diff --git a/deps/npm/docs/content/using-npm/scripts.md b/deps/npm/docs/content/using-npm/scripts.md index 8fd5c5c0dbc9d8..2f2d53c1c2b64b 100644 --- a/deps/npm/docs/content/using-npm/scripts.md +++ b/deps/npm/docs/content/using-npm/scripts.md @@ -203,6 +203,19 @@ will default the `start` command to `node server.js`. `prestart` and * `test` * `posttest` +#### A Note on a lack of [`npm uninstall`](/commands/npm-uninstall) scripts + +While npm v6 had `uninstall` lifecycle scripts, npm v7 does not. Removal of a package can happen for a wide variety of reasons, and there's no clear way to currently give the script enough context to be useful. + +Reasons for a package removal include: + +* a user directly uninstalled this package +* a user uninstalled a dependant package and so this dependency is being uninstalled +* a user uninstalled a dependant package but another package also depends on this version +* this version has been merged as a duplicate with another version +* etc. + +Due to the lack of necessary context, `uninstall` lifecycle scripts are not implemented and will not function. ### User diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index fb05ec983d1b12..e45846324faac9 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -159,7 +159,7 @@

Description

the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm’s source tree will show:

-
npm@7.21.0 /path/to/npm
+
npm@7.21.1 /path/to/npm
 └─┬ init-package-json@0.0.4
   └── promzard@0.1.5
 
diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index cb00df323df54f..a2feb42e43b6da 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -148,7 +148,7 @@

Table of contents

npm <command> [args]
 

Version

-

7.21.0

+

7.21.1

Description

npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/docs/output/using-npm/scripts.html b/deps/npm/docs/output/using-npm/scripts.html index 9b1a2e322e3621..6dcf9a0416e4c2 100644 --- a/deps/npm/docs/output/using-npm/scripts.html +++ b/deps/npm/docs/output/using-npm/scripts.html @@ -141,7 +141,7 @@

scripts

Table of contents

- +

Description

@@ -337,6 +337,17 @@

npm test<
  • test
  • posttest
  • +

    A Note on a lack of npm uninstall scripts

    +

    While npm v6 had uninstall lifecycle scripts, npm v7 does not. Removal of a package can happen for a wide variety of reasons, and there’s no clear way to currently give the script enough context to be useful.

    +

    Reasons for a package removal include:

    +
      +
    • a user directly uninstalled this package
    • +
    • a user uninstalled a dependant package and so this dependency is being uninstalled
    • +
    • a user uninstalled a dependant package but another package also depends on this version
    • +
    • this version has been merged as a duplicate with another version
    • +
    • etc.
    • +
    +

    Due to the lack of necessary context, uninstall lifecycle scripts are not implemented and will not function.

    User

    When npm is run as root, scripts are always run with the effective uid and gid of the working directory owner.

    diff --git a/deps/npm/lib/config.js b/deps/npm/lib/config.js index a56dd92ffbde6a..2df7bf513437cc 100644 --- a/deps/npm/lib/config.js +++ b/deps/npm/lib/config.js @@ -121,7 +121,7 @@ class Config extends BaseCommand { break case 'list': case 'ls': - await (this.npm.config.get('json') ? this.listJson() : this.list()) + await (this.npm.flatOptions.json ? this.listJson() : this.list()) break case 'edit': await this.edit() @@ -138,7 +138,7 @@ class Config extends BaseCommand { if (!args.length) throw this.usageError() - const where = this.npm.config.get('location') + const where = this.npm.flatOptions.location for (const [key, val] of Object.entries(keyValues(args))) { this.npm.log.info('config', 'set %j %j', key, val) this.npm.config.set(key, val || '', where) @@ -168,15 +168,15 @@ class Config extends BaseCommand { if (!keys.length) throw this.usageError() - const where = this.npm.config.get('location') + const where = this.npm.flatOptions.location for (const key of keys) this.npm.config.delete(key, where) await this.npm.config.save(where) } async edit () { - const e = this.npm.config.get('editor') - const where = this.npm.config.get('location') + const e = this.npm.flatOptions.editor + const where = this.npm.flatOptions.location const file = this.npm.config.data.get(where).source // save first, just to make sure it's synced up @@ -232,6 +232,7 @@ ${defData} async list () { const msg = [] + // long does not have a flattener const long = this.npm.config.get('long') for (const [where, { data, source }] of this.npm.config.data.entries()) { if (where === 'default' && !long) diff --git a/deps/npm/lib/utils/config/definitions.js b/deps/npm/lib/utils/config/definitions.js index c71781627872a8..092e0fc435cb4e 100644 --- a/deps/npm/lib/utils/config/definitions.js +++ b/deps/npm/lib/utils/config/definitions.js @@ -804,7 +804,11 @@ define('global', { * bin files are linked to \`{prefix}/bin\` * man pages are linked to \`{prefix}/share/man\` `, - flatten, + flatten: (key, obj, flatOptions) => { + flatten(key, obj, flatOptions) + if (flatOptions.global) + flatOptions.location = 'global' + }, }) define('global-style', { @@ -1131,14 +1135,10 @@ define('location', { description: ` When passed to \`npm config\` this refers to which config file to use. `, - // NOTE: the flattener here deliberately does not alter the value of global - // for now, this is to avoid inadvertently causing any breakage. the value of - // global, however, does modify this flag. - flatten (key, obj, flatOptions) { - // if global is set, we override ourselves - if (obj.global) - obj.location = 'global' - flatOptions.location = obj.location + flatten: (key, obj, flatOptions) => { + flatten(key, obj, flatOptions) + if (flatOptions.global) + flatOptions.location = 'global' }, }) @@ -1359,7 +1359,11 @@ define('package-lock', { modules will also be disabled. To remove extraneous modules with package-locks disabled use \`npm prune\`. `, - flatten, + flatten: (key, obj, flatOptions) => { + flatten(key, obj, flatOptions) + if (flatOptions.packageLockOnly) + flatOptions.packageLock = true + }, }) define('package-lock-only', { @@ -1375,7 +1379,11 @@ define('package-lock-only', { For \`list\` this means the output will be based on the tree described by the \`package-lock.json\`, rather than the contents of \`node_modules\`. `, - flatten, + flatten: (key, obj, flatOptions) => { + flatten(key, obj, flatOptions) + if (flatOptions.packageLockOnly) + flatOptions.packageLock = true + }, }) define('pack-destination', { diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 8dd999493aa23f..00d30a51c933cd 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -26,7 +26,7 @@ example, running \fBnpm ls promzard\fP in npm's source tree will show: .P .RS 2 .nf -npm@7\.21\.0 /path/to/npm +npm@7\.21\.1 /path/to/npm └─┬ init\-package\-json@0\.0\.4 └── promzard@0\.1\.5 .fi diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index a4ac45b5ef163d..e915deec084429 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -10,7 +10,7 @@ npm [args] .RE .SS Version .P -7\.21\.0 +7\.21\.1 .SS Description .P npm is the package manager for the Node JavaScript platform\. It puts diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7 index 086f7289b5117b..47e5879639b814 100644 --- a/deps/npm/man/man7/scripts.7 +++ b/deps/npm/man/man7/scripts.7 @@ -290,6 +290,26 @@ will default the \fBstart\fP command to \fBnode server\.js\fP\|\. \fBprestart\f \fBposttest\fP .RE +.SS A Note on a lack of npm help \fBuninstall\fP scripts +.P +While npm v6 had \fBuninstall\fP lifecycle scripts, npm v7 does not\. Removal of a package can happen for a wide variety of reasons, and there's no clear way to currently give the script enough context to be useful\. +.P +Reasons for a package removal include: +.RS 0 +.IP \(bu 2 +a user directly uninstalled this package +.IP \(bu 2 +a user uninstalled a dependant package and so this dependency is being uninstalled +.IP \(bu 2 +a user uninstalled a dependant package but another package also depends on this version +.IP \(bu 2 +this version has been merged as a duplicate with another version +.IP \(bu 2 +etc\. + +.RE +.P +Due to the lack of necessary context, \fBuninstall\fP lifecycle scripts are not implemented and will not function\. .SS User .P When npm is run as root, scripts are always run with the effective uid diff --git a/deps/npm/node_modules/@gar/promisify/index.js b/deps/npm/node_modules/@gar/promisify/index.js new file mode 100644 index 00000000000000..d0be95f6fec610 --- /dev/null +++ b/deps/npm/node_modules/@gar/promisify/index.js @@ -0,0 +1,36 @@ +'use strict' + +const { promisify } = require('util') + +const handler = { + get: function (target, prop, receiver) { + if (typeof target[prop] !== 'function') { + return target[prop] + } + if (target[prop][promisify.custom]) { + return function () { + return Reflect.get(target, prop, receiver)[promisify.custom].apply(target, arguments) + } + } + return function () { + return new Promise((resolve, reject) => { + Reflect.get(target, prop, receiver).apply(target, [...arguments, function (err, result) { + if (err) { + return reject(err) + } + resolve(result) + }]) + }) + } + } +} + +module.exports = function (thingToPromisify) { + if (typeof thingToPromisify === 'function') { + return promisify(thingToPromisify) + } + if (typeof thingToPromisify === 'object') { + return new Proxy(thingToPromisify, handler) + } + throw new TypeError('Can only promisify functions or objects') +} diff --git a/deps/npm/node_modules/@gar/promisify/package.json b/deps/npm/node_modules/@gar/promisify/package.json new file mode 100644 index 00000000000000..b5140876c2cb78 --- /dev/null +++ b/deps/npm/node_modules/@gar/promisify/package.json @@ -0,0 +1,32 @@ +{ + "name": "@gar/promisify", + "version": "1.1.2", + "description": "Promisify an entire class or object", + "main": "index.js", + "repository": { + "type": "git", + "url": "https://github.com/wraithgar/gar-promisify.git" + }, + "scripts": { + "lint": "standard", + "lint:fix": "standard --fix", + "test": "lab -a @hapi/code -t 100", + "posttest": "npm run lint" + }, + "files": [ + "index.js" + ], + "keywords": [ + "promisify", + "all", + "class", + "object" + ], + "author": "Gar ", + "license": "MIT", + "devDependencies": { + "@hapi/code": "^8.0.1", + "@hapi/lab": "^24.1.0", + "standard": "^16.0.3" + } +} diff --git a/deps/npm/node_modules/@npmcli/fs/LICENSE.md b/deps/npm/node_modules/@npmcli/fs/LICENSE.md new file mode 100644 index 00000000000000..845be76f64e789 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/LICENSE.md @@ -0,0 +1,18 @@ +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js b/deps/npm/node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js new file mode 100644 index 00000000000000..7755d1c10e6d0f --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js @@ -0,0 +1,17 @@ +const url = require('url') + +const node = require('../node.js') +const polyfill = require('./polyfill.js') + +const useNative = node.satisfies('>=10.12.0') + +const fileURLToPath = (path) => { + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? url.fileURLToPath(path) + : polyfill(path) +} + +module.exports = fileURLToPath diff --git a/deps/npm/node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js b/deps/npm/node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js new file mode 100644 index 00000000000000..794d9bba415aef --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js @@ -0,0 +1,120 @@ +const { URL, domainToUnicode } = require('url') + +const CHAR_LOWERCASE_A = 97 +const CHAR_LOWERCASE_Z = 122 + +const isWindows = process.platform === 'win32' + +class ERR_INVALID_FILE_URL_HOST extends TypeError { + constructor (platform) { + super(`File URL host must be "localhost" or empty on ${platform}`) + this.code = 'ERR_INVALID_FILE_URL_HOST' + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } +} + +class ERR_INVALID_FILE_URL_PATH extends TypeError { + constructor (msg) { + super(`File URL path ${msg}`) + this.code = 'ERR_INVALID_FILE_URL_PATH' + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } +} + +class ERR_INVALID_ARG_TYPE extends TypeError { + constructor (name, actual) { + super(`The "${name}" argument must be one of type string or an instance of URL. Received type ${typeof actual} ${actual}`) + this.code = 'ERR_INVALID_ARG_TYPE' + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } +} + +class ERR_INVALID_URL_SCHEME extends TypeError { + constructor (expected) { + super(`The URL must be of scheme ${expected}`) + this.code = 'ERR_INVALID_URL_SCHEME' + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } +} + +const isURLInstance = (input) => { + return input != null && input.href && input.origin +} + +const getPathFromURLWin32 = (url) => { + const hostname = url.hostname + let pathname = url.pathname + for (let n = 0; n < pathname.length; n++) { + if (pathname[n] === '%') { + const third = pathname.codePointAt(n + 2) | 0x20 + if ((pathname[n + 1] === '2' && third === 102) || + (pathname[n + 1] === '5' && third === 99)) { + throw new ERR_INVALID_FILE_URL_PATH('must not include encoded \\ or / characters') + } + } + } + + pathname = pathname.replace(/\//g, '\\') + pathname = decodeURIComponent(pathname) + if (hostname !== '') { + return `\\\\${domainToUnicode(hostname)}${pathname}` + } + + const letter = pathname.codePointAt(1) | 0x20 + const sep = pathname[2] + if (letter < CHAR_LOWERCASE_A || letter > CHAR_LOWERCASE_Z || + (sep !== ':')) { + throw new ERR_INVALID_FILE_URL_PATH('must be absolute') + } + + return pathname.slice(1) +} + +const getPathFromURLPosix = (url) => { + if (url.hostname !== '') { + throw new ERR_INVALID_FILE_URL_HOST(process.platform) + } + + const pathname = url.pathname + + for (let n = 0; n < pathname.length; n++) { + if (pathname[n] === '%') { + const third = pathname.codePointAt(n + 2) | 0x20 + if (pathname[n + 1] === '2' && third === 102) { + throw new ERR_INVALID_FILE_URL_PATH('must not include encoded / characters') + } + } + } + + return decodeURIComponent(pathname) +} + +const fileURLToPath = (path) => { + if (typeof path === 'string') { + path = new URL(path) + } else if (!isURLInstance(path)) { + throw new ERR_INVALID_ARG_TYPE('path', ['string', 'URL'], path) + } + + if (path.protocol !== 'file:') { + throw new ERR_INVALID_URL_SCHEME('file') + } + + return isWindows + ? getPathFromURLWin32(path) + : getPathFromURLPosix(path) +} + +module.exports = fileURLToPath diff --git a/deps/npm/node_modules/@npmcli/fs/lib/common/get-options.js b/deps/npm/node_modules/@npmcli/fs/lib/common/get-options.js new file mode 100644 index 00000000000000..cb5982f79077ac --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/common/get-options.js @@ -0,0 +1,20 @@ +// given an input that may or may not be an object, return an object that has +// a copy of every defined property listed in 'copy'. if the input is not an +// object, assign it to the property named by 'wrap' +const getOptions = (input, { copy, wrap }) => { + const result = {} + + if (input && typeof input === 'object') { + for (const prop of copy) { + if (input[prop] !== undefined) { + result[prop] = input[prop] + } + } + } else { + result[wrap] = input + } + + return result +} + +module.exports = getOptions diff --git a/deps/npm/node_modules/@npmcli/fs/lib/common/node.js b/deps/npm/node_modules/@npmcli/fs/lib/common/node.js new file mode 100644 index 00000000000000..4d13bc037359d7 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/common/node.js @@ -0,0 +1,9 @@ +const semver = require('semver') + +const satisfies = (range) => { + return semver.satisfies(process.version, range, { includePrerelease: true }) +} + +module.exports = { + satisfies, +} diff --git a/deps/npm/node_modules/@npmcli/fs/lib/common/owner.js b/deps/npm/node_modules/@npmcli/fs/lib/common/owner.js new file mode 100644 index 00000000000000..e3468b077d00ec --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/common/owner.js @@ -0,0 +1,92 @@ +const { dirname, resolve } = require('path') + +const fileURLToPath = require('./file-url-to-path/index.js') +const fs = require('../fs.js') + +// given a path, find the owner of the nearest parent +const find = async (path) => { + // if we have no getuid, permissions are irrelevant on this platform + if (!process.getuid) { + return {} + } + + // fs methods accept URL objects with a scheme of file: so we need to unwrap + // those into an actual path string before we can resolve it + const resolved = path != null && path.href && path.origin + ? resolve(fileURLToPath(path)) + : resolve(path) + + let stat + + try { + stat = await fs.lstat(resolved) + } finally { + // if we got a stat, return its contents + if (stat) { + return { uid: stat.uid, gid: stat.gid } + } + + // try the parent directory + if (resolved !== dirname(resolved)) { + return find(dirname(resolved)) + } + + // no more parents, never got a stat, just return an empty object + return {} + } +} + +// given a path, uid, and gid update the ownership of the path if necessary +const update = async (path, uid, gid) => { + // nothing to update, just exit + if (uid === undefined && gid === undefined) { + return + } + + try { + // see if the permissions are already the same, if they are we don't + // need to do anything, so return early + const stat = await fs.stat(path) + if (uid === stat.uid && gid === stat.gid) { + return + } + } catch (err) {} + + try { + await fs.chown(path, uid, gid) + } catch (err) {} +} + +// accepts a `path` and the `owner` property of an options object and normalizes +// it into an object with numerical `uid` and `gid` +const validate = async (path, input) => { + let uid + let gid + + if (typeof input === 'string' || typeof input === 'number') { + uid = input + gid = input + } else if (input && typeof input === 'object') { + uid = input.uid + gid = input.gid + } + + if (uid === 'inherit' || gid === 'inherit') { + const owner = await find(path) + if (uid === 'inherit') { + uid = owner.uid + } + + if (gid === 'inherit') { + gid = owner.gid + } + } + + return { uid, gid } +} + +module.exports = { + find, + update, + validate, +} diff --git a/deps/npm/node_modules/@npmcli/fs/lib/copy-file.js b/deps/npm/node_modules/@npmcli/fs/lib/copy-file.js new file mode 100644 index 00000000000000..d9875aba11f793 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/copy-file.js @@ -0,0 +1,22 @@ +const fs = require('./fs.js') +const getOptions = require('./common/get-options.js') +const owner = require('./common/owner.js') + +const copyFile = async (src, dest, opts) => { + const options = getOptions(opts, { + copy: ['mode', 'owner'], + wrap: 'mode', + }) + + const { uid, gid } = await owner.validate(dest, options.owner) + + // the node core method as of 16.5.0 does not support the mode being in an + // object, so we have to pass the mode value directly + const result = await fs.copyFile(src, dest, options.mode) + + await owner.update(dest, uid, gid) + + return result +} + +module.exports = copyFile diff --git a/deps/npm/node_modules/@npmcli/fs/lib/fs.js b/deps/npm/node_modules/@npmcli/fs/lib/fs.js new file mode 100644 index 00000000000000..29e5fb57356836 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/fs.js @@ -0,0 +1,8 @@ +const fs = require('fs') +const promisify = require('@gar/promisify') + +// this module returns the core fs module wrapped in a proxy that promisifies +// method calls within the getter. we keep it in a separate module so that the +// overridden methods have a consistent way to get to promisified fs methods +// without creating a circular dependency +module.exports = promisify(fs) diff --git a/deps/npm/node_modules/@npmcli/fs/lib/index.js b/deps/npm/node_modules/@npmcli/fs/lib/index.js new file mode 100644 index 00000000000000..f669efc1a91e0b --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/index.js @@ -0,0 +1,9 @@ +module.exports = { + ...require('./fs.js'), + copyFile: require('./copy-file.js'), + mkdir: require('./mkdir/index.js'), + mkdtemp: require('./mkdtemp.js'), + rm: require('./rm/index.js'), + withTempDir: require('./with-temp-dir.js'), + writeFile: require('./write-file.js'), +} diff --git a/deps/npm/node_modules/@npmcli/fs/lib/mkdir/index.js b/deps/npm/node_modules/@npmcli/fs/lib/mkdir/index.js new file mode 100644 index 00000000000000..04ff4479034545 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/mkdir/index.js @@ -0,0 +1,32 @@ +const fs = require('../fs.js') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const owner = require('../common/owner.js') + +const polyfill = require('./polyfill.js') + +// node 10.12.0 added the options parameter, which allows recursive and mode +// properties to be passed +const useNative = node.satisfies('>=10.12.0') + +// extends mkdir with the ability to specify an owner of the new dir +const mkdir = async (path, opts) => { + const options = getOptions(opts, { + copy: ['mode', 'recursive', 'owner'], + wrap: 'mode', + }) + const { uid, gid } = await owner.validate(path, options.owner) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + const result = useNative + ? await fs.mkdir(path, options) + : await polyfill(path, options) + + await owner.update(path, uid, gid) + + return result +} + +module.exports = mkdir diff --git a/deps/npm/node_modules/@npmcli/fs/lib/mkdir/polyfill.js b/deps/npm/node_modules/@npmcli/fs/lib/mkdir/polyfill.js new file mode 100644 index 00000000000000..4f8e6f006a30ec --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/mkdir/polyfill.js @@ -0,0 +1,81 @@ +const { dirname } = require('path') + +const fileURLToPath = require('../common/file-url-to-path/index.js') +const fs = require('../fs.js') + +const defaultOptions = { + mode: 0o777, + recursive: false, +} + +const mkdir = async (path, opts) => { + const options = { ...defaultOptions, ...opts } + + // if we're not in recursive mode, just call the real mkdir with the path and + // the mode option only + if (!options.recursive) { + return fs.mkdir(path, options.mode) + } + + const makeDirectory = async (dir, mode) => { + // we can't use dirname directly since these functions support URL + // objects with the file: protocol as the path input, so first we get a + // string path, then we can call dirname on that + const parent = dir != null && dir.href && dir.origin + ? dirname(fileURLToPath(dir)) + : dirname(dir) + + // if the parent is the dir itself, try to create it. anything but EISDIR + // should be rethrown + if (parent === dir) { + try { + await fs.mkdir(dir, opts) + } catch (err) { + if (err.code !== 'EISDIR') { + throw err + } + } + return undefined + } + + try { + await fs.mkdir(dir, mode) + return dir + } catch (err) { + // ENOENT means the parent wasn't there, so create that + if (err.code === 'ENOENT') { + const made = await makeDirectory(parent, mode) + await makeDirectory(dir, mode) + // return the shallowest path we created, i.e. the result of creating + // the parent + return made + } + + // an EEXIST means there's already something there + // an EROFS means we have a read-only filesystem and can't create a dir + // any other error is fatal and we should give up now + if (err.code !== 'EEXIST' && err.code !== 'EROFS') { + throw err + } + + // stat the directory, if the result is a directory, then we successfully + // created this one so return its path. otherwise, we reject with the + // original error by ignoring the error in the catch + try { + const stat = await fs.stat(dir) + if (stat.isDirectory()) { + // if it already existed, we didn't create anything so return + // undefined + return undefined + } + } catch (_) {} + + // if the thing that's there isn't a directory, then just re-throw + throw err + } + } + + return makeDirectory(path, options.mode) +} + +module.exports = mkdir diff --git a/deps/npm/node_modules/@npmcli/fs/lib/mkdtemp.js b/deps/npm/node_modules/@npmcli/fs/lib/mkdtemp.js new file mode 100644 index 00000000000000..b7f078029d1111 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/mkdtemp.js @@ -0,0 +1,28 @@ +const { dirname, sep } = require('path') + +const fs = require('./fs.js') +const getOptions = require('./common/get-options.js') +const owner = require('./common/owner.js') + +const mkdtemp = async (prefix, opts) => { + const options = getOptions(opts, { + copy: ['encoding', 'owner'], + wrap: 'encoding', + }) + + // mkdtemp relies on the trailing path separator to indicate if it should + // create a directory inside of the prefix. if that's the case then the root + // we infer ownership from is the prefix itself, otherwise it's the dirname + // /tmp -> /tmpABCDEF, infers from / + // /tmp/ -> /tmp/ABCDEF, infers from /tmp + const root = prefix.endsWith(sep) ? prefix : dirname(prefix) + const { uid, gid } = await owner.validate(root, options.owner) + + const result = await fs.mkdtemp(prefix, options) + + await owner.update(result, uid, gid) + + return result +} + +module.exports = mkdtemp diff --git a/deps/npm/node_modules/@npmcli/fs/lib/rm/index.js b/deps/npm/node_modules/@npmcli/fs/lib/rm/index.js new file mode 100644 index 00000000000000..cb81fbdf8cc479 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/rm/index.js @@ -0,0 +1,22 @@ +const fs = require('../fs.js') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const polyfill = require('./polyfill.js') + +// node 14.14.0 added fs.rm, which allows both the force and recursive options +const useNative = node.satisfies('>=14.14.0') + +const rm = async (path, opts) => { + const options = getOptions(opts, { + copy: ['retryDelay', 'maxRetries', 'recursive', 'force'], + }) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? fs.rm(path, options) + : polyfill(path, options) +} + +module.exports = rm diff --git a/deps/npm/node_modules/@npmcli/fs/lib/rm/polyfill.js b/deps/npm/node_modules/@npmcli/fs/lib/rm/polyfill.js new file mode 100644 index 00000000000000..77196b76beb06c --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/rm/polyfill.js @@ -0,0 +1,238 @@ +// this file is a modified version of the code in node core >=14.14.0 +// which is, in turn, a modified version of the rimraf module on npm +// node core changes: +// - Use of the assert module has been replaced with core's error system. +// - All code related to the glob dependency has been removed. +// - Bring your own custom fs module is not currently supported. +// - Some basic code cleanup. +// changes here: +// - remove all callback related code +// - drop sync support +// - change assertions back to non-internal methods (see options.js) +// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows +const errnos = require('os').constants.errno +const { join } = require('path') +const fs = require('../fs.js') + +// error codes that mean we need to remove contents +const notEmptyCodes = new Set([ + 'ENOTEMPTY', + 'EEXIST', + 'EPERM', +]) + +// error codes we can retry later +const retryCodes = new Set([ + 'EBUSY', + 'EMFILE', + 'ENFILE', + 'ENOTEMPTY', + 'EPERM', +]) + +const isWindows = process.platform === 'win32' + +const defaultOptions = { + retryDelay: 100, + maxRetries: 0, + recursive: false, + force: false, +} + +// this is drastically simplified, but should be roughly equivalent to what +// node core throws +class ERR_FS_EISDIR extends Error { + constructor (path) { + super() + this.info = { + code: 'EISDIR', + message: 'is a directory', + path, + syscall: 'rm', + errno: errnos.EISDIR, + } + this.name = 'SystemError' + this.code = 'ERR_FS_EISDIR' + this.errno = errnos.EISDIR + this.syscall = 'rm' + this.path = path + this.message = `Path is a directory: ${this.syscall} returned ${this.info.code} (is a directory) ${path}` + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } +} + +class ENOTDIR extends Error { + constructor (path) { + super() + this.name = 'Error' + this.code = 'ENOTDIR' + this.errno = errnos.ENOTDIR + this.syscall = 'rmdir' + this.path = path + this.message = `not a directory, ${this.syscall} '${this.path}'` + } + + toString () { + return `${this.name}: ${this.code}: ${this.message}` + } +} + +// force is passed separately here because we respect it for the first entry +// into rimraf only, any further calls that are spawned as a result (i.e. to +// delete content within the target) will ignore ENOENT errors +const rimraf = async (path, options, isTop = false) => { + const force = isTop ? options.force : true + const stat = await fs.lstat(path) + .catch((err) => { + // we only ignore ENOENT if we're forcing this call + if (err.code === 'ENOENT' && force) { + return + } + + if (isWindows && err.code === 'EPERM') { + return fixEPERM(path, options, err, isTop) + } + + throw err + }) + + // no stat object here means either lstat threw an ENOENT, or lstat threw + // an EPERM and the fixPERM function took care of things. either way, we're + // already done, so return early + if (!stat) { + return + } + + if (stat.isDirectory()) { + return rmdir(path, options, null, isTop) + } + + return fs.unlink(path) + .catch((err) => { + if (err.code === 'ENOENT' && force) { + return + } + + if (err.code === 'EISDIR') { + return rmdir(path, options, err, isTop) + } + + if (err.code === 'EPERM') { + // in windows, we handle this through fixEPERM which will also try to + // delete things again. everywhere else since deleting the target as a + // file didn't work we go ahead and try to delete it as a directory + return isWindows + ? fixEPERM(path, options, err, isTop) + : rmdir(path, options, err, isTop) + } + + throw err + }) +} + +const fixEPERM = async (path, options, originalErr, isTop) => { + const force = isTop ? options.force : true + const targetMissing = await fs.chmod(path, 0o666) + .catch((err) => { + if (err.code === 'ENOENT' && force) { + return true + } + + throw originalErr + }) + + // got an ENOENT above, return now. no file = no problem + if (targetMissing) { + return + } + + // this function does its own lstat rather than calling rimraf again to avoid + // infinite recursion for a repeating EPERM + const stat = await fs.lstat(path) + .catch((err) => { + if (err.code === 'ENOENT' && force) { + return + } + + throw originalErr + }) + + if (!stat) { + return + } + + if (stat.isDirectory()) { + return rmdir(path, options, originalErr, isTop) + } + + return fs.unlink(path) +} + +const rmdir = async (path, options, originalErr, isTop) => { + if (!options.recursive && isTop) { + throw originalErr || new ERR_FS_EISDIR(path) + } + const force = isTop ? options.force : true + + return fs.rmdir(path) + .catch(async (err) => { + // in Windows, calling rmdir on a file path will fail with ENOENT rather + // than ENOTDIR. to determine if that's what happened, we have to do + // another lstat on the path. if the path isn't actually gone, we throw + // away the ENOENT and replace it with our own ENOTDIR + if (isWindows && err.code === 'ENOENT') { + const stillExists = await fs.lstat(path).then(() => true, () => false) + if (stillExists) { + err = new ENOTDIR(path) + } + } + + // not there, not a problem + if (err.code === 'ENOENT' && force) { + return + } + + // we may not have originalErr if lstat tells us our target is a + // directory but that changes before we actually remove it, so + // only throw it here if it's set + if (originalErr && err.code === 'ENOTDIR') { + throw originalErr + } + + // the directory isn't empty, remove the contents and try again + if (notEmptyCodes.has(err.code)) { + const files = await fs.readdir(path) + await Promise.all(files.map((file) => { + const target = join(path, file) + return rimraf(target, options) + })) + return fs.rmdir(path) + } + + throw err + }) +} + +const rm = async (path, opts) => { + const options = { ...defaultOptions, ...opts } + let retries = 0 + + const errHandler = async (err) => { + if (retryCodes.has(err.code) && ++retries < options.maxRetries) { + const delay = retries * options.retryDelay + await promiseTimeout(delay) + return rimraf(path, options, true).catch(errHandler) + } + + throw err + } + + return rimraf(path, options, true).catch(errHandler) +} + +const promiseTimeout = (ms) => new Promise((r) => setTimeout(r, ms)) + +module.exports = rm diff --git a/deps/npm/node_modules/@npmcli/fs/lib/with-temp-dir.js b/deps/npm/node_modules/@npmcli/fs/lib/with-temp-dir.js new file mode 100644 index 00000000000000..353d5555d10f69 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/with-temp-dir.js @@ -0,0 +1,39 @@ +const { join, sep } = require('path') + +const getOptions = require('./common/get-options.js') +const mkdir = require('./mkdir/index.js') +const mkdtemp = require('./mkdtemp.js') +const rm = require('./rm/index.js') + +// create a temp directory, ensure its permissions match its parent, then call +// the supplied function passing it the path to the directory. clean up after +// the function finishes, whether it throws or not +const withTempDir = async (root, fn, opts) => { + const options = getOptions(opts, { + copy: ['tmpPrefix'], + }) + // create the directory, and fix its ownership + await mkdir(root, { recursive: true, owner: 'inherit' }) + + const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || ''), { owner: 'inherit' }) + let err + let result + + try { + result = await fn(target) + } catch (_err) { + err = _err + } + + try { + await rm(target, { force: true, recursive: true }) + } catch (err) {} + + if (err) { + throw err + } + + return result +} + +module.exports = withTempDir diff --git a/deps/npm/node_modules/@npmcli/fs/lib/write-file.js b/deps/npm/node_modules/@npmcli/fs/lib/write-file.js new file mode 100644 index 00000000000000..01de531d980c4d --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/lib/write-file.js @@ -0,0 +1,19 @@ +const fs = require('./fs.js') +const getOptions = require('./common/get-options.js') +const owner = require('./common/owner.js') + +const writeFile = async (file, data, opts) => { + const options = getOptions(opts, { + copy: ['encoding', 'mode', 'flag', 'signal', 'owner'], + wrap: 'encoding', + }) + const { uid, gid } = await owner.validate(file, options.owner) + + const result = await fs.writeFile(file, data, options) + + await owner.update(file, uid, gid) + + return result +} + +module.exports = writeFile diff --git a/deps/npm/node_modules/@npmcli/fs/package.json b/deps/npm/node_modules/@npmcli/fs/package.json new file mode 100644 index 00000000000000..b114b73d24e9e8 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/fs/package.json @@ -0,0 +1,36 @@ +{ + "name": "@npmcli/fs", + "version": "1.0.0", + "description": "filesystem utilities for the npm cli", + "main": "lib/index.js", + "files": [ + "lib", + "bin" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "snap": "tap", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "npm run npmclilint -- \"lib/**/*.*js\" \"test/**/*.*js\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint --", + "postsnap": "npm run lintfix --" + }, + "keywords": [ + "npm", + "oss" + ], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/lint": "^1.0.1", + "tap": "^15.0.9" + }, + "dependencies": { + "@gar/promisify": "^1.0.1", + "semver": "^7.3.5" + } +} diff --git a/deps/npm/node_modules/cacache/get.js b/deps/npm/node_modules/cacache/get.js index fe710bbd68def3..4e905e7cf861c1 100644 --- a/deps/npm/node_modules/cacache/get.js +++ b/deps/npm/node_modules/cacache/get.js @@ -1,119 +1,112 @@ 'use strict' -const util = require('util') +const Collect = require('minipass-collect') +const Minipass = require('minipass') +const Pipeline = require('minipass-pipeline') const fs = require('fs') +const util = require('util') + const index = require('./lib/entry-index') const memo = require('./lib/memoization') const read = require('./lib/content/read') -const Minipass = require('minipass') -const Collect = require('minipass-collect') -const Pipeline = require('minipass-pipeline') - const writeFile = util.promisify(fs.writeFile) -module.exports = function get (cache, key, opts) { - return getData(false, cache, key, opts) -} -module.exports.byDigest = function getByDigest (cache, digest, opts) { - return getData(true, cache, digest, opts) -} - -function getData (byDigest, cache, key, opts = {}) { +function getData (cache, key, opts = {}) { const { integrity, memoize, size } = opts - const memoized = byDigest - ? memo.get.byDigest(cache, key, opts) - : memo.get(cache, key, opts) + const memoized = memo.get(cache, key, opts) if (memoized && memoize !== false) { - return Promise.resolve( - byDigest - ? memoized - : { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - ) + return Promise.resolve({ + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + }) } - return (byDigest ? Promise.resolve(null) : index.find(cache, key, opts)).then( - (entry) => { - if (!entry && !byDigest) - throw new index.NotFoundError(cache, key) - return read(cache, byDigest ? key : entry.integrity, { - integrity, - size, - }) - .then((data) => - byDigest - ? data - : { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } - ) - .then((res) => { - if (memoize && byDigest) - memo.put.byDigest(cache, key, res, opts) - else if (memoize) - memo.put(cache, entry, res.data, opts) - - return res - }) - } - ) -} + return index.find(cache, key, opts).then((entry) => { + if (!entry) + throw new index.NotFoundError(cache, key) + + return read(cache, entry.integrity, { integrity, size }).then((data) => { + if (memoize) + memo.put(cache, entry, data, opts) -module.exports.sync = function get (cache, key, opts) { - return getDataSync(false, cache, key, opts) + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } + }) + }) } -module.exports.sync.byDigest = function getByDigest (cache, digest, opts) { - return getDataSync(true, cache, digest, opts) +module.exports = getData + +function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) + return Promise.resolve(memoized) + + return read(cache, key, { integrity, size }).then((res) => { + if (memoize) + memo.put.byDigest(cache, key, res, opts) + return res + }) } +module.exports.byDigest = getDataByDigest -function getDataSync (byDigest, cache, key, opts = {}) { +function getDataSync (cache, key, opts = {}) { const { integrity, memoize, size } = opts - const memoized = byDigest - ? memo.get.byDigest(cache, key, opts) - : memo.get(cache, key, opts) + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { - return byDigest - ? memoized - : { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } } - const entry = !byDigest && index.find.sync(cache, key, opts) - if (!entry && !byDigest) + const entry = index.find.sync(cache, key, opts) + if (!entry) throw new index.NotFoundError(cache, key) - - const data = read.sync(cache, byDigest ? key : entry.integrity, { + const data = read.sync(cache, entry.integrity, { integrity: integrity, size: size, }) - const res = byDigest - ? data - : { - metadata: entry.metadata, - data: data, - size: entry.size, - integrity: entry.integrity, - } - if (memoize && byDigest) - memo.put.byDigest(cache, key, res, opts) - else if (memoize) + const res = { + metadata: entry.metadata, + data: data, + size: entry.size, + integrity: entry.integrity, + } + if (memoize) memo.put(cache, entry, res.data, opts) return res } -module.exports.stream = getStream +module.exports.sync = getDataSync + +function getDataByDigestSync (cache, digest, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, digest, opts) + + if (memoized && memoize !== false) + return memoized + + const res = read.sync(cache, digest, { + integrity: integrity, + size: size, + }) + if (memoize) + memo.put.byDigest(cache, digest, res, opts) + + return res +} +module.exports.sync.byDigest = getDataByDigestSync const getMemoizedStream = (memoized) => { const stream = new Minipass() @@ -166,7 +159,7 @@ function getStream (cache, key, opts = {}) { return stream } -module.exports.stream.byDigest = getStreamDigest +module.exports.stream = getStream function getStreamDigest (cache, integrity, opts = {}) { const { memoize } = opts @@ -191,7 +184,7 @@ function getStreamDigest (cache, integrity, opts = {}) { } } -module.exports.info = info +module.exports.stream.byDigest = getStreamDigest function info (cache, key, opts = {}) { const { memoize } = opts @@ -201,53 +194,44 @@ function info (cache, key, opts = {}) { else return index.find(cache, key) } +module.exports.info = info -module.exports.hasContent = read.hasContent - -function cp (cache, key, dest, opts) { - return copy(false, cache, key, dest, opts) -} - -module.exports.copy = cp - -function cpDigest (cache, digest, dest, opts) { - return copy(true, cache, digest, dest, opts) -} - -module.exports.copy.byDigest = cpDigest - -function copy (byDigest, cache, key, dest, opts = {}) { +function copy (cache, key, dest, opts = {}) { if (read.copy) { - return (byDigest - ? Promise.resolve(null) - : index.find(cache, key, opts) - ).then((entry) => { - if (!entry && !byDigest) + return index.find(cache, key, opts).then((entry) => { + if (!entry) throw new index.NotFoundError(cache, key) - - return read - .copy(cache, byDigest ? key : entry.integrity, dest, opts) + return read.copy(cache, entry.integrity, dest, opts) .then(() => { - return byDigest - ? key - : { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } }) }) } - return getData(byDigest, cache, key, opts).then((res) => { - return writeFile(dest, byDigest ? res : res.data).then(() => { - return byDigest - ? key - : { - metadata: res.metadata, - size: res.size, - integrity: res.integrity, - } + return getData(cache, key, opts).then((res) => { + return writeFile(dest, res.data).then(() => { + return { + metadata: res.metadata, + size: res.size, + integrity: res.integrity, + } }) }) } +module.exports.copy = copy + +function copyByDigest (cache, key, dest, opts = {}) { + if (read.copy) + return read.copy(cache, key, dest, opts).then(() => key) + + return getDataByDigest(cache, key, opts).then((res) => { + return writeFile(dest, res).then(() => key) + }) +} +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/deps/npm/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/cacache/lib/util/tmp.js index fbcd2ab132eae5..0a5a50eba30618 100644 --- a/deps/npm/node_modules/cacache/lib/util/tmp.js +++ b/deps/npm/node_modules/cacache/lib/util/tmp.js @@ -1,21 +1,21 @@ 'use strict' -const util = require('util') +const fs = require('@npmcli/fs') const fixOwner = require('./fix-owner') const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const uniqueFilename = require('unique-filename') -const { disposer } = require('./disposer') module.exports.mkdir = mktmpdir function mktmpdir (cache, opts = {}) { const { tmpPrefix } = opts - const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), tmpPrefix) - return fixOwner.mkdirfix(cache, tmpTarget).then(() => { - return tmpTarget - }) + const tmpDir = path.join(cache, 'tmp') + return fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + .then(() => { + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) + }) } module.exports.withTmp = withTmp @@ -25,7 +25,7 @@ function withTmp (cache, opts, cb) { cb = opts opts = {} } - return disposer(mktmpdir(cache, opts), rimraf, cb) + return fs.withTempDir(path.join(cache, 'tmp'), cb, opts) } module.exports.fix = fixtmpdir diff --git a/deps/npm/node_modules/cacache/package.json b/deps/npm/node_modules/cacache/package.json index 3c2e65c0404a0d..6cb4140159af8c 100644 --- a/deps/npm/node_modules/cacache/package.json +++ b/deps/npm/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "15.2.0", + "version": "15.3.0", "cache-version": { "content": "2", "index": "5" @@ -43,6 +43,7 @@ ], "license": "ISC", "dependencies": { + "@npmcli/fs": "^1.0.0", "@npmcli/move-file": "^1.0.1", "chownr": "^2.0.0", "fs-minipass": "^2.0.0", diff --git a/deps/npm/node_modules/make-fetch-happen/lib/agent.js b/deps/npm/node_modules/make-fetch-happen/lib/agent.js index 873d69cf4760b8..3675dd8ae981a9 100644 --- a/deps/npm/node_modules/make-fetch-happen/lib/agent.js +++ b/deps/npm/node_modules/make-fetch-happen/lib/agent.js @@ -33,7 +33,7 @@ function getAgent (uri, opts) { ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}` : '>no-proxy<', `local-address:${opts.localAddress || '>no-local-address<'}`, - `strict-ssl:${isHttps ? !!opts.strictSSL : '>no-strict-ssl<'}`, + `strict-ssl:${isHttps ? opts.rejectUnauthorized : '>no-strict-ssl<'}`, `ca:${(isHttps && opts.ca) || '>no-ca<'}`, `cert:${(isHttps && opts.cert) || '>no-cert<'}`, `key:${(isHttps && opts.key) || '>no-key<'}`, @@ -72,7 +72,7 @@ function getAgent (uri, opts) { cert: opts.cert, key: opts.key, localAddress: opts.localAddress, - rejectUnauthorized: opts.strictSSL, + rejectUnauthorized: opts.rejectUnauthorized, timeout: agentTimeout, }) : new HttpAgent({ maxSockets: agentMaxSockets, @@ -173,7 +173,7 @@ function getProxy (proxyUrl, opts, isHttps) { timeout: getAgentTimeout(opts.timeout), localAddress: opts.localAddress, maxSockets: getMaxSockets(opts.maxSockets), - rejectUnauthorized: opts.strictSSL, + rejectUnauthorized: opts.rejectUnauthorized, } if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') { diff --git a/deps/npm/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/make-fetch-happen/lib/options.js index 08891754868a50..f6138e6e1d13a6 100644 --- a/deps/npm/node_modules/make-fetch-happen/lib/options.js +++ b/deps/npm/node_modules/make-fetch-happen/lib/options.js @@ -7,10 +7,9 @@ const conditionalHeaders = [ ] const configureOptions = (opts) => { - const options = { ...opts } + const {strictSSL, ...options} = { ...opts } options.method = options.method ? options.method.toUpperCase() : 'GET' - if (Object.prototype.hasOwnProperty.call(options, 'strictSSL')) - options.rejectUnauthorized = options.strictSSL + options.rejectUnauthorized = strictSSL !== false if (!options.retry) options.retry = { retries: 0 } diff --git a/deps/npm/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/make-fetch-happen/package.json index 013156756e9772..dae7b37da40691 100644 --- a/deps/npm/node_modules/make-fetch-happen/package.json +++ b/deps/npm/node_modules/make-fetch-happen/package.json @@ -1,6 +1,6 @@ { "name": "make-fetch-happen", - "version": "9.0.5", + "version": "9.1.0", "description": "Opinionated, caching, retrying fetch client", "main": "lib/index.js", "files": [ diff --git a/deps/npm/node_modules/read-package-json/package.json b/deps/npm/node_modules/read-package-json/package.json index 6589be8063714a..5ca535cfd14adb 100644 --- a/deps/npm/node_modules/read-package-json/package.json +++ b/deps/npm/node_modules/read-package-json/package.json @@ -1,6 +1,6 @@ { "name": "read-package-json", - "version": "4.0.0", + "version": "4.0.1", "author": "Isaac Z. Schlueter (http://blog.izs.me/)", "description": "The thing npm uses to read package.json files with semantics and defaults and validation", "repository": { @@ -11,9 +11,13 @@ "scripts": { "prerelease": "npm t", "postrelease": "npm publish && git push --follow-tags", - "pretest": "standard", "release": "standard-version -s", - "test": "tap --nyc-arg=--all --coverage test/*.js" + "test": "tap --nyc-arg=--all --coverage test/*.js --branches 68 --functions 83 --lines 76 --statements 77", + "npmclilint": "npmcli-lint", + "lint": "npm run npmclilint -- --ignore-pattern test/fixtures \"*.*js\" \"test/**/*.*js\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint --", + "postsnap": "npm run lintfix --" }, "dependencies": { "glob": "^7.1.1", @@ -22,9 +26,9 @@ "npm-normalize-package-bin": "^1.0.0" }, "devDependencies": { - "standard": "^11.0.0", - "standard-version": "^4.3.0", - "tap": "^11.1.2" + "@npmcli/lint": "^1.0.2", + "standard-version": "^9.3.1", + "tap": "^15.0.9" }, "license": "ISC", "files": [ diff --git a/deps/npm/node_modules/read-package-json/read-json.js b/deps/npm/node_modules/read-package-json/read-json.js index 4226e77c0ff38c..04d22e3af7e211 100644 --- a/deps/npm/node_modules/read-package-json/read-json.js +++ b/deps/npm/node_modules/read-package-json/read-json.js @@ -20,7 +20,7 @@ readJson.extraSet = [ readme, mans, bins, - githead + githead, ] var typoWarned = {} @@ -36,7 +36,9 @@ function readJson (file, log_, strict_, cb_) { } } - if (!log) log = function () {} + if (!log) { + log = function () {} + } cb = arguments[arguments.length - 1] readJson_(file, log, strict, cb) @@ -52,7 +54,9 @@ function stripBOM (content) { // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) // because the buffer-to-string conversion in `fs.readFileSync()` // translates it to FEFF, the UTF-16 BOM. - if (content.charCodeAt(0) === 0xFEFF) content = content.slice(1) + if (content.charCodeAt(0) === 0xFEFF) { + content = content.slice(1) + } return content } @@ -87,9 +91,13 @@ function parseJson (file, er, d, log, strict, cb) { } }) } - if (er) return cb(er) + if (er) { + return cb(er) + } - if (cache[d]) return cb(null, jsonClone(cache[d])) + if (cache[d]) { + return cb(null, jsonClone(cache[d])) + } var data @@ -102,7 +110,9 @@ function parseJson (file, er, d, log, strict, cb) { } } catch (er) { data = parseIndex(d) - if (!data) return cb(parseError(er, file)) + if (!data) { + return cb(parseError(er, file)) + } } extrasCached(file, d, data, log, strict, cb) @@ -118,16 +128,24 @@ function extrasCached (file, d, data, log, strict, cb) { } function indexjs (file, er, log, strict, cb) { - if (path.basename(file) === 'index.js') return cb(er) + if (path.basename(file) === 'index.js') { + return cb(er) + } var index = path.resolve(path.dirname(file), 'index.js') fs.readFile(index, 'utf8', function (er2, d) { - if (er2) return cb(er) + if (er2) { + return cb(er) + } - if (cache[d]) return cb(null, cache[d]) + if (cache[d]) { + return cb(null, cache[d]) + } var data = parseIndex(d) - if (!data) return cb(er) + if (!data) { + return cb(er) + } extrasCached(file, d, data, log, strict, cb) }) @@ -144,7 +162,9 @@ function extras (file, data, log_, strict_, cb_) { } } - if (!log) log = function () {} + if (!log) { + log = function () {} + } cb = arguments[i] var set = readJson.extraSet @@ -155,15 +175,23 @@ function extras (file, data, log_, strict_, cb_) { }) function then (er) { - if (errState) return - if (er) return cb(errState = er) - if (--n > 0) return + if (errState) { + return + } + if (er) { + return cb(errState = er) + } + if (--n > 0) { + return + } final(file, data, log, strict, cb) } } function scriptpath (file, data, cb) { - if (!data.scripts) return cb(null, data) + if (!data.scripts) { + return cb(null, data) + } var k = Object.keys(data.scripts) k.forEach(scriptpath_, data.scripts) cb(null, data) @@ -172,7 +200,9 @@ function scriptpath (file, data, cb) { function scriptpath_ (key) { var s = this[key] // This is never allowed, and only causes problems - if (typeof s !== 'string') return delete this[key] + if (typeof s !== 'string') { + return delete this[key] + } var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/ if (s.match(spre)) { @@ -183,17 +213,25 @@ function scriptpath_ (key) { function gypfile (file, data, cb) { var dir = path.dirname(file) var s = data.scripts || {} - if (s.install || s.preinstall) return cb(null, data) + if (s.install || s.preinstall) { + return cb(null, data) + } glob('*.gyp', { cwd: dir }, function (er, files) { - if (er) return cb(er) - if (data.gypfile === false) return cb(null, data) + if (er) { + return cb(er) + } + if (data.gypfile === false) { + return cb(null, data) + } gypfile_(file, data, files, cb) }) } function gypfile_ (file, data, files, cb) { - if (!files.length) return cb(null, data) + if (!files.length) { + return cb(null, data) + } var s = data.scripts || {} s.install = 'node-gyp rebuild' data.scripts = s @@ -204,15 +242,21 @@ function gypfile_ (file, data, files, cb) { function serverjs (file, data, cb) { var dir = path.dirname(file) var s = data.scripts || {} - if (s.start) return cb(null, data) + if (s.start) { + return cb(null, data) + } glob('server.js', { cwd: dir }, function (er, files) { - if (er) return cb(er) + if (er) { + return cb(er) + } serverjs_(file, data, files, cb) }) } function serverjs_ (file, data, files, cb) { - if (!files.length) return cb(null, data) + if (!files.length) { + return cb(null, data) + } var s = data.scripts || {} s.start = 'node server.js' data.scripts = s @@ -220,11 +264,15 @@ function serverjs_ (file, data, files, cb) { } function authors (file, data, cb) { - if (data.contributors) return cb(null, data) + if (data.contributors) { + return cb(null, data) + } var af = path.resolve(path.dirname(file), 'AUTHORS') fs.readFile(af, 'utf8', function (er, ad) { // ignore error. just checking it. - if (er) return cb(null, data) + if (er) { + return cb(null, data) + } authors_(file, data, ad, cb) }) } @@ -240,16 +288,22 @@ function authors_ (file, data, ad, cb) { } function readme (file, data, cb) { - if (data.readme) return cb(null, data) + if (data.readme) { + return cb(null, data) + } var dir = path.dirname(file) var globOpts = { cwd: dir, nocase: true, mark: true } glob('{README,README.*}', globOpts, function (er, files) { - if (er) return cb(er) + if (er) { + return cb(er) + } // don't accept directories. files = files.filter(function (file) { return !file.match(/\/$/) }) - if (!files.length) return cb() + if (!files.length) { + return cb() + } var fn = preferMarkdownReadme(files) var rm = path.resolve(dir, fn) readme_(file, data, rm, cb) @@ -275,7 +329,9 @@ function readme_ (file, data, rm, cb) { var rmfn = path.basename(rm) fs.readFile(rm, 'utf8', function (er, rm) { // maybe not readable, or something. - if (er) return cb() + if (er) { + return cb() + } data.readme = rm data.readmeFilename = rmfn return cb(er, data) @@ -284,31 +340,32 @@ function readme_ (file, data, rm, cb) { function mans (file, data, cb) { var m = data.directories && data.directories.man - if (data.man || !m) return cb(null, data) + if (data.man || !m) { + return cb(null, data) + } m = path.resolve(path.dirname(file), m) glob('**/*.[0-9]', { cwd: m }, function (er, mans) { - if (er) return cb(er) - mans_(file, data, mans, cb) - }) -} - -function mans_ (file, data, mans, cb) { - var m = data.directories && data.directories.man - data.man = mans.map(function (mf) { - return path.resolve(path.dirname(file), m, mf) + if (er) { + return cb(er) + } + data.man = mans + return cb(null, data) }) - return cb(null, data) } function bins (file, data, cb) { data = normalizePackageBin(data) var m = data.directories && data.directories.bin - if (data.bin || !m) return cb(null, data) + if (data.bin || !m) { + return cb(null, data) + } m = path.resolve(path.dirname(file), m) glob('**', { cwd: m }, function (er, bins) { - if (er) return cb(er) + if (er) { + return cb(er) + } bins_(file, data, bins, cb) }) } @@ -330,11 +387,14 @@ function bundleDependencies (file, data, cb) { var bdd = 'bundledDependencies' // normalize key name if (data[bdd] !== undefined) { - if (data[bd] === undefined) data[bd] = data[bdd] + if (data[bd] === undefined) { + data[bd] = data[bdd] + } delete data[bdd] } - if (data[bd] === false) delete data[bd] - else if (data[bd] === true) { + if (data[bd] === false) { + delete data[bd] + } else if (data[bd] === true) { data[bd] = Object.keys(data.dependencies || {}) } else if (data[bd] !== undefined && !Array.isArray(data[bd])) { delete data[bd] @@ -343,16 +403,24 @@ function bundleDependencies (file, data, cb) { } function githead (file, data, cb) { - if (data.gitHead) return cb(null, data) + if (data.gitHead) { + return cb(null, data) + } var dir = path.dirname(file) var head = path.resolve(dir, '.git/HEAD') fs.readFile(head, 'utf8', function (er, head) { - if (er) return cb(null, data) - githead_(file, data, dir, head, cb) + if (er) { + var parent = path.dirname(dir) + if (parent === dir) { + return cb(null, data) + } + return githead(dir, data, cb) + } + githead_(data, dir, head, cb) }) } -function githead_ (file, data, dir, head, cb) { +function githead_ (data, dir, head, cb) { if (!head.match(/^ref: /)) { data.gitHead = head.trim() return cb(null, data) @@ -388,16 +456,24 @@ function githead_ (file, data, dir, head, cb) { * normalize-package-data if it had access to the file path. */ function checkBinReferences_ (file, data, warn, cb) { - if (!(data.bin instanceof Object)) return cb() + if (!(data.bin instanceof Object)) { + return cb() + } var keys = Object.keys(data.bin) var keysLeft = keys.length - if (!keysLeft) return cb() + if (!keysLeft) { + return cb() + } function handleExists (relName, result) { keysLeft-- - if (!result) warn('No bin file found at ' + relName) - if (!keysLeft) cb() + if (!result) { + warn('No bin file found at ' + relName) + } + if (!keysLeft) { + cb() + } } keys.forEach(function (key) { @@ -421,8 +497,12 @@ function final (file, data, log, strict, cb) { var pId = makePackageId(data) function warn (msg) { - if (typoWarned[pId]) return - if (log) log('package.json', pId, msg) + if (typoWarned[pId]) { + return + } + if (log) { + log('package.json', pId, msg) + } } try { @@ -451,11 +531,15 @@ function cleanString (str) { function parseIndex (data) { data = data.split(/^\/\*\*package(?:\s|$)/m) - if (data.length < 2) return null + if (data.length < 2) { + return null + } data = data[1] data = data.split(/\*\*\/$/m) - if (data.length < 2) return null + if (data.length < 2) { + return null + } data = data[0] data = data.replace(/^\s*\*/mg, '') diff --git a/deps/npm/node_modules/tar/lib/normalize-unicode.js b/deps/npm/node_modules/tar/lib/normalize-unicode.js new file mode 100644 index 00000000000000..4aeb1d50db9e19 --- /dev/null +++ b/deps/npm/node_modules/tar/lib/normalize-unicode.js @@ -0,0 +1,11 @@ +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. +const normalizeCache = Object.create(null) +const {hasOwnProperty} = Object.prototype +module.exports = s => { + if (!hasOwnProperty.call(normalizeCache, s)) + normalizeCache[s] = s.normalize('NFKD') + return normalizeCache[s] +} diff --git a/deps/npm/node_modules/tar/lib/path-reservations.js b/deps/npm/node_modules/tar/lib/path-reservations.js index 8d0ead9b6017f1..8183c45f8535c9 100644 --- a/deps/npm/node_modules/tar/lib/path-reservations.js +++ b/deps/npm/node_modules/tar/lib/path-reservations.js @@ -7,7 +7,7 @@ // while still allowing maximal safe parallelization. const assert = require('assert') -const normPath = require('./normalize-windows-path.js') +const normalize = require('./normalize-unicode.js') const stripSlashes = require('./strip-trailing-slashes.js') const { join } = require('path') @@ -28,7 +28,7 @@ module.exports = () => { const getDirs = path => { const dirs = path.split('/').slice(0, -1).reduce((set, path) => { if (set.length) - path = normPath(join(set[set.length - 1], path)) + path = join(set[set.length - 1], path) set.push(path || '/') return set }, []) @@ -116,9 +116,8 @@ module.exports = () => { // So, we just pretend that every path matches every other path here, // effectively removing all parallelization on windows. paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => { - return stripSlashes(normPath(join(p))) - .normalize('NFKD') - .toLowerCase() + // don't need normPath, because we skip this entirely for windows + return normalize(stripSlashes(join(p))).toLowerCase() }) const dirs = new Set( diff --git a/deps/npm/node_modules/tar/lib/strip-trailing-slashes.js b/deps/npm/node_modules/tar/lib/strip-trailing-slashes.js index f702ed5a5c0ce5..3e3ecec5a402b8 100644 --- a/deps/npm/node_modules/tar/lib/strip-trailing-slashes.js +++ b/deps/npm/node_modules/tar/lib/strip-trailing-slashes.js @@ -1,24 +1,13 @@ -// this is the only approach that was significantly faster than using -// str.replace(/\/+$/, '') for strings ending with a lot of / chars and -// containing multiple / chars. -const batchStrings = [ - '/'.repeat(1024), - '/'.repeat(512), - '/'.repeat(256), - '/'.repeat(128), - '/'.repeat(64), - '/'.repeat(32), - '/'.repeat(16), - '/'.repeat(8), - '/'.repeat(4), - '/'.repeat(2), - '/', -] - +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. module.exports = str => { - for (const s of batchStrings) { - while (str.length >= s.length && str.slice(-1 * s.length) === s) - str = str.slice(0, -1 * s.length) + let i = str.length - 1 + let slashesStart = -1 + while (i > -1 && str.charAt(i) === '/') { + slashesStart = i + i-- } - return str + return slashesStart === -1 ? str : str.slice(0, slashesStart) } diff --git a/deps/npm/node_modules/tar/lib/unpack.js b/deps/npm/node_modules/tar/lib/unpack.js index 7f397f10379211..7d39dc0f7e79fd 100644 --- a/deps/npm/node_modules/tar/lib/unpack.js +++ b/deps/npm/node_modules/tar/lib/unpack.js @@ -17,6 +17,7 @@ const pathReservations = require('./path-reservations.js') const stripAbsolutePath = require('./strip-absolute-path.js') const normPath = require('./normalize-windows-path.js') const stripSlash = require('./strip-trailing-slashes.js') +const normalize = require('./normalize-unicode.js') const ONENTRY = Symbol('onEntry') const CHECKFS = Symbol('checkFs') @@ -101,8 +102,7 @@ const uint32 = (a, b, c) => // Note that on windows, we always drop the entire cache whenever a // symbolic link is encountered, because 8.3 filenames are impossible // to reason about, and collisions are hazards rather than just failures. -const cacheKeyNormalize = path => stripSlash(normPath(path)) - .normalize('NFKD') +const cacheKeyNormalize = path => normalize(stripSlash(normPath(path))) .toLowerCase() const pruneCache = (cache, abs) => { diff --git a/deps/npm/node_modules/tar/package.json b/deps/npm/node_modules/tar/package.json index a10cdac85ea8f8..9f9977a0ca99b0 100644 --- a/deps/npm/node_modules/tar/package.json +++ b/deps/npm/node_modules/tar/package.json @@ -2,7 +2,7 @@ "author": "Isaac Z. Schlueter (http://blog.izs.me/)", "name": "tar", "description": "tar for node", - "version": "6.1.10", + "version": "6.1.11", "repository": { "type": "git", "url": "https://github.com/npm/node-tar.git" diff --git a/deps/npm/package.json b/deps/npm/package.json index cb8b11ff5e61f5..d5f3cf54cf89c6 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "7.21.0", + "version": "7.21.1", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ @@ -63,7 +63,7 @@ "ansicolors": "~0.3.2", "ansistyles": "~0.1.3", "archy": "~1.0.0", - "cacache": "^15.2.0", + "cacache": "^15.3.0", "chalk": "^4.1.2", "chownr": "^2.0.0", "cli-columns": "^3.1.2", @@ -88,7 +88,7 @@ "libnpmsearch": "^3.1.1", "libnpmteam": "^2.0.3", "libnpmversion": "^1.2.1", - "make-fetch-happen": "^9.0.5", + "make-fetch-happen": "^9.1.0", "minipass": "^3.1.3", "minipass-pipeline": "^1.2.4", "mkdirp": "^1.0.4", @@ -108,13 +108,13 @@ "parse-conflict-json": "^1.1.1", "qrcode-terminal": "^0.12.0", "read": "~1.0.7", - "read-package-json": "^4.0.0", + "read-package-json": "^4.0.1", "read-package-json-fast": "^2.0.3", "readdir-scoped-modules": "^1.1.0", "rimraf": "^3.0.2", "semver": "^7.3.5", "ssri": "^8.0.1", - "tar": "^6.1.10", + "tar": "^6.1.11", "text-table": "~0.2.0", "tiny-relative-date": "^1.3.0", "treeverse": "^1.0.4", @@ -209,7 +209,7 @@ "test": "tap", "check-coverage": "tap", "snap": "tap", - "postsnap": "make -s docs/content/*/*.md", + "postsnap": "make -s mandocs", "test:nocleanup": "NO_TEST_CLEANUP=1 npm run test --", "sudotest": "sudo npm run test --", "sudotest:nocleanup": "sudo NO_TEST_CLEANUP=1 npm run test --", diff --git a/deps/npm/tap-snapshots/test/lib/config.js.test.cjs b/deps/npm/tap-snapshots/test/lib/config.js.test.cjs index a094bd32d56dfc..8f349a6f54249e 100644 --- a/deps/npm/tap-snapshots/test/lib/config.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/config.js.test.cjs @@ -5,158 +5,334 @@ * Make sure to inspect the output below. Do not ignore changes! */ 'use strict' -exports[`test/lib/config.js TAP config edit --location=global > should write global config file 1`] = ` -;;;; -; npm globalconfig file: /etc/npmrc -; this is a simple ini-formatted file -; lines that start with semi-colons are comments -; run \`npm help 7 config\` for documentation of the various options -; -; Configs like \`@scope:registry\` map a scope to a given registry url. -; -; Configs like \`///:_authToken\` are auth that is restricted -; to the registry host specified. - -init.author.name=Foo - -;;;; -; all available options shown below with default values -;;;; - - -; init-author-name= -; init-version=1.0.0 -; init.author.name= -; init.version=1.0.0 - -` - -exports[`test/lib/config.js TAP config edit > should write config file 1`] = ` -;;;; -; npm userconfig file: ~/.npmrc -; this is a simple ini-formatted file -; lines that start with semi-colons are comments -; run \`npm help 7 config\` for documentation of the various options -; -; Configs like \`@scope:registry\` map a scope to a given registry url. -; -; Configs like \`///:_authToken\` are auth that is restricted -; to the registry host specified. - -//registry.npmjs.org/:_authToken=0000000 -init.author.name=Foo -sign-git-commit=true - -;;;; -; all available options shown below with default values -;;;; - - -; init-author-name= -; init-version=1.0.0 -; init.author.name= -; init.version=1.0.0 - -` - -exports[`test/lib/config.js TAP config edit > should write config file 2`] = ` -;;;; -; npm userconfig file: ~/.npmrc -; this is a simple ini-formatted file -; lines that start with semi-colons are comments -; run \`npm help 7 config\` for documentation of the various options -; -; Configs like \`@scope:registry\` map a scope to a given registry url. -; -; Configs like \`///:_authToken\` are auth that is restricted -; to the registry host specified. - - - -;;;; -; all available options shown below with default values -;;;; - - -; init-author-name= -; init-version=1.0.0 -; init.author.name= -; init.version=1.0.0 - -` - -exports[`test/lib/config.js TAP config get no args > should list configs on config get no args 1`] = ` -; "cli" config from command line options - -cat = true -chai = true -dog = true -editor = "vi" -json = false -location = "user" -long = false - -; node bin location = /path/to/node -; cwd = {CWD} -; HOME = ~/ -; Run \`npm config ls -l\` to show all defaults. +exports[`test/lib/config.js TAP config list --json > output matches snapshot 1`] = ` +{ + "prefix": "{LOCALPREFIX}", + "userconfig": "{HOME}/.npmrc", + "json": true, + "projectloaded": "yes", + "userloaded": "yes", + "globalloaded": "yes", + "access": null, + "all": false, + "allow-same-version": false, + "also": null, + "audit": true, + "audit-level": null, + "auth-type": "legacy", + "before": null, + "bin-links": true, + "browser": null, + "ca": null, + "cache": "{CACHE}", + "cache-max": null, + "cache-min": 0, + "cafile": null, + "call": "", + "cert": null, + "ci-name": null, + "cidr": null, + "color": true, + "commit-hooks": true, + "depth": null, + "description": true, + "dev": false, + "diff": [], + "diff-ignore-all-space": false, + "diff-name-only": false, + "diff-no-prefix": false, + "diff-dst-prefix": "b/", + "diff-src-prefix": "a/", + "diff-text": false, + "diff-unified": 3, + "dry-run": false, + "editor": "{EDITOR}", + "engine-strict": false, + "fetch-retries": 2, + "fetch-retry-factor": 10, + "fetch-retry-maxtimeout": 60000, + "fetch-retry-mintimeout": 10000, + "fetch-timeout": 300000, + "force": false, + "foreground-scripts": false, + "format-package-lock": true, + "fund": true, + "git": "git", + "git-tag-version": true, + "global": false, + "global-style": false, + "globalconfig": "{GLOBALPREFIX}/npmrc", + "heading": "npm", + "https-proxy": null, + "if-present": false, + "ignore-scripts": false, + "include": [], + "include-staged": false, + "init-author-email": "", + "init-author-name": "", + "init-author-url": "", + "init-license": "ISC", + "init-module": "{HOME}/.npm-init.js", + "init-version": "1.0.0", + "init.author.email": "", + "init.author.name": "", + "init.author.url": "", + "init.license": "ISC", + "init.module": "{HOME}/.npm-init.js", + "init.version": "1.0.0", + "key": null, + "legacy-bundling": false, + "legacy-peer-deps": false, + "link": false, + "local-address": null, + "location": "user", + "loglevel": "notice", + "logs-max": 10, + "long": false, + "maxsockets": 15, + "message": "%s", + "node-options": null, + "node-version": "{NODE-VERSION}", + "noproxy": [ + "" + ], + "npm-version": "{NPM-VERSION}", + "offline": false, + "omit": [], + "only": null, + "optional": null, + "otp": null, + "package": [], + "package-lock": true, + "package-lock-only": false, + "pack-destination": ".", + "parseable": false, + "prefer-offline": false, + "prefer-online": false, + "preid": "", + "production": null, + "progress": true, + "proxy": null, + "read-only": false, + "rebuild-bundle": true, + "registry": "https://registry.npmjs.org/", + "save": true, + "save-bundle": false, + "save-dev": false, + "save-exact": false, + "save-optional": false, + "save-peer": false, + "save-prefix": "^", + "save-prod": false, + "scope": "", + "script-shell": null, + "searchexclude": "", + "searchlimit": 20, + "searchopts": "", + "searchstaleness": 900, + "shell": "{SHELL}", + "shrinkwrap": true, + "sign-git-commit": false, + "sign-git-tag": false, + "sso-poll-frequency": 500, + "sso-type": "oauth", + "strict-peer-deps": false, + "strict-ssl": true, + "tag": "latest", + "tag-version-prefix": "v", + "timing": false, + "tmp": "{TMP}", + "umask": 0, + "unicode": false, + "update-notifier": true, + "usage": false, + "user-agent": "npm/{NPM-VERSION} node/{NODE-VERSION} {PLATFORM} {ARCH} workspaces/false", + "version": false, + "versions": false, + "viewer": "{VIEWER}", + "which": null, + "workspace": [], + "workspaces": false, + "yes": null, + "metrics-registry": "https://registry.npmjs.org/" +} ` -exports[`test/lib/config.js TAP config list --long > should list all configs 1`] = ` +exports[`test/lib/config.js TAP config list --long > output matches snapshot 1`] = ` ; "default" config from default values +_auth = (protected) +access = null +all = false +allow-same-version = false +also = null +audit = true +audit-level = null +auth-type = "legacy" +before = null +bin-links = true +browser = null +ca = null +cache = "{CACHE}" +cache-max = null +cache-min = 0 +cafile = null +call = "" +cert = null +ci-name = null +cidr = null +color = true +commit-hooks = true +depth = null +description = true +dev = false +diff = [] +diff-dst-prefix = "b/" +diff-ignore-all-space = false +diff-name-only = false +diff-no-prefix = false +diff-src-prefix = "a/" +diff-text = false +diff-unified = 3 +dry-run = false +editor = "{EDITOR}" +engine-strict = false +fetch-retries = 2 +fetch-retry-factor = 10 +fetch-retry-maxtimeout = 60000 +fetch-retry-mintimeout = 10000 +fetch-timeout = 300000 +force = false +foreground-scripts = false +format-package-lock = true +fund = true +git = "git" +git-tag-version = true +global = false +global-style = false +globalconfig = "{GLOBALPREFIX}/npmrc" +heading = "npm" +https-proxy = null +if-present = false +ignore-scripts = false +include = [] +include-staged = false +init-author-email = "" init-author-name = "" +init-author-url = "" +init-license = "ISC" +init-module = "{HOME}/.npm-init.js" init-version = "1.0.0" +init.author.email = "" init.author.name = "" +init.author.url = "" +init.license = "ISC" +init.module = "{HOME}/.npm-init.js" init.version = "1.0.0" - -; "cli" config from command line options - -cat = true -chai = true -dog = true -editor = "vi" json = false +key = null +legacy-bundling = false +legacy-peer-deps = false +link = false +local-address = null location = "user" -long = true -` +loglevel = "notice" +logs-max = 10 +; long = false ; overridden by cli +maxsockets = 15 +message = "%s" +metrics-registry = "https://registry.npmjs.org/" +node-options = null +node-version = "{NODE-VERSION}" +noproxy = [""] +npm-version = "{NPM-VERSION}" +offline = false +omit = [] +only = null +optional = null +otp = null +pack-destination = "." +package = [] +package-lock = true +package-lock-only = false +parseable = false +prefer-offline = false +prefer-online = false +; prefix = "{REALGLOBALREFIX}" ; overridden by cli +preid = "" +production = null +progress = true +proxy = null +read-only = false +rebuild-bundle = true +registry = "https://registry.npmjs.org/" +save = true +save-bundle = false +save-dev = false +save-exact = false +save-optional = false +save-peer = false +save-prefix = "^" +save-prod = false +scope = "" +script-shell = null +searchexclude = "" +searchlimit = 20 +searchopts = "" +searchstaleness = 900 +shell = "{SHELL}" +shrinkwrap = true +sign-git-commit = false +sign-git-tag = false +sso-poll-frequency = 500 +sso-type = "oauth" +strict-peer-deps = false +strict-ssl = true +tag = "latest" +tag-version-prefix = "v" +timing = false +tmp = "{TMP}" +umask = 0 +unicode = false +update-notifier = true +usage = false +user-agent = "npm/{NPM-VERSION} node/{NODE-VERSION} {PLATFORM} {ARCH} workspaces/false" +; userconfig = "{HOME}/.npmrc" ; overridden by cli +version = false +versions = false +viewer = "{VIEWER}" +which = null +workspace = [] +workspaces = false +yes = null + +; "global" config from {GLOBALPREFIX}/npmrc + +globalloaded = "yes" + +; "user" config from {HOME}/.npmrc + +userloaded = "yes" + +; "project" config from {LOCALPREFIX}/.npmrc + +projectloaded = "yes" -exports[`test/lib/config.js TAP config list > should list configs 1`] = ` ; "cli" config from command line options -cat = true -chai = true -dog = true -editor = "vi" -json = false -location = "user" -long = false - -; node bin location = /path/to/node -; cwd = {CWD} -; HOME = ~/ -; Run \`npm config ls -l\` to show all defaults. +long = true +prefix = "{LOCALPREFIX}" +userconfig = "{HOME}/.npmrc" ` -exports[`test/lib/config.js TAP config list overrides > should list overridden configs 1`] = ` +exports[`test/lib/config.js TAP config list > output matches snapshot 1`] = ` ; "cli" config from command line options -cat = true -chai = true -dog = true -editor = "vi" -init.author.name = "Bar" -json = false -location = "user" -long = false - -; "user" config from ~/.npmrc - -; //private-reg.npmjs.org/:_authThoken = (protected) ; overridden by cli -; init.author.name = "Foo" ; overridden by cli +prefix = "{LOCALPREFIX}" +userconfig = "{HOME}/.npmrc" -; node bin location = /path/to/node -; cwd = {CWD} -; HOME = ~/ +; node bin location = {EXECPATH} +; cwd = {NPMDIR} +; HOME = {HOME} ; Run \`npm config ls -l\` to show all defaults. ` diff --git a/deps/npm/tap-snapshots/test/lib/publish.js.test.cjs b/deps/npm/tap-snapshots/test/lib/publish.js.test.cjs index 7a7502e02e338a..13e8f7d4b49fa0 100644 --- a/deps/npm/tap-snapshots/test/lib/publish.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/publish.js.test.cjs @@ -15,6 +15,7 @@ exports[`test/lib/publish.js TAP private workspaces colorless > should publish a Array [ Object { "_id": "@npmcli/b@1.0.0", + "gitHead": "{GITHEAD}", "name": "@npmcli/b", "readme": "ERROR: No README data found!", "version": "1.0.0", @@ -32,6 +33,7 @@ exports[`test/lib/publish.js TAP private workspaces with color > should publish Array [ Object { "_id": "@npmcli/b@1.0.0", + "gitHead": "{GITHEAD}", "name": "@npmcli/b", "readme": "ERROR: No README data found!", "version": "1.0.0", @@ -70,6 +72,7 @@ exports[`test/lib/publish.js TAP workspaces all workspaces > should publish all Array [ Object { "_id": "workspace-a@1.2.3-a", + "gitHead": "{GITHEAD}", "name": "workspace-a", "readme": "ERROR: No README data found!", "repository": Object { @@ -83,6 +86,7 @@ Array [ "bugs": Object { "url": "https://github.com/npm/workspace-b/issues", }, + "gitHead": "{GITHEAD}", "homepage": "https://github.com/npm/workspace-b#readme", "name": "workspace-b", "readme": "ERROR: No README data found!", @@ -94,6 +98,7 @@ Array [ }, Object { "_id": "workspace-n@1.2.3-n", + "gitHead": "{GITHEAD}", "name": "workspace-n", "readme": "ERROR: No README data found!", "version": "1.2.3-n", @@ -123,6 +128,7 @@ exports[`test/lib/publish.js TAP workspaces json > should publish all workspaces Array [ Object { "_id": "workspace-a@1.2.3-a", + "gitHead": "{GITHEAD}", "name": "workspace-a", "readme": "ERROR: No README data found!", "repository": Object { @@ -136,6 +142,7 @@ Array [ "bugs": Object { "url": "https://github.com/npm/workspace-b/issues", }, + "gitHead": "{GITHEAD}", "homepage": "https://github.com/npm/workspace-b#readme", "name": "workspace-b", "readme": "ERROR: No README data found!", @@ -147,6 +154,7 @@ Array [ }, Object { "_id": "workspace-n@1.2.3-n", + "gitHead": "{GITHEAD}", "name": "workspace-n", "readme": "ERROR: No README data found!", "version": "1.2.3-n", @@ -164,6 +172,7 @@ exports[`test/lib/publish.js TAP workspaces one workspace > should publish given Array [ Object { "_id": "workspace-a@1.2.3-a", + "gitHead": "{GITHEAD}", "name": "workspace-a", "readme": "ERROR: No README data found!", "repository": Object { diff --git a/deps/npm/test/fixtures/sandbox.js b/deps/npm/test/fixtures/sandbox.js new file mode 100644 index 00000000000000..4cdd9b70dbc6f9 --- /dev/null +++ b/deps/npm/test/fixtures/sandbox.js @@ -0,0 +1,366 @@ +const { createHook, executionAsyncId } = require('async_hooks') +const { EventEmitter } = require('events') +const { homedir, tmpdir } = require('os') +const { dirname, join } = require('path') +const { promisify } = require('util') +const mkdirp = require('mkdirp-infer-owner') +const npmlog = require('npmlog') +const rimraf = promisify(require('rimraf')) +const t = require('tap') + +let active = null +const chain = new Map() +const sandboxes = new Map() + +// keep a reference to the real process +const _process = process + +const processHook = createHook({ + init: (asyncId, type, triggerAsyncId, resource) => { + // track parentage of asyncIds + chain.set(asyncId, triggerAsyncId) + }, + before: (asyncId) => { + // find the nearest parent id that has a sandbox + let parent = asyncId + while (chain.has(parent) && !sandboxes.has(parent)) { + parent = chain.get(parent) + } + + process = sandboxes.has(parent) + ? sandboxes.get(parent) + : _process + }, +}).enable() + +for (const level in npmlog.levels) { + npmlog[`_${level}`] = npmlog[level] + npmlog[level] = (...args) => { + process._logs = process._logs || {} + process._logs[level] = process._logs[level] || [] + process._logs[level].push(args) + const _level = npmlog.level + npmlog.level = 'silent' + npmlog[`_${level}`](...args) + npmlog.level = _level + } +} + +const _data = Symbol('sandbox.data') +const _dirs = Symbol('sandbox.dirs') +const _test = Symbol('sandbox.test') +const _mocks = Symbol('sandbox.mocks') +const _npm = Symbol('sandbox.npm') +const _parent = Symbol('sandbox.parent') +const _output = Symbol('sandbox.output') +const _proxy = Symbol('sandbox.proxy') +const _get = Symbol('sandbox.proxy.get') +const _set = Symbol('sandbox.proxy.set') + +// these config keys can be redacted widely +const redactedDefaults = [ + 'node-version', + 'npm-version', + 'tmp', +] + +// we can't just replace these values everywhere because they're known to be +// very short strings that could be present all over the place, so we only +// replace them if they're located within quotes for now +const vagueRedactedDefaults = [ + 'editor', + 'shell', +] + +const normalize = (str) => str + .replace(/\r\n/g, '\n') // normalize line endings (for ini) + .replace(/[A-z]:\\/g, '\\') // turn windows roots to posix ones + .replace(/\\+/g, '/') // replace \ with / + +class Sandbox extends EventEmitter { + constructor (test, options = {}) { + super() + + this[_test] = test + this[_mocks] = options.mocks || {} + this[_data] = new Map() + this[_output] = [] + const tempDir = `${test.testdirName}-sandbox` + this[_dirs] = { + temp: tempDir, + global: options.global || join(tempDir, 'global'), + home: options.home || join(tempDir, 'home'), + project: options.project || join(tempDir, 'project'), + } + + this[_proxy] = new Proxy(_process, { + get: this[_get].bind(this), + set: this[_set].bind(this), + }) + this[_proxy].env = {} + this[_proxy].argv = [] + + test.cleanSnapshot = this.cleanSnapshot.bind(this) + test.afterEach(() => this.reset()) + test.teardown(() => this.teardown()) + } + + get config () { + return this[_npm] && this[_npm].config + } + + get logs () { + return this[_proxy]._logs + } + + get global () { + return this[_dirs].global + } + + get home () { + return this[_dirs].home + } + + get project () { + return this[_dirs].project + } + + get process () { + return this[_proxy] + } + + get output () { + return this[_output].map((line) => line.join(' ')).join('\n') + } + + cleanSnapshot (snapshot) { + let clean = normalize(snapshot) + + const viewer = _process.platform === 'win32' + ? /"browser"([^:]+|$)/g + : /"man"([^:]+|$)/g + + // the global prefix is platform dependent + const realGlobalPrefix = _process.platform === 'win32' + ? dirname(_process.execPath) + : dirname(dirname(_process.execPath)) + + const cache = _process.platform === 'win32' + ? /\{HOME\}\/npm-cache(\r?\n|"|\/|$)/g + : /\{HOME\}\/\.npm(\n|"|\/|$)/g + + // and finally replace some paths we know could be present + clean = clean + .replace(viewer, '"{VIEWER}"$1') + .split(normalize(this[_proxy].execPath)).join('{EXECPATH}') + .split(normalize(_process.execPath)).join('{REALEXECPATH}') + .split(normalize(this.global)).join('{GLOBALPREFIX}') + .split(normalize(realGlobalPrefix)).join('{REALGLOBALREFIX}') + .split(normalize(this.project)).join('{LOCALPREFIX}') + .split(normalize(this.home)).join('{HOME}') + .replace(cache, '{CACHE}$1') + .split(normalize(dirname(dirname(__dirname)))).join('{NPMDIR}') + .split(normalize(tmpdir())).join('{TMP}') + .split(normalize(homedir())).join('{REALHOME}') + .split(this[_proxy].platform).join('{PLATFORM}') + .split(this[_proxy].arch).join('{ARCH}') + + // We do the defaults after everything else so that they don't cause the + // other cleaners to miss values we would have clobbered here. For + // instance if execPath is /home/user/.nvm/versions/node/1.0.0/bin/node, + // and we replaced the node version first, the real execPath we're trying + // to replace would no longer be represented, and be missed. + if (this[_npm]) { + // replace default config values with placeholders + for (const name of redactedDefaults) { + let value = this[_npm].config.defaults[name] + clean = clean.split(value).join(`{${name.toUpperCase()}}`) + } + + // replace vague default config values that are present within quotes + // with placeholders + for (const name of vagueRedactedDefaults) { + const value = this[_npm].config.defaults[name] + clean = clean.split(`"${value}"`).join(`"{${name.toUpperCase()}}"`) + } + } + + return clean + } + + // test.afterEach hook + reset () { + this.removeAllListeners() + this[_parent] = undefined + this[_output] = [] + this[_data].clear() + this[_proxy].env = {} + this[_proxy].argv = [] + this[_npm] = undefined + } + + // test.teardown hook + teardown () { + if (this[_parent]) { + sandboxes.delete(this[_parent]) + } + return rimraf(this[_dirs].temp).catch(() => null) + } + + // proxy get handler + [_get] (target, prop, receiver) { + if (this[_data].has(prop)) { + return this[_data].get(prop) + } + + if (this[prop] !== undefined) { + return Reflect.get(this, prop, this) + } + + const actual = Reflect.get(target, prop, receiver) + if (typeof actual === 'function') { + // in node 10.1 there's an interesting bug where if a function on process + // is called without explicitly forcing the 'this' arg to something, we + // get 'Illegal invocation' errors. wrapping function properties in their + // own proxy so that we can make sure the context is right fixes it + return new Proxy(actual, { + apply: (target, context, args) => { + return Reflect.apply(target, _process, args) + }, + }) + } + + return actual + } + + // proxy set handler + [_set] (target, prop, value) { + if (prop === 'env') { + value = { + ...value, + HOME: this.home, + } + } + + if (prop === 'argv') { + value = [ + process.execPath, + join(dirname(process.execPath), 'npm'), + ...value, + ] + } + + return this[_data].set(prop, value) + } + + async run (command, argv = []) { + await Promise.all([ + mkdirp(this.project), + mkdirp(this.home), + mkdirp(this.global), + ]) + + // attach the sandbox process now, doing it after the promise above is + // necessary to make sure that only async calls spawned as part of this + // call to run will receive the sandbox. if we attach it too early, we + // end up interfering with tap + this[_parent] = executionAsyncId() + this[_data].set('_asyncId', this[_parent]) + sandboxes.set(this[_parent], this[_proxy]) + process = this[_proxy] + + this[_proxy].argv = [ + '--prefix', this.project, + '--userconfig', join(this.home, '.npmrc'), + '--globalconfig', join(this.global, 'npmrc'), + command, + ...argv, + ] + + this[_npm] = this[_test].mock('../../lib/npm.js', this[_mocks]) + this[_npm].output = (...args) => this[_output].push(args) + await this[_npm].load() + // in some node versions (later 10.x) our executionAsyncId at this point + // will for some reason appear to have been triggered by a different parent + // so immediately after load, if we can see that we lost our ancestry, we + // fix it here with a hammer + if (chain.get(executionAsyncId()) !== this[_parent]) { + chain.set(executionAsyncId(), this[_parent]) + process = this[_proxy] + } + + const cmd = this[_npm].argv.shift() + const impl = this[_npm].commands[cmd] + if (!impl) { + throw new Error(`Unknown command: ${cmd}`) + } + + return new Promise((resolve, reject) => { + impl(this[_npm].argv, (err) => { + if (err) { + return reject(err) + } + + return resolve() + }) + }) + } + + async complete (command, argv, partial) { + if (!Array.isArray(argv)) { + partial = argv + argv = [] + } + + await Promise.all([ + mkdirp(this.project), + mkdirp(this.home), + mkdirp(this.global), + ]) + + // attach the sandbox process now, doing it after the promise above is + // necessary to make sure that only async calls spawned as part of this + // call to run will receive the sandbox. if we attach it too early, we + // end up interfering with tap + this[_parent] = executionAsyncId() + this[_data].set('_asyncId', this[_parent]) + sandboxes.set(this[_parent], this[_proxy]) + process = this[_proxy] + + this[_proxy].argv = [ + '--prefix', this.project, + '--userconfig', join(this.home, '.npmrc'), + '--globalconfig', join(this.global, 'npmrc'), + command, + ...argv, + ] + + this[_npm] = this[_test].mock('../../lib/npm.js', this[_mocks]) + this[_npm].output = (...args) => this[_output].push(args) + await this[_npm].load() + // in some node versions (later 10.x) our executionAsyncId at this point + // will for some reason appear to have been triggered by a different parent + // so immediately after load, if we can see that we lost our ancestry, we + // fix it here with a hammer + if (chain.get(executionAsyncId()) !== this[_parent]) { + chain.set(executionAsyncId(), this[_parent]) + process = this[_proxy] + } + + const impl = this[_npm].commands[command] + if (!impl) { + throw new Error(`Unknown command: ${cmd}`) + } + + return impl.completion({ + partialWord: partial, + conf: { + argv: { + remain: ['npm', command, ...argv], + }, + }, + }) + } +} + +module.exports = Sandbox diff --git a/deps/npm/test/lib/config.js b/deps/npm/test/lib/config.js index 8a1e7d85e09aa3..ba47fa11d0bbc7 100644 --- a/deps/npm/test/lib/config.js +++ b/deps/npm/test/lib/config.js @@ -1,659 +1,379 @@ +const { join } = require('path') +const { promisify } = require('util') +const fs = require('fs') +const spawk = require('spawk') const t = require('tap') -const { EventEmitter } = require('events') - -const redactCwd = (path) => { - const normalizePath = p => p - .replace(/\\+/g, '/') - .replace(/\r\n/g, '\n') - const replaceCwd = p => p - .replace(new RegExp(normalizePath(process.cwd()), 'g'), '{CWD}') - const cleanupWinPaths = p => p - .replace(normalizePath(process.execPath), '/path/to/node') - .replace(normalizePath(process.env.HOME), '~/') - - return cleanupWinPaths( - replaceCwd( - normalizePath(path) - ) - ) -} - -t.cleanSnapshot = (str) => redactCwd(str) - -let result = '' - -const configDefs = require('../../lib/utils/config') -const definitions = Object.entries(configDefs.definitions) - .filter(([key, def]) => { - return [ - 'init-author-name', - 'init.author.name', - 'init-version', - 'init.version', - ].includes(key) - }).reduce((defs, [key, def]) => { - defs[key] = def - return defs - }, {}) - -const defaults = { - 'init-author-name': '', - 'init-version': '1.0.0', - 'init.author.name': '', - 'init.version': '1.0.0', -} - -const cliConfig = { - editor: 'vi', - json: false, - location: 'user', - long: false, - cat: true, - chai: true, - dog: true, -} - -const npm = { - log: { - warn: () => null, - info: () => null, - enableProgress: () => null, - disableProgress: () => null, - }, - config: { - data: new Map(Object.entries({ - default: { data: defaults, source: 'default values' }, - global: { data: {}, source: '/etc/npmrc' }, - cli: { data: cliConfig, source: 'command line options' }, - })), - get (key) { - return cliConfig[key] - }, - validate () { - return true - }, - }, - output: msg => { - result = msg - }, -} - -const usageUtil = () => 'usage instructions' - -const mocks = { - '../../lib/utils/config/index.js': { defaults, definitions }, - '../../lib/utils/usage.js': usageUtil, -} - -const Config = t.mock('../../lib/config.js', mocks) -const config = new Config(npm) - -t.test('config no args', t => { - config.exec([], (err) => { - t.match(err, /usage instructions/, 'should not error out on empty locations') - t.end() - }) -}) +spawk.preventUnmatched() -t.test('config ignores workspaces', t => { - npm.log.warn = (title, msg) => { - t.equal(title, 'config', 'should warn with expected title') - t.equal( - msg, - 'This command does not support workspaces.', - 'should warn with unsupported option msg' - ) - } - config.execWorkspaces([], [], (err) => { - t.match(err, /usage instructions/, 'should not error out when workspaces are defined') - npm.log.warn = () => null - t.end() - }) +const readFile = promisify(fs.readFile) + +const Sandbox = require('../fixtures/sandbox.js') + +t.test('config no args', async (t) => { + const sandbox = new Sandbox(t) + + await t.rejects(sandbox.run('config', []), { + code: 'EUSAGE', + }, 'rejects with usage') }) -t.test('config list', t => { - t.plan(2) +t.test('config ignores workspaces', async (t) => { + const sandbox = new Sandbox(t) - npm.config.find = () => 'cli' - result = '' - t.teardown(() => { - result = '' - delete npm.config.find - }) + await t.rejects(sandbox.run('config', ['--workspaces']), { + code: 'EUSAGE', + }, 'rejects with usage') - config.exec(['list'], (err) => { - t.error(err, 'npm config list') - t.matchSnapshot(result, 'should list configs') - }) + t.match(sandbox.logs.warn, [['config', 'This command does not support workspaces.']], 'logged the warning') }) -t.test('config list overrides', t => { - t.plan(2) +t.test('config list', async (t) => { + const sandbox = new Sandbox(t) - npm.config.data.set('user', { - data: { - 'init.author.name': 'Foo', - '//private-reg.npmjs.org/:_authThoken': 'f00ba1', + const temp = t.testdir({ + global: { + npmrc: 'globalloaded=yes', + }, + project: { + '.npmrc': 'projectloaded=yes', + }, + home: { + '.npmrc': 'userloaded=yes', }, - source: '~/.npmrc', - }) - cliConfig['init.author.name'] = 'Bar' - npm.config.find = () => 'cli' - result = '' - t.teardown(() => { - result = '' - npm.config.data.delete('user') - delete cliConfig['init.author.name'] - delete npm.config.find }) + const global = join(temp, 'global') + const project = join(temp, 'project') + const home = join(temp, 'home') - config.exec(['list'], (err) => { - t.error(err, 'npm config list') - t.matchSnapshot(result, 'should list overridden configs') - }) -}) + await sandbox.run('config', ['list'], { global, project, home }) -t.test('config list --long', t => { - t.plan(2) + t.matchSnapshot(sandbox.output, 'output matches snapshot') +}) - npm.config.find = key => key in cliConfig ? 'cli' : 'default' - cliConfig.long = true - result = '' - t.teardown(() => { - delete npm.config.find - cliConfig.long = false - result = '' +t.test('config list --long', async (t) => { + const temp = t.testdir({ + global: { + npmrc: 'globalloaded=yes', + }, + project: { + '.npmrc': 'projectloaded=yes', + }, + home: { + '.npmrc': 'userloaded=yes', + }, }) + const global = join(temp, 'global') + const project = join(temp, 'project') + const home = join(temp, 'home') - config.exec(['list'], (err) => { - t.error(err, 'npm config list --long') - t.matchSnapshot(result, 'should list all configs') - }) + const sandbox = new Sandbox(t, { global, project, home }) + await sandbox.run('config', ['list', '--long']) + + t.matchSnapshot(sandbox.output, 'output matches snapshot') }) -t.test('config list --json', t => { - t.plan(2) +t.test('config list --json', async (t) => { + const temp = t.testdir({ + global: { + npmrc: 'globalloaded=yes', + }, + project: { + '.npmrc': 'projectloaded=yes', + }, + home: { + '.npmrc': 'userloaded=yes', + }, + }) + const global = join(temp, 'global') + const project = join(temp, 'project') + const home = join(temp, 'home') - cliConfig.json = true - result = '' - npm.config.list = [{ - '//private-reg.npmjs.org/:_authThoken': 'f00ba1', - ...npm.config.data.get('cli').data, - }] - const npmConfigGet = npm.config.get - npm.config.get = key => npm.config.list[0][key] + const sandbox = new Sandbox(t, { global, project, home }) + await sandbox.run('config', ['list', '--json']) - t.teardown(() => { - delete npm.config.list - cliConfig.json = false - npm.config.get = npmConfigGet - result = '' - }) + t.matchSnapshot(sandbox.output, 'output matches snapshot') +}) - config.exec(['list'], (err) => { - t.error(err, 'npm config list --json') - t.same( - JSON.parse(result), - { - editor: 'vi', - json: true, - location: 'user', - long: false, - cat: true, - chai: true, - dog: true, - }, - 'should list configs usin json' - ) - }) +t.test('config delete no args', async (t) => { + const sandbox = new Sandbox(t) + + await t.rejects(sandbox.run('config', ['delete']), { + code: 'EUSAGE', + }, 'rejects with usage') }) -t.test('config delete no args', t => { - config.exec(['delete'], (err) => { - t.match(err, { message: '\nUsage: usage instructions' }) - t.end() +t.test('config delete single key', async (t) => { + // location defaults to user, so we work with a userconfig + const home = t.testdir({ + '.npmrc': 'foo=bar\nbar=baz', }) -}) -t.test('config delete key', t => { - t.plan(4) + const sandbox = new Sandbox(t) + await sandbox.run('config', ['delete', 'foo'], { home }) - npm.config.delete = (key, where) => { - t.equal(key, 'foo', 'should delete expected keyword') - t.equal(where, 'user', 'should delete key from user config by default') - } + t.equal(sandbox.config.get('foo'), undefined, 'foo should no longer be set') - npm.config.save = where => { - t.equal(where, 'user', 'should save user config post-delete') - } + const contents = await readFile(join(home, '.npmrc'), { encoding: 'utf8' }) + t.not(contents.includes('foo='), 'foo was removed on disk') +}) - config.exec(['delete', 'foo'], (err) => { - t.error(err, 'npm config delete key') +t.test('config delete multiple keys', async (t) => { + const home = t.testdir({ + '.npmrc': 'foo=bar\nbar=baz\nbaz=buz', }) - t.teardown(() => { - delete npm.config.delete - delete npm.config.save - }) -}) + const sandbox = new Sandbox(t) + await sandbox.run('config', ['delete', 'foo', 'bar'], { home }) -t.test('config delete multiple key', t => { - t.plan(6) + t.equal(sandbox.config.get('foo'), undefined, 'foo should no longer be set') + t.equal(sandbox.config.get('bar'), undefined, 'bar should no longer be set') - const expect = [ - 'foo', - 'bar', - ] + const contents = await readFile(join(home, '.npmrc'), { encoding: 'utf8' }) + t.not(contents.includes('foo='), 'foo was removed on disk') + t.not(contents.includes('bar='), 'bar was removed on disk') +}) - npm.config.delete = (key, where) => { - t.equal(key, expect.shift(), 'should delete expected keyword') - t.equal(where, 'user', 'should delete key from user config by default') - } +t.test('config delete key --location=global', async (t) => { + const global = t.testdir({ + npmrc: 'foo=bar\nbar=baz', + }) - npm.config.save = where => { - t.equal(where, 'user', 'should save user config post-delete') - } + const sandbox = new Sandbox(t) + await sandbox.run('config', ['delete', 'foo', '--location=global'], { global }) - config.exec(['delete', 'foo', 'bar'], (err) => { - t.error(err, 'npm config delete keys') - }) + t.equal(sandbox.config.get('foo', 'global'), undefined, 'foo should no longer be set') - t.teardown(() => { - delete npm.config.delete - delete npm.config.save - }) + const contents = await readFile(join(global, 'npmrc'), { encoding: 'utf8' }) + t.not(contents.includes('foo='), 'foo was removed on disk') }) -t.test('config delete key --location=global', t => { - t.plan(4) +t.test('config delete key --global', async (t) => { + const global = t.testdir({ + npmrc: 'foo=bar\nbar=baz', + }) + + const sandbox = new Sandbox(t) + await sandbox.run('config', ['delete', 'foo', '--global'], { global }) - npm.config.delete = (key, where) => { - t.equal(key, 'foo', 'should delete expected keyword from global configs') - t.equal(where, 'global', 'should delete key from global config by default') - } + t.equal(sandbox.config.get('foo', 'global'), undefined, 'foo should no longer be set') - npm.config.save = where => { - t.equal(where, 'global', 'should save global config post-delete') - } + const contents = await readFile(join(global, 'npmrc'), { encoding: 'utf8' }) + t.not(contents.includes('foo='), 'foo was removed on disk') +}) - cliConfig.location = 'global' - config.exec(['delete', 'foo'], (err) => { - t.error(err, 'npm config delete key --location=global') - }) +t.test('config set no args', async (t) => { + const sandbox = new Sandbox(t) - t.teardown(() => { - cliConfig.location = 'user' - delete npm.config.delete - delete npm.config.save - }) + await t.rejects(sandbox.run('config', ['set']), { + code: 'EUSAGE', + }, 'rejects with usage') }) -t.test('config set no args', t => { - config.exec(['set'], (err) => { - t.match(err, { message: '\nUsage: usage instructions' }) - t.end() +t.test('config set key', async (t) => { + const home = t.testdir({ + '.npmrc': 'foo=bar', }) -}) -t.test('config set key', t => { - t.plan(5) + const sandbox = new Sandbox(t, { home }) - npm.config.set = (key, val, where) => { - t.equal(key, 'foo', 'should set expected key to user config') - t.equal(val, 'bar', 'should set expected value to user config') - t.equal(where, 'user', 'should set key/val in user config by default') - } + await sandbox.run('config', ['set', 'foo']) - npm.config.save = where => { - t.equal(where, 'user', 'should save user config') - } + t.equal(sandbox.config.get('foo'), '', 'set the value for foo') - config.exec(['set', 'foo', 'bar'], (err) => { - t.error(err, 'npm config set key') - }) + const contents = await readFile(join(home, '.npmrc'), { encoding: 'utf8' }) + t.ok(contents.includes('foo='), 'wrote foo to disk') +}) - t.teardown(() => { - delete npm.config.set - delete npm.config.save +t.test('config set key value', async (t) => { + const home = t.testdir({ + '.npmrc': 'foo=bar', }) -}) -t.test('config set key=val', t => { - t.plan(5) + const sandbox = new Sandbox(t, { home }) - npm.config.set = (key, val, where) => { - t.equal(key, 'foo', 'should set expected key to user config') - t.equal(val, 'bar', 'should set expected value to user config') - t.equal(where, 'user', 'should set key/val in user config by default') - } + await sandbox.run('config', ['set', 'foo', 'baz']) - npm.config.save = where => { - t.equal(where, 'user', 'should save user config') - } + t.equal(sandbox.config.get('foo'), 'baz', 'set the value for foo') - config.exec(['set', 'foo=bar'], (err) => { - t.error(err, 'npm config set key') - }) + const contents = await readFile(join(home, '.npmrc'), { encoding: 'utf8' }) + t.ok(contents.includes('foo=baz'), 'wrote foo to disk') +}) - t.teardown(() => { - delete npm.config.set - delete npm.config.save +t.test('config set key=value', async (t) => { + const home = t.testdir({ + '.npmrc': 'foo=bar', }) -}) -t.test('config set multiple keys', t => { - t.plan(11) - - const expect = [ - ['foo', 'bar'], - ['bar', 'baz'], - ['asdf', ''], - ] - const args = ['foo', 'bar', 'bar=baz', 'asdf'] - - npm.config.set = (key, val, where) => { - const [expectKey, expectVal] = expect.shift() - t.equal(key, expectKey, 'should set expected key to user config') - t.equal(val, expectVal, 'should set expected value to user config') - t.equal(where, 'user', 'should set key/val in user config by default') - } + const sandbox = new Sandbox(t, { home }) - npm.config.save = where => { - t.equal(where, 'user', 'should save user config') - } + await sandbox.run('config', ['set', 'foo=baz']) - config.exec(['set', ...args], (err) => { - t.error(err, 'npm config set key') - }) + t.equal(sandbox.config.get('foo'), 'baz', 'set the value for foo') - t.teardown(() => { - delete npm.config.set - delete npm.config.save - }) + const contents = await readFile(join(home, '.npmrc'), { encoding: 'utf8' }) + t.ok(contents.includes('foo=baz'), 'wrote foo to disk') }) -t.test('config set key to empty value', t => { - t.plan(5) - - npm.config.set = (key, val, where) => { - t.equal(key, 'foo', 'should set expected key to user config') - t.equal(val, '', 'should set "" to user config') - t.equal(where, 'user', 'should set key/val in user config by default') - } +t.test('config set key1 value1 key2=value2 key3', async (t) => { + const home = t.testdir({ + '.npmrc': 'foo=bar\nbar=baz\nbaz=foo', + }) - npm.config.save = where => { - t.equal(where, 'user', 'should save user config') - } + const sandbox = new Sandbox(t, { home }) + await sandbox.run('config', ['set', 'foo', 'oof', 'bar=rab', 'baz']) - config.exec(['set', 'foo'], (err) => { - t.error(err, 'npm config set key to empty value') - }) + t.equal(sandbox.config.get('foo'), 'oof', 'foo was set') + t.equal(sandbox.config.get('bar'), 'rab', 'bar was set') + t.equal(sandbox.config.get('baz'), '', 'baz was set') - t.teardown(() => { - delete npm.config.set - delete npm.config.save - }) + const contents = await readFile(join(home, '.npmrc'), { encoding: 'utf8' }) + t.ok(contents.includes('foo=oof'), 'foo was written to disk') + t.ok(contents.includes('bar=rab'), 'bar was written to disk') + t.ok(contents.includes('baz='), 'baz was written to disk') }) -t.test('config set invalid key', t => { - t.plan(3) +t.test('config set invalid key logs warning', async (t) => { + const sandbox = new Sandbox(t) - const npmConfigValidate = npm.config.validate - npm.config.save = () => null - npm.config.set = () => null - npm.config.validate = () => false - npm.log.warn = (title, msg) => { - t.equal(title, 'config', 'should warn with expected title') - t.equal(msg, 'omitting invalid config values', 'should use expected msg') - } - t.teardown(() => { - npm.config.validate = npmConfigValidate - delete npm.config.save - delete npm.config.set - npm.log.warn = () => null - }) - - config.exec(['set', 'foo', 'bar'], (err) => { - t.error(err, 'npm config set invalid key') - }) + // this doesn't reject, it only logs a warning + await sandbox.run('config', ['set', 'access=foo']) + t.match(sandbox.logs.warn, [ + ['invalid config', 'access="foo"', `set in ${join(sandbox.home, '.npmrc')}`], + ], 'logged warning') }) -t.test('config set key --location=global', t => { - t.plan(5) +t.test('config set key=value --location=global', async (t) => { + const global = t.testdir({ + npmrc: 'foo=bar\nbar=baz', + }) - npm.config.set = (key, val, where) => { - t.equal(key, 'foo', 'should set expected key to global config') - t.equal(val, 'bar', 'should set expected value to global config') - t.equal(where, 'global', 'should set key/val in global config') - } + const sandbox = new Sandbox(t, { global }) + await sandbox.run('config', ['set', 'foo=buzz', '--location=global']) - npm.config.save = where => { - t.equal(where, 'global', 'should save global config') - } + t.equal(sandbox.config.get('foo', 'global'), 'buzz', 'foo should be set') - cliConfig.location = 'global' - config.exec(['set', 'foo', 'bar'], (err) => { - t.error(err, 'npm config set key --location=global') - }) + const contents = await readFile(join(global, 'npmrc'), { encoding: 'utf8' }) + t.not(contents.includes('foo=buzz'), 'foo was saved on disk') +}) - t.teardown(() => { - cliConfig.location = 'user' - delete npm.config.set - delete npm.config.save +t.test('config set key=value --global', async (t) => { + const global = t.testdir({ + npmrc: 'foo=bar\nbar=baz', }) -}) -t.test('config get no args', t => { - t.plan(2) + const sandbox = new Sandbox(t, { global }) + await sandbox.run('config', ['set', 'foo=buzz', '--global']) - npm.config.find = () => 'cli' - result = '' - t.teardown(() => { - result = '' - delete npm.config.find - }) + t.equal(sandbox.config.get('foo', 'global'), 'buzz', 'foo should be set') - config.exec(['get'], (err) => { - t.error(err, 'npm config get no args') - t.matchSnapshot(result, 'should list configs on config get no args') - }) + const contents = await readFile(join(global, 'npmrc'), { encoding: 'utf8' }) + t.not(contents.includes('foo=buzz'), 'foo was saved on disk') }) -t.test('config get key', t => { - t.plan(2) +t.test('config get no args', async (t) => { + const sandbox = new Sandbox(t) - const npmConfigGet = npm.config.get - npm.config.get = (key) => { - t.equal(key, 'foo', 'should use expected key') - return 'bar' - } + await sandbox.run('config', ['get']) + const getOutput = sandbox.output - npm.config.save = where => { - throw new Error('should not save') - } + sandbox.reset() - config.exec(['get', 'foo'], (err) => { - t.error(err, 'npm config get key') - }) + await sandbox.run('config', ['list']) + const listOutput = sandbox.output - t.teardown(() => { - npm.config.get = npmConfigGet - delete npm.config.save - }) + t.equal(listOutput, getOutput, 'get with no args outputs list') }) -t.test('config get multiple keys', t => { - t.plan(4) - - const expect = [ - 'foo', - 'bar', - ] +t.test('config get single key', async (t) => { + const sandbox = new Sandbox(t) - const npmConfigGet = npm.config.get - npm.config.get = (key) => { - t.equal(key, expect.shift(), 'should use expected key') - return 'asdf' - } - - npm.config.save = where => { - throw new Error('should not save') - } + await sandbox.run('config', ['get', 'node-version']) + t.equal(sandbox.output, sandbox.config.get('node-version'), 'should get the value') +}) - config.exec(['get', 'foo', 'bar'], (err) => { - t.error(err, 'npm config get multiple keys') - t.equal(result, 'foo=asdf\nbar=asdf') - }) +t.test('config get multiple keys', async (t) => { + const sandbox = new Sandbox(t) - t.teardown(() => { - result = '' - npm.config.get = npmConfigGet - delete npm.config.save - }) + await sandbox.run('config', ['get', 'node-version', 'npm-version']) + t.ok(sandbox.output.includes(`node-version=${sandbox.config.get('node-version')}`), 'outputs node-version') + t.ok(sandbox.output.includes(`npm-version=${sandbox.config.get('npm-version')}`), 'outputs npm-version') }) -t.test('config get private key', t => { - config.exec(['get', '//private-reg.npmjs.org/:_authThoken'], (err) => { - t.match( - err, - /The \/\/private-reg.npmjs.org\/:_authThoken option is protected, and cannot be retrieved in this way/, - 'should throw unable to retrieve error' - ) - t.end() - }) +t.test('config get private key', async (t) => { + const sandbox = new Sandbox(t) + + await t.rejects(sandbox.run('config', ['get', '_authToken']), '_authToken is protected', 'rejects with protected string') }) -t.test('config edit', t => { - t.plan(12) - const npmrc = `//registry.npmjs.org/:_authToken=0000000 -init.author.name=Foo -sign-git-commit=true` - npm.config.data.set('user', { - source: '~/.npmrc', - }) - npm.config.save = async where => { - t.equal(where, 'user', 'should save to user config by default') - } - const editMocks = { - ...mocks, - 'mkdirp-infer-owner': async () => null, - fs: { - readFile (path, encoding, cb) { - cb(null, npmrc) - }, - writeFile (file, data, encoding, cb) { - t.equal(file, '~/.npmrc', 'should save to expected file location') - t.matchSnapshot(data, 'should write config file') - cb() - }, - }, - child_process: { - spawn: (bin, args) => { - t.equal(bin, 'vi', 'should use default editor') - t.strictSame(args, ['~/.npmrc'], 'should match user source data') - const ee = new EventEmitter() - process.nextTick(() => { - ee.emit('exit', 0) - }) - return ee - }, - }, - } - const Config = t.mock('../../lib/config.js', editMocks) - const config = new Config(npm) - - config.exec(['edit'], (err) => { - t.error(err, 'npm config edit') - - // test no config file result - editMocks.fs.readFile = (p, e, cb) => { - cb(new Error('ERR')) - } - const Config = t.mock('../../lib/config.js', editMocks) - const config = new Config(npm) - config.exec(['edit'], (err) => { - t.error(err, 'npm config edit') - }) +t.test('config edit', async (t) => { + const home = t.testdir({ + '.npmrc': 'foo=bar\nbar=baz', }) t.teardown(() => { - npm.config.data.delete('user') - delete npm.config.save + spawk.clean() }) -}) -t.test('config edit --location=global', t => { - t.plan(6) + const EDITOR = 'vim' + const editor = spawk.spawn(EDITOR).exit(0) - cliConfig.location = 'global' - const npmrc = 'init.author.name=Foo' - npm.config.data.set('global', { - source: '/etc/npmrc', - }) - npm.config.save = async where => { - t.equal(where, 'global', 'should save to global config') - } - const editMocks = { - ...mocks, - 'mkdirp-infer-owner': async () => null, - fs: { - readFile (path, encoding, cb) { - cb(null, npmrc) - }, - writeFile (file, data, encoding, cb) { - t.equal(file, '/etc/npmrc', 'should save to global file location') - t.matchSnapshot(data, 'should write global config file') - cb() - }, - }, - child_process: { - spawn: (bin, args, cb) => { - t.equal(bin, 'vi', 'should use default editor') - t.strictSame(args, ['/etc/npmrc'], 'should match global source data') - const ee = new EventEmitter() - process.nextTick(() => { - ee.emit('exit', 137) - }) - return ee - }, - }, - } - const Config = t.mock('../../lib/config.js', editMocks) - const config = new Config(npm) - config.exec(['edit'], (err) => { - t.match(err, /exited with code: 137/, 'propagated exit code from editor') - }) + const sandbox = new Sandbox(t, { home }) + sandbox.process.env.EDITOR = EDITOR + await sandbox.run('config', ['edit']) + + t.ok(editor.called, 'editor was spawned') + t.same(editor.calledWith.args, [join(sandbox.home, '.npmrc')], 'editor opened the user config file') + + const contents = await readFile(join(home, '.npmrc'), { encoding: 'utf8' }) + t.ok(contents.includes('foo=bar'), 'kept foo') + t.ok(contents.includes('bar=baz'), 'kept bar') + t.ok(contents.includes('shown below with default values'), 'appends defaults to file') +}) +t.test('config edit - editor exits non-0', async (t) => { t.teardown(() => { - cliConfig.location = 'user' - npm.config.data.delete('user') - delete npm.config.save + spawk.clean() }) -}) -t.test('completion', t => { - const { completion } = config + const EDITOR = 'vim' + const editor = spawk.spawn(EDITOR).exit(1) - const testComp = (argv, expect) => { - t.resolveMatch(completion({ conf: { argv: { remain: argv } } }), expect, argv.join(' ')) - } + const sandbox = new Sandbox(t) + sandbox.process.env.EDITOR = EDITOR + await t.rejects(sandbox.run('config', ['edit']), { + message: 'editor process exited with code: 1', + }, 'rejects with error about editor code') - testComp(['npm', 'foo'], []) - testComp(['npm', 'config'], ['get', 'set', 'delete', 'ls', 'rm', 'edit', 'list']) - testComp(['npm', 'config', 'set', 'foo'], []) + t.ok(editor.called, 'editor was spawned') + t.same(editor.calledWith.args, [join(sandbox.home, '.npmrc')], 'editor opened the user config file') +}) - const possibleConfigKeys = [...Object.keys(definitions)] - testComp(['npm', 'config', 'get'], possibleConfigKeys) - testComp(['npm', 'config', 'set'], possibleConfigKeys) - testComp(['npm', 'config', 'delete'], possibleConfigKeys) - testComp(['npm', 'config', 'rm'], possibleConfigKeys) - testComp(['npm', 'config', 'edit'], []) - testComp(['npm', 'config', 'list'], []) - testComp(['npm', 'config', 'ls'], []) +t.test('completion', async (t) => { + const sandbox = new Sandbox(t) - const partial = completion({conf: { argv: { remain: ['npm', 'config'] } }, partialWord: 'l'}) - t.resolveMatch(partial, ['get', 'set', 'delete', 'ls', 'rm', 'edit'], 'npm config') + let allKeys + const testComp = async (argv, expect) => { + t.match(await sandbox.complete('config', argv), expect, argv.join(' ')) + if (!allKeys) + allKeys = Object.keys(sandbox.config.definitions) + sandbox.reset() + } - t.end() + await testComp([], ['get', 'set', 'delete', 'ls', 'rm', 'edit', 'list']) + await testComp(['set', 'foo'], []) + await testComp(['get'], allKeys) + await testComp(['set'], allKeys) + await testComp(['delete'], allKeys) + await testComp(['rm'], allKeys) + await testComp(['edit'], []) + await testComp(['list'], []) + await testComp(['ls'], []) + + const getCommand = await sandbox.complete('get') + t.match(getCommand, allKeys, 'also works for just npm get') + sandbox.reset() + + const partial = await sandbox.complete('config', 'l') + t.match(partial, ['get', 'set', 'delete', 'ls', 'rm', 'edit'], 'and works on partials') }) diff --git a/deps/npm/test/lib/publish.js b/deps/npm/test/lib/publish.js index 4aa3e5592751e0..6e0075835c2691 100644 --- a/deps/npm/test/lib/publish.js +++ b/deps/npm/test/lib/publish.js @@ -9,6 +9,14 @@ const fs = require('fs') const log = require('npmlog') log.level = 'silent' +const cleanGithead = (result) => { + return result.map((r) => { + if (r.gitHead) + r.gitHead = '{GITHEAD}' + + return r + }) +} const {definitions} = require('../../lib/utils/config') const defaults = Object.entries(definitions).reduce((defaults, [key, def]) => { defaults[key] = def.default @@ -581,7 +589,7 @@ t.test('workspaces', (t) => { log.level = 'info' publish.execWorkspaces([], [], (err) => { t.notOk(err) - t.matchSnapshot(publishes, 'should publish all workspaces') + t.matchSnapshot(cleanGithead(publishes), 'should publish all workspaces') t.matchSnapshot(outputs, 'should output all publishes') t.end() }) @@ -591,7 +599,7 @@ t.test('workspaces', (t) => { log.level = 'info' publish.execWorkspaces([], ['workspace-a'], (err) => { t.notOk(err) - t.matchSnapshot(publishes, 'should publish given workspace') + t.matchSnapshot(cleanGithead(publishes), 'should publish given workspace') t.matchSnapshot(outputs, 'should output one publish') t.end() }) @@ -610,7 +618,7 @@ t.test('workspaces', (t) => { npm.config.set('json', true) publish.execWorkspaces([], [], (err) => { t.notOk(err) - t.matchSnapshot(publishes, 'should publish all workspaces') + t.matchSnapshot(cleanGithead(publishes), 'should publish all workspaces') t.matchSnapshot(outputs, 'should output all publishes as json') t.end() }) @@ -699,7 +707,7 @@ t.test('private workspaces', (t) => { npm.color = true publish.execWorkspaces([], [], (err) => { t.notOk(err) - t.matchSnapshot(publishes, 'should publish all non-private workspaces') + t.matchSnapshot(cleanGithead(publishes), 'should publish all non-private workspaces') t.matchSnapshot(outputs, 'should output all publishes') npm.color = false t.end() @@ -726,7 +734,7 @@ t.test('private workspaces', (t) => { publish.execWorkspaces([], [], (err) => { t.notOk(err) - t.matchSnapshot(publishes, 'should publish all non-private workspaces') + t.matchSnapshot(cleanGithead(publishes), 'should publish all non-private workspaces') t.matchSnapshot(outputs, 'should output all publishes') t.end() }) diff --git a/deps/npm/test/lib/utils/config/definitions.js b/deps/npm/test/lib/utils/config/definitions.js index 63d9bbd195ab28..65193020d050c5 100644 --- a/deps/npm/test/lib/utils/config/definitions.js +++ b/deps/npm/test/lib/utils/config/definitions.js @@ -812,19 +812,44 @@ t.test('location', t => { location: 'user', } const flat = {} + // the global flattener is what sets location, so run that + definitions.global.flatten('global', obj, flat) definitions.location.flatten('location', obj, flat) // global = true sets location in both places to global - t.strictSame(flat, { location: 'global' }) - t.strictSame(obj, { global: true, location: 'global' }) + t.strictSame(flat, { global: true, location: 'global' }) + // location here is still 'user' because flattening doesn't modify the object + t.strictSame(obj, { global: true, location: 'user' }) obj.global = false obj.location = 'user' delete flat.global delete flat.location + definitions.global.flatten('global', obj, flat) definitions.location.flatten('location', obj, flat) // global = false leaves location unaltered - t.strictSame(flat, { location: 'user' }) + t.strictSame(flat, { global: false, location: 'user' }) t.strictSame(obj, { global: false, location: 'user' }) t.end() }) + +t.test('package-lock-only', t => { + const obj = { + 'package-lock': false, + 'package-lock-only': true, + } + const flat = {} + + definitions['package-lock-only'].flatten('package-lock-only', obj, flat) + definitions['package-lock'].flatten('package-lock', obj, flat) + t.strictSame(flat, { packageLock: true, packageLockOnly: true }) + + obj['package-lock-only'] = false + delete flat.packageLock + delete flat.packageLockOnly + + definitions['package-lock-only'].flatten('package-lock-only', obj, flat) + definitions['package-lock'].flatten('package-lock', obj, flat) + t.strictSame(flat, { packageLock: false, packageLockOnly: false }) + t.end() +})