mirror of
https://github.com/nodejs/node.git
synced 2025-08-15 13:48:44 +02:00
deps: upgrade npm to 8.1.1
PR-URL: https://github.com/nodejs/node/pull/40554 Reviewed-By: Colin Ihrig <cjihrig@gmail.com> Reviewed-By: Rich Trott <rtrott@gmail.com> Reviewed-By: Richard Lau <rlau@redhat.com>
This commit is contained in:
parent
a749c1f20c
commit
9843885e93
34 changed files with 224 additions and 1556 deletions
2
deps/npm/docs/output/commands/npm-ls.html
vendored
2
deps/npm/docs/output/commands/npm-ls.html
vendored
|
@ -159,7 +159,7 @@ tree at all, use <a href="../commands/npm-explain.html"><code>npm explain</code>
|
|||
the results to only the paths to the packages named. Note that nested
|
||||
packages will <em>also</em> show the paths to the specified packages. For
|
||||
example, running <code>npm ls promzard</code> in npm’s source tree will show:</p>
|
||||
<pre lang="bash"><code>npm@8.1.0 /path/to/npm
|
||||
<pre lang="bash"><code>npm@8.1.1 /path/to/npm
|
||||
└─┬ init-package-json@0.0.4
|
||||
└── promzard@0.1.5
|
||||
</code></pre>
|
||||
|
|
2
deps/npm/docs/output/commands/npm.html
vendored
2
deps/npm/docs/output/commands/npm.html
vendored
|
@ -148,7 +148,7 @@ npm command-line interface
|
|||
<pre lang="bash"><code>npm <command> [args]
|
||||
</code></pre>
|
||||
<h3 id="version">Version</h3>
|
||||
<p>8.1.0</p>
|
||||
<p>8.1.1</p>
|
||||
<h3 id="description">Description</h3>
|
||||
<p>npm is the package manager for the Node JavaScript platform. It puts
|
||||
modules in place so that node can find them, and manages dependency
|
||||
|
|
2
deps/npm/man/man1/npm-ls.1
vendored
2
deps/npm/man/man1/npm-ls.1
vendored
|
@ -26,7 +26,7 @@ example, running \fBnpm ls promzard\fP in npm's source tree will show:
|
|||
.P
|
||||
.RS 2
|
||||
.nf
|
||||
npm@8\.1\.0 /path/to/npm
|
||||
npm@8\.1\.1 /path/to/npm
|
||||
└─┬ init\-package\-json@0\.0\.4
|
||||
└── promzard@0\.1\.5
|
||||
.fi
|
||||
|
|
2
deps/npm/man/man1/npm.1
vendored
2
deps/npm/man/man1/npm.1
vendored
|
@ -10,7 +10,7 @@ npm <command> [args]
|
|||
.RE
|
||||
.SS Version
|
||||
.P
|
||||
8\.1\.0
|
||||
8\.1\.1
|
||||
.SS Description
|
||||
.P
|
||||
npm is the package manager for the Node JavaScript platform\. It puts
|
||||
|
|
15
deps/npm/node_modules/@npmcli/arborist/lib/can-place-dep.js
generated
vendored
15
deps/npm/node_modules/@npmcli/arborist/lib/can-place-dep.js
generated
vendored
|
@ -145,7 +145,12 @@ class CanPlaceDep {
|
|||
return CONFLICT
|
||||
}
|
||||
|
||||
if (targetEdge && !dep.satisfies(targetEdge) && targetEdge !== this.edge) {
|
||||
// skip this test if there's a current node, because we might be able
|
||||
// to dedupe against it anyway
|
||||
if (!current &&
|
||||
targetEdge &&
|
||||
!dep.satisfies(targetEdge) &&
|
||||
targetEdge !== this.edge) {
|
||||
return CONFLICT
|
||||
}
|
||||
|
||||
|
@ -167,10 +172,10 @@ class CanPlaceDep {
|
|||
const { version: newVer } = dep
|
||||
const tryReplace = curVer && newVer && semver.gte(newVer, curVer)
|
||||
if (tryReplace && dep.canReplace(current)) {
|
||||
/* XXX-istanbul ignore else - It's extremely rare that a replaceable
|
||||
* node would be a conflict, if the current one wasn't a conflict,
|
||||
* but it is theoretically possible if peer deps are pinned. In
|
||||
* that case we treat it like any other conflict, and keep trying */
|
||||
// It's extremely rare that a replaceable node would be a conflict, if
|
||||
// the current one wasn't a conflict, but it is theoretically possible
|
||||
// if peer deps are pinned. In that case we treat it like any other
|
||||
// conflict, and keep trying.
|
||||
const cpp = this.canPlacePeers(REPLACE)
|
||||
if (cpp !== CONFLICT) {
|
||||
return cpp
|
||||
|
|
2
deps/npm/node_modules/@npmcli/arborist/package.json
generated
vendored
2
deps/npm/node_modules/@npmcli/arborist/package.json
generated
vendored
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@npmcli/arborist",
|
||||
"version": "4.0.1",
|
||||
"version": "4.0.2",
|
||||
"description": "Manage node_modules trees",
|
||||
"dependencies": {
|
||||
"@isaacs/string-locale-compare": "^1.0.1",
|
||||
|
|
2
deps/npm/node_modules/node-gyp/.github/workflows/tests.yml
generated
vendored
2
deps/npm/node_modules/node-gyp/.github/workflows/tests.yml
generated
vendored
|
@ -9,7 +9,7 @@ jobs:
|
|||
max-parallel: 15
|
||||
matrix:
|
||||
node: [12.x, 14.x, 16.x]
|
||||
python: [3.6, 3.8, 3.9]
|
||||
python: ["3.6", "3.8", "3.10"]
|
||||
os: [macos-latest, ubuntu-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
18
deps/npm/node_modules/node-gyp/CHANGELOG.md
generated
vendored
18
deps/npm/node_modules/node-gyp/CHANGELOG.md
generated
vendored
|
@ -1,5 +1,23 @@
|
|||
# Changelog
|
||||
|
||||
## [8.3.0](https://www.github.com/nodejs/node-gyp/compare/v8.2.0...v8.3.0) (2021-10-11)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **gyp:** update gyp to v0.10.0 ([#2521](https://www.github.com/nodejs/node-gyp/issues/2521)) ([5585792](https://www.github.com/nodejs/node-gyp/commit/5585792922a97f0629f143c560efd74470eae87f))
|
||||
|
||||
|
||||
### Tests
|
||||
|
||||
* Python 3.10 was release on Oct. 4th ([#2504](https://www.github.com/nodejs/node-gyp/issues/2504)) ([0a67dcd](https://www.github.com/nodejs/node-gyp/commit/0a67dcd1307f3560495219253241eafcbf4e2a69))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* **deps:** bump make-fetch-happen from 8.0.14 to 9.1.0 ([b05b4fe](https://www.github.com/nodejs/node-gyp/commit/b05b4fe9891f718f40edf547e9b50e982826d48a))
|
||||
* refactor the creation of config.gypi file ([f2ad87f](https://www.github.com/nodejs/node-gyp/commit/f2ad87ff65f98ad66daa7225ad59d99b759a2b07))
|
||||
|
||||
## [8.2.0](https://www.github.com/nodejs/node-gyp/compare/v8.1.0...v8.2.0) (2021-08-23)
|
||||
|
||||
|
||||
|
|
7
deps/npm/node_modules/node-gyp/gyp/CHANGELOG.md
generated
vendored
7
deps/npm/node_modules/node-gyp/gyp/CHANGELOG.md
generated
vendored
|
@ -1,5 +1,12 @@
|
|||
# Changelog
|
||||
|
||||
## [0.10.0](https://www.github.com/nodejs/gyp-next/compare/v0.9.6...v0.10.0) (2021-08-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **msvs:** add support for Visual Studio 2022 ([#124](https://www.github.com/nodejs/gyp-next/issues/124)) ([4bd9215](https://www.github.com/nodejs/gyp-next/commit/4bd9215c44d300f06e916aec1d6327c22b78272d))
|
||||
|
||||
### [0.9.6](https://www.github.com/nodejs/gyp-next/compare/v0.9.5...v0.9.6) (2021-08-23)
|
||||
|
||||
|
||||
|
|
17
deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
generated
vendored
17
deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
generated
vendored
|
@ -269,6 +269,18 @@ def _CreateVersion(name, path, sdk_based=False):
|
|||
if path:
|
||||
path = os.path.normpath(path)
|
||||
versions = {
|
||||
"2022": VisualStudioVersion(
|
||||
"2022",
|
||||
"Visual Studio 2022",
|
||||
solution_version="12.00",
|
||||
project_version="17.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v143",
|
||||
compatible_sdks=["v8.1", "v10.0"],
|
||||
),
|
||||
"2019": VisualStudioVersion(
|
||||
"2019",
|
||||
"Visual Studio 2019",
|
||||
|
@ -436,6 +448,7 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
|
|||
2015 - Visual Studio 2015 (14)
|
||||
2017 - Visual Studio 2017 (15)
|
||||
2019 - Visual Studio 2019 (16)
|
||||
2022 - Visual Studio 2022 (17)
|
||||
Where (e) is e for express editions of MSVS and blank otherwise.
|
||||
"""
|
||||
version_to_year = {
|
||||
|
@ -447,6 +460,7 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
|
|||
"14.0": "2015",
|
||||
"15.0": "2017",
|
||||
"16.0": "2019",
|
||||
"17.0": "2022",
|
||||
}
|
||||
versions = []
|
||||
for version in versions_to_check:
|
||||
|
@ -522,7 +536,7 @@ def SelectVisualStudioVersion(version="auto", allow_fallback=True):
|
|||
if version == "auto":
|
||||
version = os.environ.get("GYP_MSVS_VERSION", "auto")
|
||||
version_map = {
|
||||
"auto": ("16.0", "15.0", "14.0", "12.0", "10.0", "9.0", "8.0", "11.0"),
|
||||
"auto": ("17.0", "16.0", "15.0", "14.0", "12.0", "10.0", "9.0", "8.0", "11.0"),
|
||||
"2005": ("8.0",),
|
||||
"2005e": ("8.0",),
|
||||
"2008": ("9.0",),
|
||||
|
@ -536,6 +550,7 @@ def SelectVisualStudioVersion(version="auto", allow_fallback=True):
|
|||
"2015": ("14.0",),
|
||||
"2017": ("15.0",),
|
||||
"2019": ("16.0",),
|
||||
"2022": ("17.0",),
|
||||
}
|
||||
override_path = os.environ.get("GYP_MSVS_OVERRIDE_PATH")
|
||||
if override_path:
|
||||
|
|
2
deps/npm/node_modules/node-gyp/gyp/setup.py
generated
vendored
2
deps/npm/node_modules/node-gyp/gyp/setup.py
generated
vendored
|
@ -15,7 +15,7 @@ with open(path.join(here, "README.md")) as in_file:
|
|||
|
||||
setup(
|
||||
name="gyp-next",
|
||||
version="0.9.6",
|
||||
version="0.10.0",
|
||||
description="A fork of the GYP build system for use in the Node.js projects",
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
|
|
102
deps/npm/node_modules/node-gyp/lib/configure.js
generated
vendored
102
deps/npm/node_modules/node-gyp/lib/configure.js
generated
vendored
|
@ -7,6 +7,7 @@ const os = require('os')
|
|||
const processRelease = require('./process-release')
|
||||
const win = process.platform === 'win32'
|
||||
const findNodeDirectory = require('./find-node-directory')
|
||||
const createConfigGypi = require('./create-config-gypi')
|
||||
const msgFormat = require('util').format
|
||||
var findPython = require('./find-python')
|
||||
if (win) {
|
||||
|
@ -92,107 +93,14 @@ function configure (gyp, argv, callback) {
|
|||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
var configFilename = 'config.gypi'
|
||||
var configPath = path.resolve(buildDir, configFilename)
|
||||
|
||||
log.verbose('build/' + configFilename, 'creating config file')
|
||||
|
||||
var config = process.config ? JSON.parse(JSON.stringify(process.config)) : {}
|
||||
var defaults = config.target_defaults
|
||||
var variables = config.variables
|
||||
|
||||
// default "config.variables"
|
||||
if (!variables) {
|
||||
variables = config.variables = {}
|
||||
}
|
||||
|
||||
// default "config.defaults"
|
||||
if (!defaults) {
|
||||
defaults = config.target_defaults = {}
|
||||
}
|
||||
|
||||
// don't inherit the "defaults" from node's `process.config` object.
|
||||
// doing so could cause problems in cases where the `node` executable was
|
||||
// compiled on a different machine (with different lib/include paths) than
|
||||
// the machine where the addon is being built to
|
||||
defaults.cflags = []
|
||||
defaults.defines = []
|
||||
defaults.include_dirs = []
|
||||
defaults.libraries = []
|
||||
|
||||
// set the default_configuration prop
|
||||
if ('debug' in gyp.opts) {
|
||||
defaults.default_configuration = gyp.opts.debug ? 'Debug' : 'Release'
|
||||
}
|
||||
|
||||
if (!defaults.default_configuration) {
|
||||
defaults.default_configuration = 'Release'
|
||||
}
|
||||
|
||||
// set the target_arch variable
|
||||
variables.target_arch = gyp.opts.arch || process.arch || 'ia32'
|
||||
if (variables.target_arch === 'arm64') {
|
||||
defaults.msvs_configuration_platform = 'ARM64'
|
||||
defaults.xcode_configuration_platform = 'arm64'
|
||||
}
|
||||
|
||||
// set the node development directory
|
||||
variables.nodedir = nodeDir
|
||||
|
||||
// disable -T "thin" static archives by default
|
||||
variables.standalone_static_library = gyp.opts.thin ? 0 : 1
|
||||
|
||||
if (win) {
|
||||
if (process.platform === 'win32') {
|
||||
process.env.GYP_MSVS_VERSION = Math.min(vsInfo.versionYear, 2015)
|
||||
process.env.GYP_MSVS_OVERRIDE_PATH = vsInfo.path
|
||||
defaults.msbuild_toolset = vsInfo.toolset
|
||||
if (vsInfo.sdk) {
|
||||
defaults.msvs_windows_target_platform_version = vsInfo.sdk
|
||||
}
|
||||
if (variables.target_arch === 'arm64') {
|
||||
if (vsInfo.versionMajor > 15 ||
|
||||
(vsInfo.versionMajor === 15 && vsInfo.versionMajor >= 9)) {
|
||||
defaults.msvs_enable_marmasm = 1
|
||||
} else {
|
||||
log.warn('Compiling ARM64 assembly is only available in\n' +
|
||||
'Visual Studio 2017 version 15.9 and above')
|
||||
}
|
||||
}
|
||||
variables.msbuild_path = vsInfo.msBuild
|
||||
}
|
||||
|
||||
// loop through the rest of the opts and add the unknown ones as variables.
|
||||
// this allows for module-specific configure flags like:
|
||||
//
|
||||
// $ node-gyp configure --shared-libxml2
|
||||
Object.keys(gyp.opts).forEach(function (opt) {
|
||||
if (opt === 'argv') {
|
||||
return
|
||||
}
|
||||
if (opt in gyp.configDefs) {
|
||||
return
|
||||
}
|
||||
variables[opt.replace(/-/g, '_')] = gyp.opts[opt]
|
||||
createConfigGypi({ gyp, buildDir, nodeDir, vsInfo }, (err, configPath) => {
|
||||
configs.push(configPath)
|
||||
findConfigs(err)
|
||||
})
|
||||
|
||||
// ensures that any boolean values from `process.config` get stringified
|
||||
function boolsToString (k, v) {
|
||||
if (typeof v === 'boolean') {
|
||||
return String(v)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
log.silly('build/' + configFilename, config)
|
||||
|
||||
// now write out the config.gypi file to the build/ dir
|
||||
var prefix = '# Do not edit. File was generated by node-gyp\'s "configure" step'
|
||||
|
||||
var json = JSON.stringify(config, boolsToString, 2)
|
||||
log.verbose('build/' + configFilename, 'writing out config file: %s', configPath)
|
||||
configs.push(configPath)
|
||||
fs.writeFile(configPath, [prefix, json, ''].join('\n'), findConfigs)
|
||||
}
|
||||
|
||||
function findConfigs (err) {
|
||||
|
|
119
deps/npm/node_modules/node-gyp/lib/create-config-gypi.js
generated
vendored
Normal file
119
deps/npm/node_modules/node-gyp/lib/create-config-gypi.js
generated
vendored
Normal file
|
@ -0,0 +1,119 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const log = require('npmlog')
|
||||
const path = require('path')
|
||||
|
||||
function getBaseConfigGypi () {
|
||||
const config = JSON.parse(JSON.stringify(process.config))
|
||||
if (!config.target_defaults) {
|
||||
config.target_defaults = {}
|
||||
}
|
||||
if (!config.variables) {
|
||||
config.variables = {}
|
||||
}
|
||||
return config
|
||||
}
|
||||
|
||||
function getCurrentConfigGypi ({ gyp, nodeDir, vsInfo }) {
|
||||
const config = getBaseConfigGypi()
|
||||
const defaults = config.target_defaults
|
||||
const variables = config.variables
|
||||
|
||||
// don't inherit the "defaults" from the base config.gypi.
|
||||
// doing so could cause problems in cases where the `node` executable was
|
||||
// compiled on a different machine (with different lib/include paths) than
|
||||
// the machine where the addon is being built to
|
||||
defaults.cflags = []
|
||||
defaults.defines = []
|
||||
defaults.include_dirs = []
|
||||
defaults.libraries = []
|
||||
|
||||
// set the default_configuration prop
|
||||
if ('debug' in gyp.opts) {
|
||||
defaults.default_configuration = gyp.opts.debug ? 'Debug' : 'Release'
|
||||
}
|
||||
|
||||
if (!defaults.default_configuration) {
|
||||
defaults.default_configuration = 'Release'
|
||||
}
|
||||
|
||||
// set the target_arch variable
|
||||
variables.target_arch = gyp.opts.arch || process.arch || 'ia32'
|
||||
if (variables.target_arch === 'arm64') {
|
||||
defaults.msvs_configuration_platform = 'ARM64'
|
||||
defaults.xcode_configuration_platform = 'arm64'
|
||||
}
|
||||
|
||||
// set the node development directory
|
||||
variables.nodedir = nodeDir
|
||||
|
||||
// disable -T "thin" static archives by default
|
||||
variables.standalone_static_library = gyp.opts.thin ? 0 : 1
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
defaults.msbuild_toolset = vsInfo.toolset
|
||||
if (vsInfo.sdk) {
|
||||
defaults.msvs_windows_target_platform_version = vsInfo.sdk
|
||||
}
|
||||
if (variables.target_arch === 'arm64') {
|
||||
if (vsInfo.versionMajor > 15 ||
|
||||
(vsInfo.versionMajor === 15 && vsInfo.versionMajor >= 9)) {
|
||||
defaults.msvs_enable_marmasm = 1
|
||||
} else {
|
||||
log.warn('Compiling ARM64 assembly is only available in\n' +
|
||||
'Visual Studio 2017 version 15.9 and above')
|
||||
}
|
||||
}
|
||||
variables.msbuild_path = vsInfo.msBuild
|
||||
}
|
||||
|
||||
// loop through the rest of the opts and add the unknown ones as variables.
|
||||
// this allows for module-specific configure flags like:
|
||||
//
|
||||
// $ node-gyp configure --shared-libxml2
|
||||
Object.keys(gyp.opts).forEach(function (opt) {
|
||||
if (opt === 'argv') {
|
||||
return
|
||||
}
|
||||
if (opt in gyp.configDefs) {
|
||||
return
|
||||
}
|
||||
variables[opt.replace(/-/g, '_')] = gyp.opts[opt]
|
||||
})
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
function createConfigGypi ({ gyp, buildDir, nodeDir, vsInfo }, callback) {
|
||||
const configFilename = 'config.gypi'
|
||||
const configPath = path.resolve(buildDir, configFilename)
|
||||
|
||||
log.verbose('build/' + configFilename, 'creating config file')
|
||||
|
||||
const config = getCurrentConfigGypi({ gyp, nodeDir, vsInfo })
|
||||
|
||||
// ensures that any boolean values in config.gypi get stringified
|
||||
function boolsToString (k, v) {
|
||||
if (typeof v === 'boolean') {
|
||||
return String(v)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
log.silly('build/' + configFilename, config)
|
||||
|
||||
// now write out the config.gypi file to the build/ dir
|
||||
const prefix = '# Do not edit. File was generated by node-gyp\'s "configure" step'
|
||||
|
||||
const json = JSON.stringify(config, boolsToString, 2)
|
||||
log.verbose('build/' + configFilename, 'writing out config file: %s', configPath)
|
||||
fs.writeFile(configPath, [prefix, json, ''].join('\n'), (err) => {
|
||||
callback(err, configPath)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = createConfigGypi
|
||||
module.exports.test = {
|
||||
getCurrentConfigGypi: getCurrentConfigGypi
|
||||
}
|
16
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE
generated
vendored
16
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE
generated
vendored
|
@ -1,16 +0,0 @@
|
|||
ISC License
|
||||
|
||||
Copyright (c) npm, Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for
|
||||
any purpose with or without fee is hereby granted, provided that the
|
||||
above copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
|
||||
ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
|
||||
COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
|
||||
CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
||||
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
|
||||
OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
|
||||
USE OR PERFORMANCE OF THIS SOFTWARE.
|
209
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/agent.js
generated
vendored
209
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/agent.js
generated
vendored
|
@ -1,209 +0,0 @@
|
|||
'use strict'
|
||||
const LRU = require('lru-cache')
|
||||
const url = require('url')
|
||||
const isLambda = require('is-lambda')
|
||||
|
||||
const AGENT_CACHE = new LRU({ max: 50 })
|
||||
let HttpsAgent
|
||||
let HttpAgent
|
||||
|
||||
module.exports = getAgent
|
||||
|
||||
const getAgentTimeout = timeout =>
|
||||
typeof timeout !== 'number' || !timeout ? 0 : timeout + 1
|
||||
|
||||
const getMaxSockets = maxSockets => maxSockets || 15
|
||||
|
||||
function getAgent (uri, opts) {
|
||||
const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url)
|
||||
const isHttps = parsedUri.protocol === 'https:'
|
||||
const pxuri = getProxyUri(parsedUri.href, opts)
|
||||
|
||||
// If opts.timeout is zero, set the agentTimeout to zero as well. A timeout
|
||||
// of zero disables the timeout behavior (OS limits still apply). Else, if
|
||||
// opts.timeout is a non-zero value, set it to timeout + 1, to ensure that
|
||||
// the node-fetch-npm timeout will always fire first, giving us more
|
||||
// consistent errors.
|
||||
const agentTimeout = getAgentTimeout(opts.timeout)
|
||||
const agentMaxSockets = getMaxSockets(opts.maxSockets)
|
||||
|
||||
const key = [
|
||||
`https:${isHttps}`,
|
||||
pxuri
|
||||
? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}`
|
||||
: '>no-proxy<',
|
||||
`local-address:${opts.localAddress || '>no-local-address<'}`,
|
||||
`strict-ssl:${isHttps ? !!opts.strictSSL : '>no-strict-ssl<'}`,
|
||||
`ca:${(isHttps && opts.ca) || '>no-ca<'}`,
|
||||
`cert:${(isHttps && opts.cert) || '>no-cert<'}`,
|
||||
`key:${(isHttps && opts.key) || '>no-key<'}`,
|
||||
`timeout:${agentTimeout}`,
|
||||
`maxSockets:${agentMaxSockets}`,
|
||||
].join(':')
|
||||
|
||||
if (opts.agent != null) { // `agent: false` has special behavior!
|
||||
return opts.agent
|
||||
}
|
||||
|
||||
// keep alive in AWS lambda makes no sense
|
||||
const lambdaAgent = !isLambda ? null
|
||||
: isHttps ? require('https').globalAgent
|
||||
: require('http').globalAgent
|
||||
|
||||
if (isLambda && !pxuri)
|
||||
return lambdaAgent
|
||||
|
||||
if (AGENT_CACHE.peek(key))
|
||||
return AGENT_CACHE.get(key)
|
||||
|
||||
if (pxuri) {
|
||||
const pxopts = isLambda ? {
|
||||
...opts,
|
||||
agent: lambdaAgent,
|
||||
} : opts
|
||||
const proxy = getProxy(pxuri, pxopts, isHttps)
|
||||
AGENT_CACHE.set(key, proxy)
|
||||
return proxy
|
||||
}
|
||||
|
||||
if (!HttpsAgent) {
|
||||
HttpAgent = require('agentkeepalive')
|
||||
HttpsAgent = HttpAgent.HttpsAgent
|
||||
}
|
||||
|
||||
const agent = isHttps ? new HttpsAgent({
|
||||
maxSockets: agentMaxSockets,
|
||||
ca: opts.ca,
|
||||
cert: opts.cert,
|
||||
key: opts.key,
|
||||
localAddress: opts.localAddress,
|
||||
rejectUnauthorized: opts.strictSSL,
|
||||
timeout: agentTimeout,
|
||||
}) : new HttpAgent({
|
||||
maxSockets: agentMaxSockets,
|
||||
localAddress: opts.localAddress,
|
||||
timeout: agentTimeout,
|
||||
})
|
||||
AGENT_CACHE.set(key, agent)
|
||||
return agent
|
||||
}
|
||||
|
||||
function checkNoProxy (uri, opts) {
|
||||
const host = new url.URL(uri).hostname.split('.').reverse()
|
||||
let noproxy = (opts.noProxy || getProcessEnv('no_proxy'))
|
||||
if (typeof noproxy === 'string')
|
||||
noproxy = noproxy.split(/\s*,\s*/g)
|
||||
|
||||
return noproxy && noproxy.some(no => {
|
||||
const noParts = no.split('.').filter(x => x).reverse()
|
||||
if (!noParts.length)
|
||||
return false
|
||||
for (let i = 0; i < noParts.length; i++) {
|
||||
if (host[i] !== noParts[i])
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.getProcessEnv = getProcessEnv
|
||||
|
||||
function getProcessEnv (env) {
|
||||
if (!env)
|
||||
return
|
||||
|
||||
let value
|
||||
|
||||
if (Array.isArray(env)) {
|
||||
for (const e of env) {
|
||||
value = process.env[e] ||
|
||||
process.env[e.toUpperCase()] ||
|
||||
process.env[e.toLowerCase()]
|
||||
if (typeof value !== 'undefined')
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof env === 'string') {
|
||||
value = process.env[env] ||
|
||||
process.env[env.toUpperCase()] ||
|
||||
process.env[env.toLowerCase()]
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
module.exports.getProxyUri = getProxyUri
|
||||
function getProxyUri (uri, opts) {
|
||||
const protocol = new url.URL(uri).protocol
|
||||
|
||||
const proxy = opts.proxy ||
|
||||
(
|
||||
protocol === 'https:' &&
|
||||
getProcessEnv('https_proxy')
|
||||
) ||
|
||||
(
|
||||
protocol === 'http:' &&
|
||||
getProcessEnv(['https_proxy', 'http_proxy', 'proxy'])
|
||||
)
|
||||
if (!proxy)
|
||||
return null
|
||||
|
||||
const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy
|
||||
|
||||
return !checkNoProxy(uri, opts) && parsedProxy
|
||||
}
|
||||
|
||||
const getAuth = u =>
|
||||
u.username && u.password ? `${u.username}:${u.password}`
|
||||
: u.username ? u.username
|
||||
: null
|
||||
|
||||
const getPath = u => u.pathname + u.search + u.hash
|
||||
|
||||
let HttpProxyAgent
|
||||
let HttpsProxyAgent
|
||||
let SocksProxyAgent
|
||||
module.exports.getProxy = getProxy
|
||||
function getProxy (proxyUrl, opts, isHttps) {
|
||||
const popts = {
|
||||
host: proxyUrl.hostname,
|
||||
port: proxyUrl.port,
|
||||
protocol: proxyUrl.protocol,
|
||||
path: getPath(proxyUrl),
|
||||
auth: getAuth(proxyUrl),
|
||||
ca: opts.ca,
|
||||
cert: opts.cert,
|
||||
key: opts.key,
|
||||
timeout: getAgentTimeout(opts.timeout),
|
||||
localAddress: opts.localAddress,
|
||||
maxSockets: getMaxSockets(opts.maxSockets),
|
||||
rejectUnauthorized: opts.strictSSL,
|
||||
}
|
||||
|
||||
if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') {
|
||||
if (!isHttps) {
|
||||
if (!HttpProxyAgent)
|
||||
HttpProxyAgent = require('http-proxy-agent')
|
||||
|
||||
return new HttpProxyAgent(popts)
|
||||
} else {
|
||||
if (!HttpsProxyAgent)
|
||||
HttpsProxyAgent = require('https-proxy-agent')
|
||||
|
||||
return new HttpsProxyAgent(popts)
|
||||
}
|
||||
} else if (proxyUrl.protocol.startsWith('socks')) {
|
||||
if (!SocksProxyAgent)
|
||||
SocksProxyAgent = require('socks-proxy-agent')
|
||||
|
||||
return new SocksProxyAgent(popts)
|
||||
} else {
|
||||
throw Object.assign(
|
||||
new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`),
|
||||
{
|
||||
url: proxyUrl.href,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
260
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/cache.js
generated
vendored
260
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/cache.js
generated
vendored
|
@ -1,260 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const fetch = require('minipass-fetch')
|
||||
const cacache = require('cacache')
|
||||
const ssri = require('ssri')
|
||||
const url = require('url')
|
||||
|
||||
const Minipass = require('minipass')
|
||||
const MinipassFlush = require('minipass-flush')
|
||||
const MinipassCollect = require('minipass-collect')
|
||||
const MinipassPipeline = require('minipass-pipeline')
|
||||
|
||||
const MAX_MEM_SIZE = 5 * 1024 * 1024 // 5MB
|
||||
|
||||
// some headers should never be stored in the cache, either because
|
||||
// they're a security footgun to leave lying around, or because we
|
||||
// just don't need them taking up space.
|
||||
// set to undefined so they're omitted from the JSON.stringify
|
||||
const pruneHeaders = {
|
||||
authorization: undefined,
|
||||
'npm-session': undefined,
|
||||
'set-cookie': undefined,
|
||||
'cf-ray': undefined,
|
||||
'cf-cache-status': undefined,
|
||||
'cf-request-id': undefined,
|
||||
'x-fetch-attempts': undefined,
|
||||
}
|
||||
|
||||
function cacheKey (req) {
|
||||
const parsed = new url.URL(req.url)
|
||||
return `make-fetch-happen:request-cache:${
|
||||
url.format({
|
||||
protocol: parsed.protocol,
|
||||
slashes: true,
|
||||
port: parsed.port,
|
||||
hostname: parsed.hostname,
|
||||
pathname: parsed.pathname,
|
||||
search: parsed.search,
|
||||
})
|
||||
}`
|
||||
}
|
||||
|
||||
// This is a cacache-based implementation of the Cache standard,
|
||||
// using node-fetch.
|
||||
// docs: https://developer.mozilla.org/en-US/docs/Web/API/Cache
|
||||
//
|
||||
module.exports = class Cache {
|
||||
constructor (path, opts) {
|
||||
this._path = path
|
||||
this.Promise = (opts && opts.Promise) || Promise
|
||||
}
|
||||
|
||||
static get pruneHeaders () {
|
||||
// exposed for testing, not modifiable
|
||||
return { ...pruneHeaders }
|
||||
}
|
||||
|
||||
// Returns a Promise that resolves to the response associated with the first
|
||||
// matching request in the Cache object.
|
||||
match (req, opts) {
|
||||
const key = cacheKey(req)
|
||||
return cacache.get.info(this._path, key).then(info => {
|
||||
return info && cacache.get.hasContent(
|
||||
this._path, info.integrity, opts
|
||||
).then(exists => exists && info)
|
||||
}).then(info => {
|
||||
if (info && info.metadata && matchDetails(req, {
|
||||
url: info.metadata.url,
|
||||
reqHeaders: new fetch.Headers(info.metadata.reqHeaders),
|
||||
resHeaders: new fetch.Headers(info.metadata.resHeaders),
|
||||
cacheIntegrity: info.integrity,
|
||||
integrity: opts && opts.integrity,
|
||||
})) {
|
||||
const resHeaders = new fetch.Headers(info.metadata.resHeaders)
|
||||
addCacheHeaders(resHeaders, this._path, key, info.integrity, info.time)
|
||||
if (req.method === 'HEAD') {
|
||||
return new fetch.Response(null, {
|
||||
url: req.url,
|
||||
headers: resHeaders,
|
||||
status: 200,
|
||||
})
|
||||
}
|
||||
const cachePath = this._path
|
||||
// avoid opening cache file handles until a user actually tries to
|
||||
// read from it.
|
||||
const body = new Minipass()
|
||||
const fitInMemory = info.size < MAX_MEM_SIZE
|
||||
const removeOnResume = () => body.removeListener('resume', onResume)
|
||||
const onResume =
|
||||
opts.memoize !== false && fitInMemory
|
||||
? () => {
|
||||
const c = cacache.get.stream.byDigest(cachePath, info.integrity, {
|
||||
memoize: opts.memoize,
|
||||
})
|
||||
c.on('error', /* istanbul ignore next */ err => {
|
||||
body.emit('error', err)
|
||||
})
|
||||
c.pipe(body)
|
||||
}
|
||||
: () => {
|
||||
removeOnResume()
|
||||
cacache.get.byDigest(cachePath, info.integrity, {
|
||||
memoize: opts.memoize,
|
||||
})
|
||||
.then(data => body.end(data))
|
||||
.catch(/* istanbul ignore next */ err => {
|
||||
body.emit('error', err)
|
||||
})
|
||||
}
|
||||
body.once('resume', onResume)
|
||||
body.once('end', () => removeOnResume)
|
||||
return this.Promise.resolve(new fetch.Response(body, {
|
||||
url: req.url,
|
||||
headers: resHeaders,
|
||||
status: 200,
|
||||
size: info.size,
|
||||
}))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Takes both a request and its response and adds it to the given cache.
|
||||
put (req, response, opts) {
|
||||
opts = opts || {}
|
||||
const size = response.headers.get('content-length')
|
||||
const fitInMemory = !!size && opts.memoize !== false && size < MAX_MEM_SIZE
|
||||
const ckey = cacheKey(req)
|
||||
const cacheOpts = {
|
||||
algorithms: opts.algorithms,
|
||||
metadata: {
|
||||
url: req.url,
|
||||
reqHeaders: {
|
||||
...req.headers.raw(),
|
||||
...pruneHeaders,
|
||||
},
|
||||
resHeaders: {
|
||||
...response.headers.raw(),
|
||||
...pruneHeaders,
|
||||
},
|
||||
},
|
||||
size,
|
||||
memoize: fitInMemory && opts.memoize,
|
||||
}
|
||||
if (req.method === 'HEAD' || response.status === 304) {
|
||||
// Update metadata without writing
|
||||
return cacache.get.info(this._path, ckey).then(info => {
|
||||
// Providing these will bypass content write
|
||||
cacheOpts.integrity = info.integrity
|
||||
addCacheHeaders(
|
||||
response.headers, this._path, ckey, info.integrity, info.time
|
||||
)
|
||||
|
||||
return new MinipassPipeline(
|
||||
cacache.get.stream.byDigest(this._path, info.integrity, cacheOpts),
|
||||
cacache.put.stream(this._path, ckey, cacheOpts)
|
||||
).promise().then(() => {
|
||||
return response
|
||||
})
|
||||
})
|
||||
}
|
||||
const oldBody = response.body
|
||||
// the flush is the last thing in the pipeline. Build the pipeline
|
||||
// back-to-front so we don't consume the data before we use it!
|
||||
// We unshift in either a tee-stream to the cache put stream,
|
||||
// or a collecter that dumps it to cache in one go, then the
|
||||
// old body to bring in the data.
|
||||
const newBody = new MinipassPipeline(new MinipassFlush({
|
||||
flush () {
|
||||
return cacheWritePromise
|
||||
},
|
||||
}))
|
||||
|
||||
let cacheWriteResolve, cacheWriteReject
|
||||
const cacheWritePromise = new Promise((resolve, reject) => {
|
||||
cacheWriteResolve = resolve
|
||||
cacheWriteReject = reject
|
||||
})
|
||||
const cachePath = this._path
|
||||
|
||||
if (fitInMemory) {
|
||||
const collecter = new MinipassCollect.PassThrough()
|
||||
collecter.on('collect', data => {
|
||||
cacache.put(
|
||||
cachePath,
|
||||
ckey,
|
||||
data,
|
||||
cacheOpts
|
||||
).then(cacheWriteResolve, cacheWriteReject)
|
||||
})
|
||||
newBody.unshift(collecter)
|
||||
} else {
|
||||
const tee = new Minipass()
|
||||
const cacheStream = cacache.put.stream(
|
||||
cachePath,
|
||||
ckey,
|
||||
cacheOpts
|
||||
)
|
||||
tee.pipe(cacheStream)
|
||||
cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
|
||||
newBody.unshift(tee)
|
||||
}
|
||||
|
||||
newBody.unshift(oldBody)
|
||||
return Promise.resolve(new fetch.Response(newBody, response))
|
||||
}
|
||||
|
||||
// Finds the Cache entry whose key is the request, and if found, deletes the
|
||||
// Cache entry and returns a Promise that resolves to true. If no Cache entry
|
||||
// is found, it returns false.
|
||||
'delete' (req, opts) {
|
||||
opts = opts || {}
|
||||
if (typeof opts.memoize === 'object') {
|
||||
if (opts.memoize.reset)
|
||||
opts.memoize.reset()
|
||||
else if (opts.memoize.clear)
|
||||
opts.memoize.clear()
|
||||
else {
|
||||
Object.keys(opts.memoize).forEach(k => {
|
||||
opts.memoize[k] = null
|
||||
})
|
||||
}
|
||||
}
|
||||
return cacache.rm.entry(
|
||||
this._path,
|
||||
cacheKey(req)
|
||||
// TODO - true/false
|
||||
).then(() => false)
|
||||
}
|
||||
}
|
||||
|
||||
function matchDetails (req, cached) {
|
||||
const reqUrl = new url.URL(req.url)
|
||||
const cacheUrl = new url.URL(cached.url)
|
||||
const vary = cached.resHeaders.get('Vary')
|
||||
// https://tools.ietf.org/html/rfc7234#section-4.1
|
||||
if (vary) {
|
||||
if (vary.match(/\*/))
|
||||
return false
|
||||
else {
|
||||
const fieldsMatch = vary.split(/\s*,\s*/).every(field => {
|
||||
return cached.reqHeaders.get(field) === req.headers.get(field)
|
||||
})
|
||||
if (!fieldsMatch)
|
||||
return false
|
||||
}
|
||||
}
|
||||
if (cached.integrity)
|
||||
return ssri.parse(cached.integrity).match(cached.cacheIntegrity)
|
||||
|
||||
reqUrl.hash = null
|
||||
cacheUrl.hash = null
|
||||
return url.format(reqUrl) === url.format(cacheUrl)
|
||||
}
|
||||
|
||||
function addCacheHeaders (resHeaders, path, key, hash, time) {
|
||||
resHeaders.set('X-Local-Cache', encodeURIComponent(path))
|
||||
resHeaders.set('X-Local-Cache-Key', encodeURIComponent(key))
|
||||
resHeaders.set('X-Local-Cache-Hash', encodeURIComponent(hash))
|
||||
resHeaders.set('X-Local-Cache-Time', new Date(time).toUTCString())
|
||||
}
|
457
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/index.js
generated
vendored
457
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/index.js
generated
vendored
|
@ -1,457 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const url = require('url')
|
||||
const fetch = require('minipass-fetch')
|
||||
const pkg = require('./package.json')
|
||||
const retry = require('promise-retry')
|
||||
let ssri
|
||||
|
||||
const Minipass = require('minipass')
|
||||
const MinipassPipeline = require('minipass-pipeline')
|
||||
const getAgent = require('./agent')
|
||||
const setWarning = require('./warning')
|
||||
|
||||
const configureOptions = require('./utils/configure-options')
|
||||
const iterableToObject = require('./utils/iterable-to-object')
|
||||
const makePolicy = require('./utils/make-policy')
|
||||
|
||||
const isURL = /^https?:/
|
||||
const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
|
||||
|
||||
const RETRY_ERRORS = [
|
||||
'ECONNRESET', // remote socket closed on us
|
||||
'ECONNREFUSED', // remote host refused to open connection
|
||||
'EADDRINUSE', // failed to bind to a local port (proxy?)
|
||||
'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
|
||||
// Known codes we do NOT retry on:
|
||||
// ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
|
||||
]
|
||||
|
||||
const RETRY_TYPES = [
|
||||
'request-timeout',
|
||||
]
|
||||
|
||||
// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch
|
||||
module.exports = cachingFetch
|
||||
cachingFetch.defaults = function (_uri, _opts) {
|
||||
const fetch = this
|
||||
if (typeof _uri === 'object') {
|
||||
_opts = _uri
|
||||
_uri = null
|
||||
}
|
||||
|
||||
function defaultedFetch (uri, opts) {
|
||||
const finalOpts = Object.assign({}, _opts || {}, opts || {})
|
||||
return fetch(uri || _uri, finalOpts)
|
||||
}
|
||||
|
||||
defaultedFetch.defaults = fetch.defaults
|
||||
defaultedFetch.delete = fetch.delete
|
||||
return defaultedFetch
|
||||
}
|
||||
|
||||
cachingFetch.delete = cacheDelete
|
||||
function cacheDelete (uri, opts) {
|
||||
opts = configureOptions(opts)
|
||||
if (opts.cacheManager) {
|
||||
const req = new fetch.Request(uri, {
|
||||
method: opts.method,
|
||||
headers: opts.headers,
|
||||
})
|
||||
return opts.cacheManager.delete(req, opts)
|
||||
}
|
||||
}
|
||||
|
||||
function initializeSsri () {
|
||||
if (!ssri)
|
||||
ssri = require('ssri')
|
||||
}
|
||||
|
||||
function cachingFetch (uri, _opts) {
|
||||
const opts = configureOptions(_opts)
|
||||
|
||||
if (opts.integrity) {
|
||||
initializeSsri()
|
||||
// if verifying integrity, fetch must not decompress
|
||||
opts.compress = false
|
||||
}
|
||||
|
||||
const isCachable = (
|
||||
(
|
||||
opts.method === 'GET' ||
|
||||
opts.method === 'HEAD'
|
||||
) &&
|
||||
Boolean(opts.cacheManager) &&
|
||||
opts.cache !== 'no-store' &&
|
||||
opts.cache !== 'reload'
|
||||
)
|
||||
|
||||
if (isCachable) {
|
||||
const req = new fetch.Request(uri, {
|
||||
method: opts.method,
|
||||
headers: opts.headers,
|
||||
})
|
||||
|
||||
return opts.cacheManager.match(req, opts).then(res => {
|
||||
if (res) {
|
||||
const warningCode = (res.headers.get('Warning') || '').match(/^\d+/)
|
||||
if (warningCode && +warningCode >= 100 && +warningCode < 200) {
|
||||
// https://tools.ietf.org/html/rfc7234#section-4.3.4
|
||||
//
|
||||
// If a stored response is selected for update, the cache MUST:
|
||||
//
|
||||
// * delete any Warning header fields in the stored response with
|
||||
// warn-code 1xx (see Section 5.5);
|
||||
//
|
||||
// * retain any Warning header fields in the stored response with
|
||||
// warn-code 2xx;
|
||||
//
|
||||
res.headers.delete('Warning')
|
||||
}
|
||||
|
||||
if (opts.cache === 'default' && !isStale(req, res))
|
||||
return res
|
||||
|
||||
if (opts.cache === 'default' || opts.cache === 'no-cache')
|
||||
return conditionalFetch(req, res, opts)
|
||||
|
||||
if (opts.cache === 'force-cache' || opts.cache === 'only-if-cached') {
|
||||
// 112 Disconnected operation
|
||||
// SHOULD be included if the cache is intentionally disconnected from
|
||||
// the rest of the network for a period of time.
|
||||
// (https://tools.ietf.org/html/rfc2616#section-14.46)
|
||||
setWarning(res, 112, 'Disconnected operation')
|
||||
return res
|
||||
}
|
||||
}
|
||||
|
||||
if (!res && opts.cache === 'only-if-cached') {
|
||||
const errorMsg = `request to ${
|
||||
uri
|
||||
} failed: cache mode is 'only-if-cached' but no cached response available.`
|
||||
|
||||
const err = new Error(errorMsg)
|
||||
err.code = 'ENOTCACHED'
|
||||
throw err
|
||||
}
|
||||
|
||||
// Missing cache entry, or mode is default (if stale), reload, no-store
|
||||
return remoteFetch(req.url, opts)
|
||||
})
|
||||
}
|
||||
return remoteFetch(uri, opts)
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7234#section-4.2
|
||||
function isStale (req, res) {
|
||||
const _req = {
|
||||
url: req.url,
|
||||
method: req.method,
|
||||
headers: iterableToObject(req.headers),
|
||||
}
|
||||
|
||||
const policy = makePolicy(req, res)
|
||||
|
||||
const responseTime = res.headers.get('x-local-cache-time') ||
|
||||
/* istanbul ignore next - would be weird to get a 'stale'
|
||||
* response that didn't come from cache with a cache time header */
|
||||
(res.headers.get('date') || 0)
|
||||
|
||||
policy._responseTime = new Date(responseTime)
|
||||
|
||||
const bool = !policy.satisfiesWithoutRevalidation(_req)
|
||||
const headers = policy.responseHeaders()
|
||||
if (headers.warning && /^113\b/.test(headers.warning)) {
|
||||
// Possible to pick up a rfc7234 warning at this point.
|
||||
// This is kind of a weird place to stick this, should probably go
|
||||
// in cachingFetch. But by putting it here, we save an extra
|
||||
// CachePolicy object construction.
|
||||
res.headers.append('warning', headers.warning)
|
||||
}
|
||||
return bool
|
||||
}
|
||||
|
||||
function mustRevalidate (res) {
|
||||
return (res.headers.get('cache-control') || '').match(/must-revalidate/i)
|
||||
}
|
||||
|
||||
function conditionalFetch (req, cachedRes, opts) {
|
||||
const _req = {
|
||||
url: req.url,
|
||||
method: req.method,
|
||||
headers: Object.assign({}, opts.headers || {}),
|
||||
}
|
||||
|
||||
const policy = makePolicy(req, cachedRes)
|
||||
opts.headers = policy.revalidationHeaders(_req)
|
||||
|
||||
return remoteFetch(req.url, opts)
|
||||
.then(condRes => {
|
||||
const revalidatedPolicy = policy.revalidatedPolicy(_req, {
|
||||
status: condRes.status,
|
||||
headers: iterableToObject(condRes.headers),
|
||||
})
|
||||
|
||||
if (condRes.status >= 500 && !mustRevalidate(cachedRes)) {
|
||||
// 111 Revalidation failed
|
||||
// MUST be included if a cache returns a stale response because an
|
||||
// attempt to revalidate the response failed, due to an inability to
|
||||
// reach the server.
|
||||
// (https://tools.ietf.org/html/rfc2616#section-14.46)
|
||||
setWarning(cachedRes, 111, 'Revalidation failed')
|
||||
return cachedRes
|
||||
}
|
||||
|
||||
if (condRes.status === 304) { // 304 Not Modified
|
||||
// Create a synthetic response from the cached body and original req
|
||||
const synthRes = new fetch.Response(cachedRes.body, condRes)
|
||||
return opts.cacheManager.put(req, synthRes, opts)
|
||||
.then(newRes => {
|
||||
// Get the list first, because if we delete while iterating,
|
||||
// it'll throw off the count and not make it through all
|
||||
// of them.
|
||||
const newHeaders = revalidatedPolicy.policy.responseHeaders()
|
||||
const toDelete = [...newRes.headers.keys()]
|
||||
.filter(k => !newHeaders[k])
|
||||
for (const key of toDelete)
|
||||
newRes.headers.delete(key)
|
||||
|
||||
for (const [key, val] of Object.entries(newHeaders))
|
||||
newRes.headers.set(key, val)
|
||||
|
||||
return newRes
|
||||
})
|
||||
}
|
||||
|
||||
return condRes
|
||||
})
|
||||
.then(res => res)
|
||||
.catch(err => {
|
||||
if (mustRevalidate(cachedRes))
|
||||
throw err
|
||||
else {
|
||||
// 111 Revalidation failed
|
||||
// MUST be included if a cache returns a stale response because an
|
||||
// attempt to revalidate the response failed, due to an inability to
|
||||
// reach the server.
|
||||
// (https://tools.ietf.org/html/rfc2616#section-14.46)
|
||||
setWarning(cachedRes, 111, 'Revalidation failed')
|
||||
// 199 Miscellaneous warning
|
||||
// The warning text MAY include arbitrary information to be presented to
|
||||
// a human user, or logged. A system receiving this warning MUST NOT take
|
||||
// any automated action, besides presenting the warning to the user.
|
||||
// (https://tools.ietf.org/html/rfc2616#section-14.46)
|
||||
setWarning(
|
||||
cachedRes,
|
||||
199,
|
||||
`Miscellaneous Warning ${err.code}: ${err.message}`
|
||||
)
|
||||
|
||||
return cachedRes
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function remoteFetchHandleIntegrity (res, integrity) {
|
||||
if (res.status !== 200)
|
||||
return res // Error responses aren't subject to integrity checks.
|
||||
|
||||
const oldBod = res.body
|
||||
const newBod = ssri.integrityStream({
|
||||
integrity,
|
||||
})
|
||||
return new fetch.Response(new MinipassPipeline(oldBod, newBod), res)
|
||||
}
|
||||
|
||||
function remoteFetch (uri, opts) {
|
||||
const agent = getAgent(uri, opts)
|
||||
const headers = opts.headers instanceof fetch.Headers
|
||||
? opts.headers
|
||||
: new fetch.Headers(opts.headers)
|
||||
if (!headers.get('connection'))
|
||||
headers.set('connection', agent ? 'keep-alive' : 'close')
|
||||
|
||||
if (!headers.get('user-agent'))
|
||||
headers.set('user-agent', USER_AGENT)
|
||||
|
||||
const reqOpts = {
|
||||
agent,
|
||||
body: opts.body,
|
||||
compress: opts.compress,
|
||||
follow: opts.follow,
|
||||
headers,
|
||||
method: opts.method,
|
||||
redirect: 'manual',
|
||||
size: opts.size,
|
||||
counter: opts.counter,
|
||||
timeout: opts.timeout,
|
||||
ca: opts.ca,
|
||||
cert: opts.cert,
|
||||
key: opts.key,
|
||||
rejectUnauthorized: opts.strictSSL,
|
||||
}
|
||||
|
||||
return retry(
|
||||
(retryHandler, attemptNum) => {
|
||||
const req = new fetch.Request(uri, reqOpts)
|
||||
return fetch(req)
|
||||
.then((res) => {
|
||||
if (opts.integrity)
|
||||
res = remoteFetchHandleIntegrity(res, opts.integrity)
|
||||
|
||||
res.headers.set('x-fetch-attempts', attemptNum)
|
||||
|
||||
const isStream = Minipass.isStream(req.body)
|
||||
|
||||
if (opts.cacheManager) {
|
||||
const isMethodGetHead = (
|
||||
req.method === 'GET' ||
|
||||
req.method === 'HEAD'
|
||||
)
|
||||
|
||||
const isCachable = (
|
||||
opts.cache !== 'no-store' &&
|
||||
isMethodGetHead &&
|
||||
makePolicy(req, res).storable() &&
|
||||
res.status === 200 // No other statuses should be stored!
|
||||
)
|
||||
|
||||
if (isCachable)
|
||||
return opts.cacheManager.put(req, res, opts)
|
||||
|
||||
if (!isMethodGetHead) {
|
||||
return opts.cacheManager.delete(req).then(() => {
|
||||
if (res.status >= 500 && req.method !== 'POST' && !isStream) {
|
||||
if (typeof opts.onRetry === 'function')
|
||||
opts.onRetry(res)
|
||||
|
||||
return retryHandler(res)
|
||||
}
|
||||
|
||||
return res
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const isRetriable = (
|
||||
req.method !== 'POST' &&
|
||||
!isStream &&
|
||||
(
|
||||
res.status === 408 || // Request Timeout
|
||||
res.status === 420 || // Enhance Your Calm (usually Twitter rate-limit)
|
||||
res.status === 429 || // Too Many Requests ("standard" rate-limiting)
|
||||
res.status >= 500 // Assume server errors are momentary hiccups
|
||||
)
|
||||
)
|
||||
|
||||
if (isRetriable) {
|
||||
if (typeof opts.onRetry === 'function')
|
||||
opts.onRetry(res)
|
||||
|
||||
return retryHandler(res)
|
||||
}
|
||||
|
||||
if (!fetch.isRedirect(res.status))
|
||||
return res
|
||||
|
||||
if (opts.redirect === 'manual')
|
||||
return res
|
||||
|
||||
// if (!fetch.isRedirect(res.status) || opts.redirect === 'manual') {
|
||||
// return res
|
||||
// }
|
||||
|
||||
// handle redirects - matches behavior of fetch: https://github.com/bitinn/node-fetch
|
||||
if (opts.redirect === 'error') {
|
||||
const err = new fetch.FetchError(`redirect mode is set to error: ${uri}`, 'no-redirect', { code: 'ENOREDIRECT' })
|
||||
throw err
|
||||
}
|
||||
|
||||
if (!res.headers.get('location')) {
|
||||
const err = new fetch.FetchError(`redirect location header missing at: ${uri}`, 'no-location', { code: 'EINVALIDREDIRECT' })
|
||||
throw err
|
||||
}
|
||||
|
||||
if (req.counter >= req.follow) {
|
||||
const err = new fetch.FetchError(`maximum redirect reached at: ${uri}`, 'max-redirect', { code: 'EMAXREDIRECT' })
|
||||
throw err
|
||||
}
|
||||
|
||||
const resolvedUrlParsed = new url.URL(res.headers.get('location'), req.url)
|
||||
const resolvedUrl = url.format(resolvedUrlParsed)
|
||||
const redirectURL = (isURL.test(res.headers.get('location')))
|
||||
? new url.URL(res.headers.get('location'))
|
||||
: resolvedUrlParsed
|
||||
|
||||
// Comment below is used under the following license:
|
||||
// Copyright (c) 2010-2012 Mikeal Rogers
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an "AS
|
||||
// IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
// express or implied. See the License for the specific language
|
||||
// governing permissions and limitations under the License.
|
||||
|
||||
// Remove authorization if changing hostnames (but not if just
|
||||
// changing ports or protocols). This matches the behavior of request:
|
||||
// https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
|
||||
if (new url.URL(req.url).hostname !== redirectURL.hostname)
|
||||
req.headers.delete('authorization')
|
||||
|
||||
// for POST request with 301/302 response, or any request with 303 response,
|
||||
// use GET when following redirect
|
||||
if (
|
||||
res.status === 303 ||
|
||||
(
|
||||
req.method === 'POST' &&
|
||||
(
|
||||
res.status === 301 ||
|
||||
res.status === 302
|
||||
)
|
||||
)
|
||||
) {
|
||||
opts.method = 'GET'
|
||||
opts.body = null
|
||||
req.headers.delete('content-length')
|
||||
}
|
||||
|
||||
opts.headers = {}
|
||||
req.headers.forEach((value, name) => {
|
||||
opts.headers[name] = value
|
||||
})
|
||||
|
||||
opts.counter = ++req.counter
|
||||
return cachingFetch(resolvedUrl, opts)
|
||||
})
|
||||
.catch(err => {
|
||||
const code = (err.code === 'EPROMISERETRY')
|
||||
? err.retried.code
|
||||
: err.code
|
||||
|
||||
const isRetryError = (
|
||||
RETRY_ERRORS.indexOf(code) === -1 &&
|
||||
RETRY_TYPES.indexOf(err.type) === -1
|
||||
)
|
||||
|
||||
if (req.method === 'POST' || isRetryError)
|
||||
throw err
|
||||
|
||||
if (typeof opts.onRetry === 'function')
|
||||
opts.onRetry(err)
|
||||
|
||||
return retryHandler(err)
|
||||
})
|
||||
},
|
||||
opts.retry
|
||||
).catch(err => {
|
||||
if (err.status >= 400 && err.type !== 'system') {
|
||||
// this is an HTTP response "error" that we care about
|
||||
return err
|
||||
}
|
||||
|
||||
throw err
|
||||
})
|
||||
}
|
72
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json
generated
vendored
72
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json
generated
vendored
|
@ -1,72 +0,0 @@
|
|||
{
|
||||
"name": "make-fetch-happen",
|
||||
"version": "8.0.14",
|
||||
"description": "Opinionated, caching, retrying fetch client",
|
||||
"main": "index.js",
|
||||
"files": [
|
||||
"*.js",
|
||||
"lib",
|
||||
"utils"
|
||||
],
|
||||
"scripts": {
|
||||
"preversion": "npm t",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push --follow-tags",
|
||||
"test": "tap test/*.js",
|
||||
"posttest": "npm run lint",
|
||||
"eslint": "eslint",
|
||||
"lint": "npm run eslint -- *.js utils test",
|
||||
"lintfix": "npm run lint -- --fix"
|
||||
},
|
||||
"repository": "https://github.com/npm/make-fetch-happen",
|
||||
"keywords": [
|
||||
"http",
|
||||
"request",
|
||||
"fetch",
|
||||
"mean girls",
|
||||
"caching",
|
||||
"cache",
|
||||
"subresource integrity"
|
||||
],
|
||||
"author": {
|
||||
"name": "Kat Marchán",
|
||||
"email": "kzm@zkat.tech",
|
||||
"twitter": "maybekatz"
|
||||
},
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"agentkeepalive": "^4.1.3",
|
||||
"cacache": "^15.0.5",
|
||||
"http-cache-semantics": "^4.1.0",
|
||||
"http-proxy-agent": "^4.0.1",
|
||||
"https-proxy-agent": "^5.0.0",
|
||||
"is-lambda": "^1.0.1",
|
||||
"lru-cache": "^6.0.0",
|
||||
"minipass": "^3.1.3",
|
||||
"minipass-collect": "^1.0.2",
|
||||
"minipass-fetch": "^1.3.2",
|
||||
"minipass-flush": "^1.0.5",
|
||||
"minipass-pipeline": "^1.2.4",
|
||||
"promise-retry": "^2.0.1",
|
||||
"socks-proxy-agent": "^5.0.0",
|
||||
"ssri": "^8.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^7.14.0",
|
||||
"eslint-plugin-import": "^2.22.1",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-promise": "^4.2.1",
|
||||
"eslint-plugin-standard": "^5.0.0",
|
||||
"mkdirp": "^1.0.4",
|
||||
"nock": "^11.9.1",
|
||||
"npmlog": "^4.1.2",
|
||||
"require-inject": "^1.4.2",
|
||||
"rimraf": "^2.7.1",
|
||||
"safe-buffer": "^5.2.1",
|
||||
"standard-version": "^7.1.0",
|
||||
"tap": "^14.11.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
}
|
32
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/utils/configure-options.js
generated
vendored
32
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/utils/configure-options.js
generated
vendored
|
@ -1,32 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const initializeCache = require('./initialize-cache')
|
||||
|
||||
module.exports = function configureOptions (_opts) {
|
||||
const opts = Object.assign({}, _opts || {})
|
||||
opts.method = (opts.method || 'GET').toUpperCase()
|
||||
|
||||
if (!opts.retry) {
|
||||
// opts.retry was falsy; set default
|
||||
opts.retry = { retries: 0 }
|
||||
} else {
|
||||
if (typeof opts.retry !== 'object') {
|
||||
// Shorthand
|
||||
if (typeof opts.retry === 'number')
|
||||
opts.retry = { retries: opts.retry }
|
||||
|
||||
if (typeof opts.retry === 'string') {
|
||||
const value = parseInt(opts.retry, 10)
|
||||
opts.retry = (value) ? { retries: value } : { retries: 0 }
|
||||
}
|
||||
} else {
|
||||
// Set default retries
|
||||
opts.retry = Object.assign({}, { retries: 0 }, opts.retry)
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.cacheManager)
|
||||
initializeCache(opts)
|
||||
|
||||
return opts
|
||||
}
|
26
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/utils/initialize-cache.js
generated
vendored
26
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/utils/initialize-cache.js
generated
vendored
|
@ -1,26 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const isHeaderConditional = require('./is-header-conditional')
|
||||
// Default cacache-based cache
|
||||
const Cache = require('../cache')
|
||||
|
||||
module.exports = function initializeCache (opts) {
|
||||
/**
|
||||
* NOTE: `opts.cacheManager` is the path to cache
|
||||
* We're making the assumption that if `opts.cacheManager` *isn't* a string,
|
||||
* it's a cache object
|
||||
*/
|
||||
if (typeof opts.cacheManager === 'string') {
|
||||
// Need to make a cache object
|
||||
opts.cacheManager = new Cache(opts.cacheManager, opts)
|
||||
}
|
||||
|
||||
opts.cache = opts.cache || 'default'
|
||||
|
||||
if (opts.cache === 'default' && isHeaderConditional(opts.headers)) {
|
||||
// If header list contains `If-Modified-Since`, `If-None-Match`,
|
||||
// `If-Unmodified-Since`, `If-Match`, or `If-Range`, fetch will set cache
|
||||
// mode to "no-store" if it is "default".
|
||||
opts.cache = 'no-store'
|
||||
}
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
module.exports = function isHeaderConditional (headers) {
|
||||
if (!headers || typeof headers !== 'object')
|
||||
return false
|
||||
|
||||
const modifiers = [
|
||||
'if-modified-since',
|
||||
'if-none-match',
|
||||
'if-unmodified-since',
|
||||
'if-match',
|
||||
'if-range',
|
||||
]
|
||||
|
||||
return Object.keys(headers)
|
||||
.some(h => modifiers.indexOf(h.toLowerCase()) !== -1)
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
module.exports = function iterableToObject (iter) {
|
||||
const obj = {}
|
||||
for (const k of iter.keys())
|
||||
obj[k] = iter.get(k)
|
||||
|
||||
return obj
|
||||
}
|
19
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/utils/make-policy.js
generated
vendored
19
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/utils/make-policy.js
generated
vendored
|
@ -1,19 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const CachePolicy = require('http-cache-semantics')
|
||||
|
||||
const iterableToObject = require('./iterable-to-object')
|
||||
|
||||
module.exports = function makePolicy (req, res) {
|
||||
const _req = {
|
||||
url: req.url,
|
||||
method: req.method,
|
||||
headers: iterableToObject(req.headers),
|
||||
}
|
||||
const _res = {
|
||||
status: res.status,
|
||||
headers: iterableToObject(res.headers),
|
||||
}
|
||||
|
||||
return new CachePolicy(_req, _res, { shared: false })
|
||||
}
|
24
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/warning.js
generated
vendored
24
deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/warning.js
generated
vendored
|
@ -1,24 +0,0 @@
|
|||
const url = require('url')
|
||||
|
||||
module.exports = setWarning
|
||||
|
||||
function setWarning (reqOrRes, code, message, replace) {
|
||||
// Warning = "Warning" ":" 1#warning-value
|
||||
// warning-value = warn-code SP warn-agent SP warn-text [SP warn-date]
|
||||
// warn-code = 3DIGIT
|
||||
// warn-agent = ( host [ ":" port ] ) | pseudonym
|
||||
// ; the name or pseudonym of the server adding
|
||||
// ; the Warning header, for use in debugging
|
||||
// warn-text = quoted-string
|
||||
// warn-date = <"> HTTP-date <">
|
||||
// (https://tools.ietf.org/html/rfc2616#section-14.46)
|
||||
const host = new url.URL(reqOrRes.url).host
|
||||
const jsonMessage = JSON.stringify(message)
|
||||
const jsonDate = JSON.stringify(new Date().toUTCString())
|
||||
const header = replace ? 'set' : 'append'
|
||||
|
||||
reqOrRes.headers[header](
|
||||
'Warning',
|
||||
`${code} ${host} ${jsonMessage} ${jsonDate}`
|
||||
)
|
||||
}
|
21
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.d.ts
generated
vendored
21
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.d.ts
generated
vendored
|
@ -1,21 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
import net from 'net';
|
||||
import { Agent, ClientRequest, RequestOptions } from 'agent-base';
|
||||
import { SocksProxyAgentOptions } from '.';
|
||||
/**
|
||||
* The `SocksProxyAgent`.
|
||||
*
|
||||
* @api public
|
||||
*/
|
||||
export default class SocksProxyAgent extends Agent {
|
||||
private lookup;
|
||||
private proxy;
|
||||
constructor(_opts: string | SocksProxyAgentOptions);
|
||||
/**
|
||||
* Initiates a SOCKS connection to the specified SOCKS proxy server,
|
||||
* which in turn connects to the specified remote host and port.
|
||||
*
|
||||
* @api protected
|
||||
*/
|
||||
callback(req: ClientRequest, opts: RequestOptions): Promise<net.Socket>;
|
||||
}
|
180
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.js
generated
vendored
180
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.js
generated
vendored
|
@ -1,180 +0,0 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const dns_1 = __importDefault(require("dns"));
|
||||
const tls_1 = __importDefault(require("tls"));
|
||||
const url_1 = __importDefault(require("url"));
|
||||
const debug_1 = __importDefault(require("debug"));
|
||||
const agent_base_1 = require("agent-base");
|
||||
const socks_1 = require("socks");
|
||||
const debug = debug_1.default('socks-proxy-agent');
|
||||
function dnsLookup(host) {
|
||||
return new Promise((resolve, reject) => {
|
||||
dns_1.default.lookup(host, (err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
resolve(res);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
function parseSocksProxy(opts) {
|
||||
let port = 0;
|
||||
let lookup = false;
|
||||
let type = 5;
|
||||
// Prefer `hostname` over `host`, because of `url.parse()`
|
||||
const host = opts.hostname || opts.host;
|
||||
if (!host) {
|
||||
throw new TypeError('No "host"');
|
||||
}
|
||||
if (typeof opts.port === 'number') {
|
||||
port = opts.port;
|
||||
}
|
||||
else if (typeof opts.port === 'string') {
|
||||
port = parseInt(opts.port, 10);
|
||||
}
|
||||
// From RFC 1928, Section 3: https://tools.ietf.org/html/rfc1928#section-3
|
||||
// "The SOCKS service is conventionally located on TCP port 1080"
|
||||
if (!port) {
|
||||
port = 1080;
|
||||
}
|
||||
// figure out if we want socks v4 or v5, based on the "protocol" used.
|
||||
// Defaults to 5.
|
||||
if (opts.protocol) {
|
||||
switch (opts.protocol.replace(':', '')) {
|
||||
case 'socks4':
|
||||
lookup = true;
|
||||
// pass through
|
||||
case 'socks4a':
|
||||
type = 4;
|
||||
break;
|
||||
case 'socks5':
|
||||
lookup = true;
|
||||
// pass through
|
||||
case 'socks': // no version specified, default to 5h
|
||||
case 'socks5h':
|
||||
type = 5;
|
||||
break;
|
||||
default:
|
||||
throw new TypeError(`A "socks" protocol must be specified! Got: ${opts.protocol}`);
|
||||
}
|
||||
}
|
||||
if (typeof opts.type !== 'undefined') {
|
||||
if (opts.type === 4 || opts.type === 5) {
|
||||
type = opts.type;
|
||||
}
|
||||
else {
|
||||
throw new TypeError(`"type" must be 4 or 5, got: ${opts.type}`);
|
||||
}
|
||||
}
|
||||
const proxy = {
|
||||
host,
|
||||
port,
|
||||
type
|
||||
};
|
||||
let userId = opts.userId || opts.username;
|
||||
let password = opts.password;
|
||||
if (opts.auth) {
|
||||
const auth = opts.auth.split(':');
|
||||
userId = auth[0];
|
||||
password = auth[1];
|
||||
}
|
||||
if (userId) {
|
||||
Object.defineProperty(proxy, 'userId', {
|
||||
value: userId,
|
||||
enumerable: false
|
||||
});
|
||||
}
|
||||
if (password) {
|
||||
Object.defineProperty(proxy, 'password', {
|
||||
value: password,
|
||||
enumerable: false
|
||||
});
|
||||
}
|
||||
return { lookup, proxy };
|
||||
}
|
||||
/**
|
||||
* The `SocksProxyAgent`.
|
||||
*
|
||||
* @api public
|
||||
*/
|
||||
class SocksProxyAgent extends agent_base_1.Agent {
|
||||
constructor(_opts) {
|
||||
let opts;
|
||||
if (typeof _opts === 'string') {
|
||||
opts = url_1.default.parse(_opts);
|
||||
}
|
||||
else {
|
||||
opts = _opts;
|
||||
}
|
||||
if (!opts) {
|
||||
throw new TypeError('a SOCKS proxy server `host` and `port` must be specified!');
|
||||
}
|
||||
super(opts);
|
||||
const parsedProxy = parseSocksProxy(opts);
|
||||
this.lookup = parsedProxy.lookup;
|
||||
this.proxy = parsedProxy.proxy;
|
||||
}
|
||||
/**
|
||||
* Initiates a SOCKS connection to the specified SOCKS proxy server,
|
||||
* which in turn connects to the specified remote host and port.
|
||||
*
|
||||
* @api protected
|
||||
*/
|
||||
callback(req, opts) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const { lookup, proxy } = this;
|
||||
let { host, port, timeout } = opts;
|
||||
if (!host) {
|
||||
throw new Error('No `host` defined!');
|
||||
}
|
||||
if (lookup) {
|
||||
// Client-side DNS resolution for "4" and "5" socks proxy versions.
|
||||
host = yield dnsLookup(host);
|
||||
}
|
||||
const socksOpts = {
|
||||
proxy,
|
||||
destination: { host, port },
|
||||
command: 'connect',
|
||||
timeout
|
||||
};
|
||||
debug('Creating socks proxy connection: %o', socksOpts);
|
||||
const { socket } = yield socks_1.SocksClient.createConnection(socksOpts);
|
||||
debug('Successfully created socks proxy connection');
|
||||
if (opts.secureEndpoint) {
|
||||
// The proxy is connecting to a TLS server, so upgrade
|
||||
// this socket connection to a TLS connection.
|
||||
debug('Upgrading socket connection to TLS');
|
||||
const servername = opts.servername || host;
|
||||
return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket,
|
||||
servername }));
|
||||
}
|
||||
return socket;
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.default = SocksProxyAgent;
|
||||
function omit(obj, ...keys) {
|
||||
const ret = {};
|
||||
let key;
|
||||
for (key in obj) {
|
||||
if (!keys.includes(key)) {
|
||||
ret[key] = obj[key];
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
//# sourceMappingURL=agent.js.map
|
1
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.js.map
generated
vendored
1
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.js.map
generated
vendored
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AAEtB,8CAAsB;AACtB,8CAAsB;AACtB,kDAAgC;AAChC,2CAAkE;AAClE,iCAAoE;AAGpE,MAAM,KAAK,GAAG,eAAW,CAAC,mBAAmB,CAAC,CAAC;AAE/C,SAAS,SAAS,CAAC,IAAY;IAC9B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,aAAG,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;YAC7B,IAAI,GAAG,EAAE;gBACR,MAAM,CAAC,GAAG,CAAC,CAAC;aACZ;iBAAM;gBACN,OAAO,CAAC,GAAG,CAAC,CAAC;aACb;QACF,CAAC,CAAC,CAAC;IACJ,CAAC,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,eAAe,CACvB,IAA4B;IAE5B,IAAI,IAAI,GAAG,CAAC,CAAC;IACb,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,IAAI,GAAuB,CAAC,CAAC;IAEjC,0DAA0D;IAC1D,MAAM,IAAI,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,CAAC;IACxC,IAAI,CAAC,IAAI,EAAE;QACV,MAAM,IAAI,SAAS,CAAC,WAAW,CAAC,CAAC;KACjC;IAED,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;QAClC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;KACjB;SAAM,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;QACzC,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;KAC/B;IAED,0EAA0E;IAC1E,iEAAiE;IACjE,IAAI,CAAC,IAAI,EAAE;QACV,IAAI,GAAG,IAAI,CAAC;KACZ;IAED,sEAAsE;IACtE,iBAAiB;IACjB,IAAI,IAAI,CAAC,QAAQ,EAAE;QAClB,QAAQ,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE;YACvC,KAAK,QAAQ;gBACZ,MAAM,GAAG,IAAI,CAAC;YACf,eAAe;YACf,KAAK,SAAS;gBACb,IAAI,GAAG,CAAC,CAAC;gBACT,MAAM;YACP,KAAK,QAAQ;gBACZ,MAAM,GAAG,IAAI,CAAC;YACf,eAAe;YACf,KAAK,OAAO,CAAC,CAAC,sCAAsC;YACpD,KAAK,SAAS;gBACb,IAAI,GAAG,CAAC,CAAC;gBACT,MAAM;YACP;gBACC,MAAM,IAAI,SAAS,CAClB,8CAA8C,IAAI,CAAC,QAAQ,EAAE,CAC7D,CAAC;SACH;KACD;IAED,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE;QACrC,IAAI,IAAI,CAAC,IAAI,KAAK,CAAC,IAAI,IAAI,CAAC,IAAI,KAAK,CAAC,EAAE;YACvC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;SACjB;aAAM;YACN,MAAM,IAAI,SAAS,CAAC,+BAA+B,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;SAChE;KACD;IAED,MAAM,KAAK,GAAe;QACzB,IAAI;QACJ,IAAI;QACJ,IAAI;KACJ,CAAC;IAEF,IAAI,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,QAAQ,CAAC;IAC1C,IAAI,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;IAC7B,IAAI,IAAI,CAAC,IAAI,EAAE;QACd,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAClC,MAAM,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;QACjB,QAAQ,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;KACnB;IACD,IAAI,MAAM,EAAE;QACX,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,QAAQ,EAAE;YACtC,KAAK,EAAE,MAAM;YACb,UAAU,EAAE,KAAK;SACjB,CAAC,CAAC;KACH;IACD,IAAI,QAAQ,EAAE;QACb,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,UAAU,EAAE;YACxC,KAAK,EAAE,QAAQ;YACf,UAAU,EAAE,KAAK;SACjB,CAAC,CAAC;KACH;IAED,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC;AAC1B,CAAC;AAED;;;;GAIG;AACH,MAAqB,eAAgB,SAAQ,kBAAK;IAIjD,YAAY,KAAsC;QACjD,IAAI,IAA4B,CAAC;QACjC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,SAAS,CAClB,2DAA2D,CAC3D,CAAC;SACF;QACD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,WAAW,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;QAC1C,IAAI,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC;QACjC,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC;IAChC,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAkB,EAClB,IAAoB;;YAEpB,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC;YAC/B,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC;YAEnC,IAAI,CAAC,IAAI,EAAE;gBACV,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAC;aACtC;YAED,IAAI,MAAM,EAAE;gBACX,mEAAmE;gBACnE,IAAI,GAAG,MAAM,SAAS,CAAC,IAAI,CAAC,CAAC;aAC7B;YAED,MAAM,SAAS,GAAuB;gBACrC,KAAK;gBACL,WAAW,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE;gBAC3B,OAAO,EAAE,SAAS;gBAClB,OAAO;aACP,CAAC;YACF,KAAK,CAAC,qCAAqC,EAAE,SAAS,CAAC,CAAC;YACxD,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,mBAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,CAAC;YACjE,KAAK,CAAC,6CAA6C,CAAC,CAAC;YAErD,IAAI,IAAI,CAAC,cAAc,EAAE;gBACxB,sDAAsD;gBACtD,8CAA8C;gBAC9C,KAAK,CAAC,oCAAoC,CAAC,CAAC;gBAC5C,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC;gBAC3C,OAAO,aAAG,CAAC,OAAO,iCACd,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,CAAC,KACjD,MAAM;oBACN,UAAU,IACT,CAAC;aACH;YAED,OAAO,MAAM,CAAC;QACf,CAAC;KAAA;CACD;AArED,kCAqEC;AAED,SAAS,IAAI,CACZ,GAAM,EACN,GAAG,IAAO;IAIV,MAAM,GAAG,GAAG,EAEX,CAAC;IACF,IAAI,GAAqB,CAAC;IAC1B,KAAK,GAAG,IAAI,GAAG,EAAE;QAChB,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YACxB,GAAG,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;SACpB;KACD;IACD,OAAO,GAAG,CAAC;AACZ,CAAC"}
|
19
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.d.ts
generated
vendored
19
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.d.ts
generated
vendored
|
@ -1,19 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
import { Url } from 'url';
|
||||
import { SocksProxy } from 'socks';
|
||||
import { AgentOptions } from 'agent-base';
|
||||
import _SocksProxyAgent from './agent';
|
||||
declare function createSocksProxyAgent(opts: string | createSocksProxyAgent.SocksProxyAgentOptions): _SocksProxyAgent;
|
||||
declare namespace createSocksProxyAgent {
|
||||
interface BaseSocksProxyAgentOptions {
|
||||
host?: string | null;
|
||||
port?: string | number | null;
|
||||
username?: string | null;
|
||||
}
|
||||
export interface SocksProxyAgentOptions extends AgentOptions, BaseSocksProxyAgentOptions, Partial<Omit<Url & SocksProxy, keyof BaseSocksProxyAgentOptions>> {
|
||||
}
|
||||
export type SocksProxyAgent = _SocksProxyAgent;
|
||||
export const SocksProxyAgent: typeof _SocksProxyAgent;
|
||||
export {};
|
||||
}
|
||||
export = createSocksProxyAgent;
|
14
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js
generated
vendored
14
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js
generated
vendored
|
@ -1,14 +0,0 @@
|
|||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
const agent_1 = __importDefault(require("./agent"));
|
||||
function createSocksProxyAgent(opts) {
|
||||
return new agent_1.default(opts);
|
||||
}
|
||||
(function (createSocksProxyAgent) {
|
||||
createSocksProxyAgent.SocksProxyAgent = agent_1.default;
|
||||
createSocksProxyAgent.prototype = agent_1.default.prototype;
|
||||
})(createSocksProxyAgent || (createSocksProxyAgent = {}));
|
||||
module.exports = createSocksProxyAgent;
|
||||
//# sourceMappingURL=index.js.map
|
1
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js.map
generated
vendored
1
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js.map
generated
vendored
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;AAGA,oDAAuC;AAEvC,SAAS,qBAAqB,CAC7B,IAA2D;IAE3D,OAAO,IAAI,eAAgB,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED,WAAU,qBAAqB;IAajB,qCAAe,GAAG,eAAgB,CAAC;IAEhD,qBAAqB,CAAC,SAAS,GAAG,eAAgB,CAAC,SAAS,CAAC;AAC9D,CAAC,EAhBS,qBAAqB,KAArB,qBAAqB,QAgB9B;AAED,iBAAS,qBAAqB,CAAC"}
|
64
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/package.json
generated
vendored
64
deps/npm/node_modules/node-gyp/node_modules/socks-proxy-agent/package.json
generated
vendored
|
@ -1,64 +0,0 @@
|
|||
{
|
||||
"name": "socks-proxy-agent",
|
||||
"version": "5.0.1",
|
||||
"description": "A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS",
|
||||
"main": "dist/index",
|
||||
"typings": "dist/index",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"prebuild": "rimraf dist",
|
||||
"build": "tsc",
|
||||
"test": "mocha --reporter spec",
|
||||
"test-lint": "eslint src --ext .js,.ts",
|
||||
"prepublishOnly": "npm run build"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/TooTallNate/node-socks-proxy-agent.git"
|
||||
},
|
||||
"keywords": [
|
||||
"socks",
|
||||
"socks4",
|
||||
"socks4a",
|
||||
"socks5",
|
||||
"socks5h",
|
||||
"proxy",
|
||||
"http",
|
||||
"https",
|
||||
"agent"
|
||||
],
|
||||
"author": "Nathan Rajlich <nathan@tootallnate.net> (http://n8.io/)",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/TooTallNate/node-socks-proxy-agent/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"agent-base": "^6.0.2",
|
||||
"debug": "4",
|
||||
"socks": "^2.3.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/debug": "4",
|
||||
"@types/node": "^12.12.11",
|
||||
"@typescript-eslint/eslint-plugin": "1.6.0",
|
||||
"@typescript-eslint/parser": "1.1.0",
|
||||
"eslint": "5.16.0",
|
||||
"eslint-config-airbnb": "17.1.0",
|
||||
"eslint-config-prettier": "4.1.0",
|
||||
"eslint-import-resolver-typescript": "1.1.1",
|
||||
"eslint-plugin-import": "2.16.0",
|
||||
"eslint-plugin-jsx-a11y": "6.2.1",
|
||||
"eslint-plugin-react": "7.12.4",
|
||||
"mocha": "^6.2.2",
|
||||
"proxy": "1",
|
||||
"raw-body": "^2.3.2",
|
||||
"rimraf": "^3.0.0",
|
||||
"socksv5": "TooTallNate/socksv5#fix/dstSock-close-event",
|
||||
"typescript": "^3.5.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
}
|
4
deps/npm/node_modules/node-gyp/package.json
generated
vendored
4
deps/npm/node_modules/node-gyp/package.json
generated
vendored
|
@ -11,7 +11,7 @@
|
|||
"bindings",
|
||||
"gyp"
|
||||
],
|
||||
"version": "8.2.0",
|
||||
"version": "8.3.0",
|
||||
"installVersion": 9,
|
||||
"author": "Nathan Rajlich <nathan@tootallnate.net> (http://tootallnate.net)",
|
||||
"repository": {
|
||||
|
@ -25,7 +25,7 @@
|
|||
"env-paths": "^2.2.0",
|
||||
"glob": "^7.1.4",
|
||||
"graceful-fs": "^4.2.6",
|
||||
"make-fetch-happen": "^8.0.14",
|
||||
"make-fetch-happen": "^9.1.0",
|
||||
"nopt": "^5.0.0",
|
||||
"npmlog": "^4.1.2",
|
||||
"rimraf": "^3.0.2",
|
||||
|
|
37
deps/npm/node_modules/node-gyp/test/test-create-config-gypi.js
generated
vendored
Normal file
37
deps/npm/node_modules/node-gyp/test/test-create-config-gypi.js
generated
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const gyp = require('../lib/node-gyp')
|
||||
const createConfigGypi = require('../lib/create-config-gypi')
|
||||
const { getCurrentConfigGypi } = createConfigGypi.test
|
||||
|
||||
test('config.gypi with no options', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const prog = gyp()
|
||||
prog.parseArgv([])
|
||||
|
||||
const config = getCurrentConfigGypi({ gyp: prog, vsInfo: {} })
|
||||
t.equal(config.target_defaults.default_configuration, 'Release')
|
||||
t.equal(config.variables.target_arch, process.arch)
|
||||
})
|
||||
|
||||
test('config.gypi with --debug', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const prog = gyp()
|
||||
prog.parseArgv(['_', '_', '--debug'])
|
||||
|
||||
const config = getCurrentConfigGypi({ gyp: prog, vsInfo: {} })
|
||||
t.equal(config.target_defaults.default_configuration, 'Debug')
|
||||
})
|
||||
|
||||
test('config.gypi with custom options', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const prog = gyp()
|
||||
prog.parseArgv(['_', '_', '--shared-libxml2'])
|
||||
|
||||
const config = getCurrentConfigGypi({ gyp: prog, vsInfo: {} })
|
||||
t.equal(config.variables.shared_libxml2, true)
|
||||
})
|
6
deps/npm/package.json
vendored
6
deps/npm/package.json
vendored
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "8.1.0",
|
||||
"version": "8.1.1",
|
||||
"name": "npm",
|
||||
"description": "a package manager for JavaScript",
|
||||
"workspaces": [
|
||||
|
@ -55,7 +55,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@isaacs/string-locale-compare": "^1.1.0",
|
||||
"@npmcli/arborist": "^4.0.1",
|
||||
"@npmcli/arborist": "^4.0.2",
|
||||
"@npmcli/ci-detect": "^1.4.0",
|
||||
"@npmcli/config": "^2.3.0",
|
||||
"@npmcli/map-workspaces": "^2.0.0",
|
||||
|
@ -96,7 +96,7 @@
|
|||
"mkdirp": "^1.0.4",
|
||||
"mkdirp-infer-owner": "^2.0.0",
|
||||
"ms": "^2.1.2",
|
||||
"node-gyp": "^8.2.0",
|
||||
"node-gyp": "^8.3.0",
|
||||
"nopt": "^5.0.0",
|
||||
"npm-audit-report": "^2.1.5",
|
||||
"npm-install-checks": "^4.0.0",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue