1
0
Fork 0

Patch to fix the dependbot alert. (#744)

* Patch to fix the dependbot alert.

* .

* .

* .
pull/628/merge
Tingluo Huang 2022-04-05 13:01:33 -04:00 committed by GitHub
parent 5126516654
commit add3486cc3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 42 additions and 16 deletions

View File

@ -1,6 +1,6 @@
--- ---
name: node-fetch name: node-fetch
version: 2.6.5 version: 2.6.7
type: npm type: npm
summary: A light-weight module that brings window.fetch to node.js summary: A light-weight module that brings window.fetch to node.js
homepage: https://github.com/bitinn/node-fetch homepage: https://github.com/bitinn/node-fetch

32
dist/index.js vendored
View File

@ -10195,7 +10195,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
}); });
const INTERNALS$2 = Symbol('Request internals'); const INTERNALS$2 = Symbol('Request internals');
const URL = whatwgUrl.URL; const URL = Url.URL || whatwgUrl.URL;
// fix an issue where "format", "parse" aren't a named export for node <10 // fix an issue where "format", "parse" aren't a named export for node <10
const parse_url = Url.parse; const parse_url = Url.parse;
@ -10458,9 +10458,17 @@ AbortError.prototype = Object.create(Error.prototype);
AbortError.prototype.constructor = AbortError; AbortError.prototype.constructor = AbortError;
AbortError.prototype.name = 'AbortError'; AbortError.prototype.name = 'AbortError';
const URL$1 = Url.URL || whatwgUrl.URL;
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 // fix an issue where "PassThrough", "resolve" aren't a named export for node <10
const PassThrough$1 = Stream.PassThrough; const PassThrough$1 = Stream.PassThrough;
const resolve_url = Url.resolve;
const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {
const orig = new URL$1(original).hostname;
const dest = new URL$1(destination).hostname;
return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
};
/** /**
* Fetch function * Fetch function
@ -10548,7 +10556,19 @@ function fetch(url, opts) {
const location = headers.get('Location'); const location = headers.get('Location');
// HTTP fetch step 5.3 // HTTP fetch step 5.3
const locationURL = location === null ? null : resolve_url(request.url, location); let locationURL = null;
try {
locationURL = location === null ? null : new URL$1(location, request.url).toString();
} catch (err) {
// error here can only be invalid URL in Location: header
// do not throw when options.redirect == manual
// let the user extract the errorneous redirect URL
if (request.redirect !== 'manual') {
reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
finalize();
return;
}
}
// HTTP fetch step 5.5 // HTTP fetch step 5.5
switch (request.redirect) { switch (request.redirect) {
@ -10596,6 +10616,12 @@ function fetch(url, opts) {
size: request.size size: request.size
}; };
if (!isDomainOrSubdomain(request.url, locationURL)) {
for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
requestOpts.headers.delete(name);
}
}
// HTTP-redirect fetch step 9 // HTTP-redirect fetch step 9
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));

6
package-lock.json generated
View File

@ -15895,9 +15895,9 @@
"integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==" "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ=="
}, },
"node-fetch": { "node-fetch": {
"version": "2.6.5", "version": "2.6.7",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.5.tgz", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz",
"integrity": "sha512-mmlIVHJEu5rnIxgEgez6b9GgWXbkZj5YZ7fx+2r94a2E+Uirsp6HsPTPlomfdHtpt/B0cdKviwkoaM6pyvUOpQ==", "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==",
"requires": { "requires": {
"whatwg-url": "^5.0.0" "whatwg-url": "^5.0.0"
}, },

View File

@ -5,4 +5,4 @@ set -e
src/misc/licensed-download.sh src/misc/licensed-download.sh
echo 'Running: licensed cached' echo 'Running: licensed cached'
_temp/licensed-3.3.1/licensed status _temp/licensed-3.6.0/licensed status

View File

@ -2,23 +2,23 @@
set -e set -e
if [ ! -f _temp/licensed-3.3.1.done ]; then if [ ! -f _temp/licensed-3.6.0.done ]; then
echo 'Clearing temp' echo 'Clearing temp'
rm -rf _temp/licensed-3.3.1 || true rm -rf _temp/licensed-3.6.0 || true
echo 'Downloading licensed' echo 'Downloading licensed'
mkdir -p _temp/licensed-3.3.1 mkdir -p _temp/licensed-3.6.0
pushd _temp/licensed-3.3.1 pushd _temp/licensed-3.6.0
if [[ "$OSTYPE" == "darwin"* ]]; then if [[ "$OSTYPE" == "darwin"* ]]; then
curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/3.3.1/licensed-3.3.1-darwin-x64.tar.gz curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/3.6.0/licensed-3.6.0-darwin-x64.tar.gz
else else
curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/3.3.1/licensed-3.3.1-linux-x64.tar.gz curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/3.6.0/licensed-3.6.0-linux-x64.tar.gz
fi fi
echo 'Extracting licenesed' echo 'Extracting licenesed'
tar -xzf licensed.tar.gz tar -xzf licensed.tar.gz
popd popd
touch _temp/licensed-3.3.1.done touch _temp/licensed-3.6.0.done
else else
echo 'Licensed already downloaded' echo 'Licensed already downloaded'
fi fi

View File

@ -5,4 +5,4 @@ set -e
src/misc/licensed-download.sh src/misc/licensed-download.sh
echo 'Running: licensed cached' echo 'Running: licensed cached'
_temp/licensed-3.3.1/licensed cache _temp/licensed-3.6.0/licensed cache