mirror of
https://github.com/DeterminateSystems/magic-nix-cache-action.git
synced 2024-12-27 01:30:35 +01:00
12272 lines
401 KiB
JavaScript
12272 lines
401 KiB
JavaScript
|
import * as fs$2 from 'node:fs/promises';
|
||
|
import * as os$2 from 'node:os';
|
||
|
import os__default from 'node:os';
|
||
|
import * as path$1 from 'node:path';
|
||
|
import { spawn } from 'node:child_process';
|
||
|
import { openSync, writeSync, close, createWriteStream } from 'node:fs';
|
||
|
import { pipeline } from 'node:stream/promises';
|
||
|
import { setTimeout as setTimeout$1 } from 'timers/promises';
|
||
|
import require$$0 from 'os';
|
||
|
import require$$1 from 'fs';
|
||
|
import crypto from 'crypto';
|
||
|
import require$$0$2 from 'path';
|
||
|
import require$$1$2 from 'http';
|
||
|
import require$$2$1 from 'https';
|
||
|
import require$$0$6 from 'net';
|
||
|
import require$$1$1 from 'tls';
|
||
|
import require$$0$1, { errorMonitor } from 'events';
|
||
|
import require$$5 from 'assert';
|
||
|
import require$$6, { types } from 'util';
|
||
|
import EventEmitter$2, { EventEmitter as EventEmitter$3 } from 'node:events';
|
||
|
import process$1 from 'node:process';
|
||
|
import { Buffer as Buffer$1 } from 'node:buffer';
|
||
|
import stream$2, { Readable as Readable$1, PassThrough as PassThrough$1, Duplex } from 'node:stream';
|
||
|
import urlLib, { URL as URL$6, URLSearchParams } from 'node:url';
|
||
|
import http$4, { ServerResponse } from 'node:http';
|
||
|
import crypto$1 from 'node:crypto';
|
||
|
import require$$0$4 from 'buffer';
|
||
|
import require$$0$3 from 'stream';
|
||
|
import require$$1$3 from 'zlib';
|
||
|
import { promisify as promisify$1, inspect } from 'node:util';
|
||
|
import net from 'node:net';
|
||
|
import { checkServerIdentity } from 'node:tls';
|
||
|
import https$4 from 'node:https';
|
||
|
import { lookup, V4MAPPED, ALL, ADDRCONFIG, promises } from 'node:dns';
|
||
|
import require$$3 from 'http2';
|
||
|
import require$$0$5 from 'url';
|
||
|
|
||
|
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
|
||
|
|
||
|
function getDefaultExportFromCjs (x) {
|
||
|
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
|
||
|
}
|
||
|
|
||
|
function getAugmentedNamespace(n) {
|
||
|
if (n.__esModule) return n;
|
||
|
var f = n.default;
|
||
|
if (typeof f == "function") {
|
||
|
var a = function a () {
|
||
|
if (this instanceof a) {
|
||
|
var args = [null];
|
||
|
args.push.apply(args, arguments);
|
||
|
var Ctor = Function.bind.apply(f, args);
|
||
|
return new Ctor();
|
||
|
}
|
||
|
return f.apply(this, arguments);
|
||
|
};
|
||
|
a.prototype = f.prototype;
|
||
|
} else a = {};
|
||
|
Object.defineProperty(a, '__esModule', {value: true});
|
||
|
Object.keys(n).forEach(function (k) {
|
||
|
var d = Object.getOwnPropertyDescriptor(n, k);
|
||
|
Object.defineProperty(a, k, d.get ? d : {
|
||
|
enumerable: true,
|
||
|
get: function () {
|
||
|
return n[k];
|
||
|
}
|
||
|
});
|
||
|
});
|
||
|
return a;
|
||
|
}
|
||
|
|
||
|
var core = {};
|
||
|
|
||
|
var command = {};
|
||
|
|
||
|
var utils = {};
|
||
|
|
||
|
// We use any as a valid input type
|
||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||
|
Object.defineProperty(utils, "__esModule", { value: true });
|
||
|
utils.toCommandProperties = utils.toCommandValue = void 0;
|
||
|
/**
|
||
|
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
||
|
* @param input input to sanitize into a string
|
||
|
*/
|
||
|
function toCommandValue(input) {
|
||
|
if (input === null || input === undefined) {
|
||
|
return '';
|
||
|
}
|
||
|
else if (typeof input === 'string' || input instanceof String) {
|
||
|
return input;
|
||
|
}
|
||
|
return JSON.stringify(input);
|
||
|
}
|
||
|
utils.toCommandValue = toCommandValue;
|
||
|
/**
|
||
|
*
|
||
|
* @param annotationProperties
|
||
|
* @returns The command properties to send with the actual annotation command
|
||
|
* See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
|
||
|
*/
|
||
|
function toCommandProperties(annotationProperties) {
|
||
|
if (!Object.keys(annotationProperties).length) {
|
||
|
return {};
|
||
|
}
|
||
|
return {
|
||
|
title: annotationProperties.title,
|
||
|
file: annotationProperties.file,
|
||
|
line: annotationProperties.startLine,
|
||
|
endLine: annotationProperties.endLine,
|
||
|
col: annotationProperties.startColumn,
|
||
|
endColumn: annotationProperties.endColumn
|
||
|
};
|
||
|
}
|
||
|
utils.toCommandProperties = toCommandProperties;
|
||
|
|
||
|
var __createBinding$1 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
|
if (k2 === undefined) k2 = k;
|
||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
|
}) : (function(o, m, k, k2) {
|
||
|
if (k2 === undefined) k2 = k;
|
||
|
o[k2] = m[k];
|
||
|
}));
|
||
|
var __setModuleDefault$1 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
|
}) : function(o, v) {
|
||
|
o["default"] = v;
|
||
|
});
|
||
|
var __importStar$1 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding$1(result, mod, k);
|
||
|
__setModuleDefault$1(result, mod);
|
||
|
return result;
|
||
|
};
|
||
|
Object.defineProperty(command, "__esModule", { value: true });
|
||
|
command.issue = command.issueCommand = void 0;
|
||
|
const os$1 = __importStar$1(require$$0);
|
||
|
const utils_1$1 = utils;
|
||
|
/**
|
||
|
* Commands
|
||
|
*
|
||
|
* Command Format:
|
||
|
* ::name key=value,key=value::message
|
||
|
*
|
||
|
* Examples:
|
||
|
* ::warning::This is the message
|
||
|
* ::set-env name=MY_VAR::some value
|
||
|
*/
|
||
|
function issueCommand(command, properties, message) {
|
||
|
const cmd = new Command(command, properties, message);
|
||
|
process.stdout.write(cmd.toString() + os$1.EOL);
|
||
|
}
|
||
|
command.issueCommand = issueCommand;
|
||
|
function issue(name, message = '') {
|
||
|
issueCommand(name, {}, message);
|
||
|
}
|
||
|
command.issue = issue;
|
||
|
const CMD_STRING = '::';
|
||
|
class Command {
|
||
|
constructor(command, properties, message) {
|
||
|
if (!command) {
|
||
|
command = 'missing.command';
|
||
|
}
|
||
|
this.command = command;
|
||
|
this.properties = properties;
|
||
|
this.message = message;
|
||
|
}
|
||
|
toString() {
|
||
|
let cmdStr = CMD_STRING + this.command;
|
||
|
if (this.properties && Object.keys(this.properties).length > 0) {
|
||
|
cmdStr += ' ';
|
||
|
let first = true;
|
||
|
for (const key in this.properties) {
|
||
|
if (this.properties.hasOwnProperty(key)) {
|
||
|
const val = this.properties[key];
|
||
|
if (val) {
|
||
|
if (first) {
|
||
|
first = false;
|
||
|
}
|
||
|
else {
|
||
|
cmdStr += ',';
|
||
|
}
|
||
|
cmdStr += `${key}=${escapeProperty(val)}`;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
||
|
return cmdStr;
|
||
|
}
|
||
|
}
|
||
|
function escapeData(s) {
|
||
|
return utils_1$1.toCommandValue(s)
|
||
|
.replace(/%/g, '%25')
|
||
|
.replace(/\r/g, '%0D')
|
||
|
.replace(/\n/g, '%0A');
|
||
|
}
|
||
|
function escapeProperty(s) {
|
||
|
return utils_1$1.toCommandValue(s)
|
||
|
.replace(/%/g, '%25')
|
||
|
.replace(/\r/g, '%0D')
|
||
|
.replace(/\n/g, '%0A')
|
||
|
.replace(/:/g, '%3A')
|
||
|
.replace(/,/g, '%2C');
|
||
|
}
|
||
|
|
||
|
var fileCommand = {};
|
||
|
|
||
|
const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate
|
||
|
|
||
|
let poolPtr = rnds8Pool.length;
|
||
|
function rng() {
|
||
|
if (poolPtr > rnds8Pool.length - 16) {
|
||
|
crypto.randomFillSync(rnds8Pool);
|
||
|
poolPtr = 0;
|
||
|
}
|
||
|
|
||
|
return rnds8Pool.slice(poolPtr, poolPtr += 16);
|
||
|
}
|
||
|
|
||
|
var REGEX = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
|
||
|
|
||
|
function validate(uuid) {
|
||
|
return typeof uuid === 'string' && REGEX.test(uuid);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Convert array of 16 byte values to UUID string format of the form:
|
||
|
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
|
||
|
*/
|
||
|
|
||
|
const byteToHex = [];
|
||
|
|
||
|
for (let i = 0; i < 256; ++i) {
|
||
|
byteToHex.push((i + 0x100).toString(16).substr(1));
|
||
|
}
|
||
|
|
||
|
function stringify(arr, offset = 0) {
|
||
|
// Note: Be careful editing this code! It's been tuned for performance
|
||
|
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
|
||
|
const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one
|
||
|
// of the following:
|
||
|
// - One or more input array values don't map to a hex octet (leading to
|
||
|
// "undefined" in the uuid)
|
||
|
// - Invalid input values for the RFC `version` or `variant` fields
|
||
|
|
||
|
if (!validate(uuid)) {
|
||
|
throw TypeError('Stringified UUID is invalid');
|
||
|
}
|
||
|
|
||
|
return uuid;
|
||
|
}
|
||
|
|
||
|
//
|
||
|
// Inspired by https://github.com/LiosK/UUID.js
|
||
|
// and http://docs.python.org/library/uuid.html
|
||
|
|
||
|
let _nodeId;
|
||
|
|
||
|
let _clockseq; // Previous uuid creation time
|
||
|
|
||
|
|
||
|
let _lastMSecs = 0;
|
||
|
let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
|
||
|
|
||
|
function v1(options, buf, offset) {
|
||
|
let i = buf && offset || 0;
|
||
|
const b = buf || new Array(16);
|
||
|
options = options || {};
|
||
|
let node = options.node || _nodeId;
|
||
|
let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
|
||
|
// specified. We do this lazily to minimize issues related to insufficient
|
||
|
// system entropy. See #189
|
||
|
|
||
|
if (node == null || clockseq == null) {
|
||
|
const seedBytes = options.random || (options.rng || rng)();
|
||
|
|
||
|
if (node == null) {
|
||
|
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
|
||
|
node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
|
||
|
}
|
||
|
|
||
|
if (clockseq == null) {
|
||
|
// Per 4.2.2, randomize (14 bit) clockseq
|
||
|
clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
|
||
|
}
|
||
|
} // UUID timestamps are 100 nano-second units since the Gregorian epoch,
|
||
|
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
|
||
|
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
|
||
|
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
|
||
|
|
||
|
|
||
|
let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
|
||
|
// cycle to simulate higher resolution clock
|
||
|
|
||
|
let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
|
||
|
|
||
|
const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
|
||
|
|
||
|
if (dt < 0 && options.clockseq === undefined) {
|
||
|
clockseq = clockseq + 1 & 0x3fff;
|
||
|
} // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
|
||
|
// time interval
|
||
|
|
||
|
|
||
|
if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
|
||
|
nsecs = 0;
|
||
|
} // Per 4.2.1.2 Throw error if too many uuids are requested
|
||
|
|
||
|
|
||
|
if (nsecs >= 10000) {
|
||
|
throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
|
||
|
}
|
||
|
|
||
|
_lastMSecs = msecs;
|
||
|
_lastNSecs = nsecs;
|
||
|
_clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
|
||
|
|
||
|
msecs += 12219292800000; // `time_low`
|
||
|
|
||
|
const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
|
||
|
b[i++] = tl >>> 24 & 0xff;
|
||
|
b[i++] = tl >>> 16 & 0xff;
|
||
|
b[i++] = tl >>> 8 & 0xff;
|
||
|
b[i++] = tl & 0xff; // `time_mid`
|
||
|
|
||
|
const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
|
||
|
b[i++] = tmh >>> 8 & 0xff;
|
||
|
b[i++] = tmh & 0xff; // `time_high_and_version`
|
||
|
|
||
|
b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
|
||
|
|
||
|
b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
|
||
|
|
||
|
b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
|
||
|
|
||
|
b[i++] = clockseq & 0xff; // `node`
|
||
|
|
||
|
for (let n = 0; n < 6; ++n) {
|
||
|
b[i + n] = node[n];
|
||
|
}
|
||
|
|
||
|
return buf || stringify(b);
|
||
|
}
|
||
|
|
||
|
function parse(uuid) {
|
||
|
if (!validate(uuid)) {
|
||
|
throw TypeError('Invalid UUID');
|
||
|
}
|
||
|
|
||
|
let v;
|
||
|
const arr = new Uint8Array(16); // Parse ########-....-....-....-............
|
||
|
|
||
|
arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
|
||
|
arr[1] = v >>> 16 & 0xff;
|
||
|
arr[2] = v >>> 8 & 0xff;
|
||
|
arr[3] = v & 0xff; // Parse ........-####-....-....-............
|
||
|
|
||
|
arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
|
||
|
arr[5] = v & 0xff; // Parse ........-....-####-....-............
|
||
|
|
||
|
arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
|
||
|
arr[7] = v & 0xff; // Parse ........-....-....-####-............
|
||
|
|
||
|
arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
|
||
|
arr[9] = v & 0xff; // Parse ........-....-....-....-############
|
||
|
// (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
|
||
|
|
||
|
arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
|
||
|
arr[11] = v / 0x100000000 & 0xff;
|
||
|
arr[12] = v >>> 24 & 0xff;
|
||
|
arr[13] = v >>> 16 & 0xff;
|
||
|
arr[14] = v >>> 8 & 0xff;
|
||
|
arr[15] = v & 0xff;
|
||
|
return arr;
|
||
|
}
|
||
|
|
||
|
function stringToBytes(str) {
|
||
|
str = unescape(encodeURIComponent(str)); // UTF8 escape
|
||
|
|
||
|
const bytes = [];
|
||
|
|
||
|
for (let i = 0; i < str.length; ++i) {
|
||
|
bytes.push(str.charCodeAt(i));
|
||
|
}
|
||
|
|
||
|
return bytes;
|
||
|
}
|
||
|
|
||
|
const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
|
||
|
const URL$5 = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
|
||
|
function v35 (name, version, hashfunc) {
|
||
|
function generateUUID(value, namespace, buf, offset) {
|
||
|
if (typeof value === 'string') {
|
||
|
value = stringToBytes(value);
|
||
|
}
|
||
|
|
||
|
if (typeof namespace === 'string') {
|
||
|
namespace = parse(namespace);
|
||
|
}
|
||
|
|
||
|
if (namespace.length !== 16) {
|
||
|
throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
|
||
|
} // Compute hash of namespace and value, Per 4.3
|
||
|
// Future: Use spread syntax when supported on all platforms, e.g. `bytes =
|
||
|
// hashfunc([...namespace, ... value])`
|
||
|
|
||
|
|
||
|
let bytes = new Uint8Array(16 + value.length);
|
||
|
bytes.set(namespace);
|
||
|
bytes.set(value, namespace.length);
|
||
|
bytes = hashfunc(bytes);
|
||
|
bytes[6] = bytes[6] & 0x0f | version;
|
||
|
bytes[8] = bytes[8] & 0x3f | 0x80;
|
||
|
|
||
|
if (buf) {
|
||
|
offset = offset || 0;
|
||
|
|
||
|
for (let i = 0; i < 16; ++i) {
|
||
|
buf[offset + i] = bytes[i];
|
||
|
}
|
||
|
|
||
|
return buf;
|
||
|
}
|
||
|
|
||
|
return stringify(bytes);
|
||
|
} // Function#name is not settable on some platforms (#270)
|
||
|
|
||
|
|
||
|
try {
|
||
|
generateUUID.name = name; // eslint-disable-next-line no-empty
|
||
|
} catch (err) {} // For CommonJS default export support
|
||
|
|
||
|
|
||
|
generateUUID.DNS = DNS;
|
||
|
generateUUID.URL = URL$5;
|
||
|
return generateUUID;
|
||
|
}
|
||
|
|
||
|
function md5(bytes) {
|
||
|
if (Array.isArray(bytes)) {
|
||
|
bytes = Buffer.from(bytes);
|
||
|
} else if (typeof bytes === 'string') {
|
||
|
bytes = Buffer.from(bytes, 'utf8');
|
||
|
}
|
||
|
|
||
|
return crypto.createHash('md5').update(bytes).digest();
|
||
|
}
|
||
|
|
||
|
const v3 = v35('v3', 0x30, md5);
|
||
|
var v3$1 = v3;
|
||
|
|
||
|
function v4(options, buf, offset) {
|
||
|
options = options || {};
|
||
|
const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
|
||
|
|
||
|
rnds[6] = rnds[6] & 0x0f | 0x40;
|
||
|
rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
|
||
|
|
||
|
if (buf) {
|
||
|
offset = offset || 0;
|
||
|
|
||
|
for (let i = 0; i < 16; ++i) {
|
||
|
buf[offset + i] = rnds[i];
|
||
|
}
|
||
|
|
||
|
return buf;
|
||
|
}
|
||
|
|
||
|
return stringify(rnds);
|
||
|
}
|
||
|
|
||
|
function sha1(bytes) {
|
||
|
if (Array.isArray(bytes)) {
|
||
|
bytes = Buffer.from(bytes);
|
||
|
} else if (typeof bytes === 'string') {
|
||
|
bytes = Buffer.from(bytes, 'utf8');
|
||
|
}
|
||
|
|
||
|
return crypto.createHash('sha1').update(bytes).digest();
|
||
|
}
|
||
|
|
||
|
const v5 = v35('v5', 0x50, sha1);
|
||
|
var v5$1 = v5;
|
||
|
|
||
|
var nil = '00000000-0000-0000-0000-000000000000';
|
||
|
|
||
|
function version(uuid) {
|
||
|
if (!validate(uuid)) {
|
||
|
throw TypeError('Invalid UUID');
|
||
|
}
|
||
|
|
||
|
return parseInt(uuid.substr(14, 1), 16);
|
||
|
}
|
||
|
|
||
|
var esmNode = /*#__PURE__*/Object.freeze({
|
||
|
__proto__: null,
|
||
|
NIL: nil,
|
||
|
parse: parse,
|
||
|
stringify: stringify,
|
||
|
v1: v1,
|
||
|
v3: v3$1,
|
||
|
v4: v4,
|
||
|
v5: v5$1,
|
||
|
validate: validate,
|
||
|
version: version
|
||
|
});
|
||
|
|
||
|
var require$$2 = /*@__PURE__*/getAugmentedNamespace(esmNode);
|
||
|
|
||
|
// For internal use, subject to change.
|
||
|
var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
|
if (k2 === undefined) k2 = k;
|
||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
|
}) : (function(o, m, k, k2) {
|
||
|
if (k2 === undefined) k2 = k;
|
||
|
o[k2] = m[k];
|
||
|
}));
|
||
|
var __setModuleDefault = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
|
}) : function(o, v) {
|
||
|
o["default"] = v;
|
||
|
});
|
||
|
var __importStar = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
|
__setModuleDefault(result, mod);
|
||
|
return result;
|
||
|
};
|
||
|
Object.defineProperty(fileCommand, "__esModule", { value: true });
|
||
|
fileCommand.prepareKeyValueMessage = fileCommand.issueFileCommand = void 0;
|
||
|
// We use any as a valid input type
|
||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||
|
const fs$1 = __importStar(require$$1);
|
||
|
const os = __importStar(require$$0);
|
||
|
const uuid_1 = require$$2;
|
||
|
const utils_1 = utils;
|
||
|
function issueFileCommand(command, message) {
|
||
|
const filePath = process.env[`GITHUB_${command}`];
|
||
|
if (!filePath) {
|
||
|
throw new Error(`Unable to find environment variable for file command ${command}`);
|
||
|
}
|
||
|
if (!fs$1.existsSync(filePath)) {
|
||
|
throw new Error(`Missing file at path: ${filePath}`);
|
||
|
}
|
||
|
fs$1.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
|
||
|
encoding: 'utf8'
|
||
|
});
|
||
|
}
|
||
|
fileCommand.issueFileCommand = issueFileCommand;
|
||
|
function prepareKeyValueMessage(key, value) {
|
||
|
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
|
||
|
const convertedValue = utils_1.toCommandValue(value);
|
||
|
// These should realistically never happen, but just in case someone finds a
|
||
|
// way to exploit uuid generation let's not allow keys or values that contain
|
||
|
// the delimiter.
|
||
|
if (key.includes(delimiter)) {
|
||
|
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
|
||
|
}
|
||
|
if (convertedValue.includes(delimiter)) {
|
||
|
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
|
||
|
}
|
||
|
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
|
||
|
}
|
||
|
fileCommand.prepareKeyValueMessage = prepareKeyValueMessage;
|
||
|
|
||
|
var oidcUtils = {};
|
||
|
|
||
|
var lib = {};
|
||
|
|
||
|
var proxy = {};
|
||
|
|
||
|
Object.defineProperty(proxy, "__esModule", { value: true });
|
||
|
proxy.checkBypass = proxy.getProxyUrl = void 0;
|
||
|
function getProxyUrl(reqUrl) {
|
||
|
const usingSsl = reqUrl.protocol === 'https:';
|
||
|
if (checkBypass(reqUrl)) {
|
||
|
return undefined;
|
||
|
}
|
||
|
const proxyVar = (() => {
|
||
|
if (usingSsl) {
|
||
|
return process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
||
|
}
|
||
|
else {
|
||
|
return process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
||
|
}
|
||
|
})();
|
||
|
if (proxyVar) {
|
||
|
return new URL(proxyVar);
|
||
|
}
|
||
|
else {
|
||
|
return undefined;
|
||
|
}
|
||
|
}
|
||
|
proxy.getProxyUrl = getProxyUrl;
|
||
|
function checkBypass(reqUrl) {
|
||
|
if (!reqUrl.hostname) {
|
||
|
return false;
|
||
|
}
|
||
|
const reqHost = reqUrl.hostname;
|
||
|
if (isLoopbackAddress(reqHost)) {
|
||
|
return true;
|
||
|
}
|
||
|
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||
|
if (!noProxy) {
|
||
|
return false;
|
||
|
}
|
||
|
// Determine the request port
|
||
|
let reqPort;
|
||
|
if (reqUrl.port) {
|
||
|
reqPort = Number(reqUrl.port);
|
||
|
}
|
||
|
else if (reqUrl.protocol === 'http:') {
|
||
|
reqPort = 80;
|
||
|
}
|
||
|
else if (reqUrl.protocol === 'https:') {
|
||
|
reqPort = 443;
|
||
|
}
|
||
|
// Format the request hostname and hostname with port
|
||
|
const upperReqHosts = [reqUrl.hostname.toUpperCase()];
|
||
|
if (typeof reqPort === 'number') {
|
||
|
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
||
|
}
|
||
|
// Compare request host against noproxy
|
||
|
for (const upperNoProxyItem of noProxy
|
||
|
.split(',')
|
||
|
.map(x => x.trim().toUpperCase())
|
||
|
.filter(x => x)) {
|
||
|
if (upperNoProxyItem === '*' ||
|
||
|
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||
|
x.endsWith(`.${upperNoProxyItem}`) ||
|
||
|
(upperNoProxyItem.startsWith('.') &&
|
||
|
x.endsWith(`${upperNoProxyItem}`)))) {
|
||
|
return true;
|
||
|
}
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
proxy.checkBypass = checkBypass;
|
||
|
function isLoopbackAddress(host) {
|
||
|
const hostLower = host.toLowerCase();
|
||
|
return (hostLower === 'localhost' ||
|
||
|
hostLower.startsWith('127.') ||
|
||
|
hostLower.startsWith('[::1]') ||
|
||
|
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||
|
}
|
||
|
|
||
|
var tunnel$1 = {};
|
||
|
|
||
|
var tls$4 = require$$1$1;
|
||
|
var http$3 = require$$1$2;
|
||
|
var https$3 = require$$2$1;
|
||
|
var events$1 = require$$0$1;
|
||
|
var util = require$$6;
|
||
|
|
||
|
|
||
|
tunnel$1.httpOverHttp = httpOverHttp;
|
||
|
tunnel$1.httpsOverHttp = httpsOverHttp;
|
||
|
tunnel$1.httpOverHttps = httpOverHttps;
|
||
|
tunnel$1.httpsOverHttps = httpsOverHttps;
|
||
|
|
||
|
|
||
|
function httpOverHttp(options) {
|
||
|
var agent = new TunnelingAgent(options);
|
||
|
agent.request = http$3.request;
|
||
|
return agent;
|
||
|
}
|
||
|
|
||
|
function httpsOverHttp(options) {
|
||
|
var agent = new TunnelingAgent(options);
|
||
|
agent.request = http$3.request;
|
||
|
agent.createSocket = createSecureSocket;
|
||
|
agent.defaultPort = 443;
|
||
|
return agent;
|
||
|
}
|
||
|
|
||
|
function httpOverHttps(options) {
|
||
|
var agent = new TunnelingAgent(options);
|
||
|
agent.request = https$3.request;
|
||
|
return agent;
|
||
|
}
|
||
|
|
||
|
function httpsOverHttps(options) {
|
||
|
var agent = new TunnelingAgent(options);
|
||
|
agent.request = https$3.request;
|
||
|
agent.createSocket = createSecureSocket;
|
||
|
agent.defaultPort = 443;
|
||
|
return agent;
|
||
|
}
|
||
|
|
||
|
|
||
|
function TunnelingAgent(options) {
|
||
|
var self = this;
|
||
|
self.options = options || {};
|
||
|
self.proxyOptions = self.options.proxy || {};
|
||
|
self.maxSockets = self.options.maxSockets || http$3.Agent.defaultMaxSockets;
|
||
|
self.requests = [];
|
||
|
self.sockets = [];
|
||
|
|
||
|
self.on('free', function onFree(socket, host, port, localAddress) {
|
||
|
var options = toOptions(host, port, localAddress);
|
||
|
for (var i = 0, len = self.requests.length; i < len; ++i) {
|
||
|
var pending = self.requests[i];
|
||
|
if (pending.host === options.host && pending.port === options.port) {
|
||
|
// Detect the request to connect same origin server,
|
||
|
// reuse the connection.
|
||
|
self.requests.splice(i, 1);
|
||
|
pending.request.onSocket(socket);
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
socket.destroy();
|
||
|
self.removeSocket(socket);
|
||
|
});
|
||
|
}
|
||
|
util.inherits(TunnelingAgent, events$1.EventEmitter);
|
||
|
|
||
|
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
|
||
|
var self = this;
|
||
|
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
|
||
|
|
||
|
if (self.sockets.length >= this.maxSockets) {
|
||
|
// We are over limit so we'll add it to the queue.
|
||
|
self.requests.push(options);
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
// If we are under maxSockets create a new one.
|
||
|
self.createSocket(options, function(socket) {
|
||
|
socket.on('free', onFree);
|
||
|
socket.on('close', onCloseOrRemove);
|
||
|
socket.on('agentRemove', onCloseOrRemove);
|
||
|
req.onSocket(socket);
|
||
|
|
||
|
function onFree() {
|
||
|
self.emit('free', socket, options);
|
||
|
}
|
||
|
|
||
|
function onCloseOrRemove(err) {
|
||
|
self.removeSocket(socket);
|
||
|
socket.removeListener('free', onFree);
|
||
|
socket.removeListener('close', onCloseOrRemove);
|
||
|
socket.removeListener('agentRemove', onCloseOrRemove);
|
||
|
}
|
||
|
});
|
||
|
};
|
||
|
|
||
|
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
|
||
|
var self = this;
|
||
|
var placeholder = {};
|
||
|
self.sockets.push(placeholder);
|
||
|
|
||
|
var connectOptions = mergeOptions({}, self.proxyOptions, {
|
||
|
method: 'CONNECT',
|
||
|
path: options.host + ':' + options.port,
|
||
|
agent: false,
|
||
|
headers: {
|
||
|
host: options.host + ':' + options.port
|
||
|
}
|
||
|
});
|
||
|
if (options.localAddress) {
|
||
|
connectOptions.localAddress = options.localAddress;
|
||
|
}
|
||
|
if (connectOptions.proxyAuth) {
|
||
|
connectOptions.headers = connectOptions.headers || {};
|
||
|
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
|
||
|
new Buffer(connectOptions.proxyAuth).toString('base64');
|
||
|
}
|
||
|
|
||
|
debug('making CONNECT request');
|
||
|
var connectReq = self.request(connectOptions);
|
||
|
connectReq.useChunkedEncodingByDefault = false; // for v0.6
|
||
|
connectReq.once('response', onResponse); // for v0.6
|
||
|
connectReq.once('upgrade', onUpgrade); // for v0.6
|
||
|
connectReq.once('connect', onConnect); // for v0.7 or later
|
||
|
connectReq.once('error', onError);
|
||
|
connectReq.end();
|
||
|
|
||
|
function onResponse(res) {
|
||
|
// Very hacky. This is necessary to avoid http-parser leaks.
|
||
|
res.upgrade = true;
|
||
|
}
|
||
|
|
||
|
function onUpgrade(res, socket, head) {
|
||
|
// Hacky.
|
||
|
process.nextTick(function() {
|
||
|
onConnect(res, socket, head);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function onConnect(res, socket, head) {
|
||
|
connectReq.removeAllListeners();
|
||
|
socket.removeAllListeners();
|
||
|
|
||
|
if (res.statusCode !== 200) {
|
||
|
debug('tunneling socket could not be established, statusCode=%d',
|
||
|
res.statusCode);
|
||
|
socket.destroy();
|
||
|
var error = new Error('tunneling socket could not be established, ' +
|
||
|
'statusCode=' + res.statusCode);
|
||
|
error.code = 'ECONNRESET';
|
||
|
options.request.emit('error', error);
|
||
|
self.removeSocket(placeholder);
|
||
|
return;
|
||
|
}
|
||
|
if (head.length > 0) {
|
||
|
debug('got illegal response body from proxy');
|
||
|
socket.destroy();
|
||
|
var error = new Error('got illegal response body from proxy');
|
||
|
error.code = 'ECONNRESET';
|
||
|
options.request.emit('error', error);
|
||
|
self.removeSocket(placeholder);
|
||
|
return;
|
||
|
}
|
||
|
debug('tunneling connection has established');
|
||
|
self.sockets[self.sockets.indexOf(placeholder)] = socket;
|
||
|
return cb(socket);
|
||
|
}
|
||
|
|
||
|
function onError(cause) {
|
||
|
connectReq.removeAllListeners();
|
||
|
|
||
|
debug('tunneling socket could not be established, cause=%s\n',
|
||
|
cause.message, cause.stack);
|
||
|
var error = new Error('tunneling socket could not be established, ' +
|
||
|
'cause=' + cause.message);
|
||
|
error.code = 'ECONNRESET';
|
||
|
options.request.emit('error', error);
|
||
|
self.removeSocket(placeholder);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
|
||
|
var pos = this.sockets.indexOf(socket);
|
||
|
if (pos === -1) {
|
||
|
return;
|
||
|
}
|
||
|
this.sockets.splice(pos, 1);
|
||
|
|
||
|
var pending = this.requests.shift();
|
||
|
if (pending) {
|
||
|
// If we have pending requests and a socket gets closed a new one
|
||
|
// needs to be created to take over in the pool for the one that closed.
|
||
|
this.createSocket(pending, function(socket) {
|
||
|
pending.request.onSocket(socket);
|
||
|
});
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function createSecureSocket(options, cb) {
|
||
|
var self = this;
|
||
|
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
|
||
|
var hostHeader = options.request.getHeader('host');
|
||
|
var tlsOptions = mergeOptions({}, self.options, {
|
||
|
socket: socket,
|
||
|
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
|
||
|
});
|
||
|
|
||
|
// 0 is dummy port for v0.6
|
||
|
var secureSocket = tls$4.connect(0, tlsOptions);
|
||
|
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
|
||
|
cb(secureSocket);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
|
||
|
function toOptions(host, port, localAddress) {
|
||
|
if (typeof host === 'string') { // since v0.10
|
||
|
return {
|
||
|
host: host,
|
||
|
port: port,
|
||
|
localAddress: localAddress
|
||
|
};
|
||
|
}
|
||
|
return host; // for v0.11 or later
|
||
|
}
|
||
|
|
||
|
function mergeOptions(target) {
|
||
|
for (var i = 1, len = arguments.length; i < len; ++i) {
|
||
|
var overrides = arguments[i];
|
||
|
if (typeof overrides === 'object') {
|
||
|
var keys = Object.keys(overrides);
|
||
|
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
|
||
|
var k = keys[j];
|
||
|
if (overrides[k] !== undefined) {
|
||
|
target[k] = overrides[k];
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return target;
|
||
|
}
|
||
|
|
||
|
|
||
|
var debug;
|
||
|
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
|
||
|
debug = function() {
|
||
|
var args = Array.prototype.slice.call(arguments);
|
||
|
if (typeof args[0] === 'string') {
|
||
|
args[0] = 'TUNNEL: ' + args[0];
|
||
|
} else {
|
||
|
args.unshift('TUNNEL:');
|
||
|
}
|
||
|
console.error.apply(console, args);
|
||
|
};
|
||
|
} else {
|
||
|
debug = function() {};
|
||
|
}
|
||
|
tunnel$1.debug = debug; // for test
|
||
|
|
||
|
var tunnel = tunnel$1;
|
||
|
|
||
|
(function (exports) {
|
||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||
|
var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
|
if (k2 === undefined) k2 = k;
|
||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
|
}) : (function(o, m, k, k2) {
|
||
|
if (k2 === undefined) k2 = k;
|
||
|
o[k2] = m[k];
|
||
|
}));
|
||
|
var __setModuleDefault = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
|
}) : function(o, v) {
|
||
|
o["default"] = v;
|
||
|
});
|
||
|
var __importStar = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
|
__setModuleDefault(result, mod);
|
||
|
return result;
|
||
|
};
|
||
|
var __awaiter = (commonjsGlobal && commonjsGlobal.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
|
||
|
const http = __importStar(require$$1$2);
|
||
|
const https = __importStar(require$$2$1);
|
||
|
const pm = __importStar(proxy);
|
||
|
const tunnel$1 = __importStar(tunnel);
|
||
|
var HttpCodes;
|
||
|
(function (HttpCodes) {
|
||
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
||
|
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
||
|
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
||
|
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
||
|
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
||
|
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
||
|
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
||
|
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
||
|
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
||
|
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
||
|
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
||
|
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
||
|
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
||
|
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
||
|
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
||
|
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
||
|
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
||
|
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
||
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
||
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
||
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
||
|
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
||
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
||
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
||
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
||
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
||
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
||
|
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
||
|
var Headers;
|
||
|
(function (Headers) {
|
||
|
Headers["Accept"] = "accept";
|
||
|
Headers["ContentType"] = "content-type";
|
||
|
})(Headers = exports.Headers || (exports.Headers = {}));
|
||
|
var MediaTypes;
|
||
|
(function (MediaTypes) {
|
||
|
MediaTypes["ApplicationJson"] = "application/json";
|
||
|
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
||
|
/**
|
||
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||
|
*/
|
||
|
function getProxyUrl(serverUrl) {
|
||
|
const proxyUrl = pm.getProxyUrl(new URL(serverUrl));
|
||
|
return proxyUrl ? proxyUrl.href : '';
|
||
|
}
|
||
|
exports.getProxyUrl = getProxyUrl;
|
||
|
const HttpRedirectCodes = [
|
||
|
HttpCodes.MovedPermanently,
|
||
|
HttpCodes.ResourceMoved,
|
||
|
HttpCodes.SeeOther,
|
||
|
HttpCodes.TemporaryRedirect,
|
||
|
HttpCodes.PermanentRedirect
|
||
|
];
|
||
|
const HttpResponseRetryCodes = [
|
||
|
HttpCodes.BadGateway,
|
||
|
HttpCodes.ServiceUnavailable,
|
||
|
HttpCodes.GatewayTimeout
|
||
|
];
|
||
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||
|
const ExponentialBackoffCeiling = 10;
|
||
|
const ExponentialBackoffTimeSlice = 5;
|
||
|
class HttpClientError extends Error {
|
||
|
constructor(message, statusCode) {
|
||
|
super(message);
|
||
|
this.name = 'HttpClientError';
|
||
|
this.statusCode = statusCode;
|
||
|
Object.setPrototypeOf(this, HttpClientError.prototype);
|
||
|
}
|
||
|
}
|
||
|
exports.HttpClientError = HttpClientError;
|
||
|
class HttpClientResponse {
|
||
|
constructor(message) {
|
||
|
this.message = message;
|
||
|
}
|
||
|
readBody() {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||
|
let output = Buffer.alloc(0);
|
||
|
this.message.on('data', (chunk) => {
|
||
|
output = Buffer.concat([output, chunk]);
|
||
|
});
|
||
|
this.message.on('end', () => {
|
||
|
resolve(output.toString());
|
||
|
});
|
||
|
}));
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
exports.HttpClientResponse = HttpClientResponse;
|
||
|
function isHttps(requestUrl) {
|
||
|
const parsedUrl = new URL(requestUrl);
|
||
|
return parsedUrl.protocol === 'https:';
|
||
|
}
|
||
|
exports.isHttps = isHttps;
|
||
|
class HttpClient {
|
||
|
constructor(userAgent, handlers, requestOptions) {
|
||
|
this._ignoreSslError = false;
|
||
|
this._allowRedirects = true;
|
||
|
this._allowRedirectDowngrade = false;
|
||
|
this._maxRedirects = 50;
|
||
|
this._allowRetries = false;
|
||
|
this._maxRetries = 1;
|
||
|
this._keepAlive = false;
|
||
|
this._disposed = false;
|
||
|
this.userAgent = userAgent;
|
||
|
this.handlers = handlers || [];
|
||
|
this.requestOptions = requestOptions;
|
||
|
if (requestOptions) {
|
||
|
if (requestOptions.ignoreSslError != null) {
|
||
|
this._ignoreSslError = requestOptions.ignoreSslError;
|
||
|
}
|
||
|
this._socketTimeout = requestOptions.socketTimeout;
|
||
|
if (requestOptions.allowRedirects != null) {
|
||
|
this._allowRedirects = requestOptions.allowRedirects;
|
||
|
}
|
||
|
if (requestOptions.allowRedirectDowngrade != null) {
|
||
|
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
|
||
|
}
|
||
|
if (requestOptions.maxRedirects != null) {
|
||
|
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
||
|
}
|
||
|
if (requestOptions.keepAlive != null) {
|
||
|
this._keepAlive = requestOptions.keepAlive;
|
||
|
}
|
||
|
if (requestOptions.allowRetries != null) {
|
||
|
this._allowRetries = requestOptions.allowRetries;
|
||
|
}
|
||
|
if (requestOptions.maxRetries != null) {
|
||
|
this._maxRetries = requestOptions.maxRetries;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
options(requestUrl, additionalHeaders) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
||
|
});
|
||
|
}
|
||
|
get(requestUrl, additionalHeaders) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
||
|
});
|
||
|
}
|
||
|
del(requestUrl, additionalHeaders) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
||
|
});
|
||
|
}
|
||
|
post(requestUrl, data, additionalHeaders) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
||
|
});
|
||
|
}
|
||
|
patch(requestUrl, data, additionalHeaders) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
||
|
});
|
||
|
}
|
||
|
put(requestUrl, data, additionalHeaders) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
||
|
});
|
||
|
}
|
||
|
head(requestUrl, additionalHeaders) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
||
|
});
|
||
|
}
|
||
|
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return this.request(verb, requestUrl, stream, additionalHeaders);
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Gets a typed object from an endpoint
|
||
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||
|
*/
|
||
|
getJson(requestUrl, additionalHeaders = {}) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||
|
const res = yield this.get(requestUrl, additionalHeaders);
|
||
|
return this._processResponse(res, this.requestOptions);
|
||
|
});
|
||
|
}
|
||
|
postJson(requestUrl, obj, additionalHeaders = {}) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const data = JSON.stringify(obj, null, 2);
|
||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||
|
const res = yield this.post(requestUrl, data, additionalHeaders);
|
||
|
return this._processResponse(res, this.requestOptions);
|
||
|
});
|
||
|
}
|
||
|
putJson(requestUrl, obj, additionalHeaders = {}) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const data = JSON.stringify(obj, null, 2);
|
||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||
|
const res = yield this.put(requestUrl, data, additionalHeaders);
|
||
|
return this._processResponse(res, this.requestOptions);
|
||
|
});
|
||
|
}
|
||
|
patchJson(requestUrl, obj, additionalHeaders = {}) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const data = JSON.stringify(obj, null, 2);
|
||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||
|
const res = yield this.patch(requestUrl, data, additionalHeaders);
|
||
|
return this._processResponse(res, this.requestOptions);
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Makes a raw http request.
|
||
|
* All other methods such as get, post, patch, and request ultimately call this.
|
||
|
* Prefer get, del, post and patch
|
||
|
*/
|
||
|
request(verb, requestUrl, data, headers) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
if (this._disposed) {
|
||
|
throw new Error('Client has already been disposed.');
|
||
|
}
|
||
|
const parsedUrl = new URL(requestUrl);
|
||
|
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||
|
// Only perform retries on reads since writes may not be idempotent.
|
||
|
const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)
|
||
|
? this._maxRetries + 1
|
||
|
: 1;
|
||
|
let numTries = 0;
|
||
|
let response;
|
||
|
do {
|
||
|
response = yield this.requestRaw(info, data);
|
||
|
// Check if it's an authentication challenge
|
||
|
if (response &&
|
||
|
response.message &&
|
||
|
response.message.statusCode === HttpCodes.Unauthorized) {
|
||
|
let authenticationHandler;
|
||
|
for (const handler of this.handlers) {
|
||
|
if (handler.canHandleAuthentication(response)) {
|
||
|
authenticationHandler = handler;
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
if (authenticationHandler) {
|
||
|
return authenticationHandler.handleAuthentication(this, info, data);
|
||
|
}
|
||
|
else {
|
||
|
// We have received an unauthorized response but have no handlers to handle it.
|
||
|
// Let the response return to the caller.
|
||
|
return response;
|
||
|
}
|
||
|
}
|
||
|
let redirectsRemaining = this._maxRedirects;
|
||
|
while (response.message.statusCode &&
|
||
|
HttpRedirectCodes.includes(response.message.statusCode) &&
|
||
|
this._allowRedirects &&
|
||
|
redirectsRemaining > 0) {
|
||
|
const redirectUrl = response.message.headers['location'];
|
||
|
if (!redirectUrl) {
|
||
|
// if there's no location to redirect to, we won't
|
||
|
break;
|
||
|
}
|
||
|
const parsedRedirectUrl = new URL(redirectUrl);
|
||
|
if (parsedUrl.protocol === 'https:' &&
|
||
|
parsedUrl.protocol !== parsedRedirectUrl.protocol &&
|
||
|
!this._allowRedirectDowngrade) {
|
||
|
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
||
|
}
|
||
|
// we need to finish reading the response before reassigning response
|
||
|
// which will leak the open socket.
|
||
|
yield response.readBody();
|
||
|
// strip authorization header if redirected to a different hostname
|
||
|
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
||
|
for (const header in headers) {
|
||
|
// header names are case insensitive
|
||
|
if (header.toLowerCase() === 'authorization') {
|
||
|
delete headers[header];
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
// let's make the request with the new redirectUrl
|
||
|
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
||
|
response = yield this.requestRaw(info, data);
|
||
|
redirectsRemaining--;
|
||
|
}
|
||
|
if (!response.message.statusCode ||
|
||
|
!HttpResponseRetryCodes.includes(response.message.statusCode)) {
|
||
|
// If not a retry code, return immediately instead of retrying
|
||
|
return response;
|
||
|
}
|
||
|
numTries += 1;
|
||
|
if (numTries < maxTries) {
|
||
|
yield response.readBody();
|
||
|
yield this._performExponentialBackoff(numTries);
|
||
|
}
|
||
|
} while (numTries < maxTries);
|
||
|
return response;
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Needs to be called if keepAlive is set to true in request options.
|
||
|
*/
|
||
|
dispose() {
|
||
|
if (this._agent) {
|
||
|
this._agent.destroy();
|
||
|
}
|
||
|
this._disposed = true;
|
||
|
}
|
||
|
/**
|
||
|
* Raw request.
|
||
|
* @param info
|
||
|
* @param data
|
||
|
*/
|
||
|
requestRaw(info, data) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return new Promise((resolve, reject) => {
|
||
|
function callbackForResult(err, res) {
|
||
|
if (err) {
|
||
|
reject(err);
|
||
|
}
|
||
|
else if (!res) {
|
||
|
// If `err` is not passed, then `res` must be passed.
|
||
|
reject(new Error('Unknown error'));
|
||
|
}
|
||
|
else {
|
||
|
resolve(res);
|
||
|
}
|
||
|
}
|
||
|
this.requestRawWithCallback(info, data, callbackForResult);
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Raw request with callback.
|
||
|
* @param info
|
||
|
* @param data
|
||
|
* @param onResult
|
||
|
*/
|
||
|
requestRawWithCallback(info, data, onResult) {
|
||
|
if (typeof data === 'string') {
|
||
|
if (!info.options.headers) {
|
||
|
info.options.headers = {};
|
||
|
}
|
||
|
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
||
|
}
|
||
|
let callbackCalled = false;
|
||
|
function handleResult(err, res) {
|
||
|
if (!callbackCalled) {
|
||
|
callbackCalled = true;
|
||
|
onResult(err, res);
|
||
|
}
|
||
|
}
|
||
|
const req = info.httpModule.request(info.options, (msg) => {
|
||
|
const res = new HttpClientResponse(msg);
|
||
|
handleResult(undefined, res);
|
||
|
});
|
||
|
let socket;
|
||
|
req.on('socket', sock => {
|
||
|
socket = sock;
|
||
|
});
|
||
|
// If we ever get disconnected, we want the socket to timeout eventually
|
||
|
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
||
|
if (socket) {
|
||
|
socket.end();
|
||
|
}
|
||
|
handleResult(new Error(`Request timeout: ${info.options.path}`));
|
||
|
});
|
||
|
req.on('error', function (err) {
|
||
|
// err has statusCode property
|
||
|
// res should have headers
|
||
|
handleResult(err);
|
||
|
});
|
||
|
if (data && typeof data === 'string') {
|
||
|
req.write(data, 'utf8');
|
||
|
}
|
||
|
if (data && typeof data !== 'string') {
|
||
|
data.on('close', function () {
|
||
|
req.end();
|
||
|
});
|
||
|
data.pipe(req);
|
||
|
}
|
||
|
else {
|
||
|
req.end();
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
||
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||
|
*/
|
||
|
getAgent(serverUrl) {
|
||
|
const parsedUrl = new URL(serverUrl);
|
||
|
return this._getAgent(parsedUrl);
|
||
|
}
|
||
|
_prepareRequest(method, requestUrl, headers) {
|
||
|
const info = {};
|
||
|
info.parsedUrl = requestUrl;
|
||
|
const usingSsl = info.parsedUrl.protocol === 'https:';
|
||
|
info.httpModule = usingSsl ? https : http;
|
||
|
const defaultPort = usingSsl ? 443 : 80;
|
||
|
info.options = {};
|
||
|
info.options.host = info.parsedUrl.hostname;
|
||
|
info.options.port = info.parsedUrl.port
|
||
|
? parseInt(info.parsedUrl.port)
|
||
|
: defaultPort;
|
||
|
info.options.path =
|
||
|
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||
|
info.options.method = method;
|
||
|
info.options.headers = this._mergeHeaders(headers);
|
||
|
if (this.userAgent != null) {
|
||
|
info.options.headers['user-agent'] = this.userAgent;
|
||
|
}
|
||
|
info.options.agent = this._getAgent(info.parsedUrl);
|
||
|
// gives handlers an opportunity to participate
|
||
|
if (this.handlers) {
|
||
|
for (const handler of this.handlers) {
|
||
|
handler.prepareRequest(info.options);
|
||
|
}
|
||
|
}
|
||
|
return info;
|
||
|
}
|
||
|
_mergeHeaders(headers) {
|
||
|
if (this.requestOptions && this.requestOptions.headers) {
|
||
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));
|
||
|
}
|
||
|
return lowercaseKeys(headers || {});
|
||
|
}
|
||
|
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
||
|
let clientHeader;
|
||
|
if (this.requestOptions && this.requestOptions.headers) {
|
||
|
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
||
|
}
|
||
|
return additionalHeaders[header] || clientHeader || _default;
|
||
|
}
|
||
|
_getAgent(parsedUrl) {
|
||
|
let agent;
|
||
|
const proxyUrl = pm.getProxyUrl(parsedUrl);
|
||
|
const useProxy = proxyUrl && proxyUrl.hostname;
|
||
|
if (this._keepAlive && useProxy) {
|
||
|
agent = this._proxyAgent;
|
||
|
}
|
||
|
if (this._keepAlive && !useProxy) {
|
||
|
agent = this._agent;
|
||
|
}
|
||
|
// if agent is already assigned use that agent.
|
||
|
if (agent) {
|
||
|
return agent;
|
||
|
}
|
||
|
const usingSsl = parsedUrl.protocol === 'https:';
|
||
|
let maxSockets = 100;
|
||
|
if (this.requestOptions) {
|
||
|
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
||
|
}
|
||
|
// This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
|
||
|
if (proxyUrl && proxyUrl.hostname) {
|
||
|
const agentOptions = {
|
||
|
maxSockets,
|
||
|
keepAlive: this._keepAlive,
|
||
|
proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {
|
||
|
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
|
||
|
})), { host: proxyUrl.hostname, port: proxyUrl.port })
|
||
|
};
|
||
|
let tunnelAgent;
|
||
|
const overHttps = proxyUrl.protocol === 'https:';
|
||
|
if (usingSsl) {
|
||
|
tunnelAgent = overHttps ? tunnel$1.httpsOverHttps : tunnel$1.httpsOverHttp;
|
||
|
}
|
||
|
else {
|
||
|
tunnelAgent = overHttps ? tunnel$1.httpOverHttps : tunnel$1.httpOverHttp;
|
||
|
}
|
||
|
agent = tunnelAgent(agentOptions);
|
||
|
this._proxyAgent = agent;
|
||
|
}
|
||
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
||
|
if (this._keepAlive && !agent) {
|
||
|
const options = { keepAlive: this._keepAlive, maxSockets };
|
||
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
||
|
this._agent = agent;
|
||
|
}
|
||
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
||
|
if (!agent) {
|
||
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
||
|
}
|
||
|
if (usingSsl && this._ignoreSslError) {
|
||
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||
|
// we have to cast it to any and change it directly
|
||
|
agent.options = Object.assign(agent.options || {}, {
|
||
|
rejectUnauthorized: false
|
||
|
});
|
||
|
}
|
||
|
return agent;
|
||
|
}
|
||
|
_performExponentialBackoff(retryNumber) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
||
|
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
||
|
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
||
|
});
|
||
|
}
|
||
|
_processResponse(res, options) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
|
||
|
const statusCode = res.message.statusCode || 0;
|
||
|
const response = {
|
||
|
statusCode,
|
||
|
result: null,
|
||
|
headers: {}
|
||
|
};
|
||
|
// not found leads to null obj returned
|
||
|
if (statusCode === HttpCodes.NotFound) {
|
||
|
resolve(response);
|
||
|
}
|
||
|
// get the result from the body
|
||
|
function dateTimeDeserializer(key, value) {
|
||
|
if (typeof value === 'string') {
|
||
|
const a = new Date(value);
|
||
|
if (!isNaN(a.valueOf())) {
|
||
|
return a;
|
||
|
}
|
||
|
}
|
||
|
return value;
|
||
|
}
|
||
|
let obj;
|
||
|
let contents;
|
||
|
try {
|
||
|
contents = yield res.readBody();
|
||
|
if (contents && contents.length > 0) {
|
||
|
if (options && options.deserializeDates) {
|
||
|
obj = JSON.parse(contents, dateTimeDeserializer);
|
||
|
}
|
||
|
else {
|
||
|
obj = JSON.parse(contents);
|
||
|
}
|
||
|
response.result = obj;
|
||
|
}
|
||
|
response.headers = res.message.headers;
|
||
|
}
|
||
|
catch (err) {
|
||
|
// Invalid resource (contents not json); leaving result obj null
|
||
|
}
|
||
|
// note that 3xx redirects are handled by the http layer.
|
||
|
if (statusCode > 299) {
|
||
|
let msg;
|
||
|
// if exception/error in body, attempt to get better error
|
||
|
if (obj && obj.message) {
|
||
|
msg = obj.message;
|
||
|
}
|
||
|
else if (contents && contents.length > 0) {
|
||
|
// it may be the case that the exception is in the body message as string
|
||
|
msg = contents;
|
||
|
}
|
||
|
else {
|
||
|
msg = `Failed request: (${statusCode})`;
|
||
|
}
|
||
|
const err = new HttpClientError(msg, statusCode);
|
||
|
err.result = response.result;
|
||
|
reject(err);
|
||
|
}
|
||
|
else {
|
||
|
resolve(response);
|
||
|
}
|
||
|
}));
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
exports.HttpClient = HttpClient;
|
||
|
const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||
|
|
||
|
} (lib));
|
||
|
|
||
|
var auth = {};
|
||
|
|
||
|
var __awaiter = (commonjsGlobal && commonjsGlobal.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
Object.defineProperty(auth, "__esModule", { value: true });
|
||
|
auth.PersonalAccessTokenCredentialHandler = auth.BearerCredentialHandler = auth.BasicCredentialHandler = void 0;
|
||
|
class BasicCredentialHandler {
|
||
|
constructor(username, password) {
|
||
|
this.username = username;
|
||
|
this.password = password;
|
||
|
}
|
||
|
prepareRequest(options) {
|
||
|
if (!options.headers) {
|
||
|
throw Error('The request has no headers');
|
||
|
}
|
||
|
options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
|
||
|
}
|
||
|
// This handler cannot handle 401
|
||
|
canHandleAuthentication() {
|
||
|
return false;
|
||
|
}
|
||
|
handleAuthentication() {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
throw new Error('not implemented');
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
auth.BasicCredentialHandler = BasicCredentialHandler;
|
||
|
class BearerCredentialHandler {
|
||
|
constructor(token) {
|
||
|
this.token = token;
|
||
|
}
|
||
|
// currently implements pre-authorization
|
||
|
// TODO: support preAuth = false where it hooks on 401
|
||
|
prepareRequest(options) {
|
||
|
if (!options.headers) {
|
||
|
throw Error('The request has no headers');
|
||
|
}
|
||
|
options.headers['Authorization'] = `Bearer ${this.token}`;
|
||
|
}
|
||
|
// This handler cannot handle 401
|
||
|
canHandleAuthentication() {
|
||
|
return false;
|
||
|
}
|
||
|
handleAuthentication() {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
throw new Error('not implemented');
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
auth.BearerCredentialHandler = BearerCredentialHandler;
|
||
|
class PersonalAccessTokenCredentialHandler {
|
||
|
constructor(token) {
|
||
|
this.token = token;
|
||
|
}
|
||
|
// currently implements pre-authorization
|
||
|
// TODO: support preAuth = false where it hooks on 401
|
||
|
prepareRequest(options) {
|
||
|
if (!options.headers) {
|
||
|
throw Error('The request has no headers');
|
||
|
}
|
||
|
options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
|
||
|
}
|
||
|
// This handler cannot handle 401
|
||
|
canHandleAuthentication() {
|
||
|
return false;
|
||
|
}
|
||
|
handleAuthentication() {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
throw new Error('not implemented');
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
auth.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
|
||
|
|
||
|
var hasRequiredOidcUtils;
|
||
|
|
||
|
function requireOidcUtils () {
|
||
|
if (hasRequiredOidcUtils) return oidcUtils;
|
||
|
hasRequiredOidcUtils = 1;
|
||
|
var __awaiter = (commonjsGlobal && commonjsGlobal.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
Object.defineProperty(oidcUtils, "__esModule", { value: true });
|
||
|
oidcUtils.OidcClient = void 0;
|
||
|
const http_client_1 = lib;
|
||
|
const auth_1 = auth;
|
||
|
const core_1 = requireCore();
|
||
|
class OidcClient {
|
||
|
static createHttpClient(allowRetry = true, maxRetry = 10) {
|
||
|
const requestOptions = {
|
||
|
allowRetries: allowRetry,
|
||
|
maxRetries: maxRetry
|
||
|
};
|
||
|
return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);
|
||
|
}
|
||
|
static getRequestToken() {
|
||
|
const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
|
||
|
if (!token) {
|
||
|
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
|
||
|
}
|
||
|
return token;
|
||
|
}
|
||
|
static getIDTokenUrl() {
|
||
|
const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
|
||
|
if (!runtimeUrl) {
|
||
|
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
|
||
|
}
|
||
|
return runtimeUrl;
|
||
|
}
|
||
|
static getCall(id_token_url) {
|
||
|
var _a;
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const httpclient = OidcClient.createHttpClient();
|
||
|
const res = yield httpclient
|
||
|
.getJson(id_token_url)
|
||
|
.catch(error => {
|
||
|
throw new Error(`Failed to get ID Token. \n
|
||
|
Error Code : ${error.statusCode}\n
|
||
|
Error Message: ${error.result.message}`);
|
||
|
});
|
||
|
const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
|
||
|
if (!id_token) {
|
||
|
throw new Error('Response json body do not have ID Token field');
|
||
|
}
|
||
|
return id_token;
|
||
|
});
|
||
|
}
|
||
|
static getIDToken(audience) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
try {
|
||
|
// New ID Token is requested from action service
|
||
|
let id_token_url = OidcClient.getIDTokenUrl();
|
||
|
if (audience) {
|
||
|
const encodedAudience = encodeURIComponent(audience);
|
||
|
id_token_url = `${id_token_url}&audience=${encodedAudience}`;
|
||
|
}
|
||
|
core_1.debug(`ID token url is ${id_token_url}`);
|
||
|
const id_token = yield OidcClient.getCall(id_token_url);
|
||
|
core_1.setSecret(id_token);
|
||
|
return id_token;
|
||
|
}
|
||
|
catch (error) {
|
||
|
throw new Error(`Error message: ${error.message}`);
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
oidcUtils.OidcClient = OidcClient;
|
||
|
|
||
|
return oidcUtils;
|
||
|
}
|
||
|
|
||
|
var summary = {};
|
||
|
|
||
|
var hasRequiredSummary;
|
||
|
|
||
|
function requireSummary () {
|
||
|
if (hasRequiredSummary) return summary;
|
||
|
hasRequiredSummary = 1;
|
||
|
(function (exports) {
|
||
|
var __awaiter = (commonjsGlobal && commonjsGlobal.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
|
||
|
const os_1 = require$$0;
|
||
|
const fs_1 = require$$1;
|
||
|
const { access, appendFile, writeFile } = fs_1.promises;
|
||
|
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
|
||
|
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
|
||
|
class Summary {
|
||
|
constructor() {
|
||
|
this._buffer = '';
|
||
|
}
|
||
|
/**
|
||
|
* Finds the summary file path from the environment, rejects if env var is not found or file does not exist
|
||
|
* Also checks r/w permissions.
|
||
|
*
|
||
|
* @returns step summary file path
|
||
|
*/
|
||
|
filePath() {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
if (this._filePath) {
|
||
|
return this._filePath;
|
||
|
}
|
||
|
const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];
|
||
|
if (!pathFromEnv) {
|
||
|
throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
|
||
|
}
|
||
|
try {
|
||
|
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
|
||
|
}
|
||
|
catch (_a) {
|
||
|
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
|
||
|
}
|
||
|
this._filePath = pathFromEnv;
|
||
|
return this._filePath;
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Wraps content in an HTML tag, adding any HTML attributes
|
||
|
*
|
||
|
* @param {string} tag HTML tag to wrap
|
||
|
* @param {string | null} content content within the tag
|
||
|
* @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
|
||
|
*
|
||
|
* @returns {string} content wrapped in HTML element
|
||
|
*/
|
||
|
wrap(tag, content, attrs = {}) {
|
||
|
const htmlAttrs = Object.entries(attrs)
|
||
|
.map(([key, value]) => ` ${key}="${value}"`)
|
||
|
.join('');
|
||
|
if (!content) {
|
||
|
return `<${tag}${htmlAttrs}>`;
|
||
|
}
|
||
|
return `<${tag}${htmlAttrs}>${content}</${tag}>`;
|
||
|
}
|
||
|
/**
|
||
|
* Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
|
||
|
*
|
||
|
* @param {SummaryWriteOptions} [options] (optional) options for write operation
|
||
|
*
|
||
|
* @returns {Promise<Summary>} summary instance
|
||
|
*/
|
||
|
write(options) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
|
||
|
const filePath = yield this.filePath();
|
||
|
const writeFunc = overwrite ? writeFile : appendFile;
|
||
|
yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
|
||
|
return this.emptyBuffer();
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Clears the summary buffer and wipes the summary file
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
clear() {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return this.emptyBuffer().write({ overwrite: true });
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Returns the current summary buffer as a string
|
||
|
*
|
||
|
* @returns {string} string of summary buffer
|
||
|
*/
|
||
|
stringify() {
|
||
|
return this._buffer;
|
||
|
}
|
||
|
/**
|
||
|
* If the summary buffer is empty
|
||
|
*
|
||
|
* @returns {boolen} true if the buffer is empty
|
||
|
*/
|
||
|
isEmptyBuffer() {
|
||
|
return this._buffer.length === 0;
|
||
|
}
|
||
|
/**
|
||
|
* Resets the summary buffer without writing to summary file
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
emptyBuffer() {
|
||
|
this._buffer = '';
|
||
|
return this;
|
||
|
}
|
||
|
/**
|
||
|
* Adds raw text to the summary buffer
|
||
|
*
|
||
|
* @param {string} text content to add
|
||
|
* @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
addRaw(text, addEOL = false) {
|
||
|
this._buffer += text;
|
||
|
return addEOL ? this.addEOL() : this;
|
||
|
}
|
||
|
/**
|
||
|
* Adds the operating system-specific end-of-line marker to the buffer
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
addEOL() {
|
||
|
return this.addRaw(os_1.EOL);
|
||
|
}
|
||
|
/**
|
||
|
* Adds an HTML codeblock to the summary buffer
|
||
|
*
|
||
|
* @param {string} code content to render within fenced code block
|
||
|
* @param {string} lang (optional) language to syntax highlight code
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
addCodeBlock(code, lang) {
|
||
|
const attrs = Object.assign({}, (lang && { lang }));
|
||
|
const element = this.wrap('pre', this.wrap('code', code), attrs);
|
||
|
return this.addRaw(element).addEOL();
|
||
|
}
|
||
|
/**
|
||
|
* Adds an HTML list to the summary buffer
|
||
|
*
|
||
|
* @param {string[]} items list of items to render
|
||
|
* @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
addList(items, ordered = false) {
|
||
|
const tag = ordered ? 'ol' : 'ul';
|
||
|
const listItems = items.map(item => this.wrap('li', item)).join('');
|
||
|
const element = this.wrap(tag, listItems);
|
||
|
return this.addRaw(element).addEOL();
|
||
|
}
|
||
|
/**
|
||
|
* Adds an HTML table to the summary buffer
|
||
|
*
|
||
|
* @param {SummaryTableCell[]} rows table rows
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
addTable(rows) {
|
||
|
const tableBody = rows
|
||
|
.map(row => {
|
||
|
const cells = row
|
||
|
.map(cell => {
|
||
|
if (typeof cell === 'string') {
|
||
|
return this.wrap('td', cell);
|
||
|
}
|
||
|
const { header, data, colspan, rowspan } = cell;
|
||
|
const tag = header ? 'th' : 'td';
|
||
|
const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
|
||
|
return this.wrap(tag, data, attrs);
|
||
|
})
|
||
|
.join('');
|
||
|
return this.wrap('tr', cells);
|
||
|
})
|
||
|
.join('');
|
||
|
const element = this.wrap('table', tableBody);
|
||
|
return this.addRaw(element).addEOL();
|
||
|
}
|
||
|
/**
|
||
|
* Adds a collapsable HTML details element to the summary buffer
|
||
|
*
|
||
|
* @param {string} label text for the closed state
|
||
|
* @param {string} content collapsable content
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
addDetails(label, content) {
|
||
|
const element = this.wrap('details', this.wrap('summary', label) + content);
|
||
|
return this.addRaw(element).addEOL();
|
||
|
}
|
||
|
/**
|
||
|
* Adds an HTML image tag to the summary buffer
|
||
|
*
|
||
|
* @param {string} src path to the image you to embed
|
||
|
* @param {string} alt text description of the image
|
||
|
* @param {SummaryImageOptions} options (optional) addition image attributes
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
addImage(src, alt, options) {
|
||
|
const { width, height } = options || {};
|
||
|
const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
|
||
|
const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
|
||
|
return this.addRaw(element).addEOL();
|
||
|
}
|
||
|
/**
|
||
|
* Adds an HTML section heading element
|
||
|
*
|
||
|
* @param {string} text heading text
|
||
|
* @param {number | string} [level=1] (optional) the heading level, default: 1
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
addHeading(text, level) {
|
||
|
const tag = `h${level}`;
|
||
|
const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
|
||
|
? tag
|
||
|
: 'h1';
|
||
|
const element = this.wrap(allowedTag, text);
|
||
|
return this.addRaw(element).addEOL();
|
||
|
}
|
||
|
/**
|
||
|
* Adds an HTML thematic break (<hr>) to the summary buffer
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
addSeparator() {
|
||
|
const element = this.wrap('hr', null);
|
||
|
return this.addRaw(element).addEOL();
|
||
|
}
|
||
|
/**
|
||
|
* Adds an HTML line break (<br>) to the summary buffer
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
addBreak() {
|
||
|
const element = this.wrap('br', null);
|
||
|
return this.addRaw(element).addEOL();
|
||
|
}
|
||
|
/**
|
||
|
* Adds an HTML blockquote to the summary buffer
|
||
|
*
|
||
|
* @param {string} text quote text
|
||
|
* @param {string} cite (optional) citation url
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
addQuote(text, cite) {
|
||
|
const attrs = Object.assign({}, (cite && { cite }));
|
||
|
const element = this.wrap('blockquote', text, attrs);
|
||
|
return this.addRaw(element).addEOL();
|
||
|
}
|
||
|
/**
|
||
|
* Adds an HTML anchor tag to the summary buffer
|
||
|
*
|
||
|
* @param {string} text link text/content
|
||
|
* @param {string} href hyperlink
|
||
|
*
|
||
|
* @returns {Summary} summary instance
|
||
|
*/
|
||
|
addLink(text, href) {
|
||
|
const element = this.wrap('a', text, { href });
|
||
|
return this.addRaw(element).addEOL();
|
||
|
}
|
||
|
}
|
||
|
const _summary = new Summary();
|
||
|
/**
|
||
|
* @deprecated use `core.summary`
|
||
|
*/
|
||
|
exports.markdownSummary = _summary;
|
||
|
exports.summary = _summary;
|
||
|
|
||
|
} (summary));
|
||
|
return summary;
|
||
|
}
|
||
|
|
||
|
var pathUtils = {};
|
||
|
|
||
|
var hasRequiredPathUtils;
|
||
|
|
||
|
function requirePathUtils () {
|
||
|
if (hasRequiredPathUtils) return pathUtils;
|
||
|
hasRequiredPathUtils = 1;
|
||
|
var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
|
if (k2 === undefined) k2 = k;
|
||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
|
}) : (function(o, m, k, k2) {
|
||
|
if (k2 === undefined) k2 = k;
|
||
|
o[k2] = m[k];
|
||
|
}));
|
||
|
var __setModuleDefault = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
|
}) : function(o, v) {
|
||
|
o["default"] = v;
|
||
|
});
|
||
|
var __importStar = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
|
__setModuleDefault(result, mod);
|
||
|
return result;
|
||
|
};
|
||
|
Object.defineProperty(pathUtils, "__esModule", { value: true });
|
||
|
pathUtils.toPlatformPath = pathUtils.toWin32Path = pathUtils.toPosixPath = void 0;
|
||
|
const path = __importStar(require$$0$2);
|
||
|
/**
|
||
|
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
|
||
|
* replaced with /.
|
||
|
*
|
||
|
* @param pth. Path to transform.
|
||
|
* @return string Posix path.
|
||
|
*/
|
||
|
function toPosixPath(pth) {
|
||
|
return pth.replace(/[\\]/g, '/');
|
||
|
}
|
||
|
pathUtils.toPosixPath = toPosixPath;
|
||
|
/**
|
||
|
* toWin32Path converts the given path to the win32 form. On Linux, / will be
|
||
|
* replaced with \\.
|
||
|
*
|
||
|
* @param pth. Path to transform.
|
||
|
* @return string Win32 path.
|
||
|
*/
|
||
|
function toWin32Path(pth) {
|
||
|
return pth.replace(/[/]/g, '\\');
|
||
|
}
|
||
|
pathUtils.toWin32Path = toWin32Path;
|
||
|
/**
|
||
|
* toPlatformPath converts the given path to a platform-specific path. It does
|
||
|
* this by replacing instances of / and \ with the platform-specific path
|
||
|
* separator.
|
||
|
*
|
||
|
* @param pth The path to platformize.
|
||
|
* @return string The platform-specific path.
|
||
|
*/
|
||
|
function toPlatformPath(pth) {
|
||
|
return pth.replace(/[/\\]/g, path.sep);
|
||
|
}
|
||
|
pathUtils.toPlatformPath = toPlatformPath;
|
||
|
|
||
|
return pathUtils;
|
||
|
}
|
||
|
|
||
|
var hasRequiredCore;
|
||
|
|
||
|
function requireCore () {
|
||
|
if (hasRequiredCore) return core;
|
||
|
hasRequiredCore = 1;
|
||
|
(function (exports) {
|
||
|
var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
|
if (k2 === undefined) k2 = k;
|
||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
|
}) : (function(o, m, k, k2) {
|
||
|
if (k2 === undefined) k2 = k;
|
||
|
o[k2] = m[k];
|
||
|
}));
|
||
|
var __setModuleDefault = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
|
}) : function(o, v) {
|
||
|
o["default"] = v;
|
||
|
});
|
||
|
var __importStar = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
|
__setModuleDefault(result, mod);
|
||
|
return result;
|
||
|
};
|
||
|
var __awaiter = (commonjsGlobal && commonjsGlobal.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
|
||
|
const command_1 = command;
|
||
|
const file_command_1 = fileCommand;
|
||
|
const utils_1 = utils;
|
||
|
const os = __importStar(require$$0);
|
||
|
const path = __importStar(require$$0$2);
|
||
|
const oidc_utils_1 = requireOidcUtils();
|
||
|
/**
|
||
|
* The code to exit an action
|
||
|
*/
|
||
|
var ExitCode;
|
||
|
(function (ExitCode) {
|
||
|
/**
|
||
|
* A code indicating that the action was successful
|
||
|
*/
|
||
|
ExitCode[ExitCode["Success"] = 0] = "Success";
|
||
|
/**
|
||
|
* A code indicating that the action was a failure
|
||
|
*/
|
||
|
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
||
|
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
||
|
//-----------------------------------------------------------------------
|
||
|
// Variables
|
||
|
//-----------------------------------------------------------------------
|
||
|
/**
|
||
|
* Sets env variable for this action and future actions in the job
|
||
|
* @param name the name of the variable to set
|
||
|
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
||
|
*/
|
||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||
|
function exportVariable(name, val) {
|
||
|
const convertedVal = utils_1.toCommandValue(val);
|
||
|
process.env[name] = convertedVal;
|
||
|
const filePath = process.env['GITHUB_ENV'] || '';
|
||
|
if (filePath) {
|
||
|
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
|
||
|
}
|
||
|
command_1.issueCommand('set-env', { name }, convertedVal);
|
||
|
}
|
||
|
exports.exportVariable = exportVariable;
|
||
|
/**
|
||
|
* Registers a secret which will get masked from logs
|
||
|
* @param secret value of the secret
|
||
|
*/
|
||
|
function setSecret(secret) {
|
||
|
command_1.issueCommand('add-mask', {}, secret);
|
||
|
}
|
||
|
exports.setSecret = setSecret;
|
||
|
/**
|
||
|
* Prepends inputPath to the PATH (for this action and future actions)
|
||
|
* @param inputPath
|
||
|
*/
|
||
|
function addPath(inputPath) {
|
||
|
const filePath = process.env['GITHUB_PATH'] || '';
|
||
|
if (filePath) {
|
||
|
file_command_1.issueFileCommand('PATH', inputPath);
|
||
|
}
|
||
|
else {
|
||
|
command_1.issueCommand('add-path', {}, inputPath);
|
||
|
}
|
||
|
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
||
|
}
|
||
|
exports.addPath = addPath;
|
||
|
/**
|
||
|
* Gets the value of an input.
|
||
|
* Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
|
||
|
* Returns an empty string if the value is not defined.
|
||
|
*
|
||
|
* @param name name of the input to get
|
||
|
* @param options optional. See InputOptions.
|
||
|
* @returns string
|
||
|
*/
|
||
|
function getInput(name, options) {
|
||
|
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
||
|
if (options && options.required && !val) {
|
||
|
throw new Error(`Input required and not supplied: ${name}`);
|
||
|
}
|
||
|
if (options && options.trimWhitespace === false) {
|
||
|
return val;
|
||
|
}
|
||
|
return val.trim();
|
||
|
}
|
||
|
exports.getInput = getInput;
|
||
|
/**
|
||
|
* Gets the values of an multiline input. Each value is also trimmed.
|
||
|
*
|
||
|
* @param name name of the input to get
|
||
|
* @param options optional. See InputOptions.
|
||
|
* @returns string[]
|
||
|
*
|
||
|
*/
|
||
|
function getMultilineInput(name, options) {
|
||
|
const inputs = getInput(name, options)
|
||
|
.split('\n')
|
||
|
.filter(x => x !== '');
|
||
|
if (options && options.trimWhitespace === false) {
|
||
|
return inputs;
|
||
|
}
|
||
|
return inputs.map(input => input.trim());
|
||
|
}
|
||
|
exports.getMultilineInput = getMultilineInput;
|
||
|
/**
|
||
|
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
|
||
|
* Support boolean input list: `true | True | TRUE | false | False | FALSE` .
|
||
|
* The return value is also in boolean type.
|
||
|
* ref: https://yaml.org/spec/1.2/spec.html#id2804923
|
||
|
*
|
||
|
* @param name name of the input to get
|
||
|
* @param options optional. See InputOptions.
|
||
|
* @returns boolean
|
||
|
*/
|
||
|
function getBooleanInput(name, options) {
|
||
|
const trueValue = ['true', 'True', 'TRUE'];
|
||
|
const falseValue = ['false', 'False', 'FALSE'];
|
||
|
const val = getInput(name, options);
|
||
|
if (trueValue.includes(val))
|
||
|
return true;
|
||
|
if (falseValue.includes(val))
|
||
|
return false;
|
||
|
throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
|
||
|
`Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
|
||
|
}
|
||
|
exports.getBooleanInput = getBooleanInput;
|
||
|
/**
|
||
|
* Sets the value of an output.
|
||
|
*
|
||
|
* @param name name of the output to set
|
||
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||
|
*/
|
||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||
|
function setOutput(name, value) {
|
||
|
const filePath = process.env['GITHUB_OUTPUT'] || '';
|
||
|
if (filePath) {
|
||
|
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
|
||
|
}
|
||
|
process.stdout.write(os.EOL);
|
||
|
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
|
||
|
}
|
||
|
exports.setOutput = setOutput;
|
||
|
/**
|
||
|
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
||
|
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
||
|
*
|
||
|
*/
|
||
|
function setCommandEcho(enabled) {
|
||
|
command_1.issue('echo', enabled ? 'on' : 'off');
|
||
|
}
|
||
|
exports.setCommandEcho = setCommandEcho;
|
||
|
//-----------------------------------------------------------------------
|
||
|
// Results
|
||
|
//-----------------------------------------------------------------------
|
||
|
/**
|
||
|
* Sets the action status to failed.
|
||
|
* When the action exits it will be with an exit code of 1
|
||
|
* @param message add error issue message
|
||
|
*/
|
||
|
function setFailed(message) {
|
||
|
process.exitCode = ExitCode.Failure;
|
||
|
error(message);
|
||
|
}
|
||
|
exports.setFailed = setFailed;
|
||
|
//-----------------------------------------------------------------------
|
||
|
// Logging Commands
|
||
|
//-----------------------------------------------------------------------
|
||
|
/**
|
||
|
* Gets whether Actions Step Debug is on or not
|
||
|
*/
|
||
|
function isDebug() {
|
||
|
return process.env['RUNNER_DEBUG'] === '1';
|
||
|
}
|
||
|
exports.isDebug = isDebug;
|
||
|
/**
|
||
|
* Writes debug message to user log
|
||
|
* @param message debug message
|
||
|
*/
|
||
|
function debug(message) {
|
||
|
command_1.issueCommand('debug', {}, message);
|
||
|
}
|
||
|
exports.debug = debug;
|
||
|
/**
|
||
|
* Adds an error issue
|
||
|
* @param message error issue message. Errors will be converted to string via toString()
|
||
|
* @param properties optional properties to add to the annotation.
|
||
|
*/
|
||
|
function error(message, properties = {}) {
|
||
|
command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
||
|
}
|
||
|
exports.error = error;
|
||
|
/**
|
||
|
* Adds a warning issue
|
||
|
* @param message warning issue message. Errors will be converted to string via toString()
|
||
|
* @param properties optional properties to add to the annotation.
|
||
|
*/
|
||
|
function warning(message, properties = {}) {
|
||
|
command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
||
|
}
|
||
|
exports.warning = warning;
|
||
|
/**
|
||
|
* Adds a notice issue
|
||
|
* @param message notice issue message. Errors will be converted to string via toString()
|
||
|
* @param properties optional properties to add to the annotation.
|
||
|
*/
|
||
|
function notice(message, properties = {}) {
|
||
|
command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
||
|
}
|
||
|
exports.notice = notice;
|
||
|
/**
|
||
|
* Writes info to log with console.log.
|
||
|
* @param message info message
|
||
|
*/
|
||
|
function info(message) {
|
||
|
process.stdout.write(message + os.EOL);
|
||
|
}
|
||
|
exports.info = info;
|
||
|
/**
|
||
|
* Begin an output group.
|
||
|
*
|
||
|
* Output until the next `groupEnd` will be foldable in this group
|
||
|
*
|
||
|
* @param name The name of the output group
|
||
|
*/
|
||
|
function startGroup(name) {
|
||
|
command_1.issue('group', name);
|
||
|
}
|
||
|
exports.startGroup = startGroup;
|
||
|
/**
|
||
|
* End an output group.
|
||
|
*/
|
||
|
function endGroup() {
|
||
|
command_1.issue('endgroup');
|
||
|
}
|
||
|
exports.endGroup = endGroup;
|
||
|
/**
|
||
|
* Wrap an asynchronous function call in a group.
|
||
|
*
|
||
|
* Returns the same type as the function itself.
|
||
|
*
|
||
|
* @param name The name of the group
|
||
|
* @param fn The function to wrap in the group
|
||
|
*/
|
||
|
function group(name, fn) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
startGroup(name);
|
||
|
let result;
|
||
|
try {
|
||
|
result = yield fn();
|
||
|
}
|
||
|
finally {
|
||
|
endGroup();
|
||
|
}
|
||
|
return result;
|
||
|
});
|
||
|
}
|
||
|
exports.group = group;
|
||
|
//-----------------------------------------------------------------------
|
||
|
// Wrapper action state
|
||
|
//-----------------------------------------------------------------------
|
||
|
/**
|
||
|
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
||
|
*
|
||
|
* @param name name of the state to store
|
||
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||
|
*/
|
||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||
|
function saveState(name, value) {
|
||
|
const filePath = process.env['GITHUB_STATE'] || '';
|
||
|
if (filePath) {
|
||
|
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
|
||
|
}
|
||
|
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
|
||
|
}
|
||
|
exports.saveState = saveState;
|
||
|
/**
|
||
|
* Gets the value of an state set by this action's main execution.
|
||
|
*
|
||
|
* @param name name of the state to get
|
||
|
* @returns string
|
||
|
*/
|
||
|
function getState(name) {
|
||
|
return process.env[`STATE_${name}`] || '';
|
||
|
}
|
||
|
exports.getState = getState;
|
||
|
function getIDToken(aud) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return yield oidc_utils_1.OidcClient.getIDToken(aud);
|
||
|
});
|
||
|
}
|
||
|
exports.getIDToken = getIDToken;
|
||
|
/**
|
||
|
* Summary exports
|
||
|
*/
|
||
|
var summary_1 = requireSummary();
|
||
|
Object.defineProperty(exports, "summary", { enumerable: true, get: function () { return summary_1.summary; } });
|
||
|
/**
|
||
|
* @deprecated use core.summary
|
||
|
*/
|
||
|
var summary_2 = requireSummary();
|
||
|
Object.defineProperty(exports, "markdownSummary", { enumerable: true, get: function () { return summary_2.markdownSummary; } });
|
||
|
/**
|
||
|
* Path exports
|
||
|
*/
|
||
|
var path_utils_1 = requirePathUtils();
|
||
|
Object.defineProperty(exports, "toPosixPath", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });
|
||
|
Object.defineProperty(exports, "toWin32Path", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });
|
||
|
Object.defineProperty(exports, "toPlatformPath", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });
|
||
|
|
||
|
} (core));
|
||
|
return core;
|
||
|
}
|
||
|
|
||
|
var coreExports = requireCore();
|
||
|
|
||
|
let events = require$$0$1;
|
||
|
let fs = require$$1;
|
||
|
let path = require$$0$2;
|
||
|
|
||
|
// const environment = process.env['NODE_ENV'] || 'development'
|
||
|
|
||
|
class devNull {
|
||
|
info() { };
|
||
|
error() { };
|
||
|
}
|
||
|
class Tail extends events.EventEmitter {
|
||
|
|
||
|
constructor(filename, options = {}) {
|
||
|
super();
|
||
|
this.filename = filename;
|
||
|
this.absPath = path.dirname(this.filename);
|
||
|
this.separator = (options.separator !== undefined) ? options.separator : /[\r]{0,1}\n/;// null is a valid param
|
||
|
this.fsWatchOptions = options.fsWatchOptions || {};
|
||
|
this.follow = options['follow'] != undefined ? options['follow'] : true;
|
||
|
this.logger = options.logger || new devNull();
|
||
|
this.useWatchFile = options.useWatchFile || false;
|
||
|
this.flushAtEOF = options.flushAtEOF || false;
|
||
|
this.encoding = options.encoding || 'utf-8';
|
||
|
const fromBeginning = options.fromBeginning || false;
|
||
|
this.nLines = options.nLines || undefined;
|
||
|
|
||
|
this.logger.info(`Tail starting...`);
|
||
|
this.logger.info(`filename: ${this.filename}`);
|
||
|
this.logger.info(`encoding: ${this.encoding}`);
|
||
|
|
||
|
try {
|
||
|
fs.accessSync(this.filename, fs.constants.F_OK);
|
||
|
} catch (err) {
|
||
|
if (err.code == 'ENOENT') {
|
||
|
throw err
|
||
|
}
|
||
|
}
|
||
|
|
||
|
this.buffer = '';
|
||
|
this.internalDispatcher = new events.EventEmitter();
|
||
|
this.queue = [];
|
||
|
this.isWatching = false;
|
||
|
this.pos = 0;
|
||
|
|
||
|
// this.internalDispatcher.on('next',this.readBlock);
|
||
|
this.internalDispatcher.on('next', () => {
|
||
|
this.readBlock();
|
||
|
});
|
||
|
|
||
|
let cursor;
|
||
|
|
||
|
this.logger.info(`fromBeginning: ${fromBeginning}`);
|
||
|
if (fromBeginning) {
|
||
|
cursor = 0;
|
||
|
} else if (this.nLines <= 0) {
|
||
|
cursor = 0;
|
||
|
} else if (this.nLines !== undefined) {
|
||
|
cursor = this.getPositionAtNthLine(this.nLines);
|
||
|
} else {
|
||
|
cursor = this.latestPosition();
|
||
|
}
|
||
|
|
||
|
if (cursor === undefined) throw new Error("Tail can't initialize.");
|
||
|
|
||
|
const flush = fromBeginning || (this.nLines != undefined);
|
||
|
try {
|
||
|
this.watch(cursor, flush);
|
||
|
} catch (err) {
|
||
|
this.logger.error(`watch for ${this.filename} failed: ${err}`);
|
||
|
this.emit("error", `watch for ${this.filename} failed: ${err}`);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Grabs the index of the last line of text in the format /.*(\n)?/.
|
||
|
* Returns null if a full line can not be found.
|
||
|
* @param {string} text
|
||
|
* @returns {number | null}
|
||
|
*/
|
||
|
getIndexOfLastLine(text) {
|
||
|
|
||
|
/**
|
||
|
* Helper function get the last match as string
|
||
|
* @param {string} haystack
|
||
|
* @param {string | RegExp} needle
|
||
|
* @returns {string | undefined}
|
||
|
*/
|
||
|
const getLastMatch = (haystack, needle) => {
|
||
|
const matches = haystack.match(needle);
|
||
|
if (matches === null) {
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
return matches[matches.length - 1];
|
||
|
};
|
||
|
|
||
|
const endSep = getLastMatch(text, this.separator);
|
||
|
|
||
|
if (!endSep) return null;
|
||
|
|
||
|
const endSepIndex = text.lastIndexOf(endSep);
|
||
|
let lastLine;
|
||
|
|
||
|
if (text.endsWith(endSep)) {
|
||
|
// If the text ends with a separator, look back further to find the next
|
||
|
// separator to complete the line
|
||
|
|
||
|
const trimmed = text.substring(0, endSepIndex);
|
||
|
const startSep = getLastMatch(trimmed, this.separator);
|
||
|
|
||
|
// If there isn't another separator, the line isn't complete so
|
||
|
// so return null to get more data
|
||
|
|
||
|
if (!startSep) {
|
||
|
return null;
|
||
|
}
|
||
|
|
||
|
const startSepIndex = trimmed.lastIndexOf(startSep);
|
||
|
|
||
|
// Exclude the starting separator, include the ending separator
|
||
|
|
||
|
lastLine = text.substring(
|
||
|
startSepIndex + startSep.length,
|
||
|
endSepIndex + endSep.length
|
||
|
);
|
||
|
} else {
|
||
|
// If the text does not end with a separator, grab everything after
|
||
|
// the last separator
|
||
|
lastLine = text.substring(endSepIndex + endSep.length);
|
||
|
}
|
||
|
|
||
|
return text.lastIndexOf(lastLine);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Returns the position of the start of the `nLines`th line from the bottom.
|
||
|
* Returns 0 if `nLines` is greater than the total number of lines in the file.
|
||
|
* @param {number} nLines
|
||
|
* @returns {number}
|
||
|
*/
|
||
|
getPositionAtNthLine(nLines) {
|
||
|
const { size } = fs.statSync(this.filename);
|
||
|
|
||
|
if (size === 0) {
|
||
|
return 0;
|
||
|
}
|
||
|
|
||
|
const fd = fs.openSync(this.filename, 'r');
|
||
|
// Start from the end of the file and work backwards in specific chunks
|
||
|
let currentReadPosition = size;
|
||
|
const chunkSizeBytes = Math.min(1024, size);
|
||
|
const lineBytes = [];
|
||
|
|
||
|
let remaining = '';
|
||
|
|
||
|
while (lineBytes.length < nLines) {
|
||
|
// Shift the current read position backward to the amount we're about to read
|
||
|
currentReadPosition -= chunkSizeBytes;
|
||
|
|
||
|
// If negative, we've reached the beginning of the file and we should stop and return 0, starting the
|
||
|
// stream at the beginning.
|
||
|
if (currentReadPosition < 0) {
|
||
|
return 0;
|
||
|
}
|
||
|
|
||
|
// Read a chunk of the file and prepend it to the working buffer
|
||
|
const buffer = Buffer.alloc(chunkSizeBytes);
|
||
|
const bytesRead = fs.readSync(fd, buffer,
|
||
|
0, // position in buffer to write to
|
||
|
chunkSizeBytes, // number of bytes to read
|
||
|
currentReadPosition // position in file to read from
|
||
|
);
|
||
|
|
||
|
// .subarray returns Uint8Array in node versions < 16.x and Buffer
|
||
|
// in versions >= 16.x. To support both, allocate a new buffer with
|
||
|
// Buffer.from which accepts both types
|
||
|
const readArray = buffer.subarray(0, bytesRead);
|
||
|
remaining = Buffer.from(readArray).toString(this.encoding) + remaining;
|
||
|
|
||
|
let index = this.getIndexOfLastLine(remaining);
|
||
|
|
||
|
while (index !== null && lineBytes.length < nLines) {
|
||
|
const line = remaining.substring(index);
|
||
|
|
||
|
lineBytes.push(Buffer.byteLength(line));
|
||
|
remaining = remaining.substring(0, index);
|
||
|
|
||
|
index = this.getIndexOfLastLine(remaining);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
fs.closeSync(fd);
|
||
|
|
||
|
return size - lineBytes.reduce((acc, cur) => acc + cur, 0)
|
||
|
}
|
||
|
|
||
|
latestPosition() {
|
||
|
try {
|
||
|
return fs.statSync(this.filename).size;
|
||
|
} catch (err) {
|
||
|
this.logger.error(`size check for ${this.filename} failed: ${err}`);
|
||
|
this.emit("error", `size check for ${this.filename} failed: ${err}`);
|
||
|
throw err;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
readBlock() {
|
||
|
if (this.queue.length >= 1) {
|
||
|
const block = this.queue[0];
|
||
|
if (block.end > block.start) {
|
||
|
let stream = fs.createReadStream(this.filename, { start: block.start, end: block.end - 1, encoding: this.encoding });
|
||
|
stream.on('error', (error) => {
|
||
|
this.logger.error(`Tail error: ${error}`);
|
||
|
this.emit('error', error);
|
||
|
});
|
||
|
stream.on('end', () => {
|
||
|
this.queue.shift();
|
||
|
if (this.queue.length > 0) {
|
||
|
this.internalDispatcher.emit('next');
|
||
|
}
|
||
|
if (this.flushAtEOF && this.buffer.length > 0) {
|
||
|
this.emit('line', this.buffer);
|
||
|
this.buffer = "";
|
||
|
}
|
||
|
});
|
||
|
stream.on('data', (d) => {
|
||
|
if (this.separator === null) {
|
||
|
this.emit("line", d);
|
||
|
} else {
|
||
|
this.buffer += d;
|
||
|
let parts = this.buffer.split(this.separator);
|
||
|
this.buffer = parts.pop();
|
||
|
for (const chunk of parts) {
|
||
|
this.emit("line", chunk);
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
change() {
|
||
|
let p = this.latestPosition();
|
||
|
if (p < this.currentCursorPos) {//scenario where text is not appended but it's actually a w+
|
||
|
this.currentCursorPos = p;
|
||
|
} else if (p > this.currentCursorPos) {
|
||
|
this.queue.push({ start: this.currentCursorPos, end: p });
|
||
|
this.currentCursorPos = p;
|
||
|
if (this.queue.length == 1) {
|
||
|
this.internalDispatcher.emit("next");
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
watch(startingCursor, flush) {
|
||
|
if (this.isWatching) return;
|
||
|
this.logger.info(`filesystem.watch present? ${fs.watch != undefined}`);
|
||
|
this.logger.info(`useWatchFile: ${this.useWatchFile}`);
|
||
|
|
||
|
this.isWatching = true;
|
||
|
this.currentCursorPos = startingCursor;
|
||
|
//force a file flush is either fromBegining or nLines flags were passed.
|
||
|
if (flush) this.change();
|
||
|
|
||
|
if (!this.useWatchFile && fs.watch) {
|
||
|
this.logger.info(`watch strategy: watch`);
|
||
|
this.watcher = fs.watch(this.filename, this.fsWatchOptions, (e, filename) => { this.watchEvent(e, filename); });
|
||
|
} else {
|
||
|
this.logger.info(`watch strategy: watchFile`);
|
||
|
fs.watchFile(this.filename, this.fsWatchOptions, (curr, prev) => { this.watchFileEvent(curr, prev); });
|
||
|
}
|
||
|
}
|
||
|
|
||
|
rename(filename) {
|
||
|
//TODO
|
||
|
//MacOS sometimes throws a rename event for no reason.
|
||
|
//Different platforms might behave differently.
|
||
|
//see https://nodejs.org/api/fs.html#fs_fs_watch_filename_options_listener
|
||
|
//filename might not be present.
|
||
|
//https://nodejs.org/api/fs.html#fs_filename_argument
|
||
|
//Better solution would be check inode but it will require a timeout and
|
||
|
// a sync file read.
|
||
|
if (filename === undefined || filename !== this.filename) {
|
||
|
this.unwatch();
|
||
|
if (this.follow) {
|
||
|
this.filename = path.join(this.absPath, filename);
|
||
|
this.rewatchId = setTimeout((() => {
|
||
|
try {
|
||
|
this.watch(this.currentCursorPos);
|
||
|
} catch (ex) {
|
||
|
this.logger.error(`'rename' event for ${this.filename}. File not available anymore.`);
|
||
|
this.emit("error", ex);
|
||
|
}
|
||
|
}), 1000);
|
||
|
} else {
|
||
|
this.logger.error(`'rename' event for ${this.filename}. File not available anymore.`);
|
||
|
this.emit("error", `'rename' event for ${this.filename}. File not available anymore.`);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
watchEvent(e, evtFilename) {
|
||
|
try {
|
||
|
if (e === 'change') {
|
||
|
this.change();
|
||
|
} else if (e === 'rename') {
|
||
|
this.rename(evtFilename);
|
||
|
}
|
||
|
} catch (err) {
|
||
|
this.logger.error(`watchEvent for ${this.filename} failed: ${err}`);
|
||
|
this.emit("error", `watchEvent for ${this.filename} failed: ${err}`);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
watchFileEvent(curr, prev) {
|
||
|
if (curr.size > prev.size) {
|
||
|
this.currentCursorPos = curr.size; //Update this.currentCursorPos so that a consumer can determine if entire file has been handled
|
||
|
this.queue.push({ start: prev.size, end: curr.size });
|
||
|
if (this.queue.length == 1) {
|
||
|
this.internalDispatcher.emit("next");
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
unwatch() {
|
||
|
if (this.watcher) {
|
||
|
this.watcher.close();
|
||
|
} else {
|
||
|
fs.unwatchFile(this.filename);
|
||
|
}
|
||
|
if (this.rewatchId) {
|
||
|
clearTimeout(this.rewatchId);
|
||
|
this.rewatchId = undefined;
|
||
|
}
|
||
|
this.isWatching = false;
|
||
|
this.queue = [];// TODO: is this correct behaviour?
|
||
|
if (this.logger) {
|
||
|
this.logger.info(`Unwatch ${this.filename}`);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
}
|
||
|
|
||
|
var Tail_1 = Tail;
|
||
|
|
||
|
const typedArrayTypeNames = [
|
||
|
'Int8Array',
|
||
|
'Uint8Array',
|
||
|
'Uint8ClampedArray',
|
||
|
'Int16Array',
|
||
|
'Uint16Array',
|
||
|
'Int32Array',
|
||
|
'Uint32Array',
|
||
|
'Float32Array',
|
||
|
'Float64Array',
|
||
|
'BigInt64Array',
|
||
|
'BigUint64Array',
|
||
|
];
|
||
|
function isTypedArrayName(name) {
|
||
|
return typedArrayTypeNames.includes(name);
|
||
|
}
|
||
|
const objectTypeNames = [
|
||
|
'Function',
|
||
|
'Generator',
|
||
|
'AsyncGenerator',
|
||
|
'GeneratorFunction',
|
||
|
'AsyncGeneratorFunction',
|
||
|
'AsyncFunction',
|
||
|
'Observable',
|
||
|
'Array',
|
||
|
'Buffer',
|
||
|
'Blob',
|
||
|
'Object',
|
||
|
'RegExp',
|
||
|
'Date',
|
||
|
'Error',
|
||
|
'Map',
|
||
|
'Set',
|
||
|
'WeakMap',
|
||
|
'WeakSet',
|
||
|
'WeakRef',
|
||
|
'ArrayBuffer',
|
||
|
'SharedArrayBuffer',
|
||
|
'DataView',
|
||
|
'Promise',
|
||
|
'URL',
|
||
|
'FormData',
|
||
|
'URLSearchParams',
|
||
|
'HTMLElement',
|
||
|
'NaN',
|
||
|
...typedArrayTypeNames,
|
||
|
];
|
||
|
function isObjectTypeName(name) {
|
||
|
return objectTypeNames.includes(name);
|
||
|
}
|
||
|
const primitiveTypeNames = [
|
||
|
'null',
|
||
|
'undefined',
|
||
|
'string',
|
||
|
'number',
|
||
|
'bigint',
|
||
|
'boolean',
|
||
|
'symbol',
|
||
|
];
|
||
|
function isPrimitiveTypeName(name) {
|
||
|
return primitiveTypeNames.includes(name);
|
||
|
}
|
||
|
// eslint-disable-next-line @typescript-eslint/ban-types
|
||
|
function isOfType(type) {
|
||
|
return (value) => typeof value === type;
|
||
|
}
|
||
|
const { toString } = Object.prototype;
|
||
|
const getObjectType = (value) => {
|
||
|
const objectTypeName = toString.call(value).slice(8, -1);
|
||
|
if (/HTML\w+Element/.test(objectTypeName) && is.domElement(value)) {
|
||
|
return 'HTMLElement';
|
||
|
}
|
||
|
if (isObjectTypeName(objectTypeName)) {
|
||
|
return objectTypeName;
|
||
|
}
|
||
|
return undefined;
|
||
|
};
|
||
|
const isObjectOfType = (type) => (value) => getObjectType(value) === type;
|
||
|
function is(value) {
|
||
|
if (value === null) {
|
||
|
return 'null';
|
||
|
}
|
||
|
switch (typeof value) {
|
||
|
case 'undefined':
|
||
|
return 'undefined';
|
||
|
case 'string':
|
||
|
return 'string';
|
||
|
case 'number':
|
||
|
return Number.isNaN(value) ? 'NaN' : 'number';
|
||
|
case 'boolean':
|
||
|
return 'boolean';
|
||
|
case 'function':
|
||
|
return 'Function';
|
||
|
case 'bigint':
|
||
|
return 'bigint';
|
||
|
case 'symbol':
|
||
|
return 'symbol';
|
||
|
}
|
||
|
if (is.observable(value)) {
|
||
|
return 'Observable';
|
||
|
}
|
||
|
if (is.array(value)) {
|
||
|
return 'Array';
|
||
|
}
|
||
|
if (is.buffer(value)) {
|
||
|
return 'Buffer';
|
||
|
}
|
||
|
const tagType = getObjectType(value);
|
||
|
if (tagType) {
|
||
|
return tagType;
|
||
|
}
|
||
|
if (value instanceof String || value instanceof Boolean || value instanceof Number) {
|
||
|
throw new TypeError('Please don\'t use object wrappers for primitive types');
|
||
|
}
|
||
|
return 'Object';
|
||
|
}
|
||
|
is.undefined = isOfType('undefined');
|
||
|
is.string = isOfType('string');
|
||
|
const isNumberType = isOfType('number');
|
||
|
is.number = (value) => isNumberType(value) && !is.nan(value);
|
||
|
is.bigint = isOfType('bigint');
|
||
|
// eslint-disable-next-line @typescript-eslint/ban-types
|
||
|
is.function_ = isOfType('function');
|
||
|
// eslint-disable-next-line @typescript-eslint/ban-types
|
||
|
is.null_ = (value) => value === null;
|
||
|
is.class_ = (value) => is.function_(value) && value.toString().startsWith('class ');
|
||
|
is.boolean = (value) => value === true || value === false;
|
||
|
is.symbol = isOfType('symbol');
|
||
|
is.numericString = (value) => is.string(value) && !is.emptyStringOrWhitespace(value) && !Number.isNaN(Number(value));
|
||
|
is.array = (value, assertion) => {
|
||
|
if (!Array.isArray(value)) {
|
||
|
return false;
|
||
|
}
|
||
|
if (!is.function_(assertion)) {
|
||
|
return true;
|
||
|
}
|
||
|
return value.every(element => assertion(element));
|
||
|
};
|
||
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-return, @typescript-eslint/no-unsafe-call
|
||
|
is.buffer = (value) => value?.constructor?.isBuffer?.(value) ?? false;
|
||
|
is.blob = (value) => isObjectOfType('Blob')(value);
|
||
|
is.nullOrUndefined = (value) => is.null_(value) || is.undefined(value); // eslint-disable-line @typescript-eslint/ban-types
|
||
|
is.object = (value) => !is.null_(value) && (typeof value === 'object' || is.function_(value)); // eslint-disable-line @typescript-eslint/ban-types
|
||
|
is.iterable = (value) => is.function_(value?.[Symbol.iterator]);
|
||
|
is.asyncIterable = (value) => is.function_(value?.[Symbol.asyncIterator]);
|
||
|
is.generator = (value) => is.iterable(value) && is.function_(value?.next) && is.function_(value?.throw);
|
||
|
is.asyncGenerator = (value) => is.asyncIterable(value) && is.function_(value.next) && is.function_(value.throw);
|
||
|
is.nativePromise = (value) => isObjectOfType('Promise')(value);
|
||
|
const hasPromiseApi = (value) => is.function_(value?.then)
|
||
|
&& is.function_(value?.catch);
|
||
|
is.promise = (value) => is.nativePromise(value) || hasPromiseApi(value);
|
||
|
is.generatorFunction = isObjectOfType('GeneratorFunction');
|
||
|
is.asyncGeneratorFunction = (value) => getObjectType(value) === 'AsyncGeneratorFunction';
|
||
|
is.asyncFunction = (value) => getObjectType(value) === 'AsyncFunction';
|
||
|
// eslint-disable-next-line no-prototype-builtins, @typescript-eslint/ban-types
|
||
|
is.boundFunction = (value) => is.function_(value) && !value.hasOwnProperty('prototype');
|
||
|
is.regExp = isObjectOfType('RegExp');
|
||
|
is.date = isObjectOfType('Date');
|
||
|
is.error = isObjectOfType('Error');
|
||
|
is.map = (value) => isObjectOfType('Map')(value);
|
||
|
is.set = (value) => isObjectOfType('Set')(value);
|
||
|
is.weakMap = (value) => isObjectOfType('WeakMap')(value); // eslint-disable-line @typescript-eslint/ban-types
|
||
|
is.weakSet = (value) => isObjectOfType('WeakSet')(value); // eslint-disable-line @typescript-eslint/ban-types
|
||
|
is.weakRef = (value) => isObjectOfType('WeakRef')(value); // eslint-disable-line @typescript-eslint/ban-types
|
||
|
is.int8Array = isObjectOfType('Int8Array');
|
||
|
is.uint8Array = isObjectOfType('Uint8Array');
|
||
|
is.uint8ClampedArray = isObjectOfType('Uint8ClampedArray');
|
||
|
is.int16Array = isObjectOfType('Int16Array');
|
||
|
is.uint16Array = isObjectOfType('Uint16Array');
|
||
|
is.int32Array = isObjectOfType('Int32Array');
|
||
|
is.uint32Array = isObjectOfType('Uint32Array');
|
||
|
is.float32Array = isObjectOfType('Float32Array');
|
||
|
is.float64Array = isObjectOfType('Float64Array');
|
||
|
is.bigInt64Array = isObjectOfType('BigInt64Array');
|
||
|
is.bigUint64Array = isObjectOfType('BigUint64Array');
|
||
|
is.arrayBuffer = isObjectOfType('ArrayBuffer');
|
||
|
is.sharedArrayBuffer = isObjectOfType('SharedArrayBuffer');
|
||
|
is.dataView = isObjectOfType('DataView');
|
||
|
is.enumCase = (value, targetEnum) => Object.values(targetEnum).includes(value);
|
||
|
is.directInstanceOf = (instance, class_) => Object.getPrototypeOf(instance) === class_.prototype;
|
||
|
is.urlInstance = (value) => isObjectOfType('URL')(value);
|
||
|
is.urlString = (value) => {
|
||
|
if (!is.string(value)) {
|
||
|
return false;
|
||
|
}
|
||
|
try {
|
||
|
new URL(value); // eslint-disable-line no-new
|
||
|
return true;
|
||
|
}
|
||
|
catch {
|
||
|
return false;
|
||
|
}
|
||
|
};
|
||
|
// Example: `is.truthy = (value: unknown): value is (not false | not 0 | not '' | not undefined | not null) => Boolean(value);`
|
||
|
is.truthy = (value) => Boolean(value); // eslint-disable-line unicorn/prefer-native-coercion-functions
|
||
|
// Example: `is.falsy = (value: unknown): value is (not true | 0 | '' | undefined | null) => Boolean(value);`
|
||
|
is.falsy = (value) => !value;
|
||
|
is.nan = (value) => Number.isNaN(value);
|
||
|
is.primitive = (value) => is.null_(value) || isPrimitiveTypeName(typeof value);
|
||
|
is.integer = (value) => Number.isInteger(value);
|
||
|
is.safeInteger = (value) => Number.isSafeInteger(value);
|
||
|
is.plainObject = (value) => {
|
||
|
// From: https://github.com/sindresorhus/is-plain-obj/blob/main/index.js
|
||
|
if (typeof value !== 'object' || value === null) {
|
||
|
return false;
|
||
|
}
|
||
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||
|
const prototype = Object.getPrototypeOf(value);
|
||
|
return (prototype === null || prototype === Object.prototype || Object.getPrototypeOf(prototype) === null) && !(Symbol.toStringTag in value) && !(Symbol.iterator in value);
|
||
|
};
|
||
|
is.typedArray = (value) => isTypedArrayName(getObjectType(value));
|
||
|
const isValidLength = (value) => is.safeInteger(value) && value >= 0;
|
||
|
is.arrayLike = (value) => !is.nullOrUndefined(value) && !is.function_(value) && isValidLength(value.length);
|
||
|
is.inRange = (value, range) => {
|
||
|
if (is.number(range)) {
|
||
|
return value >= Math.min(0, range) && value <= Math.max(range, 0);
|
||
|
}
|
||
|
if (is.array(range) && range.length === 2) {
|
||
|
return value >= Math.min(...range) && value <= Math.max(...range);
|
||
|
}
|
||
|
throw new TypeError(`Invalid range: ${JSON.stringify(range)}`);
|
||
|
};
|
||
|
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||
|
const NODE_TYPE_ELEMENT = 1;
|
||
|
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||
|
const DOM_PROPERTIES_TO_CHECK = [
|
||
|
'innerHTML',
|
||
|
'ownerDocument',
|
||
|
'style',
|
||
|
'attributes',
|
||
|
'nodeValue',
|
||
|
];
|
||
|
is.domElement = (value) => is.object(value)
|
||
|
&& value.nodeType === NODE_TYPE_ELEMENT
|
||
|
&& is.string(value.nodeName)
|
||
|
&& !is.plainObject(value)
|
||
|
&& DOM_PROPERTIES_TO_CHECK.every(property => property in value);
|
||
|
is.observable = (value) => {
|
||
|
if (!value) {
|
||
|
return false;
|
||
|
}
|
||
|
// eslint-disable-next-line no-use-extend-native/no-use-extend-native, @typescript-eslint/no-unsafe-call
|
||
|
if (value === value[Symbol.observable]?.()) {
|
||
|
return true;
|
||
|
}
|
||
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
|
||
|
if (value === value['@@observable']?.()) {
|
||
|
return true;
|
||
|
}
|
||
|
return false;
|
||
|
};
|
||
|
is.nodeStream = (value) => is.object(value) && is.function_(value.pipe) && !is.observable(value);
|
||
|
is.infinite = (value) => value === Number.POSITIVE_INFINITY || value === Number.NEGATIVE_INFINITY;
|
||
|
const isAbsoluteMod2 = (remainder) => (value) => is.integer(value) && Math.abs(value % 2) === remainder;
|
||
|
is.evenInteger = isAbsoluteMod2(0);
|
||
|
is.oddInteger = isAbsoluteMod2(1);
|
||
|
is.emptyArray = (value) => is.array(value) && value.length === 0;
|
||
|
is.nonEmptyArray = (value) => is.array(value) && value.length > 0;
|
||
|
is.emptyString = (value) => is.string(value) && value.length === 0;
|
||
|
const isWhiteSpaceString = (value) => is.string(value) && !/\S/.test(value);
|
||
|
is.emptyStringOrWhitespace = (value) => is.emptyString(value) || isWhiteSpaceString(value);
|
||
|
// TODO: Use `not ''` when the `not` operator is available.
|
||
|
is.nonEmptyString = (value) => is.string(value) && value.length > 0;
|
||
|
// TODO: Use `not ''` when the `not` operator is available.
|
||
|
is.nonEmptyStringAndNotWhitespace = (value) => is.string(value) && !is.emptyStringOrWhitespace(value);
|
||
|
// eslint-disable-next-line unicorn/no-array-callback-reference
|
||
|
is.emptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length === 0;
|
||
|
// TODO: Use `not` operator here to remove `Map` and `Set` from type guard:
|
||
|
// - https://github.com/Microsoft/TypeScript/pull/29317
|
||
|
// eslint-disable-next-line unicorn/no-array-callback-reference
|
||
|
is.nonEmptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length > 0;
|
||
|
is.emptySet = (value) => is.set(value) && value.size === 0;
|
||
|
is.nonEmptySet = (value) => is.set(value) && value.size > 0;
|
||
|
// eslint-disable-next-line unicorn/no-array-callback-reference
|
||
|
is.emptyMap = (value) => is.map(value) && value.size === 0;
|
||
|
// eslint-disable-next-line unicorn/no-array-callback-reference
|
||
|
is.nonEmptyMap = (value) => is.map(value) && value.size > 0;
|
||
|
// `PropertyKey` is any value that can be used as an object key (string, number, or symbol)
|
||
|
is.propertyKey = (value) => is.any([is.string, is.number, is.symbol], value);
|
||
|
is.formData = (value) => isObjectOfType('FormData')(value);
|
||
|
is.urlSearchParams = (value) => isObjectOfType('URLSearchParams')(value);
|
||
|
const predicateOnArray = (method, predicate, values) => {
|
||
|
if (!is.function_(predicate)) {
|
||
|
throw new TypeError(`Invalid predicate: ${JSON.stringify(predicate)}`);
|
||
|
}
|
||
|
if (values.length === 0) {
|
||
|
throw new TypeError('Invalid number of values');
|
||
|
}
|
||
|
return method.call(values, predicate);
|
||
|
};
|
||
|
is.any = (predicate, ...values) => {
|
||
|
const predicates = is.array(predicate) ? predicate : [predicate];
|
||
|
return predicates.some(singlePredicate => predicateOnArray(Array.prototype.some, singlePredicate, values));
|
||
|
};
|
||
|
is.all = (predicate, ...values) => predicateOnArray(Array.prototype.every, predicate, values);
|
||
|
const assertType = (condition, description, value, options = {}) => {
|
||
|
if (!condition) {
|
||
|
const { multipleValues } = options;
|
||
|
const valuesMessage = multipleValues
|
||
|
? `received values of types ${[
|
||
|
...new Set(value.map(singleValue => `\`${is(singleValue)}\``)),
|
||
|
].join(', ')}`
|
||
|
: `received value of type \`${is(value)}\``;
|
||
|
throw new TypeError(`Expected value which is \`${description}\`, ${valuesMessage}.`);
|
||
|
}
|
||
|
};
|
||
|
/* eslint-disable @typescript-eslint/no-confusing-void-expression */
|
||
|
const assert$1 = {
|
||
|
// Unknowns.
|
||
|
undefined: (value) => assertType(is.undefined(value), 'undefined', value),
|
||
|
string: (value) => assertType(is.string(value), 'string', value),
|
||
|
number: (value) => assertType(is.number(value), 'number', value),
|
||
|
bigint: (value) => assertType(is.bigint(value), 'bigint', value),
|
||
|
// eslint-disable-next-line @typescript-eslint/ban-types
|
||
|
function_: (value) => assertType(is.function_(value), 'Function', value),
|
||
|
null_: (value) => assertType(is.null_(value), 'null', value),
|
||
|
class_: (value) => assertType(is.class_(value), "Class" /* AssertionTypeDescription.class_ */, value),
|
||
|
boolean: (value) => assertType(is.boolean(value), 'boolean', value),
|
||
|
symbol: (value) => assertType(is.symbol(value), 'symbol', value),
|
||
|
numericString: (value) => assertType(is.numericString(value), "string with a number" /* AssertionTypeDescription.numericString */, value),
|
||
|
array: (value, assertion) => {
|
||
|
const assert = assertType;
|
||
|
assert(is.array(value), 'Array', value);
|
||
|
if (assertion) {
|
||
|
// eslint-disable-next-line unicorn/no-array-for-each, unicorn/no-array-callback-reference
|
||
|
value.forEach(assertion);
|
||
|
}
|
||
|
},
|
||
|
buffer: (value) => assertType(is.buffer(value), 'Buffer', value),
|
||
|
blob: (value) => assertType(is.blob(value), 'Blob', value),
|
||
|
nullOrUndefined: (value) => assertType(is.nullOrUndefined(value), "null or undefined" /* AssertionTypeDescription.nullOrUndefined */, value),
|
||
|
object: (value) => assertType(is.object(value), 'Object', value),
|
||
|
iterable: (value) => assertType(is.iterable(value), "Iterable" /* AssertionTypeDescription.iterable */, value),
|
||
|
asyncIterable: (value) => assertType(is.asyncIterable(value), "AsyncIterable" /* AssertionTypeDescription.asyncIterable */, value),
|
||
|
generator: (value) => assertType(is.generator(value), 'Generator', value),
|
||
|
asyncGenerator: (value) => assertType(is.asyncGenerator(value), 'AsyncGenerator', value),
|
||
|
nativePromise: (value) => assertType(is.nativePromise(value), "native Promise" /* AssertionTypeDescription.nativePromise */, value),
|
||
|
promise: (value) => assertType(is.promise(value), 'Promise', value),
|
||
|
generatorFunction: (value) => assertType(is.generatorFunction(value), 'GeneratorFunction', value),
|
||
|
asyncGeneratorFunction: (value) => assertType(is.asyncGeneratorFunction(value), 'AsyncGeneratorFunction', value),
|
||
|
// eslint-disable-next-line @typescript-eslint/ban-types
|
||
|
asyncFunction: (value) => assertType(is.asyncFunction(value), 'AsyncFunction', value),
|
||
|
// eslint-disable-next-line @typescript-eslint/ban-types
|
||
|
boundFunction: (value) => assertType(is.boundFunction(value), 'Function', value),
|
||
|
regExp: (value) => assertType(is.regExp(value), 'RegExp', value),
|
||
|
date: (value) => assertType(is.date(value), 'Date', value),
|
||
|
error: (value) => assertType(is.error(value), 'Error', value),
|
||
|
map: (value) => assertType(is.map(value), 'Map', value),
|
||
|
set: (value) => assertType(is.set(value), 'Set', value),
|
||
|
weakMap: (value) => assertType(is.weakMap(value), 'WeakMap', value),
|
||
|
weakSet: (value) => assertType(is.weakSet(value), 'WeakSet', value),
|
||
|
weakRef: (value) => assertType(is.weakRef(value), 'WeakRef', value),
|
||
|
int8Array: (value) => assertType(is.int8Array(value), 'Int8Array', value),
|
||
|
uint8Array: (value) => assertType(is.uint8Array(value), 'Uint8Array', value),
|
||
|
uint8ClampedArray: (value) => assertType(is.uint8ClampedArray(value), 'Uint8ClampedArray', value),
|
||
|
int16Array: (value) => assertType(is.int16Array(value), 'Int16Array', value),
|
||
|
uint16Array: (value) => assertType(is.uint16Array(value), 'Uint16Array', value),
|
||
|
int32Array: (value) => assertType(is.int32Array(value), 'Int32Array', value),
|
||
|
uint32Array: (value) => assertType(is.uint32Array(value), 'Uint32Array', value),
|
||
|
float32Array: (value) => assertType(is.float32Array(value), 'Float32Array', value),
|
||
|
float64Array: (value) => assertType(is.float64Array(value), 'Float64Array', value),
|
||
|
bigInt64Array: (value) => assertType(is.bigInt64Array(value), 'BigInt64Array', value),
|
||
|
bigUint64Array: (value) => assertType(is.bigUint64Array(value), 'BigUint64Array', value),
|
||
|
arrayBuffer: (value) => assertType(is.arrayBuffer(value), 'ArrayBuffer', value),
|
||
|
sharedArrayBuffer: (value) => assertType(is.sharedArrayBuffer(value), 'SharedArrayBuffer', value),
|
||
|
dataView: (value) => assertType(is.dataView(value), 'DataView', value),
|
||
|
enumCase: (value, targetEnum) => assertType(is.enumCase(value, targetEnum), 'EnumCase', value),
|
||
|
urlInstance: (value) => assertType(is.urlInstance(value), 'URL', value),
|
||
|
urlString: (value) => assertType(is.urlString(value), "string with a URL" /* AssertionTypeDescription.urlString */, value),
|
||
|
truthy: (value) => assertType(is.truthy(value), "truthy" /* AssertionTypeDescription.truthy */, value),
|
||
|
falsy: (value) => assertType(is.falsy(value), "falsy" /* AssertionTypeDescription.falsy */, value),
|
||
|
nan: (value) => assertType(is.nan(value), "NaN" /* AssertionTypeDescription.nan */, value),
|
||
|
primitive: (value) => assertType(is.primitive(value), "primitive" /* AssertionTypeDescription.primitive */, value),
|
||
|
integer: (value) => assertType(is.integer(value), "integer" /* AssertionTypeDescription.integer */, value),
|
||
|
safeInteger: (value) => assertType(is.safeInteger(value), "integer" /* AssertionTypeDescription.safeInteger */, value),
|
||
|
plainObject: (value) => assertType(is.plainObject(value), "plain object" /* AssertionTypeDescription.plainObject */, value),
|
||
|
typedArray: (value) => assertType(is.typedArray(value), "TypedArray" /* AssertionTypeDescription.typedArray */, value),
|
||
|
arrayLike: (value) => assertType(is.arrayLike(value), "array-like" /* AssertionTypeDescription.arrayLike */, value),
|
||
|
domElement: (value) => assertType(is.domElement(value), "HTMLElement" /* AssertionTypeDescription.domElement */, value),
|
||
|
observable: (value) => assertType(is.observable(value), 'Observable', value),
|
||
|
nodeStream: (value) => assertType(is.nodeStream(value), "Node.js Stream" /* AssertionTypeDescription.nodeStream */, value),
|
||
|
infinite: (value) => assertType(is.infinite(value), "infinite number" /* AssertionTypeDescription.infinite */, value),
|
||
|
emptyArray: (value) => assertType(is.emptyArray(value), "empty array" /* AssertionTypeDescription.emptyArray */, value),
|
||
|
nonEmptyArray: (value) => assertType(is.nonEmptyArray(value), "non-empty array" /* AssertionTypeDescription.nonEmptyArray */, value),
|
||
|
emptyString: (value) => assertType(is.emptyString(value), "empty string" /* AssertionTypeDescription.emptyString */, value),
|
||
|
emptyStringOrWhitespace: (value) => assertType(is.emptyStringOrWhitespace(value), "empty string or whitespace" /* AssertionTypeDescription.emptyStringOrWhitespace */, value),
|
||
|
nonEmptyString: (value) => assertType(is.nonEmptyString(value), "non-empty string" /* AssertionTypeDescription.nonEmptyString */, value),
|
||
|
nonEmptyStringAndNotWhitespace: (value) => assertType(is.nonEmptyStringAndNotWhitespace(value), "non-empty string and not whitespace" /* AssertionTypeDescription.nonEmptyStringAndNotWhitespace */, value),
|
||
|
emptyObject: (value) => assertType(is.emptyObject(value), "empty object" /* AssertionTypeDescription.emptyObject */, value),
|
||
|
nonEmptyObject: (value) => assertType(is.nonEmptyObject(value), "non-empty object" /* AssertionTypeDescription.nonEmptyObject */, value),
|
||
|
emptySet: (value) => assertType(is.emptySet(value), "empty set" /* AssertionTypeDescription.emptySet */, value),
|
||
|
nonEmptySet: (value) => assertType(is.nonEmptySet(value), "non-empty set" /* AssertionTypeDescription.nonEmptySet */, value),
|
||
|
emptyMap: (value) => assertType(is.emptyMap(value), "empty map" /* AssertionTypeDescription.emptyMap */, value),
|
||
|
nonEmptyMap: (value) => assertType(is.nonEmptyMap(value), "non-empty map" /* AssertionTypeDescription.nonEmptyMap */, value),
|
||
|
propertyKey: (value) => assertType(is.propertyKey(value), 'PropertyKey', value),
|
||
|
formData: (value) => assertType(is.formData(value), 'FormData', value),
|
||
|
urlSearchParams: (value) => assertType(is.urlSearchParams(value), 'URLSearchParams', value),
|
||
|
// Numbers.
|
||
|
evenInteger: (value) => assertType(is.evenInteger(value), "even integer" /* AssertionTypeDescription.evenInteger */, value),
|
||
|
oddInteger: (value) => assertType(is.oddInteger(value), "odd integer" /* AssertionTypeDescription.oddInteger */, value),
|
||
|
// Two arguments.
|
||
|
directInstanceOf: (instance, class_) => assertType(is.directInstanceOf(instance, class_), "T" /* AssertionTypeDescription.directInstanceOf */, instance),
|
||
|
inRange: (value, range) => assertType(is.inRange(value, range), "in range" /* AssertionTypeDescription.inRange */, value),
|
||
|
// Variadic functions.
|
||
|
any: (predicate, ...values) => assertType(is.any(predicate, ...values), "predicate returns truthy for any value" /* AssertionTypeDescription.any */, values, { multipleValues: true }),
|
||
|
all: (predicate, ...values) => assertType(is.all(predicate, ...values), "predicate returns truthy for all values" /* AssertionTypeDescription.all */, values, { multipleValues: true }),
|
||
|
};
|
||
|
/* eslint-enable @typescript-eslint/no-confusing-void-expression */
|
||
|
// Some few keywords are reserved, but we'll populate them for Node.js users
|
||
|
// See https://github.com/Microsoft/TypeScript/issues/2536
|
||
|
Object.defineProperties(is, {
|
||
|
class: {
|
||
|
value: is.class_,
|
||
|
},
|
||
|
function: {
|
||
|
value: is.function_,
|
||
|
},
|
||
|
null: {
|
||
|
value: is.null_,
|
||
|
},
|
||
|
});
|
||
|
Object.defineProperties(assert$1, {
|
||
|
class: {
|
||
|
value: assert$1.class_,
|
||
|
},
|
||
|
function: {
|
||
|
value: assert$1.function_,
|
||
|
},
|
||
|
null: {
|
||
|
value: assert$1.null_,
|
||
|
},
|
||
|
});
|
||
|
|
||
|
let CancelError$1 = class CancelError extends Error {
|
||
|
constructor(reason) {
|
||
|
super(reason || 'Promise was canceled');
|
||
|
this.name = 'CancelError';
|
||
|
}
|
||
|
|
||
|
get isCanceled() {
|
||
|
return true;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
// TODO: Use private class fields when ESLint 8 is out.
|
||
|
|
||
|
class PCancelable {
|
||
|
static fn(userFunction) {
|
||
|
return (...arguments_) => {
|
||
|
return new PCancelable((resolve, reject, onCancel) => {
|
||
|
arguments_.push(onCancel);
|
||
|
// eslint-disable-next-line promise/prefer-await-to-then
|
||
|
userFunction(...arguments_).then(resolve, reject);
|
||
|
});
|
||
|
};
|
||
|
}
|
||
|
|
||
|
constructor(executor) {
|
||
|
this._cancelHandlers = [];
|
||
|
this._isPending = true;
|
||
|
this._isCanceled = false;
|
||
|
this._rejectOnCancel = true;
|
||
|
|
||
|
this._promise = new Promise((resolve, reject) => {
|
||
|
this._reject = reject;
|
||
|
|
||
|
const onResolve = value => {
|
||
|
if (!this._isCanceled || !onCancel.shouldReject) {
|
||
|
this._isPending = false;
|
||
|
resolve(value);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const onReject = error => {
|
||
|
this._isPending = false;
|
||
|
reject(error);
|
||
|
};
|
||
|
|
||
|
const onCancel = handler => {
|
||
|
if (!this._isPending) {
|
||
|
throw new Error('The `onCancel` handler was attached after the promise settled.');
|
||
|
}
|
||
|
|
||
|
this._cancelHandlers.push(handler);
|
||
|
};
|
||
|
|
||
|
Object.defineProperties(onCancel, {
|
||
|
shouldReject: {
|
||
|
get: () => this._rejectOnCancel,
|
||
|
set: boolean => {
|
||
|
this._rejectOnCancel = boolean;
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
|
||
|
executor(onResolve, onReject, onCancel);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
then(onFulfilled, onRejected) {
|
||
|
// eslint-disable-next-line promise/prefer-await-to-then
|
||
|
return this._promise.then(onFulfilled, onRejected);
|
||
|
}
|
||
|
|
||
|
catch(onRejected) {
|
||
|
// eslint-disable-next-line promise/prefer-await-to-then
|
||
|
return this._promise.catch(onRejected);
|
||
|
}
|
||
|
|
||
|
finally(onFinally) {
|
||
|
// eslint-disable-next-line promise/prefer-await-to-then
|
||
|
return this._promise.finally(onFinally);
|
||
|
}
|
||
|
|
||
|
cancel(reason) {
|
||
|
if (!this._isPending || this._isCanceled) {
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
this._isCanceled = true;
|
||
|
|
||
|
if (this._cancelHandlers.length > 0) {
|
||
|
try {
|
||
|
for (const handler of this._cancelHandlers) {
|
||
|
handler();
|
||
|
}
|
||
|
} catch (error) {
|
||
|
this._reject(error);
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (this._rejectOnCancel) {
|
||
|
this._reject(new CancelError$1(reason));
|
||
|
}
|
||
|
}
|
||
|
|
||
|
get isCanceled() {
|
||
|
return this._isCanceled;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
Object.setPrototypeOf(PCancelable.prototype, Promise.prototype);
|
||
|
|
||
|
// A hacky check to prevent circular references.
|
||
|
function isRequest(x) {
|
||
|
return is.object(x) && '_onResponse' in x;
|
||
|
}
|
||
|
/**
|
||
|
An error to be thrown when a request fails.
|
||
|
Contains a `code` property with error class code, like `ECONNREFUSED`.
|
||
|
*/
|
||
|
let RequestError$1 = class RequestError extends Error {
|
||
|
constructor(message, error, self) {
|
||
|
super(message);
|
||
|
Object.defineProperty(this, "input", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "code", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "stack", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "response", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "request", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "timings", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Error.captureStackTrace(this, this.constructor);
|
||
|
this.name = 'RequestError';
|
||
|
this.code = error.code ?? 'ERR_GOT_REQUEST_ERROR';
|
||
|
this.input = error.input;
|
||
|
if (isRequest(self)) {
|
||
|
Object.defineProperty(this, 'request', {
|
||
|
enumerable: false,
|
||
|
value: self,
|
||
|
});
|
||
|
Object.defineProperty(this, 'response', {
|
||
|
enumerable: false,
|
||
|
value: self.response,
|
||
|
});
|
||
|
this.options = self.options;
|
||
|
}
|
||
|
else {
|
||
|
this.options = self;
|
||
|
}
|
||
|
this.timings = this.request?.timings;
|
||
|
// Recover the original stacktrace
|
||
|
if (is.string(error.stack) && is.string(this.stack)) {
|
||
|
const indexOfMessage = this.stack.indexOf(this.message) + this.message.length;
|
||
|
const thisStackTrace = this.stack.slice(indexOfMessage).split('\n').reverse();
|
||
|
const errorStackTrace = error.stack.slice(error.stack.indexOf(error.message) + error.message.length).split('\n').reverse();
|
||
|
// Remove duplicated traces
|
||
|
while (errorStackTrace.length > 0 && errorStackTrace[0] === thisStackTrace[0]) {
|
||
|
thisStackTrace.shift();
|
||
|
}
|
||
|
this.stack = `${this.stack.slice(0, indexOfMessage)}${thisStackTrace.reverse().join('\n')}${errorStackTrace.reverse().join('\n')}`;
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
/**
|
||
|
An error to be thrown when the server redirects you more than ten times.
|
||
|
Includes a `response` property.
|
||
|
*/
|
||
|
class MaxRedirectsError extends RequestError$1 {
|
||
|
constructor(request) {
|
||
|
super(`Redirected ${request.options.maxRedirects} times. Aborting.`, {}, request);
|
||
|
this.name = 'MaxRedirectsError';
|
||
|
this.code = 'ERR_TOO_MANY_REDIRECTS';
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
An error to be thrown when the server response code is not 2xx nor 3xx if `options.followRedirect` is `true`, but always except for 304.
|
||
|
Includes a `response` property.
|
||
|
*/
|
||
|
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||
|
class HTTPError extends RequestError$1 {
|
||
|
constructor(response) {
|
||
|
super(`Response code ${response.statusCode} (${response.statusMessage})`, {}, response.request);
|
||
|
this.name = 'HTTPError';
|
||
|
this.code = 'ERR_NON_2XX_3XX_RESPONSE';
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
An error to be thrown when a cache method fails.
|
||
|
For example, if the database goes down or there's a filesystem error.
|
||
|
*/
|
||
|
let CacheError$1 = class CacheError extends RequestError$1 {
|
||
|
constructor(error, request) {
|
||
|
super(error.message, error, request);
|
||
|
this.name = 'CacheError';
|
||
|
this.code = this.code === 'ERR_GOT_REQUEST_ERROR' ? 'ERR_CACHE_ACCESS' : this.code;
|
||
|
}
|
||
|
};
|
||
|
/**
|
||
|
An error to be thrown when the request body is a stream and an error occurs while reading from that stream.
|
||
|
*/
|
||
|
class UploadError extends RequestError$1 {
|
||
|
constructor(error, request) {
|
||
|
super(error.message, error, request);
|
||
|
this.name = 'UploadError';
|
||
|
this.code = this.code === 'ERR_GOT_REQUEST_ERROR' ? 'ERR_UPLOAD' : this.code;
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
An error to be thrown when the request is aborted due to a timeout.
|
||
|
Includes an `event` and `timings` property.
|
||
|
*/
|
||
|
let TimeoutError$1 = class TimeoutError extends RequestError$1 {
|
||
|
constructor(error, timings, request) {
|
||
|
super(error.message, error, request);
|
||
|
Object.defineProperty(this, "timings", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "event", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
this.name = 'TimeoutError';
|
||
|
this.event = error.event;
|
||
|
this.timings = timings;
|
||
|
}
|
||
|
};
|
||
|
/**
|
||
|
An error to be thrown when reading from response stream fails.
|
||
|
*/
|
||
|
class ReadError extends RequestError$1 {
|
||
|
constructor(error, request) {
|
||
|
super(error.message, error, request);
|
||
|
this.name = 'ReadError';
|
||
|
this.code = this.code === 'ERR_GOT_REQUEST_ERROR' ? 'ERR_READING_RESPONSE_STREAM' : this.code;
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
An error which always triggers a new retry when thrown.
|
||
|
*/
|
||
|
class RetryError extends RequestError$1 {
|
||
|
constructor(request) {
|
||
|
super('Retrying', {}, request);
|
||
|
this.name = 'RetryError';
|
||
|
this.code = 'ERR_RETRYING';
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
An error to be thrown when the request is aborted by AbortController.
|
||
|
*/
|
||
|
class AbortError extends RequestError$1 {
|
||
|
constructor(request) {
|
||
|
super('This operation was aborted.', {}, request);
|
||
|
this.code = 'ERR_ABORTED';
|
||
|
this.name = 'AbortError';
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var source$1 = {exports: {}};
|
||
|
|
||
|
(function (module, exports) {
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
function isTLSSocket(socket) {
|
||
|
return socket.encrypted;
|
||
|
}
|
||
|
const deferToConnect = (socket, fn) => {
|
||
|
let listeners;
|
||
|
if (typeof fn === 'function') {
|
||
|
const connect = fn;
|
||
|
listeners = { connect };
|
||
|
}
|
||
|
else {
|
||
|
listeners = fn;
|
||
|
}
|
||
|
const hasConnectListener = typeof listeners.connect === 'function';
|
||
|
const hasSecureConnectListener = typeof listeners.secureConnect === 'function';
|
||
|
const hasCloseListener = typeof listeners.close === 'function';
|
||
|
const onConnect = () => {
|
||
|
if (hasConnectListener) {
|
||
|
listeners.connect();
|
||
|
}
|
||
|
if (isTLSSocket(socket) && hasSecureConnectListener) {
|
||
|
if (socket.authorized) {
|
||
|
listeners.secureConnect();
|
||
|
}
|
||
|
else if (!socket.authorizationError) {
|
||
|
socket.once('secureConnect', listeners.secureConnect);
|
||
|
}
|
||
|
}
|
||
|
if (hasCloseListener) {
|
||
|
socket.once('close', listeners.close);
|
||
|
}
|
||
|
};
|
||
|
if (socket.writable && !socket.connecting) {
|
||
|
onConnect();
|
||
|
}
|
||
|
else if (socket.connecting) {
|
||
|
socket.once('connect', onConnect);
|
||
|
}
|
||
|
else if (socket.destroyed && hasCloseListener) {
|
||
|
listeners.close(socket._hadError);
|
||
|
}
|
||
|
};
|
||
|
exports.default = deferToConnect;
|
||
|
// For CommonJS default export support
|
||
|
module.exports = deferToConnect;
|
||
|
module.exports.default = deferToConnect;
|
||
|
} (source$1, source$1.exports));
|
||
|
|
||
|
var sourceExports = source$1.exports;
|
||
|
var deferToConnect = /*@__PURE__*/getDefaultExportFromCjs(sourceExports);
|
||
|
|
||
|
const timer = (request) => {
|
||
|
if (request.timings) {
|
||
|
return request.timings;
|
||
|
}
|
||
|
const timings = {
|
||
|
start: Date.now(),
|
||
|
socket: undefined,
|
||
|
lookup: undefined,
|
||
|
connect: undefined,
|
||
|
secureConnect: undefined,
|
||
|
upload: undefined,
|
||
|
response: undefined,
|
||
|
end: undefined,
|
||
|
error: undefined,
|
||
|
abort: undefined,
|
||
|
phases: {
|
||
|
wait: undefined,
|
||
|
dns: undefined,
|
||
|
tcp: undefined,
|
||
|
tls: undefined,
|
||
|
request: undefined,
|
||
|
firstByte: undefined,
|
||
|
download: undefined,
|
||
|
total: undefined,
|
||
|
},
|
||
|
};
|
||
|
request.timings = timings;
|
||
|
const handleError = (origin) => {
|
||
|
origin.once(errorMonitor, () => {
|
||
|
timings.error = Date.now();
|
||
|
timings.phases.total = timings.error - timings.start;
|
||
|
});
|
||
|
};
|
||
|
handleError(request);
|
||
|
const onAbort = () => {
|
||
|
timings.abort = Date.now();
|
||
|
timings.phases.total = timings.abort - timings.start;
|
||
|
};
|
||
|
request.prependOnceListener('abort', onAbort);
|
||
|
const onSocket = (socket) => {
|
||
|
timings.socket = Date.now();
|
||
|
timings.phases.wait = timings.socket - timings.start;
|
||
|
if (types.isProxy(socket)) {
|
||
|
return;
|
||
|
}
|
||
|
const lookupListener = () => {
|
||
|
timings.lookup = Date.now();
|
||
|
timings.phases.dns = timings.lookup - timings.socket;
|
||
|
};
|
||
|
socket.prependOnceListener('lookup', lookupListener);
|
||
|
deferToConnect(socket, {
|
||
|
connect: () => {
|
||
|
timings.connect = Date.now();
|
||
|
if (timings.lookup === undefined) {
|
||
|
socket.removeListener('lookup', lookupListener);
|
||
|
timings.lookup = timings.connect;
|
||
|
timings.phases.dns = timings.lookup - timings.socket;
|
||
|
}
|
||
|
timings.phases.tcp = timings.connect - timings.lookup;
|
||
|
},
|
||
|
secureConnect: () => {
|
||
|
timings.secureConnect = Date.now();
|
||
|
timings.phases.tls = timings.secureConnect - timings.connect;
|
||
|
},
|
||
|
});
|
||
|
};
|
||
|
if (request.socket) {
|
||
|
onSocket(request.socket);
|
||
|
}
|
||
|
else {
|
||
|
request.prependOnceListener('socket', onSocket);
|
||
|
}
|
||
|
const onUpload = () => {
|
||
|
timings.upload = Date.now();
|
||
|
timings.phases.request = timings.upload - (timings.secureConnect ?? timings.connect);
|
||
|
};
|
||
|
if (request.writableFinished) {
|
||
|
onUpload();
|
||
|
}
|
||
|
else {
|
||
|
request.prependOnceListener('finish', onUpload);
|
||
|
}
|
||
|
request.prependOnceListener('response', (response) => {
|
||
|
timings.response = Date.now();
|
||
|
timings.phases.firstByte = timings.response - timings.upload;
|
||
|
response.timings = timings;
|
||
|
handleError(response);
|
||
|
response.prependOnceListener('end', () => {
|
||
|
request.off('abort', onAbort);
|
||
|
response.off('aborted', onAbort);
|
||
|
if (timings.phases.total) {
|
||
|
// Aborted or errored
|
||
|
return;
|
||
|
}
|
||
|
timings.end = Date.now();
|
||
|
timings.phases.download = timings.end - timings.response;
|
||
|
timings.phases.total = timings.end - timings.start;
|
||
|
});
|
||
|
response.prependOnceListener('aborted', onAbort);
|
||
|
});
|
||
|
return timings;
|
||
|
};
|
||
|
var timer$1 = timer;
|
||
|
|
||
|
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
|
||
|
const DATA_URL_DEFAULT_MIME_TYPE = 'text/plain';
|
||
|
const DATA_URL_DEFAULT_CHARSET = 'us-ascii';
|
||
|
|
||
|
const testParameter = (name, filters) => filters.some(filter => filter instanceof RegExp ? filter.test(name) : filter === name);
|
||
|
|
||
|
const supportedProtocols = new Set([
|
||
|
'https:',
|
||
|
'http:',
|
||
|
'file:',
|
||
|
]);
|
||
|
|
||
|
const hasCustomProtocol = urlString => {
|
||
|
try {
|
||
|
const {protocol} = new URL(urlString);
|
||
|
return protocol.endsWith(':') && !supportedProtocols.has(protocol);
|
||
|
} catch {
|
||
|
return false;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const normalizeDataURL = (urlString, {stripHash}) => {
|
||
|
const match = /^data:(?<type>[^,]*?),(?<data>[^#]*?)(?:#(?<hash>.*))?$/.exec(urlString);
|
||
|
|
||
|
if (!match) {
|
||
|
throw new Error(`Invalid URL: ${urlString}`);
|
||
|
}
|
||
|
|
||
|
let {type, data, hash} = match.groups;
|
||
|
const mediaType = type.split(';');
|
||
|
hash = stripHash ? '' : hash;
|
||
|
|
||
|
let isBase64 = false;
|
||
|
if (mediaType[mediaType.length - 1] === 'base64') {
|
||
|
mediaType.pop();
|
||
|
isBase64 = true;
|
||
|
}
|
||
|
|
||
|
// Lowercase MIME type
|
||
|
const mimeType = mediaType.shift()?.toLowerCase() ?? '';
|
||
|
const attributes = mediaType
|
||
|
.map(attribute => {
|
||
|
let [key, value = ''] = attribute.split('=').map(string => string.trim());
|
||
|
|
||
|
// Lowercase `charset`
|
||
|
if (key === 'charset') {
|
||
|
value = value.toLowerCase();
|
||
|
|
||
|
if (value === DATA_URL_DEFAULT_CHARSET) {
|
||
|
return '';
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return `${key}${value ? `=${value}` : ''}`;
|
||
|
})
|
||
|
.filter(Boolean);
|
||
|
|
||
|
const normalizedMediaType = [
|
||
|
...attributes,
|
||
|
];
|
||
|
|
||
|
if (isBase64) {
|
||
|
normalizedMediaType.push('base64');
|
||
|
}
|
||
|
|
||
|
if (normalizedMediaType.length > 0 || (mimeType && mimeType !== DATA_URL_DEFAULT_MIME_TYPE)) {
|
||
|
normalizedMediaType.unshift(mimeType);
|
||
|
}
|
||
|
|
||
|
return `data:${normalizedMediaType.join(';')},${isBase64 ? data.trim() : data}${hash ? `#${hash}` : ''}`;
|
||
|
};
|
||
|
|
||
|
function normalizeUrl(urlString, options) {
|
||
|
options = {
|
||
|
defaultProtocol: 'http',
|
||
|
normalizeProtocol: true,
|
||
|
forceHttp: false,
|
||
|
forceHttps: false,
|
||
|
stripAuthentication: true,
|
||
|
stripHash: false,
|
||
|
stripTextFragment: true,
|
||
|
stripWWW: true,
|
||
|
removeQueryParameters: [/^utm_\w+/i],
|
||
|
removeTrailingSlash: true,
|
||
|
removeSingleSlash: true,
|
||
|
removeDirectoryIndex: false,
|
||
|
removeExplicitPort: false,
|
||
|
sortQueryParameters: true,
|
||
|
...options,
|
||
|
};
|
||
|
|
||
|
// Legacy: Append `:` to the protocol if missing.
|
||
|
if (typeof options.defaultProtocol === 'string' && !options.defaultProtocol.endsWith(':')) {
|
||
|
options.defaultProtocol = `${options.defaultProtocol}:`;
|
||
|
}
|
||
|
|
||
|
urlString = urlString.trim();
|
||
|
|
||
|
// Data URL
|
||
|
if (/^data:/i.test(urlString)) {
|
||
|
return normalizeDataURL(urlString, options);
|
||
|
}
|
||
|
|
||
|
if (hasCustomProtocol(urlString)) {
|
||
|
return urlString;
|
||
|
}
|
||
|
|
||
|
const hasRelativeProtocol = urlString.startsWith('//');
|
||
|
const isRelativeUrl = !hasRelativeProtocol && /^\.*\//.test(urlString);
|
||
|
|
||
|
// Prepend protocol
|
||
|
if (!isRelativeUrl) {
|
||
|
urlString = urlString.replace(/^(?!(?:\w+:)?\/\/)|^\/\//, options.defaultProtocol);
|
||
|
}
|
||
|
|
||
|
const urlObject = new URL(urlString);
|
||
|
|
||
|
if (options.forceHttp && options.forceHttps) {
|
||
|
throw new Error('The `forceHttp` and `forceHttps` options cannot be used together');
|
||
|
}
|
||
|
|
||
|
if (options.forceHttp && urlObject.protocol === 'https:') {
|
||
|
urlObject.protocol = 'http:';
|
||
|
}
|
||
|
|
||
|
if (options.forceHttps && urlObject.protocol === 'http:') {
|
||
|
urlObject.protocol = 'https:';
|
||
|
}
|
||
|
|
||
|
// Remove auth
|
||
|
if (options.stripAuthentication) {
|
||
|
urlObject.username = '';
|
||
|
urlObject.password = '';
|
||
|
}
|
||
|
|
||
|
// Remove hash
|
||
|
if (options.stripHash) {
|
||
|
urlObject.hash = '';
|
||
|
} else if (options.stripTextFragment) {
|
||
|
urlObject.hash = urlObject.hash.replace(/#?:~:text.*?$/i, '');
|
||
|
}
|
||
|
|
||
|
// Remove duplicate slashes if not preceded by a protocol
|
||
|
// NOTE: This could be implemented using a single negative lookbehind
|
||
|
// regex, but we avoid that to maintain compatibility with older js engines
|
||
|
// which do not have support for that feature.
|
||
|
if (urlObject.pathname) {
|
||
|
// TODO: Replace everything below with `urlObject.pathname = urlObject.pathname.replace(/(?<!\b[a-z][a-z\d+\-.]{1,50}:)\/{2,}/g, '/');` when Safari supports negative lookbehind.
|
||
|
|
||
|
// Split the string by occurrences of this protocol regex, and perform
|
||
|
// duplicate-slash replacement on the strings between those occurrences
|
||
|
// (if any).
|
||
|
const protocolRegex = /\b[a-z][a-z\d+\-.]{1,50}:\/\//g;
|
||
|
|
||
|
let lastIndex = 0;
|
||
|
let result = '';
|
||
|
for (;;) {
|
||
|
const match = protocolRegex.exec(urlObject.pathname);
|
||
|
if (!match) {
|
||
|
break;
|
||
|
}
|
||
|
|
||
|
const protocol = match[0];
|
||
|
const protocolAtIndex = match.index;
|
||
|
const intermediate = urlObject.pathname.slice(lastIndex, protocolAtIndex);
|
||
|
|
||
|
result += intermediate.replace(/\/{2,}/g, '/');
|
||
|
result += protocol;
|
||
|
lastIndex = protocolAtIndex + protocol.length;
|
||
|
}
|
||
|
|
||
|
const remnant = urlObject.pathname.slice(lastIndex, urlObject.pathname.length);
|
||
|
result += remnant.replace(/\/{2,}/g, '/');
|
||
|
|
||
|
urlObject.pathname = result;
|
||
|
}
|
||
|
|
||
|
// Decode URI octets
|
||
|
if (urlObject.pathname) {
|
||
|
try {
|
||
|
urlObject.pathname = decodeURI(urlObject.pathname);
|
||
|
} catch {}
|
||
|
}
|
||
|
|
||
|
// Remove directory index
|
||
|
if (options.removeDirectoryIndex === true) {
|
||
|
options.removeDirectoryIndex = [/^index\.[a-z]+$/];
|
||
|
}
|
||
|
|
||
|
if (Array.isArray(options.removeDirectoryIndex) && options.removeDirectoryIndex.length > 0) {
|
||
|
let pathComponents = urlObject.pathname.split('/');
|
||
|
const lastComponent = pathComponents[pathComponents.length - 1];
|
||
|
|
||
|
if (testParameter(lastComponent, options.removeDirectoryIndex)) {
|
||
|
pathComponents = pathComponents.slice(0, -1);
|
||
|
urlObject.pathname = pathComponents.slice(1).join('/') + '/';
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (urlObject.hostname) {
|
||
|
// Remove trailing dot
|
||
|
urlObject.hostname = urlObject.hostname.replace(/\.$/, '');
|
||
|
|
||
|
// Remove `www.`
|
||
|
if (options.stripWWW && /^www\.(?!www\.)[a-z\-\d]{1,63}\.[a-z.\-\d]{2,63}$/.test(urlObject.hostname)) {
|
||
|
// Each label should be max 63 at length (min: 1).
|
||
|
// Source: https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names
|
||
|
// Each TLD should be up to 63 characters long (min: 2).
|
||
|
// It is technically possible to have a single character TLD, but none currently exist.
|
||
|
urlObject.hostname = urlObject.hostname.replace(/^www\./, '');
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// Remove query unwanted parameters
|
||
|
if (Array.isArray(options.removeQueryParameters)) {
|
||
|
// eslint-disable-next-line unicorn/no-useless-spread -- We are intentionally spreading to get a copy.
|
||
|
for (const key of [...urlObject.searchParams.keys()]) {
|
||
|
if (testParameter(key, options.removeQueryParameters)) {
|
||
|
urlObject.searchParams.delete(key);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (!Array.isArray(options.keepQueryParameters) && options.removeQueryParameters === true) {
|
||
|
urlObject.search = '';
|
||
|
}
|
||
|
|
||
|
// Keep wanted query parameters
|
||
|
if (Array.isArray(options.keepQueryParameters) && options.keepQueryParameters.length > 0) {
|
||
|
// eslint-disable-next-line unicorn/no-useless-spread -- We are intentionally spreading to get a copy.
|
||
|
for (const key of [...urlObject.searchParams.keys()]) {
|
||
|
if (!testParameter(key, options.keepQueryParameters)) {
|
||
|
urlObject.searchParams.delete(key);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// Sort query parameters
|
||
|
if (options.sortQueryParameters) {
|
||
|
urlObject.searchParams.sort();
|
||
|
|
||
|
// Calling `.sort()` encodes the search parameters, so we need to decode them again.
|
||
|
try {
|
||
|
urlObject.search = decodeURIComponent(urlObject.search);
|
||
|
} catch {}
|
||
|
}
|
||
|
|
||
|
if (options.removeTrailingSlash) {
|
||
|
urlObject.pathname = urlObject.pathname.replace(/\/$/, '');
|
||
|
}
|
||
|
|
||
|
// Remove an explicit port number, excluding a default port number, if applicable
|
||
|
if (options.removeExplicitPort && urlObject.port) {
|
||
|
urlObject.port = '';
|
||
|
}
|
||
|
|
||
|
const oldUrlString = urlString;
|
||
|
|
||
|
// Take advantage of many of the Node `url` normalizations
|
||
|
urlString = urlObject.toString();
|
||
|
|
||
|
if (!options.removeSingleSlash && urlObject.pathname === '/' && !oldUrlString.endsWith('/') && urlObject.hash === '') {
|
||
|
urlString = urlString.replace(/\/$/, '');
|
||
|
}
|
||
|
|
||
|
// Remove ending `/` unless removeSingleSlash is false
|
||
|
if ((options.removeTrailingSlash || urlObject.pathname === '/') && urlObject.hash === '' && options.removeSingleSlash) {
|
||
|
urlString = urlString.replace(/\/$/, '');
|
||
|
}
|
||
|
|
||
|
// Restore relative protocol, if applicable
|
||
|
if (hasRelativeProtocol && !options.normalizeProtocol) {
|
||
|
urlString = urlString.replace(/^http:\/\//, '//');
|
||
|
}
|
||
|
|
||
|
// Remove http/https
|
||
|
if (options.stripProtocol) {
|
||
|
urlString = urlString.replace(/^(?:https?:)?\/\//, '');
|
||
|
}
|
||
|
|
||
|
return urlString;
|
||
|
}
|
||
|
|
||
|
var getStream$3 = {exports: {}};
|
||
|
|
||
|
const {PassThrough: PassThroughStream} = require$$0$3;
|
||
|
|
||
|
var bufferStream$1 = options => {
|
||
|
options = {...options};
|
||
|
|
||
|
const {array} = options;
|
||
|
let {encoding} = options;
|
||
|
const isBuffer = encoding === 'buffer';
|
||
|
let objectMode = false;
|
||
|
|
||
|
if (array) {
|
||
|
objectMode = !(encoding || isBuffer);
|
||
|
} else {
|
||
|
encoding = encoding || 'utf8';
|
||
|
}
|
||
|
|
||
|
if (isBuffer) {
|
||
|
encoding = null;
|
||
|
}
|
||
|
|
||
|
const stream = new PassThroughStream({objectMode});
|
||
|
|
||
|
if (encoding) {
|
||
|
stream.setEncoding(encoding);
|
||
|
}
|
||
|
|
||
|
let length = 0;
|
||
|
const chunks = [];
|
||
|
|
||
|
stream.on('data', chunk => {
|
||
|
chunks.push(chunk);
|
||
|
|
||
|
if (objectMode) {
|
||
|
length = chunks.length;
|
||
|
} else {
|
||
|
length += chunk.length;
|
||
|
}
|
||
|
});
|
||
|
|
||
|
stream.getBufferedValue = () => {
|
||
|
if (array) {
|
||
|
return chunks;
|
||
|
}
|
||
|
|
||
|
return isBuffer ? Buffer.concat(chunks, length) : chunks.join('');
|
||
|
};
|
||
|
|
||
|
stream.getBufferedLength = () => length;
|
||
|
|
||
|
return stream;
|
||
|
};
|
||
|
|
||
|
const {constants: BufferConstants} = require$$0$4;
|
||
|
const stream$1 = require$$0$3;
|
||
|
const {promisify} = require$$6;
|
||
|
const bufferStream = bufferStream$1;
|
||
|
|
||
|
const streamPipelinePromisified = promisify(stream$1.pipeline);
|
||
|
|
||
|
class MaxBufferError extends Error {
|
||
|
constructor() {
|
||
|
super('maxBuffer exceeded');
|
||
|
this.name = 'MaxBufferError';
|
||
|
}
|
||
|
}
|
||
|
|
||
|
async function getStream$1(inputStream, options) {
|
||
|
if (!inputStream) {
|
||
|
throw new Error('Expected a stream');
|
||
|
}
|
||
|
|
||
|
options = {
|
||
|
maxBuffer: Infinity,
|
||
|
...options
|
||
|
};
|
||
|
|
||
|
const {maxBuffer} = options;
|
||
|
const stream = bufferStream(options);
|
||
|
|
||
|
await new Promise((resolve, reject) => {
|
||
|
const rejectPromise = error => {
|
||
|
// Don't retrieve an oversized buffer.
|
||
|
if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) {
|
||
|
error.bufferedData = stream.getBufferedValue();
|
||
|
}
|
||
|
|
||
|
reject(error);
|
||
|
};
|
||
|
|
||
|
(async () => {
|
||
|
try {
|
||
|
await streamPipelinePromisified(inputStream, stream);
|
||
|
resolve();
|
||
|
} catch (error) {
|
||
|
rejectPromise(error);
|
||
|
}
|
||
|
})();
|
||
|
|
||
|
stream.on('data', () => {
|
||
|
if (stream.getBufferedLength() > maxBuffer) {
|
||
|
rejectPromise(new MaxBufferError());
|
||
|
}
|
||
|
});
|
||
|
});
|
||
|
|
||
|
return stream.getBufferedValue();
|
||
|
}
|
||
|
|
||
|
getStream$3.exports = getStream$1;
|
||
|
var buffer = getStream$3.exports.buffer = (stream, options) => getStream$1(stream, {...options, encoding: 'buffer'});
|
||
|
getStream$3.exports.array = (stream, options) => getStream$1(stream, {...options, array: true});
|
||
|
getStream$3.exports.MaxBufferError = MaxBufferError;
|
||
|
|
||
|
var getStreamExports = getStream$3.exports;
|
||
|
var getStream$2 = /*@__PURE__*/getDefaultExportFromCjs(getStreamExports);
|
||
|
|
||
|
// rfc7231 6.1
|
||
|
const statusCodeCacheableByDefault = new Set([
|
||
|
200,
|
||
|
203,
|
||
|
204,
|
||
|
206,
|
||
|
300,
|
||
|
301,
|
||
|
308,
|
||
|
404,
|
||
|
405,
|
||
|
410,
|
||
|
414,
|
||
|
501,
|
||
|
]);
|
||
|
|
||
|
// This implementation does not understand partial responses (206)
|
||
|
const understoodStatuses = new Set([
|
||
|
200,
|
||
|
203,
|
||
|
204,
|
||
|
300,
|
||
|
301,
|
||
|
302,
|
||
|
303,
|
||
|
307,
|
||
|
308,
|
||
|
404,
|
||
|
405,
|
||
|
410,
|
||
|
414,
|
||
|
501,
|
||
|
]);
|
||
|
|
||
|
const errorStatusCodes = new Set([
|
||
|
500,
|
||
|
502,
|
||
|
503,
|
||
|
504,
|
||
|
]);
|
||
|
|
||
|
const hopByHopHeaders = {
|
||
|
date: true, // included, because we add Age update Date
|
||
|
connection: true,
|
||
|
'keep-alive': true,
|
||
|
'proxy-authenticate': true,
|
||
|
'proxy-authorization': true,
|
||
|
te: true,
|
||
|
trailer: true,
|
||
|
'transfer-encoding': true,
|
||
|
upgrade: true,
|
||
|
};
|
||
|
|
||
|
const excludedFromRevalidationUpdate = {
|
||
|
// Since the old body is reused, it doesn't make sense to change properties of the body
|
||
|
'content-length': true,
|
||
|
'content-encoding': true,
|
||
|
'transfer-encoding': true,
|
||
|
'content-range': true,
|
||
|
};
|
||
|
|
||
|
function toNumberOrZero(s) {
|
||
|
const n = parseInt(s, 10);
|
||
|
return isFinite(n) ? n : 0;
|
||
|
}
|
||
|
|
||
|
// RFC 5861
|
||
|
function isErrorResponse(response) {
|
||
|
// consider undefined response as faulty
|
||
|
if(!response) {
|
||
|
return true
|
||
|
}
|
||
|
return errorStatusCodes.has(response.status);
|
||
|
}
|
||
|
|
||
|
function parseCacheControl(header) {
|
||
|
const cc = {};
|
||
|
if (!header) return cc;
|
||
|
|
||
|
// TODO: When there is more than one value present for a given directive (e.g., two Expires header fields, multiple Cache-Control: max-age directives),
|
||
|
// the directive's value is considered invalid. Caches are encouraged to consider responses that have invalid freshness information to be stale
|
||
|
const parts = header.trim().split(/,/);
|
||
|
for (const part of parts) {
|
||
|
const [k, v] = part.split(/=/, 2);
|
||
|
cc[k.trim()] = v === undefined ? true : v.trim().replace(/^"|"$/g, '');
|
||
|
}
|
||
|
|
||
|
return cc;
|
||
|
}
|
||
|
|
||
|
function formatCacheControl(cc) {
|
||
|
let parts = [];
|
||
|
for (const k in cc) {
|
||
|
const v = cc[k];
|
||
|
parts.push(v === true ? k : k + '=' + v);
|
||
|
}
|
||
|
if (!parts.length) {
|
||
|
return undefined;
|
||
|
}
|
||
|
return parts.join(', ');
|
||
|
}
|
||
|
|
||
|
var httpCacheSemantics = class CachePolicy {
|
||
|
constructor(
|
||
|
req,
|
||
|
res,
|
||
|
{
|
||
|
shared,
|
||
|
cacheHeuristic,
|
||
|
immutableMinTimeToLive,
|
||
|
ignoreCargoCult,
|
||
|
_fromObject,
|
||
|
} = {}
|
||
|
) {
|
||
|
if (_fromObject) {
|
||
|
this._fromObject(_fromObject);
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
if (!res || !res.headers) {
|
||
|
throw Error('Response headers missing');
|
||
|
}
|
||
|
this._assertRequestHasHeaders(req);
|
||
|
|
||
|
this._responseTime = this.now();
|
||
|
this._isShared = shared !== false;
|
||
|
this._cacheHeuristic =
|
||
|
undefined !== cacheHeuristic ? cacheHeuristic : 0.1; // 10% matches IE
|
||
|
this._immutableMinTtl =
|
||
|
undefined !== immutableMinTimeToLive
|
||
|
? immutableMinTimeToLive
|
||
|
: 24 * 3600 * 1000;
|
||
|
|
||
|
this._status = 'status' in res ? res.status : 200;
|
||
|
this._resHeaders = res.headers;
|
||
|
this._rescc = parseCacheControl(res.headers['cache-control']);
|
||
|
this._method = 'method' in req ? req.method : 'GET';
|
||
|
this._url = req.url;
|
||
|
this._host = req.headers.host;
|
||
|
this._noAuthorization = !req.headers.authorization;
|
||
|
this._reqHeaders = res.headers.vary ? req.headers : null; // Don't keep all request headers if they won't be used
|
||
|
this._reqcc = parseCacheControl(req.headers['cache-control']);
|
||
|
|
||
|
// Assume that if someone uses legacy, non-standard uncecessary options they don't understand caching,
|
||
|
// so there's no point stricly adhering to the blindly copy&pasted directives.
|
||
|
if (
|
||
|
ignoreCargoCult &&
|
||
|
'pre-check' in this._rescc &&
|
||
|
'post-check' in this._rescc
|
||
|
) {
|
||
|
delete this._rescc['pre-check'];
|
||
|
delete this._rescc['post-check'];
|
||
|
delete this._rescc['no-cache'];
|
||
|
delete this._rescc['no-store'];
|
||
|
delete this._rescc['must-revalidate'];
|
||
|
this._resHeaders = Object.assign({}, this._resHeaders, {
|
||
|
'cache-control': formatCacheControl(this._rescc),
|
||
|
});
|
||
|
delete this._resHeaders.expires;
|
||
|
delete this._resHeaders.pragma;
|
||
|
}
|
||
|
|
||
|
// When the Cache-Control header field is not present in a request, caches MUST consider the no-cache request pragma-directive
|
||
|
// as having the same effect as if "Cache-Control: no-cache" were present (see Section 5.2.1).
|
||
|
if (
|
||
|
res.headers['cache-control'] == null &&
|
||
|
/no-cache/.test(res.headers.pragma)
|
||
|
) {
|
||
|
this._rescc['no-cache'] = true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
now() {
|
||
|
return Date.now();
|
||
|
}
|
||
|
|
||
|
storable() {
|
||
|
// The "no-store" request directive indicates that a cache MUST NOT store any part of either this request or any response to it.
|
||
|
return !!(
|
||
|
!this._reqcc['no-store'] &&
|
||
|
// A cache MUST NOT store a response to any request, unless:
|
||
|
// The request method is understood by the cache and defined as being cacheable, and
|
||
|
('GET' === this._method ||
|
||
|
'HEAD' === this._method ||
|
||
|
('POST' === this._method && this._hasExplicitExpiration())) &&
|
||
|
// the response status code is understood by the cache, and
|
||
|
understoodStatuses.has(this._status) &&
|
||
|
// the "no-store" cache directive does not appear in request or response header fields, and
|
||
|
!this._rescc['no-store'] &&
|
||
|
// the "private" response directive does not appear in the response, if the cache is shared, and
|
||
|
(!this._isShared || !this._rescc.private) &&
|
||
|
// the Authorization header field does not appear in the request, if the cache is shared,
|
||
|
(!this._isShared ||
|
||
|
this._noAuthorization ||
|
||
|
this._allowsStoringAuthenticated()) &&
|
||
|
// the response either:
|
||
|
// contains an Expires header field, or
|
||
|
(this._resHeaders.expires ||
|
||
|
// contains a max-age response directive, or
|
||
|
// contains a s-maxage response directive and the cache is shared, or
|
||
|
// contains a public response directive.
|
||
|
this._rescc['max-age'] ||
|
||
|
(this._isShared && this._rescc['s-maxage']) ||
|
||
|
this._rescc.public ||
|
||
|
// has a status code that is defined as cacheable by default
|
||
|
statusCodeCacheableByDefault.has(this._status))
|
||
|
);
|
||
|
}
|
||
|
|
||
|
_hasExplicitExpiration() {
|
||
|
// 4.2.1 Calculating Freshness Lifetime
|
||
|
return (
|
||
|
(this._isShared && this._rescc['s-maxage']) ||
|
||
|
this._rescc['max-age'] ||
|
||
|
this._resHeaders.expires
|
||
|
);
|
||
|
}
|
||
|
|
||
|
_assertRequestHasHeaders(req) {
|
||
|
if (!req || !req.headers) {
|
||
|
throw Error('Request headers missing');
|
||
|
}
|
||
|
}
|
||
|
|
||
|
satisfiesWithoutRevalidation(req) {
|
||
|
this._assertRequestHasHeaders(req);
|
||
|
|
||
|
// When presented with a request, a cache MUST NOT reuse a stored response, unless:
|
||
|
// the presented request does not contain the no-cache pragma (Section 5.4), nor the no-cache cache directive,
|
||
|
// unless the stored response is successfully validated (Section 4.3), and
|
||
|
const requestCC = parseCacheControl(req.headers['cache-control']);
|
||
|
if (requestCC['no-cache'] || /no-cache/.test(req.headers.pragma)) {
|
||
|
return false;
|
||
|
}
|
||
|
|
||
|
if (requestCC['max-age'] && this.age() > requestCC['max-age']) {
|
||
|
return false;
|
||
|
}
|
||
|
|
||
|
if (
|
||
|
requestCC['min-fresh'] &&
|
||
|
this.timeToLive() < 1000 * requestCC['min-fresh']
|
||
|
) {
|
||
|
return false;
|
||
|
}
|
||
|
|
||
|
// the stored response is either:
|
||
|
// fresh, or allowed to be served stale
|
||
|
if (this.stale()) {
|
||
|
const allowsStale =
|
||
|
requestCC['max-stale'] &&
|
||
|
!this._rescc['must-revalidate'] &&
|
||
|
(true === requestCC['max-stale'] ||
|
||
|
requestCC['max-stale'] > this.age() - this.maxAge());
|
||
|
if (!allowsStale) {
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return this._requestMatches(req, false);
|
||
|
}
|
||
|
|
||
|
_requestMatches(req, allowHeadMethod) {
|
||
|
// The presented effective request URI and that of the stored response match, and
|
||
|
return (
|
||
|
(!this._url || this._url === req.url) &&
|
||
|
this._host === req.headers.host &&
|
||
|
// the request method associated with the stored response allows it to be used for the presented request, and
|
||
|
(!req.method ||
|
||
|
this._method === req.method ||
|
||
|
(allowHeadMethod && 'HEAD' === req.method)) &&
|
||
|
// selecting header fields nominated by the stored response (if any) match those presented, and
|
||
|
this._varyMatches(req)
|
||
|
);
|
||
|
}
|
||
|
|
||
|
_allowsStoringAuthenticated() {
|
||
|
// following Cache-Control response directives (Section 5.2.2) have such an effect: must-revalidate, public, and s-maxage.
|
||
|
return (
|
||
|
this._rescc['must-revalidate'] ||
|
||
|
this._rescc.public ||
|
||
|
this._rescc['s-maxage']
|
||
|
);
|
||
|
}
|
||
|
|
||
|
_varyMatches(req) {
|
||
|
if (!this._resHeaders.vary) {
|
||
|
return true;
|
||
|
}
|
||
|
|
||
|
// A Vary header field-value of "*" always fails to match
|
||
|
if (this._resHeaders.vary === '*') {
|
||
|
return false;
|
||
|
}
|
||
|
|
||
|
const fields = this._resHeaders.vary
|
||
|
.trim()
|
||
|
.toLowerCase()
|
||
|
.split(/\s*,\s*/);
|
||
|
for (const name of fields) {
|
||
|
if (req.headers[name] !== this._reqHeaders[name]) return false;
|
||
|
}
|
||
|
return true;
|
||
|
}
|
||
|
|
||
|
_copyWithoutHopByHopHeaders(inHeaders) {
|
||
|
const headers = {};
|
||
|
for (const name in inHeaders) {
|
||
|
if (hopByHopHeaders[name]) continue;
|
||
|
headers[name] = inHeaders[name];
|
||
|
}
|
||
|
// 9.1. Connection
|
||
|
if (inHeaders.connection) {
|
||
|
const tokens = inHeaders.connection.trim().split(/\s*,\s*/);
|
||
|
for (const name of tokens) {
|
||
|
delete headers[name];
|
||
|
}
|
||
|
}
|
||
|
if (headers.warning) {
|
||
|
const warnings = headers.warning.split(/,/).filter(warning => {
|
||
|
return !/^\s*1[0-9][0-9]/.test(warning);
|
||
|
});
|
||
|
if (!warnings.length) {
|
||
|
delete headers.warning;
|
||
|
} else {
|
||
|
headers.warning = warnings.join(',').trim();
|
||
|
}
|
||
|
}
|
||
|
return headers;
|
||
|
}
|
||
|
|
||
|
responseHeaders() {
|
||
|
const headers = this._copyWithoutHopByHopHeaders(this._resHeaders);
|
||
|
const age = this.age();
|
||
|
|
||
|
// A cache SHOULD generate 113 warning if it heuristically chose a freshness
|
||
|
// lifetime greater than 24 hours and the response's age is greater than 24 hours.
|
||
|
if (
|
||
|
age > 3600 * 24 &&
|
||
|
!this._hasExplicitExpiration() &&
|
||
|
this.maxAge() > 3600 * 24
|
||
|
) {
|
||
|
headers.warning =
|
||
|
(headers.warning ? `${headers.warning}, ` : '') +
|
||
|
'113 - "rfc7234 5.5.4"';
|
||
|
}
|
||
|
headers.age = `${Math.round(age)}`;
|
||
|
headers.date = new Date(this.now()).toUTCString();
|
||
|
return headers;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Value of the Date response header or current time if Date was invalid
|
||
|
* @return timestamp
|
||
|
*/
|
||
|
date() {
|
||
|
const serverDate = Date.parse(this._resHeaders.date);
|
||
|
if (isFinite(serverDate)) {
|
||
|
return serverDate;
|
||
|
}
|
||
|
return this._responseTime;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Value of the Age header, in seconds, updated for the current time.
|
||
|
* May be fractional.
|
||
|
*
|
||
|
* @return Number
|
||
|
*/
|
||
|
age() {
|
||
|
let age = this._ageValue();
|
||
|
|
||
|
const residentTime = (this.now() - this._responseTime) / 1000;
|
||
|
return age + residentTime;
|
||
|
}
|
||
|
|
||
|
_ageValue() {
|
||
|
return toNumberOrZero(this._resHeaders.age);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Value of applicable max-age (or heuristic equivalent) in seconds. This counts since response's `Date`.
|
||
|
*
|
||
|
* For an up-to-date value, see `timeToLive()`.
|
||
|
*
|
||
|
* @return Number
|
||
|
*/
|
||
|
maxAge() {
|
||
|
if (!this.storable() || this._rescc['no-cache']) {
|
||
|
return 0;
|
||
|
}
|
||
|
|
||
|
// Shared responses with cookies are cacheable according to the RFC, but IMHO it'd be unwise to do so by default
|
||
|
// so this implementation requires explicit opt-in via public header
|
||
|
if (
|
||
|
this._isShared &&
|
||
|
(this._resHeaders['set-cookie'] &&
|
||
|
!this._rescc.public &&
|
||
|
!this._rescc.immutable)
|
||
|
) {
|
||
|
return 0;
|
||
|
}
|
||
|
|
||
|
if (this._resHeaders.vary === '*') {
|
||
|
return 0;
|
||
|
}
|
||
|
|
||
|
if (this._isShared) {
|
||
|
if (this._rescc['proxy-revalidate']) {
|
||
|
return 0;
|
||
|
}
|
||
|
// if a response includes the s-maxage directive, a shared cache recipient MUST ignore the Expires field.
|
||
|
if (this._rescc['s-maxage']) {
|
||
|
return toNumberOrZero(this._rescc['s-maxage']);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// If a response includes a Cache-Control field with the max-age directive, a recipient MUST ignore the Expires field.
|
||
|
if (this._rescc['max-age']) {
|
||
|
return toNumberOrZero(this._rescc['max-age']);
|
||
|
}
|
||
|
|
||
|
const defaultMinTtl = this._rescc.immutable ? this._immutableMinTtl : 0;
|
||
|
|
||
|
const serverDate = this.date();
|
||
|
if (this._resHeaders.expires) {
|
||
|
const expires = Date.parse(this._resHeaders.expires);
|
||
|
// A cache recipient MUST interpret invalid date formats, especially the value "0", as representing a time in the past (i.e., "already expired").
|
||
|
if (Number.isNaN(expires) || expires < serverDate) {
|
||
|
return 0;
|
||
|
}
|
||
|
return Math.max(defaultMinTtl, (expires - serverDate) / 1000);
|
||
|
}
|
||
|
|
||
|
if (this._resHeaders['last-modified']) {
|
||
|
const lastModified = Date.parse(this._resHeaders['last-modified']);
|
||
|
if (isFinite(lastModified) && serverDate > lastModified) {
|
||
|
return Math.max(
|
||
|
defaultMinTtl,
|
||
|
((serverDate - lastModified) / 1000) * this._cacheHeuristic
|
||
|
);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return defaultMinTtl;
|
||
|
}
|
||
|
|
||
|
timeToLive() {
|
||
|
const age = this.maxAge() - this.age();
|
||
|
const staleIfErrorAge = age + toNumberOrZero(this._rescc['stale-if-error']);
|
||
|
const staleWhileRevalidateAge = age + toNumberOrZero(this._rescc['stale-while-revalidate']);
|
||
|
return Math.max(0, age, staleIfErrorAge, staleWhileRevalidateAge) * 1000;
|
||
|
}
|
||
|
|
||
|
stale() {
|
||
|
return this.maxAge() <= this.age();
|
||
|
}
|
||
|
|
||
|
_useStaleIfError() {
|
||
|
return this.maxAge() + toNumberOrZero(this._rescc['stale-if-error']) > this.age();
|
||
|
}
|
||
|
|
||
|
useStaleWhileRevalidate() {
|
||
|
return this.maxAge() + toNumberOrZero(this._rescc['stale-while-revalidate']) > this.age();
|
||
|
}
|
||
|
|
||
|
static fromObject(obj) {
|
||
|
return new this(undefined, undefined, { _fromObject: obj });
|
||
|
}
|
||
|
|
||
|
_fromObject(obj) {
|
||
|
if (this._responseTime) throw Error('Reinitialized');
|
||
|
if (!obj || obj.v !== 1) throw Error('Invalid serialization');
|
||
|
|
||
|
this._responseTime = obj.t;
|
||
|
this._isShared = obj.sh;
|
||
|
this._cacheHeuristic = obj.ch;
|
||
|
this._immutableMinTtl =
|
||
|
obj.imm !== undefined ? obj.imm : 24 * 3600 * 1000;
|
||
|
this._status = obj.st;
|
||
|
this._resHeaders = obj.resh;
|
||
|
this._rescc = obj.rescc;
|
||
|
this._method = obj.m;
|
||
|
this._url = obj.u;
|
||
|
this._host = obj.h;
|
||
|
this._noAuthorization = obj.a;
|
||
|
this._reqHeaders = obj.reqh;
|
||
|
this._reqcc = obj.reqcc;
|
||
|
}
|
||
|
|
||
|
toObject() {
|
||
|
return {
|
||
|
v: 1,
|
||
|
t: this._responseTime,
|
||
|
sh: this._isShared,
|
||
|
ch: this._cacheHeuristic,
|
||
|
imm: this._immutableMinTtl,
|
||
|
st: this._status,
|
||
|
resh: this._resHeaders,
|
||
|
rescc: this._rescc,
|
||
|
m: this._method,
|
||
|
u: this._url,
|
||
|
h: this._host,
|
||
|
a: this._noAuthorization,
|
||
|
reqh: this._reqHeaders,
|
||
|
reqcc: this._reqcc,
|
||
|
};
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Headers for sending to the origin server to revalidate stale response.
|
||
|
* Allows server to return 304 to allow reuse of the previous response.
|
||
|
*
|
||
|
* Hop by hop headers are always stripped.
|
||
|
* Revalidation headers may be added or removed, depending on request.
|
||
|
*/
|
||
|
revalidationHeaders(incomingReq) {
|
||
|
this._assertRequestHasHeaders(incomingReq);
|
||
|
const headers = this._copyWithoutHopByHopHeaders(incomingReq.headers);
|
||
|
|
||
|
// This implementation does not understand range requests
|
||
|
delete headers['if-range'];
|
||
|
|
||
|
if (!this._requestMatches(incomingReq, true) || !this.storable()) {
|
||
|
// revalidation allowed via HEAD
|
||
|
// not for the same resource, or wasn't allowed to be cached anyway
|
||
|
delete headers['if-none-match'];
|
||
|
delete headers['if-modified-since'];
|
||
|
return headers;
|
||
|
}
|
||
|
|
||
|
/* MUST send that entity-tag in any cache validation request (using If-Match or If-None-Match) if an entity-tag has been provided by the origin server. */
|
||
|
if (this._resHeaders.etag) {
|
||
|
headers['if-none-match'] = headers['if-none-match']
|
||
|
? `${headers['if-none-match']}, ${this._resHeaders.etag}`
|
||
|
: this._resHeaders.etag;
|
||
|
}
|
||
|
|
||
|
// Clients MAY issue simple (non-subrange) GET requests with either weak validators or strong validators. Clients MUST NOT use weak validators in other forms of request.
|
||
|
const forbidsWeakValidators =
|
||
|
headers['accept-ranges'] ||
|
||
|
headers['if-match'] ||
|
||
|
headers['if-unmodified-since'] ||
|
||
|
(this._method && this._method != 'GET');
|
||
|
|
||
|
/* SHOULD send the Last-Modified value in non-subrange cache validation requests (using If-Modified-Since) if only a Last-Modified value has been provided by the origin server.
|
||
|
Note: This implementation does not understand partial responses (206) */
|
||
|
if (forbidsWeakValidators) {
|
||
|
delete headers['if-modified-since'];
|
||
|
|
||
|
if (headers['if-none-match']) {
|
||
|
const etags = headers['if-none-match']
|
||
|
.split(/,/)
|
||
|
.filter(etag => {
|
||
|
return !/^\s*W\//.test(etag);
|
||
|
});
|
||
|
if (!etags.length) {
|
||
|
delete headers['if-none-match'];
|
||
|
} else {
|
||
|
headers['if-none-match'] = etags.join(',').trim();
|
||
|
}
|
||
|
}
|
||
|
} else if (
|
||
|
this._resHeaders['last-modified'] &&
|
||
|
!headers['if-modified-since']
|
||
|
) {
|
||
|
headers['if-modified-since'] = this._resHeaders['last-modified'];
|
||
|
}
|
||
|
|
||
|
return headers;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Creates new CachePolicy with information combined from the previews response,
|
||
|
* and the new revalidation response.
|
||
|
*
|
||
|
* Returns {policy, modified} where modified is a boolean indicating
|
||
|
* whether the response body has been modified, and old cached body can't be used.
|
||
|
*
|
||
|
* @return {Object} {policy: CachePolicy, modified: Boolean}
|
||
|
*/
|
||
|
revalidatedPolicy(request, response) {
|
||
|
this._assertRequestHasHeaders(request);
|
||
|
if(this._useStaleIfError() && isErrorResponse(response)) { // I consider the revalidation request unsuccessful
|
||
|
return {
|
||
|
modified: false,
|
||
|
matches: false,
|
||
|
policy: this,
|
||
|
};
|
||
|
}
|
||
|
if (!response || !response.headers) {
|
||
|
throw Error('Response headers missing');
|
||
|
}
|
||
|
|
||
|
// These aren't going to be supported exactly, since one CachePolicy object
|
||
|
// doesn't know about all the other cached objects.
|
||
|
let matches = false;
|
||
|
if (response.status !== undefined && response.status != 304) {
|
||
|
matches = false;
|
||
|
} else if (
|
||
|
response.headers.etag &&
|
||
|
!/^\s*W\//.test(response.headers.etag)
|
||
|
) {
|
||
|
// "All of the stored responses with the same strong validator are selected.
|
||
|
// If none of the stored responses contain the same strong validator,
|
||
|
// then the cache MUST NOT use the new response to update any stored responses."
|
||
|
matches =
|
||
|
this._resHeaders.etag &&
|
||
|
this._resHeaders.etag.replace(/^\s*W\//, '') ===
|
||
|
response.headers.etag;
|
||
|
} else if (this._resHeaders.etag && response.headers.etag) {
|
||
|
// "If the new response contains a weak validator and that validator corresponds
|
||
|
// to one of the cache's stored responses,
|
||
|
// then the most recent of those matching stored responses is selected for update."
|
||
|
matches =
|
||
|
this._resHeaders.etag.replace(/^\s*W\//, '') ===
|
||
|
response.headers.etag.replace(/^\s*W\//, '');
|
||
|
} else if (this._resHeaders['last-modified']) {
|
||
|
matches =
|
||
|
this._resHeaders['last-modified'] ===
|
||
|
response.headers['last-modified'];
|
||
|
} else {
|
||
|
// If the new response does not include any form of validator (such as in the case where
|
||
|
// a client generates an If-Modified-Since request from a source other than the Last-Modified
|
||
|
// response header field), and there is only one stored response, and that stored response also
|
||
|
// lacks a validator, then that stored response is selected for update.
|
||
|
if (
|
||
|
!this._resHeaders.etag &&
|
||
|
!this._resHeaders['last-modified'] &&
|
||
|
!response.headers.etag &&
|
||
|
!response.headers['last-modified']
|
||
|
) {
|
||
|
matches = true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (!matches) {
|
||
|
return {
|
||
|
policy: new this.constructor(request, response),
|
||
|
// Client receiving 304 without body, even if it's invalid/mismatched has no option
|
||
|
// but to reuse a cached body. We don't have a good way to tell clients to do
|
||
|
// error recovery in such case.
|
||
|
modified: response.status != 304,
|
||
|
matches: false,
|
||
|
};
|
||
|
}
|
||
|
|
||
|
// use other header fields provided in the 304 (Not Modified) response to replace all instances
|
||
|
// of the corresponding header fields in the stored response.
|
||
|
const headers = {};
|
||
|
for (const k in this._resHeaders) {
|
||
|
headers[k] =
|
||
|
k in response.headers && !excludedFromRevalidationUpdate[k]
|
||
|
? response.headers[k]
|
||
|
: this._resHeaders[k];
|
||
|
}
|
||
|
|
||
|
const newResponse = Object.assign({}, response, {
|
||
|
status: this._status,
|
||
|
method: this._method,
|
||
|
headers,
|
||
|
});
|
||
|
return {
|
||
|
policy: new this.constructor(request, newResponse, {
|
||
|
shared: this._isShared,
|
||
|
cacheHeuristic: this._cacheHeuristic,
|
||
|
immutableMinTimeToLive: this._immutableMinTtl,
|
||
|
}),
|
||
|
modified: false,
|
||
|
matches: true,
|
||
|
};
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var CachePolicy = /*@__PURE__*/getDefaultExportFromCjs(httpCacheSemantics);
|
||
|
|
||
|
function lowercaseKeys(object) {
|
||
|
return Object.fromEntries(Object.entries(object).map(([key, value]) => [key.toLowerCase(), value]));
|
||
|
}
|
||
|
|
||
|
class Response extends Readable$1 {
|
||
|
statusCode;
|
||
|
headers;
|
||
|
body;
|
||
|
url;
|
||
|
|
||
|
constructor({statusCode, headers, body, url}) {
|
||
|
if (typeof statusCode !== 'number') {
|
||
|
throw new TypeError('Argument `statusCode` should be a number');
|
||
|
}
|
||
|
|
||
|
if (typeof headers !== 'object') {
|
||
|
throw new TypeError('Argument `headers` should be an object');
|
||
|
}
|
||
|
|
||
|
if (!(body instanceof Uint8Array)) {
|
||
|
throw new TypeError('Argument `body` should be a buffer');
|
||
|
}
|
||
|
|
||
|
if (typeof url !== 'string') {
|
||
|
throw new TypeError('Argument `url` should be a string');
|
||
|
}
|
||
|
|
||
|
super({
|
||
|
read() {
|
||
|
this.push(body);
|
||
|
this.push(null);
|
||
|
},
|
||
|
});
|
||
|
|
||
|
this.statusCode = statusCode;
|
||
|
this.headers = lowercaseKeys(headers);
|
||
|
this.body = body;
|
||
|
this.url = url;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function commonjsRequire(path) {
|
||
|
throw new Error('Could not dynamically require "' + path + '". Please configure the dynamicRequireTargets or/and ignoreDynamicRequires option of @rollup/plugin-commonjs appropriately for this require call to work.');
|
||
|
}
|
||
|
|
||
|
var jsonBuffer = {};
|
||
|
|
||
|
//TODO: handle reviver/dehydrate function like normal
|
||
|
//and handle indentation, like normal.
|
||
|
//if anyone needs this... please send pull request.
|
||
|
|
||
|
jsonBuffer.stringify = function stringify (o) {
|
||
|
if('undefined' == typeof o) return o
|
||
|
|
||
|
if(o && Buffer.isBuffer(o))
|
||
|
return JSON.stringify(':base64:' + o.toString('base64'))
|
||
|
|
||
|
if(o && o.toJSON)
|
||
|
o = o.toJSON();
|
||
|
|
||
|
if(o && 'object' === typeof o) {
|
||
|
var s = '';
|
||
|
var array = Array.isArray(o);
|
||
|
s = array ? '[' : '{';
|
||
|
var first = true;
|
||
|
|
||
|
for(var k in o) {
|
||
|
var ignore = 'function' == typeof o[k] || (!array && 'undefined' === typeof o[k]);
|
||
|
if(Object.hasOwnProperty.call(o, k) && !ignore) {
|
||
|
if(!first)
|
||
|
s += ',';
|
||
|
first = false;
|
||
|
if (array) {
|
||
|
if(o[k] == undefined)
|
||
|
s += 'null';
|
||
|
else
|
||
|
s += stringify(o[k]);
|
||
|
} else if (o[k] !== void(0)) {
|
||
|
s += stringify(k) + ':' + stringify(o[k]);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
s += array ? ']' : '}';
|
||
|
|
||
|
return s
|
||
|
} else if ('string' === typeof o) {
|
||
|
return JSON.stringify(/^:/.test(o) ? ':' + o : o)
|
||
|
} else if ('undefined' === typeof o) {
|
||
|
return 'null';
|
||
|
} else
|
||
|
return JSON.stringify(o)
|
||
|
};
|
||
|
|
||
|
jsonBuffer.parse = function (s) {
|
||
|
return JSON.parse(s, function (key, value) {
|
||
|
if('string' === typeof value) {
|
||
|
if(/^:base64:/.test(value))
|
||
|
return Buffer.from(value.substring(8), 'base64')
|
||
|
else
|
||
|
return /^:/.test(value) ? value.substring(1) : value
|
||
|
}
|
||
|
return value
|
||
|
})
|
||
|
};
|
||
|
|
||
|
const EventEmitter$1 = require$$0$1;
|
||
|
const JSONB = jsonBuffer;
|
||
|
|
||
|
const loadStore = options => {
|
||
|
const adapters = {
|
||
|
redis: '@keyv/redis',
|
||
|
rediss: '@keyv/redis',
|
||
|
mongodb: '@keyv/mongo',
|
||
|
mongo: '@keyv/mongo',
|
||
|
sqlite: '@keyv/sqlite',
|
||
|
postgresql: '@keyv/postgres',
|
||
|
postgres: '@keyv/postgres',
|
||
|
mysql: '@keyv/mysql',
|
||
|
etcd: '@keyv/etcd',
|
||
|
offline: '@keyv/offline',
|
||
|
tiered: '@keyv/tiered',
|
||
|
};
|
||
|
if (options.adapter || options.uri) {
|
||
|
const adapter = options.adapter || /^[^:+]*/.exec(options.uri)[0];
|
||
|
return new (commonjsRequire(adapters[adapter]))(options);
|
||
|
}
|
||
|
|
||
|
return new Map();
|
||
|
};
|
||
|
|
||
|
const iterableAdapters = [
|
||
|
'sqlite',
|
||
|
'postgres',
|
||
|
'mysql',
|
||
|
'mongo',
|
||
|
'redis',
|
||
|
'tiered',
|
||
|
];
|
||
|
|
||
|
class Keyv extends EventEmitter$1 {
|
||
|
constructor(uri, {emitErrors = true, ...options} = {}) {
|
||
|
super();
|
||
|
this.opts = {
|
||
|
namespace: 'keyv',
|
||
|
serialize: JSONB.stringify,
|
||
|
deserialize: JSONB.parse,
|
||
|
...((typeof uri === 'string') ? {uri} : uri),
|
||
|
...options,
|
||
|
};
|
||
|
|
||
|
if (!this.opts.store) {
|
||
|
const adapterOptions = {...this.opts};
|
||
|
this.opts.store = loadStore(adapterOptions);
|
||
|
}
|
||
|
|
||
|
if (this.opts.compression) {
|
||
|
const compression = this.opts.compression;
|
||
|
this.opts.serialize = compression.serialize.bind(compression);
|
||
|
this.opts.deserialize = compression.deserialize.bind(compression);
|
||
|
}
|
||
|
|
||
|
if (typeof this.opts.store.on === 'function' && emitErrors) {
|
||
|
this.opts.store.on('error', error => this.emit('error', error));
|
||
|
}
|
||
|
|
||
|
this.opts.store.namespace = this.opts.namespace;
|
||
|
|
||
|
const generateIterator = iterator => async function * () {
|
||
|
for await (const [key, raw] of typeof iterator === 'function'
|
||
|
? iterator(this.opts.store.namespace)
|
||
|
: iterator) {
|
||
|
const data = this.opts.deserialize(raw);
|
||
|
if (this.opts.store.namespace && !key.includes(this.opts.store.namespace)) {
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
if (typeof data.expires === 'number' && Date.now() > data.expires) {
|
||
|
this.delete(key);
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
yield [this._getKeyUnprefix(key), data.value];
|
||
|
}
|
||
|
};
|
||
|
|
||
|
// Attach iterators
|
||
|
if (typeof this.opts.store[Symbol.iterator] === 'function' && this.opts.store instanceof Map) {
|
||
|
this.iterator = generateIterator(this.opts.store);
|
||
|
} else if (typeof this.opts.store.iterator === 'function' && this.opts.store.opts
|
||
|
&& this._checkIterableAdaptar()) {
|
||
|
this.iterator = generateIterator(this.opts.store.iterator.bind(this.opts.store));
|
||
|
}
|
||
|
}
|
||
|
|
||
|
_checkIterableAdaptar() {
|
||
|
return iterableAdapters.includes(this.opts.store.opts.dialect)
|
||
|
|| iterableAdapters.findIndex(element => this.opts.store.opts.url.includes(element)) >= 0;
|
||
|
}
|
||
|
|
||
|
_getKeyPrefix(key) {
|
||
|
return `${this.opts.namespace}:${key}`;
|
||
|
}
|
||
|
|
||
|
_getKeyPrefixArray(keys) {
|
||
|
return keys.map(key => `${this.opts.namespace}:${key}`);
|
||
|
}
|
||
|
|
||
|
_getKeyUnprefix(key) {
|
||
|
return key
|
||
|
.split(':')
|
||
|
.splice(1)
|
||
|
.join(':');
|
||
|
}
|
||
|
|
||
|
get(key, options) {
|
||
|
const {store} = this.opts;
|
||
|
const isArray = Array.isArray(key);
|
||
|
const keyPrefixed = isArray ? this._getKeyPrefixArray(key) : this._getKeyPrefix(key);
|
||
|
if (isArray && store.getMany === undefined) {
|
||
|
const promises = [];
|
||
|
for (const key of keyPrefixed) {
|
||
|
promises.push(Promise.resolve()
|
||
|
.then(() => store.get(key))
|
||
|
.then(data => (typeof data === 'string') ? this.opts.deserialize(data) : (this.opts.compression ? this.opts.deserialize(data) : data))
|
||
|
.then(data => {
|
||
|
if (data === undefined || data === null) {
|
||
|
return undefined;
|
||
|
}
|
||
|
|
||
|
if (typeof data.expires === 'number' && Date.now() > data.expires) {
|
||
|
return this.delete(key).then(() => undefined);
|
||
|
}
|
||
|
|
||
|
return (options && options.raw) ? data : data.value;
|
||
|
}),
|
||
|
);
|
||
|
}
|
||
|
|
||
|
return Promise.allSettled(promises)
|
||
|
.then(values => {
|
||
|
const data = [];
|
||
|
for (const value of values) {
|
||
|
data.push(value.value);
|
||
|
}
|
||
|
|
||
|
return data;
|
||
|
});
|
||
|
}
|
||
|
|
||
|
return Promise.resolve()
|
||
|
.then(() => isArray ? store.getMany(keyPrefixed) : store.get(keyPrefixed))
|
||
|
.then(data => (typeof data === 'string') ? this.opts.deserialize(data) : (this.opts.compression ? this.opts.deserialize(data) : data))
|
||
|
.then(data => {
|
||
|
if (data === undefined || data === null) {
|
||
|
return undefined;
|
||
|
}
|
||
|
|
||
|
if (isArray) {
|
||
|
const result = [];
|
||
|
|
||
|
for (let row of data) {
|
||
|
if ((typeof row === 'string')) {
|
||
|
row = this.opts.deserialize(row);
|
||
|
}
|
||
|
|
||
|
if (row === undefined || row === null) {
|
||
|
result.push(undefined);
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
if (typeof row.expires === 'number' && Date.now() > row.expires) {
|
||
|
this.delete(key).then(() => undefined);
|
||
|
result.push(undefined);
|
||
|
} else {
|
||
|
result.push((options && options.raw) ? row : row.value);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return result;
|
||
|
}
|
||
|
|
||
|
if (typeof data.expires === 'number' && Date.now() > data.expires) {
|
||
|
return this.delete(key).then(() => undefined);
|
||
|
}
|
||
|
|
||
|
return (options && options.raw) ? data : data.value;
|
||
|
});
|
||
|
}
|
||
|
|
||
|
set(key, value, ttl) {
|
||
|
const keyPrefixed = this._getKeyPrefix(key);
|
||
|
if (typeof ttl === 'undefined') {
|
||
|
ttl = this.opts.ttl;
|
||
|
}
|
||
|
|
||
|
if (ttl === 0) {
|
||
|
ttl = undefined;
|
||
|
}
|
||
|
|
||
|
const {store} = this.opts;
|
||
|
|
||
|
return Promise.resolve()
|
||
|
.then(() => {
|
||
|
const expires = (typeof ttl === 'number') ? (Date.now() + ttl) : null;
|
||
|
if (typeof value === 'symbol') {
|
||
|
this.emit('error', 'symbol cannot be serialized');
|
||
|
}
|
||
|
|
||
|
value = {value, expires};
|
||
|
return this.opts.serialize(value);
|
||
|
})
|
||
|
.then(value => store.set(keyPrefixed, value, ttl))
|
||
|
.then(() => true);
|
||
|
}
|
||
|
|
||
|
delete(key) {
|
||
|
const {store} = this.opts;
|
||
|
if (Array.isArray(key)) {
|
||
|
const keyPrefixed = this._getKeyPrefixArray(key);
|
||
|
if (store.deleteMany === undefined) {
|
||
|
const promises = [];
|
||
|
for (const key of keyPrefixed) {
|
||
|
promises.push(store.delete(key));
|
||
|
}
|
||
|
|
||
|
return Promise.allSettled(promises)
|
||
|
.then(values => values.every(x => x.value === true));
|
||
|
}
|
||
|
|
||
|
return Promise.resolve()
|
||
|
.then(() => store.deleteMany(keyPrefixed));
|
||
|
}
|
||
|
|
||
|
const keyPrefixed = this._getKeyPrefix(key);
|
||
|
return Promise.resolve()
|
||
|
.then(() => store.delete(keyPrefixed));
|
||
|
}
|
||
|
|
||
|
clear() {
|
||
|
const {store} = this.opts;
|
||
|
return Promise.resolve()
|
||
|
.then(() => store.clear());
|
||
|
}
|
||
|
|
||
|
has(key) {
|
||
|
const keyPrefixed = this._getKeyPrefix(key);
|
||
|
const {store} = this.opts;
|
||
|
return Promise.resolve()
|
||
|
.then(async () => {
|
||
|
if (typeof store.has === 'function') {
|
||
|
return store.has(keyPrefixed);
|
||
|
}
|
||
|
|
||
|
const value = await store.get(keyPrefixed);
|
||
|
return value !== undefined;
|
||
|
});
|
||
|
}
|
||
|
|
||
|
disconnect() {
|
||
|
const {store} = this.opts;
|
||
|
if (typeof store.disconnect === 'function') {
|
||
|
return store.disconnect();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var src = Keyv;
|
||
|
|
||
|
var Keyv$1 = /*@__PURE__*/getDefaultExportFromCjs(src);
|
||
|
|
||
|
// We define these manually to ensure they're always copied
|
||
|
// even if they would move up the prototype chain
|
||
|
// https://nodejs.org/api/http.html#http_class_http_incomingmessage
|
||
|
const knownProperties$1 = [
|
||
|
'aborted',
|
||
|
'complete',
|
||
|
'headers',
|
||
|
'httpVersion',
|
||
|
'httpVersionMinor',
|
||
|
'httpVersionMajor',
|
||
|
'method',
|
||
|
'rawHeaders',
|
||
|
'rawTrailers',
|
||
|
'setTimeout',
|
||
|
'socket',
|
||
|
'statusCode',
|
||
|
'statusMessage',
|
||
|
'trailers',
|
||
|
'url',
|
||
|
];
|
||
|
|
||
|
function mimicResponse$2(fromStream, toStream) {
|
||
|
if (toStream._readableState.autoDestroy) {
|
||
|
throw new Error('The second stream must have the `autoDestroy` option set to `false`');
|
||
|
}
|
||
|
|
||
|
const fromProperties = new Set([...Object.keys(fromStream), ...knownProperties$1]);
|
||
|
|
||
|
const properties = {};
|
||
|
|
||
|
for (const property of fromProperties) {
|
||
|
// Don't overwrite existing properties.
|
||
|
if (property in toStream) {
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
properties[property] = {
|
||
|
get() {
|
||
|
const value = fromStream[property];
|
||
|
const isFunction = typeof value === 'function';
|
||
|
|
||
|
return isFunction ? value.bind(fromStream) : value;
|
||
|
},
|
||
|
set(value) {
|
||
|
fromStream[property] = value;
|
||
|
},
|
||
|
enumerable: true,
|
||
|
configurable: false,
|
||
|
};
|
||
|
}
|
||
|
|
||
|
Object.defineProperties(toStream, properties);
|
||
|
|
||
|
fromStream.once('aborted', () => {
|
||
|
toStream.destroy();
|
||
|
|
||
|
toStream.emit('aborted');
|
||
|
});
|
||
|
|
||
|
fromStream.once('close', () => {
|
||
|
if (fromStream.complete) {
|
||
|
if (toStream.readable) {
|
||
|
toStream.once('end', () => {
|
||
|
toStream.emit('close');
|
||
|
});
|
||
|
} else {
|
||
|
toStream.emit('close');
|
||
|
}
|
||
|
} else {
|
||
|
toStream.emit('close');
|
||
|
}
|
||
|
});
|
||
|
|
||
|
return toStream;
|
||
|
}
|
||
|
|
||
|
// Type definitions for cacheable-request 6.0
|
||
|
// Project: https://github.com/lukechilds/cacheable-request#readme
|
||
|
// Definitions by: BendingBender <https://github.com/BendingBender>
|
||
|
// Paul Melnikow <https://github.com/paulmelnikow>
|
||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||
|
// TypeScript Version: 2.3
|
||
|
class RequestError extends Error {
|
||
|
constructor(error) {
|
||
|
super(error.message);
|
||
|
Object.assign(this, error);
|
||
|
}
|
||
|
}
|
||
|
class CacheError extends Error {
|
||
|
constructor(error) {
|
||
|
super(error.message);
|
||
|
Object.assign(this, error);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
class CacheableRequest {
|
||
|
constructor(cacheRequest, cacheAdapter) {
|
||
|
this.hooks = new Map();
|
||
|
this.request = () => (options, cb) => {
|
||
|
let url;
|
||
|
if (typeof options === 'string') {
|
||
|
url = normalizeUrlObject(urlLib.parse(options));
|
||
|
options = {};
|
||
|
}
|
||
|
else if (options instanceof urlLib.URL) {
|
||
|
url = normalizeUrlObject(urlLib.parse(options.toString()));
|
||
|
options = {};
|
||
|
}
|
||
|
else {
|
||
|
const [pathname, ...searchParts] = (options.path ?? '').split('?');
|
||
|
const search = searchParts.length > 0
|
||
|
? `?${searchParts.join('?')}`
|
||
|
: '';
|
||
|
url = normalizeUrlObject({ ...options, pathname, search });
|
||
|
}
|
||
|
options = {
|
||
|
headers: {},
|
||
|
method: 'GET',
|
||
|
cache: true,
|
||
|
strictTtl: false,
|
||
|
automaticFailover: false,
|
||
|
...options,
|
||
|
...urlObjectToRequestOptions(url),
|
||
|
};
|
||
|
options.headers = Object.fromEntries(entries(options.headers).map(([key, value]) => [key.toLowerCase(), value]));
|
||
|
const ee = new EventEmitter$2();
|
||
|
const normalizedUrlString = normalizeUrl(urlLib.format(url), {
|
||
|
stripWWW: false,
|
||
|
removeTrailingSlash: false,
|
||
|
stripAuthentication: false,
|
||
|
});
|
||
|
let key = `${options.method}:${normalizedUrlString}`;
|
||
|
// POST, PATCH, and PUT requests may be cached, depending on the response
|
||
|
// cache-control headers. As a result, the body of the request should be
|
||
|
// added to the cache key in order to avoid collisions.
|
||
|
if (options.body && options.method !== undefined && ['POST', 'PATCH', 'PUT'].includes(options.method)) {
|
||
|
if (options.body instanceof stream$2.Readable) {
|
||
|
// Streamed bodies should completely skip the cache because they may
|
||
|
// or may not be hashable and in either case the stream would need to
|
||
|
// close before the cache key could be generated.
|
||
|
options.cache = false;
|
||
|
}
|
||
|
else {
|
||
|
key += `:${crypto$1.createHash('md5').update(options.body).digest('hex')}`;
|
||
|
}
|
||
|
}
|
||
|
let revalidate = false;
|
||
|
let madeRequest = false;
|
||
|
const makeRequest = (options_) => {
|
||
|
madeRequest = true;
|
||
|
let requestErrored = false;
|
||
|
let requestErrorCallback = () => { };
|
||
|
const requestErrorPromise = new Promise(resolve => {
|
||
|
requestErrorCallback = () => {
|
||
|
if (!requestErrored) {
|
||
|
requestErrored = true;
|
||
|
resolve();
|
||
|
}
|
||
|
};
|
||
|
});
|
||
|
const handler = async (response) => {
|
||
|
if (revalidate) {
|
||
|
response.status = response.statusCode;
|
||
|
const revalidatedPolicy = CachePolicy.fromObject(revalidate.cachePolicy).revalidatedPolicy(options_, response);
|
||
|
if (!revalidatedPolicy.modified) {
|
||
|
response.resume();
|
||
|
await new Promise(resolve => {
|
||
|
// Skipping 'error' handler cause 'error' event should't be emitted for 304 response
|
||
|
response
|
||
|
.once('end', resolve);
|
||
|
});
|
||
|
const headers = convertHeaders(revalidatedPolicy.policy.responseHeaders());
|
||
|
response = new Response({ statusCode: revalidate.statusCode, headers, body: revalidate.body, url: revalidate.url });
|
||
|
response.cachePolicy = revalidatedPolicy.policy;
|
||
|
response.fromCache = true;
|
||
|
}
|
||
|
}
|
||
|
if (!response.fromCache) {
|
||
|
response.cachePolicy = new CachePolicy(options_, response, options_);
|
||
|
response.fromCache = false;
|
||
|
}
|
||
|
let clonedResponse;
|
||
|
if (options_.cache && response.cachePolicy.storable()) {
|
||
|
clonedResponse = cloneResponse(response);
|
||
|
(async () => {
|
||
|
try {
|
||
|
const bodyPromise = getStream$2.buffer(response);
|
||
|
await Promise.race([
|
||
|
requestErrorPromise,
|
||
|
new Promise(resolve => response.once('end', resolve)),
|
||
|
new Promise(resolve => response.once('close', resolve)), // eslint-disable-line no-promise-executor-return
|
||
|
]);
|
||
|
const body = await bodyPromise;
|
||
|
let value = {
|
||
|
url: response.url,
|
||
|
statusCode: response.fromCache ? revalidate.statusCode : response.statusCode,
|
||
|
body,
|
||
|
cachePolicy: response.cachePolicy.toObject(),
|
||
|
};
|
||
|
let ttl = options_.strictTtl ? response.cachePolicy.timeToLive() : undefined;
|
||
|
if (options_.maxTtl) {
|
||
|
ttl = ttl ? Math.min(ttl, options_.maxTtl) : options_.maxTtl;
|
||
|
}
|
||
|
if (this.hooks.size > 0) {
|
||
|
/* eslint-disable no-await-in-loop */
|
||
|
for (const key_ of this.hooks.keys()) {
|
||
|
value = await this.runHook(key_, value, response);
|
||
|
}
|
||
|
/* eslint-enable no-await-in-loop */
|
||
|
}
|
||
|
await this.cache.set(key, value, ttl);
|
||
|
}
|
||
|
catch (error) {
|
||
|
ee.emit('error', new CacheError(error));
|
||
|
}
|
||
|
})();
|
||
|
}
|
||
|
else if (options_.cache && revalidate) {
|
||
|
(async () => {
|
||
|
try {
|
||
|
await this.cache.delete(key);
|
||
|
}
|
||
|
catch (error) {
|
||
|
ee.emit('error', new CacheError(error));
|
||
|
}
|
||
|
})();
|
||
|
}
|
||
|
ee.emit('response', clonedResponse ?? response);
|
||
|
if (typeof cb === 'function') {
|
||
|
cb(clonedResponse ?? response);
|
||
|
}
|
||
|
};
|
||
|
try {
|
||
|
const request_ = this.cacheRequest(options_, handler);
|
||
|
request_.once('error', requestErrorCallback);
|
||
|
request_.once('abort', requestErrorCallback);
|
||
|
request_.once('destroy', requestErrorCallback);
|
||
|
ee.emit('request', request_);
|
||
|
}
|
||
|
catch (error) {
|
||
|
ee.emit('error', new RequestError(error));
|
||
|
}
|
||
|
};
|
||
|
(async () => {
|
||
|
const get = async (options_) => {
|
||
|
await Promise.resolve();
|
||
|
const cacheEntry = options_.cache ? await this.cache.get(key) : undefined;
|
||
|
if (cacheEntry === undefined && !options_.forceRefresh) {
|
||
|
makeRequest(options_);
|
||
|
return;
|
||
|
}
|
||
|
const policy = CachePolicy.fromObject(cacheEntry.cachePolicy);
|
||
|
if (policy.satisfiesWithoutRevalidation(options_) && !options_.forceRefresh) {
|
||
|
const headers = convertHeaders(policy.responseHeaders());
|
||
|
const response = new Response({ statusCode: cacheEntry.statusCode, headers, body: cacheEntry.body, url: cacheEntry.url });
|
||
|
response.cachePolicy = policy;
|
||
|
response.fromCache = true;
|
||
|
ee.emit('response', response);
|
||
|
if (typeof cb === 'function') {
|
||
|
cb(response);
|
||
|
}
|
||
|
}
|
||
|
else if (policy.satisfiesWithoutRevalidation(options_) && Date.now() >= policy.timeToLive() && options_.forceRefresh) {
|
||
|
await this.cache.delete(key);
|
||
|
options_.headers = policy.revalidationHeaders(options_);
|
||
|
makeRequest(options_);
|
||
|
}
|
||
|
else {
|
||
|
revalidate = cacheEntry;
|
||
|
options_.headers = policy.revalidationHeaders(options_);
|
||
|
makeRequest(options_);
|
||
|
}
|
||
|
};
|
||
|
const errorHandler = (error) => ee.emit('error', new CacheError(error));
|
||
|
if (this.cache instanceof Keyv$1) {
|
||
|
const cachek = this.cache;
|
||
|
cachek.once('error', errorHandler);
|
||
|
ee.on('error', () => cachek.removeListener('error', errorHandler));
|
||
|
ee.on('response', () => cachek.removeListener('error', errorHandler));
|
||
|
}
|
||
|
try {
|
||
|
await get(options);
|
||
|
}
|
||
|
catch (error) {
|
||
|
if (options.automaticFailover && !madeRequest) {
|
||
|
makeRequest(options);
|
||
|
}
|
||
|
ee.emit('error', new CacheError(error));
|
||
|
}
|
||
|
})();
|
||
|
return ee;
|
||
|
};
|
||
|
this.addHook = (name, fn) => {
|
||
|
if (!this.hooks.has(name)) {
|
||
|
this.hooks.set(name, fn);
|
||
|
}
|
||
|
};
|
||
|
this.removeHook = (name) => this.hooks.delete(name);
|
||
|
this.getHook = (name) => this.hooks.get(name);
|
||
|
this.runHook = async (name, ...args) => this.hooks.get(name)?.(...args);
|
||
|
if (cacheAdapter instanceof Keyv$1) {
|
||
|
this.cache = cacheAdapter;
|
||
|
}
|
||
|
else if (typeof cacheAdapter === 'string') {
|
||
|
this.cache = new Keyv$1({
|
||
|
uri: cacheAdapter,
|
||
|
namespace: 'cacheable-request',
|
||
|
});
|
||
|
}
|
||
|
else {
|
||
|
this.cache = new Keyv$1({
|
||
|
store: cacheAdapter,
|
||
|
namespace: 'cacheable-request',
|
||
|
});
|
||
|
}
|
||
|
this.request = this.request.bind(this);
|
||
|
this.cacheRequest = cacheRequest;
|
||
|
}
|
||
|
}
|
||
|
const entries = Object.entries;
|
||
|
const cloneResponse = (response) => {
|
||
|
const clone = new PassThrough$1({ autoDestroy: false });
|
||
|
mimicResponse$2(response, clone);
|
||
|
return response.pipe(clone);
|
||
|
};
|
||
|
const urlObjectToRequestOptions = (url) => {
|
||
|
const options = { ...url };
|
||
|
options.path = `${url.pathname || '/'}${url.search || ''}`;
|
||
|
delete options.pathname;
|
||
|
delete options.search;
|
||
|
return options;
|
||
|
};
|
||
|
const normalizeUrlObject = (url) =>
|
||
|
// If url was parsed by url.parse or new URL:
|
||
|
// - hostname will be set
|
||
|
// - host will be hostname[:port]
|
||
|
// - port will be set if it was explicit in the parsed string
|
||
|
// Otherwise, url was from request options:
|
||
|
// - hostname or host may be set
|
||
|
// - host shall not have port encoded
|
||
|
({
|
||
|
protocol: url.protocol,
|
||
|
auth: url.auth,
|
||
|
hostname: url.hostname || url.host || 'localhost',
|
||
|
port: url.port,
|
||
|
pathname: url.pathname,
|
||
|
search: url.search,
|
||
|
});
|
||
|
const convertHeaders = (headers) => {
|
||
|
const result = [];
|
||
|
for (const name of Object.keys(headers)) {
|
||
|
result[name.toLowerCase()] = headers[name];
|
||
|
}
|
||
|
return result;
|
||
|
};
|
||
|
var CacheableRequest$1 = CacheableRequest;
|
||
|
|
||
|
// We define these manually to ensure they're always copied
|
||
|
// even if they would move up the prototype chain
|
||
|
// https://nodejs.org/api/http.html#http_class_http_incomingmessage
|
||
|
const knownProperties = [
|
||
|
'aborted',
|
||
|
'complete',
|
||
|
'headers',
|
||
|
'httpVersion',
|
||
|
'httpVersionMinor',
|
||
|
'httpVersionMajor',
|
||
|
'method',
|
||
|
'rawHeaders',
|
||
|
'rawTrailers',
|
||
|
'setTimeout',
|
||
|
'socket',
|
||
|
'statusCode',
|
||
|
'statusMessage',
|
||
|
'trailers',
|
||
|
'url'
|
||
|
];
|
||
|
|
||
|
var mimicResponse$1 = (fromStream, toStream) => {
|
||
|
if (toStream._readableState.autoDestroy) {
|
||
|
throw new Error('The second stream must have the `autoDestroy` option set to `false`');
|
||
|
}
|
||
|
|
||
|
const fromProperties = new Set(Object.keys(fromStream).concat(knownProperties));
|
||
|
|
||
|
const properties = {};
|
||
|
|
||
|
for (const property of fromProperties) {
|
||
|
// Don't overwrite existing properties.
|
||
|
if (property in toStream) {
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
properties[property] = {
|
||
|
get() {
|
||
|
const value = fromStream[property];
|
||
|
const isFunction = typeof value === 'function';
|
||
|
|
||
|
return isFunction ? value.bind(fromStream) : value;
|
||
|
},
|
||
|
set(value) {
|
||
|
fromStream[property] = value;
|
||
|
},
|
||
|
enumerable: true,
|
||
|
configurable: false
|
||
|
};
|
||
|
}
|
||
|
|
||
|
Object.defineProperties(toStream, properties);
|
||
|
|
||
|
fromStream.once('aborted', () => {
|
||
|
toStream.destroy();
|
||
|
|
||
|
toStream.emit('aborted');
|
||
|
});
|
||
|
|
||
|
fromStream.once('close', () => {
|
||
|
if (fromStream.complete) {
|
||
|
if (toStream.readable) {
|
||
|
toStream.once('end', () => {
|
||
|
toStream.emit('close');
|
||
|
});
|
||
|
} else {
|
||
|
toStream.emit('close');
|
||
|
}
|
||
|
} else {
|
||
|
toStream.emit('close');
|
||
|
}
|
||
|
});
|
||
|
|
||
|
return toStream;
|
||
|
};
|
||
|
|
||
|
const {Transform, PassThrough} = require$$0$3;
|
||
|
const zlib = require$$1$3;
|
||
|
const mimicResponse = mimicResponse$1;
|
||
|
|
||
|
var decompressResponse = response => {
|
||
|
const contentEncoding = (response.headers['content-encoding'] || '').toLowerCase();
|
||
|
|
||
|
if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) {
|
||
|
return response;
|
||
|
}
|
||
|
|
||
|
// TODO: Remove this when targeting Node.js 12.
|
||
|
const isBrotli = contentEncoding === 'br';
|
||
|
if (isBrotli && typeof zlib.createBrotliDecompress !== 'function') {
|
||
|
response.destroy(new Error('Brotli is not supported on Node.js < 12'));
|
||
|
return response;
|
||
|
}
|
||
|
|
||
|
let isEmpty = true;
|
||
|
|
||
|
const checker = new Transform({
|
||
|
transform(data, _encoding, callback) {
|
||
|
isEmpty = false;
|
||
|
|
||
|
callback(null, data);
|
||
|
},
|
||
|
|
||
|
flush(callback) {
|
||
|
callback();
|
||
|
}
|
||
|
});
|
||
|
|
||
|
const finalStream = new PassThrough({
|
||
|
autoDestroy: false,
|
||
|
destroy(error, callback) {
|
||
|
response.destroy();
|
||
|
|
||
|
callback(error);
|
||
|
}
|
||
|
});
|
||
|
|
||
|
const decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip();
|
||
|
|
||
|
decompressStream.once('error', error => {
|
||
|
if (isEmpty && !response.readable) {
|
||
|
finalStream.end();
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
finalStream.destroy(error);
|
||
|
});
|
||
|
|
||
|
mimicResponse(response, finalStream);
|
||
|
response.pipe(checker).pipe(decompressStream).pipe(finalStream);
|
||
|
|
||
|
return finalStream;
|
||
|
};
|
||
|
|
||
|
var decompressResponse$1 = /*@__PURE__*/getDefaultExportFromCjs(decompressResponse);
|
||
|
|
||
|
const isFunction = (value) => (typeof value === "function");
|
||
|
|
||
|
const isAsyncIterable = (value) => (isFunction(value[Symbol.asyncIterator]));
|
||
|
async function* readStream(readable) {
|
||
|
const reader = readable.getReader();
|
||
|
while (true) {
|
||
|
const { done, value } = await reader.read();
|
||
|
if (done) {
|
||
|
break;
|
||
|
}
|
||
|
yield value;
|
||
|
}
|
||
|
}
|
||
|
const getStreamIterator = (source) => {
|
||
|
if (isAsyncIterable(source)) {
|
||
|
return source;
|
||
|
}
|
||
|
if (isFunction(source.getReader)) {
|
||
|
return readStream(source);
|
||
|
}
|
||
|
throw new TypeError("Unsupported data source: Expected either ReadableStream or async iterable.");
|
||
|
};
|
||
|
|
||
|
const alphabet = "abcdefghijklmnopqrstuvwxyz0123456789";
|
||
|
function createBoundary() {
|
||
|
let size = 16;
|
||
|
let res = "";
|
||
|
while (size--) {
|
||
|
res += alphabet[(Math.random() * alphabet.length) << 0];
|
||
|
}
|
||
|
return res;
|
||
|
}
|
||
|
|
||
|
const normalizeValue = (value) => String(value)
|
||
|
.replace(/\r|\n/g, (match, i, str) => {
|
||
|
if ((match === "\r" && str[i + 1] !== "\n")
|
||
|
|| (match === "\n" && str[i - 1] !== "\r")) {
|
||
|
return "\r\n";
|
||
|
}
|
||
|
return match;
|
||
|
});
|
||
|
|
||
|
const getType = (value) => (Object.prototype.toString.call(value).slice(8, -1).toLowerCase());
|
||
|
function isPlainObject(value) {
|
||
|
if (getType(value) !== "object") {
|
||
|
return false;
|
||
|
}
|
||
|
const pp = Object.getPrototypeOf(value);
|
||
|
if (pp === null || pp === undefined) {
|
||
|
return true;
|
||
|
}
|
||
|
const Ctor = pp.constructor && pp.constructor.toString();
|
||
|
return Ctor === Object.toString();
|
||
|
}
|
||
|
|
||
|
function getProperty(target, prop) {
|
||
|
if (typeof prop === "string") {
|
||
|
for (const [name, value] of Object.entries(target)) {
|
||
|
if (prop.toLowerCase() === name.toLowerCase()) {
|
||
|
return value;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return undefined;
|
||
|
}
|
||
|
const proxyHeaders = (object) => new Proxy(object, {
|
||
|
get: (target, prop) => getProperty(target, prop),
|
||
|
has: (target, prop) => getProperty(target, prop) !== undefined
|
||
|
});
|
||
|
|
||
|
const isFormData$1 = (value) => Boolean(value
|
||
|
&& isFunction(value.constructor)
|
||
|
&& value[Symbol.toStringTag] === "FormData"
|
||
|
&& isFunction(value.append)
|
||
|
&& isFunction(value.getAll)
|
||
|
&& isFunction(value.entries)
|
||
|
&& isFunction(value[Symbol.iterator]));
|
||
|
|
||
|
const escapeName = (name) => String(name)
|
||
|
.replace(/\r/g, "%0D")
|
||
|
.replace(/\n/g, "%0A")
|
||
|
.replace(/"/g, "%22");
|
||
|
|
||
|
const isFile = (value) => Boolean(value
|
||
|
&& typeof value === "object"
|
||
|
&& isFunction(value.constructor)
|
||
|
&& value[Symbol.toStringTag] === "File"
|
||
|
&& isFunction(value.stream)
|
||
|
&& value.name != null);
|
||
|
|
||
|
var __classPrivateFieldSet = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
||
|
if (kind === "m") throw new TypeError("Private method is not writable");
|
||
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
||
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
||
|
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
||
|
};
|
||
|
var __classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
||
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
||
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
||
|
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
||
|
};
|
||
|
var _FormDataEncoder_instances, _FormDataEncoder_CRLF, _FormDataEncoder_CRLF_BYTES, _FormDataEncoder_CRLF_BYTES_LENGTH, _FormDataEncoder_DASHES, _FormDataEncoder_encoder, _FormDataEncoder_footer, _FormDataEncoder_form, _FormDataEncoder_options, _FormDataEncoder_getFieldHeader, _FormDataEncoder_getContentLength;
|
||
|
const defaultOptions = {
|
||
|
enableAdditionalHeaders: false
|
||
|
};
|
||
|
const readonlyProp = { writable: false, configurable: false };
|
||
|
class FormDataEncoder {
|
||
|
constructor(form, boundaryOrOptions, options) {
|
||
|
_FormDataEncoder_instances.add(this);
|
||
|
_FormDataEncoder_CRLF.set(this, "\r\n");
|
||
|
_FormDataEncoder_CRLF_BYTES.set(this, void 0);
|
||
|
_FormDataEncoder_CRLF_BYTES_LENGTH.set(this, void 0);
|
||
|
_FormDataEncoder_DASHES.set(this, "-".repeat(2));
|
||
|
_FormDataEncoder_encoder.set(this, new TextEncoder());
|
||
|
_FormDataEncoder_footer.set(this, void 0);
|
||
|
_FormDataEncoder_form.set(this, void 0);
|
||
|
_FormDataEncoder_options.set(this, void 0);
|
||
|
if (!isFormData$1(form)) {
|
||
|
throw new TypeError("Expected first argument to be a FormData instance.");
|
||
|
}
|
||
|
let boundary;
|
||
|
if (isPlainObject(boundaryOrOptions)) {
|
||
|
options = boundaryOrOptions;
|
||
|
}
|
||
|
else {
|
||
|
boundary = boundaryOrOptions;
|
||
|
}
|
||
|
if (!boundary) {
|
||
|
boundary = createBoundary();
|
||
|
}
|
||
|
if (typeof boundary !== "string") {
|
||
|
throw new TypeError("Expected boundary argument to be a string.");
|
||
|
}
|
||
|
if (options && !isPlainObject(options)) {
|
||
|
throw new TypeError("Expected options argument to be an object.");
|
||
|
}
|
||
|
__classPrivateFieldSet(this, _FormDataEncoder_form, Array.from(form.entries()), "f");
|
||
|
__classPrivateFieldSet(this, _FormDataEncoder_options, { ...defaultOptions, ...options }, "f");
|
||
|
__classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES, __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")), "f");
|
||
|
__classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, "f").byteLength, "f");
|
||
|
this.boundary = `form-data-boundary-${boundary}`;
|
||
|
this.contentType = `multipart/form-data; boundary=${this.boundary}`;
|
||
|
__classPrivateFieldSet(this, _FormDataEncoder_footer, __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(`${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${this.boundary}${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f").repeat(2)}`), "f");
|
||
|
const headers = {
|
||
|
"Content-Type": this.contentType
|
||
|
};
|
||
|
const contentLength = __classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getContentLength).call(this);
|
||
|
if (contentLength) {
|
||
|
this.contentLength = contentLength;
|
||
|
headers["Content-Length"] = contentLength;
|
||
|
}
|
||
|
this.headers = proxyHeaders(Object.freeze(headers));
|
||
|
Object.defineProperties(this, {
|
||
|
boundary: readonlyProp,
|
||
|
contentType: readonlyProp,
|
||
|
contentLength: readonlyProp,
|
||
|
headers: readonlyProp
|
||
|
});
|
||
|
}
|
||
|
getContentLength() {
|
||
|
return this.contentLength == null ? undefined : Number(this.contentLength);
|
||
|
}
|
||
|
*values() {
|
||
|
for (const [name, raw] of __classPrivateFieldGet(this, _FormDataEncoder_form, "f")) {
|
||
|
const value = isFile(raw) ? raw : __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(normalizeValue(raw));
|
||
|
yield __classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getFieldHeader).call(this, name, value);
|
||
|
yield value;
|
||
|
yield __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, "f");
|
||
|
}
|
||
|
yield __classPrivateFieldGet(this, _FormDataEncoder_footer, "f");
|
||
|
}
|
||
|
async *encode() {
|
||
|
for (const part of this.values()) {
|
||
|
if (isFile(part)) {
|
||
|
yield* getStreamIterator(part.stream());
|
||
|
}
|
||
|
else {
|
||
|
yield part;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
[(_FormDataEncoder_CRLF = new WeakMap(), _FormDataEncoder_CRLF_BYTES = new WeakMap(), _FormDataEncoder_CRLF_BYTES_LENGTH = new WeakMap(), _FormDataEncoder_DASHES = new WeakMap(), _FormDataEncoder_encoder = new WeakMap(), _FormDataEncoder_footer = new WeakMap(), _FormDataEncoder_form = new WeakMap(), _FormDataEncoder_options = new WeakMap(), _FormDataEncoder_instances = new WeakSet(), _FormDataEncoder_getFieldHeader = function _FormDataEncoder_getFieldHeader(name, value) {
|
||
|
let header = "";
|
||
|
header += `${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${this.boundary}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}`;
|
||
|
header += `Content-Disposition: form-data; name="${escapeName(name)}"`;
|
||
|
if (isFile(value)) {
|
||
|
header += `; filename="${escapeName(value.name)}"${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}`;
|
||
|
header += `Content-Type: ${value.type || "application/octet-stream"}`;
|
||
|
}
|
||
|
const size = isFile(value) ? value.size : value.byteLength;
|
||
|
if (__classPrivateFieldGet(this, _FormDataEncoder_options, "f").enableAdditionalHeaders === true
|
||
|
&& size != null
|
||
|
&& !isNaN(size)) {
|
||
|
header += `${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}Content-Length: ${isFile(value) ? value.size : value.byteLength}`;
|
||
|
}
|
||
|
return __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(`${header}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f").repeat(2)}`);
|
||
|
}, _FormDataEncoder_getContentLength = function _FormDataEncoder_getContentLength() {
|
||
|
let length = 0;
|
||
|
for (const [name, raw] of __classPrivateFieldGet(this, _FormDataEncoder_form, "f")) {
|
||
|
const value = isFile(raw) ? raw : __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(normalizeValue(raw));
|
||
|
const size = isFile(value) ? value.size : value.byteLength;
|
||
|
if (size == null || isNaN(size)) {
|
||
|
return undefined;
|
||
|
}
|
||
|
length += __classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getFieldHeader).call(this, name, value).byteLength;
|
||
|
length += size;
|
||
|
length += __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, "f");
|
||
|
}
|
||
|
return String(length + __classPrivateFieldGet(this, _FormDataEncoder_footer, "f").byteLength);
|
||
|
}, Symbol.iterator)]() {
|
||
|
return this.values();
|
||
|
}
|
||
|
[Symbol.asyncIterator]() {
|
||
|
return this.encode();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function isFormData(body) {
|
||
|
return is.nodeStream(body) && is.function_(body.getBoundary);
|
||
|
}
|
||
|
|
||
|
async function getBodySize(body, headers) {
|
||
|
if (headers && 'content-length' in headers) {
|
||
|
return Number(headers['content-length']);
|
||
|
}
|
||
|
if (!body) {
|
||
|
return 0;
|
||
|
}
|
||
|
if (is.string(body)) {
|
||
|
return Buffer$1.byteLength(body);
|
||
|
}
|
||
|
if (is.buffer(body)) {
|
||
|
return body.length;
|
||
|
}
|
||
|
if (isFormData(body)) {
|
||
|
return promisify$1(body.getLength.bind(body))();
|
||
|
}
|
||
|
return undefined;
|
||
|
}
|
||
|
|
||
|
function proxyEvents$2(from, to, events) {
|
||
|
const eventFunctions = {};
|
||
|
for (const event of events) {
|
||
|
const eventFunction = (...args) => {
|
||
|
to.emit(event, ...args);
|
||
|
};
|
||
|
eventFunctions[event] = eventFunction;
|
||
|
from.on(event, eventFunction);
|
||
|
}
|
||
|
return () => {
|
||
|
for (const [event, eventFunction] of Object.entries(eventFunctions)) {
|
||
|
from.off(event, eventFunction);
|
||
|
}
|
||
|
};
|
||
|
}
|
||
|
|
||
|
// When attaching listeners, it's very easy to forget about them.
|
||
|
// Especially if you do error handling and set timeouts.
|
||
|
// So instead of checking if it's proper to throw an error on every timeout ever,
|
||
|
// use this simple tool which will remove all listeners you have attached.
|
||
|
function unhandle() {
|
||
|
const handlers = [];
|
||
|
return {
|
||
|
once(origin, event, fn) {
|
||
|
origin.once(event, fn);
|
||
|
handlers.push({ origin, event, fn });
|
||
|
},
|
||
|
unhandleAll() {
|
||
|
for (const handler of handlers) {
|
||
|
const { origin, event, fn } = handler;
|
||
|
origin.removeListener(event, fn);
|
||
|
}
|
||
|
handlers.length = 0;
|
||
|
},
|
||
|
};
|
||
|
}
|
||
|
|
||
|
const reentry = Symbol('reentry');
|
||
|
const noop$1 = () => { };
|
||
|
class TimeoutError extends Error {
|
||
|
constructor(threshold, event) {
|
||
|
super(`Timeout awaiting '${event}' for ${threshold}ms`);
|
||
|
Object.defineProperty(this, "event", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: event
|
||
|
});
|
||
|
Object.defineProperty(this, "code", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
this.name = 'TimeoutError';
|
||
|
this.code = 'ETIMEDOUT';
|
||
|
}
|
||
|
}
|
||
|
function timedOut(request, delays, options) {
|
||
|
if (reentry in request) {
|
||
|
return noop$1;
|
||
|
}
|
||
|
request[reentry] = true;
|
||
|
const cancelers = [];
|
||
|
const { once, unhandleAll } = unhandle();
|
||
|
const addTimeout = (delay, callback, event) => {
|
||
|
const timeout = setTimeout(callback, delay, delay, event);
|
||
|
timeout.unref?.();
|
||
|
const cancel = () => {
|
||
|
clearTimeout(timeout);
|
||
|
};
|
||
|
cancelers.push(cancel);
|
||
|
return cancel;
|
||
|
};
|
||
|
const { host, hostname } = options;
|
||
|
const timeoutHandler = (delay, event) => {
|
||
|
request.destroy(new TimeoutError(delay, event));
|
||
|
};
|
||
|
const cancelTimeouts = () => {
|
||
|
for (const cancel of cancelers) {
|
||
|
cancel();
|
||
|
}
|
||
|
unhandleAll();
|
||
|
};
|
||
|
request.once('error', error => {
|
||
|
cancelTimeouts();
|
||
|
// Save original behavior
|
||
|
/* istanbul ignore next */
|
||
|
if (request.listenerCount('error') === 0) {
|
||
|
throw error;
|
||
|
}
|
||
|
});
|
||
|
if (typeof delays.request !== 'undefined') {
|
||
|
const cancelTimeout = addTimeout(delays.request, timeoutHandler, 'request');
|
||
|
once(request, 'response', (response) => {
|
||
|
once(response, 'end', cancelTimeout);
|
||
|
});
|
||
|
}
|
||
|
if (typeof delays.socket !== 'undefined') {
|
||
|
const { socket } = delays;
|
||
|
const socketTimeoutHandler = () => {
|
||
|
timeoutHandler(socket, 'socket');
|
||
|
};
|
||
|
request.setTimeout(socket, socketTimeoutHandler);
|
||
|
// `request.setTimeout(0)` causes a memory leak.
|
||
|
// We can just remove the listener and forget about the timer - it's unreffed.
|
||
|
// See https://github.com/sindresorhus/got/issues/690
|
||
|
cancelers.push(() => {
|
||
|
request.removeListener('timeout', socketTimeoutHandler);
|
||
|
});
|
||
|
}
|
||
|
const hasLookup = typeof delays.lookup !== 'undefined';
|
||
|
const hasConnect = typeof delays.connect !== 'undefined';
|
||
|
const hasSecureConnect = typeof delays.secureConnect !== 'undefined';
|
||
|
const hasSend = typeof delays.send !== 'undefined';
|
||
|
if (hasLookup || hasConnect || hasSecureConnect || hasSend) {
|
||
|
once(request, 'socket', (socket) => {
|
||
|
const { socketPath } = request;
|
||
|
/* istanbul ignore next: hard to test */
|
||
|
if (socket.connecting) {
|
||
|
const hasPath = Boolean(socketPath ?? net.isIP(hostname ?? host ?? '') !== 0);
|
||
|
if (hasLookup && !hasPath && typeof socket.address().address === 'undefined') {
|
||
|
const cancelTimeout = addTimeout(delays.lookup, timeoutHandler, 'lookup');
|
||
|
once(socket, 'lookup', cancelTimeout);
|
||
|
}
|
||
|
if (hasConnect) {
|
||
|
const timeConnect = () => addTimeout(delays.connect, timeoutHandler, 'connect');
|
||
|
if (hasPath) {
|
||
|
once(socket, 'connect', timeConnect());
|
||
|
}
|
||
|
else {
|
||
|
once(socket, 'lookup', (error) => {
|
||
|
if (error === null) {
|
||
|
once(socket, 'connect', timeConnect());
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
if (hasSecureConnect && options.protocol === 'https:') {
|
||
|
once(socket, 'connect', () => {
|
||
|
const cancelTimeout = addTimeout(delays.secureConnect, timeoutHandler, 'secureConnect');
|
||
|
once(socket, 'secureConnect', cancelTimeout);
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
if (hasSend) {
|
||
|
const timeRequest = () => addTimeout(delays.send, timeoutHandler, 'send');
|
||
|
/* istanbul ignore next: hard to test */
|
||
|
if (socket.connecting) {
|
||
|
once(socket, 'connect', () => {
|
||
|
once(request, 'upload-complete', timeRequest());
|
||
|
});
|
||
|
}
|
||
|
else {
|
||
|
once(request, 'upload-complete', timeRequest());
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
if (typeof delays.response !== 'undefined') {
|
||
|
once(request, 'upload-complete', () => {
|
||
|
const cancelTimeout = addTimeout(delays.response, timeoutHandler, 'response');
|
||
|
once(request, 'response', cancelTimeout);
|
||
|
});
|
||
|
}
|
||
|
if (typeof delays.read !== 'undefined') {
|
||
|
once(request, 'response', (response) => {
|
||
|
const cancelTimeout = addTimeout(delays.read, timeoutHandler, 'read');
|
||
|
once(response, 'end', cancelTimeout);
|
||
|
});
|
||
|
}
|
||
|
return cancelTimeouts;
|
||
|
}
|
||
|
|
||
|
function urlToOptions(url) {
|
||
|
// Cast to URL
|
||
|
url = url;
|
||
|
const options = {
|
||
|
protocol: url.protocol,
|
||
|
hostname: is.string(url.hostname) && url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname,
|
||
|
host: url.host,
|
||
|
hash: url.hash,
|
||
|
search: url.search,
|
||
|
pathname: url.pathname,
|
||
|
href: url.href,
|
||
|
path: `${url.pathname || ''}${url.search || ''}`,
|
||
|
};
|
||
|
if (is.string(url.port) && url.port.length > 0) {
|
||
|
options.port = Number(url.port);
|
||
|
}
|
||
|
if (url.username || url.password) {
|
||
|
options.auth = `${url.username || ''}:${url.password || ''}`;
|
||
|
}
|
||
|
return options;
|
||
|
}
|
||
|
|
||
|
class WeakableMap {
|
||
|
constructor() {
|
||
|
Object.defineProperty(this, "weakMap", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "map", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
this.weakMap = new WeakMap();
|
||
|
this.map = new Map();
|
||
|
}
|
||
|
set(key, value) {
|
||
|
if (typeof key === 'object') {
|
||
|
this.weakMap.set(key, value);
|
||
|
}
|
||
|
else {
|
||
|
this.map.set(key, value);
|
||
|
}
|
||
|
}
|
||
|
get(key) {
|
||
|
if (typeof key === 'object') {
|
||
|
return this.weakMap.get(key);
|
||
|
}
|
||
|
return this.map.get(key);
|
||
|
}
|
||
|
has(key) {
|
||
|
if (typeof key === 'object') {
|
||
|
return this.weakMap.has(key);
|
||
|
}
|
||
|
return this.map.has(key);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const calculateRetryDelay = ({ attemptCount, retryOptions, error, retryAfter, computedValue, }) => {
|
||
|
if (error.name === 'RetryError') {
|
||
|
return 1;
|
||
|
}
|
||
|
if (attemptCount > retryOptions.limit) {
|
||
|
return 0;
|
||
|
}
|
||
|
const hasMethod = retryOptions.methods.includes(error.options.method);
|
||
|
const hasErrorCode = retryOptions.errorCodes.includes(error.code);
|
||
|
const hasStatusCode = error.response && retryOptions.statusCodes.includes(error.response.statusCode);
|
||
|
if (!hasMethod || (!hasErrorCode && !hasStatusCode)) {
|
||
|
return 0;
|
||
|
}
|
||
|
if (error.response) {
|
||
|
if (retryAfter) {
|
||
|
// In this case `computedValue` is `options.request.timeout`
|
||
|
if (retryAfter > computedValue) {
|
||
|
return 0;
|
||
|
}
|
||
|
return retryAfter;
|
||
|
}
|
||
|
if (error.response.statusCode === 413) {
|
||
|
return 0;
|
||
|
}
|
||
|
}
|
||
|
const noise = Math.random() * retryOptions.noise;
|
||
|
return Math.min(((2 ** (attemptCount - 1)) * 1000), retryOptions.backoffLimit) + noise;
|
||
|
};
|
||
|
var calculateRetryDelay$1 = calculateRetryDelay;
|
||
|
|
||
|
const {Resolver: AsyncResolver} = promises;
|
||
|
|
||
|
const kCacheableLookupCreateConnection = Symbol('cacheableLookupCreateConnection');
|
||
|
const kCacheableLookupInstance = Symbol('cacheableLookupInstance');
|
||
|
const kExpires = Symbol('expires');
|
||
|
|
||
|
const supportsALL = typeof ALL === 'number';
|
||
|
|
||
|
const verifyAgent = agent => {
|
||
|
if (!(agent && typeof agent.createConnection === 'function')) {
|
||
|
throw new Error('Expected an Agent instance as the first argument');
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const map4to6 = entries => {
|
||
|
for (const entry of entries) {
|
||
|
if (entry.family === 6) {
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
entry.address = `::ffff:${entry.address}`;
|
||
|
entry.family = 6;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const getIfaceInfo = () => {
|
||
|
let has4 = false;
|
||
|
let has6 = false;
|
||
|
|
||
|
for (const device of Object.values(os__default.networkInterfaces())) {
|
||
|
for (const iface of device) {
|
||
|
if (iface.internal) {
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
if (iface.family === 'IPv6') {
|
||
|
has6 = true;
|
||
|
} else {
|
||
|
has4 = true;
|
||
|
}
|
||
|
|
||
|
if (has4 && has6) {
|
||
|
return {has4, has6};
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return {has4, has6};
|
||
|
};
|
||
|
|
||
|
const isIterable = map => {
|
||
|
return Symbol.iterator in map;
|
||
|
};
|
||
|
|
||
|
const ignoreNoResultErrors = dnsPromise => {
|
||
|
return dnsPromise.catch(error => {
|
||
|
if (
|
||
|
error.code === 'ENODATA' ||
|
||
|
error.code === 'ENOTFOUND' ||
|
||
|
error.code === 'ENOENT' // Windows: name exists, but not this record type
|
||
|
) {
|
||
|
return [];
|
||
|
}
|
||
|
|
||
|
throw error;
|
||
|
});
|
||
|
};
|
||
|
|
||
|
const ttl = {ttl: true};
|
||
|
const all = {all: true};
|
||
|
const all4 = {all: true, family: 4};
|
||
|
const all6 = {all: true, family: 6};
|
||
|
|
||
|
class CacheableLookup {
|
||
|
constructor({
|
||
|
cache = new Map(),
|
||
|
maxTtl = Infinity,
|
||
|
fallbackDuration = 3600,
|
||
|
errorTtl = 0.15,
|
||
|
resolver = new AsyncResolver(),
|
||
|
lookup: lookup$1 = lookup
|
||
|
} = {}) {
|
||
|
this.maxTtl = maxTtl;
|
||
|
this.errorTtl = errorTtl;
|
||
|
|
||
|
this._cache = cache;
|
||
|
this._resolver = resolver;
|
||
|
this._dnsLookup = lookup$1 && promisify$1(lookup$1);
|
||
|
this.stats = {
|
||
|
cache: 0,
|
||
|
query: 0
|
||
|
};
|
||
|
|
||
|
if (this._resolver instanceof AsyncResolver) {
|
||
|
this._resolve4 = this._resolver.resolve4.bind(this._resolver);
|
||
|
this._resolve6 = this._resolver.resolve6.bind(this._resolver);
|
||
|
} else {
|
||
|
this._resolve4 = promisify$1(this._resolver.resolve4.bind(this._resolver));
|
||
|
this._resolve6 = promisify$1(this._resolver.resolve6.bind(this._resolver));
|
||
|
}
|
||
|
|
||
|
this._iface = getIfaceInfo();
|
||
|
|
||
|
this._pending = {};
|
||
|
this._nextRemovalTime = false;
|
||
|
this._hostnamesToFallback = new Set();
|
||
|
|
||
|
this.fallbackDuration = fallbackDuration;
|
||
|
|
||
|
if (fallbackDuration > 0) {
|
||
|
const interval = setInterval(() => {
|
||
|
this._hostnamesToFallback.clear();
|
||
|
}, fallbackDuration * 1000);
|
||
|
|
||
|
/* istanbul ignore next: There is no `interval.unref()` when running inside an Electron renderer */
|
||
|
if (interval.unref) {
|
||
|
interval.unref();
|
||
|
}
|
||
|
|
||
|
this._fallbackInterval = interval;
|
||
|
}
|
||
|
|
||
|
this.lookup = this.lookup.bind(this);
|
||
|
this.lookupAsync = this.lookupAsync.bind(this);
|
||
|
}
|
||
|
|
||
|
set servers(servers) {
|
||
|
this.clear();
|
||
|
|
||
|
this._resolver.setServers(servers);
|
||
|
}
|
||
|
|
||
|
get servers() {
|
||
|
return this._resolver.getServers();
|
||
|
}
|
||
|
|
||
|
lookup(hostname, options, callback) {
|
||
|
if (typeof options === 'function') {
|
||
|
callback = options;
|
||
|
options = {};
|
||
|
} else if (typeof options === 'number') {
|
||
|
options = {
|
||
|
family: options
|
||
|
};
|
||
|
}
|
||
|
|
||
|
if (!callback) {
|
||
|
throw new Error('Callback must be a function.');
|
||
|
}
|
||
|
|
||
|
// eslint-disable-next-line promise/prefer-await-to-then
|
||
|
this.lookupAsync(hostname, options).then(result => {
|
||
|
if (options.all) {
|
||
|
callback(null, result);
|
||
|
} else {
|
||
|
callback(null, result.address, result.family, result.expires, result.ttl, result.source);
|
||
|
}
|
||
|
}, callback);
|
||
|
}
|
||
|
|
||
|
async lookupAsync(hostname, options = {}) {
|
||
|
if (typeof options === 'number') {
|
||
|
options = {
|
||
|
family: options
|
||
|
};
|
||
|
}
|
||
|
|
||
|
let cached = await this.query(hostname);
|
||
|
|
||
|
if (options.family === 6) {
|
||
|
const filtered = cached.filter(entry => entry.family === 6);
|
||
|
|
||
|
if (options.hints & V4MAPPED) {
|
||
|
if ((supportsALL && options.hints & ALL) || filtered.length === 0) {
|
||
|
map4to6(cached);
|
||
|
} else {
|
||
|
cached = filtered;
|
||
|
}
|
||
|
} else {
|
||
|
cached = filtered;
|
||
|
}
|
||
|
} else if (options.family === 4) {
|
||
|
cached = cached.filter(entry => entry.family === 4);
|
||
|
}
|
||
|
|
||
|
if (options.hints & ADDRCONFIG) {
|
||
|
const {_iface} = this;
|
||
|
cached = cached.filter(entry => entry.family === 6 ? _iface.has6 : _iface.has4);
|
||
|
}
|
||
|
|
||
|
if (cached.length === 0) {
|
||
|
const error = new Error(`cacheableLookup ENOTFOUND ${hostname}`);
|
||
|
error.code = 'ENOTFOUND';
|
||
|
error.hostname = hostname;
|
||
|
|
||
|
throw error;
|
||
|
}
|
||
|
|
||
|
if (options.all) {
|
||
|
return cached;
|
||
|
}
|
||
|
|
||
|
return cached[0];
|
||
|
}
|
||
|
|
||
|
async query(hostname) {
|
||
|
let source = 'cache';
|
||
|
let cached = await this._cache.get(hostname);
|
||
|
|
||
|
if (cached) {
|
||
|
this.stats.cache++;
|
||
|
}
|
||
|
|
||
|
if (!cached) {
|
||
|
const pending = this._pending[hostname];
|
||
|
if (pending) {
|
||
|
this.stats.cache++;
|
||
|
cached = await pending;
|
||
|
} else {
|
||
|
source = 'query';
|
||
|
const newPromise = this.queryAndCache(hostname);
|
||
|
this._pending[hostname] = newPromise;
|
||
|
this.stats.query++;
|
||
|
try {
|
||
|
cached = await newPromise;
|
||
|
} finally {
|
||
|
delete this._pending[hostname];
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
cached = cached.map(entry => {
|
||
|
return {...entry, source};
|
||
|
});
|
||
|
|
||
|
return cached;
|
||
|
}
|
||
|
|
||
|
async _resolve(hostname) {
|
||
|
// ANY is unsafe as it doesn't trigger new queries in the underlying server.
|
||
|
const [A, AAAA] = await Promise.all([
|
||
|
ignoreNoResultErrors(this._resolve4(hostname, ttl)),
|
||
|
ignoreNoResultErrors(this._resolve6(hostname, ttl))
|
||
|
]);
|
||
|
|
||
|
let aTtl = 0;
|
||
|
let aaaaTtl = 0;
|
||
|
let cacheTtl = 0;
|
||
|
|
||
|
const now = Date.now();
|
||
|
|
||
|
for (const entry of A) {
|
||
|
entry.family = 4;
|
||
|
entry.expires = now + (entry.ttl * 1000);
|
||
|
|
||
|
aTtl = Math.max(aTtl, entry.ttl);
|
||
|
}
|
||
|
|
||
|
for (const entry of AAAA) {
|
||
|
entry.family = 6;
|
||
|
entry.expires = now + (entry.ttl * 1000);
|
||
|
|
||
|
aaaaTtl = Math.max(aaaaTtl, entry.ttl);
|
||
|
}
|
||
|
|
||
|
if (A.length > 0) {
|
||
|
if (AAAA.length > 0) {
|
||
|
cacheTtl = Math.min(aTtl, aaaaTtl);
|
||
|
} else {
|
||
|
cacheTtl = aTtl;
|
||
|
}
|
||
|
} else {
|
||
|
cacheTtl = aaaaTtl;
|
||
|
}
|
||
|
|
||
|
return {
|
||
|
entries: [
|
||
|
...A,
|
||
|
...AAAA
|
||
|
],
|
||
|
cacheTtl
|
||
|
};
|
||
|
}
|
||
|
|
||
|
async _lookup(hostname) {
|
||
|
try {
|
||
|
const [A, AAAA] = await Promise.all([
|
||
|
// Passing {all: true} doesn't return all IPv4 and IPv6 entries.
|
||
|
// See https://github.com/szmarczak/cacheable-lookup/issues/42
|
||
|
ignoreNoResultErrors(this._dnsLookup(hostname, all4)),
|
||
|
ignoreNoResultErrors(this._dnsLookup(hostname, all6))
|
||
|
]);
|
||
|
|
||
|
return {
|
||
|
entries: [
|
||
|
...A,
|
||
|
...AAAA
|
||
|
],
|
||
|
cacheTtl: 0
|
||
|
};
|
||
|
} catch {
|
||
|
return {
|
||
|
entries: [],
|
||
|
cacheTtl: 0
|
||
|
};
|
||
|
}
|
||
|
}
|
||
|
|
||
|
async _set(hostname, data, cacheTtl) {
|
||
|
if (this.maxTtl > 0 && cacheTtl > 0) {
|
||
|
cacheTtl = Math.min(cacheTtl, this.maxTtl) * 1000;
|
||
|
data[kExpires] = Date.now() + cacheTtl;
|
||
|
|
||
|
try {
|
||
|
await this._cache.set(hostname, data, cacheTtl);
|
||
|
} catch (error) {
|
||
|
this.lookupAsync = async () => {
|
||
|
const cacheError = new Error('Cache Error. Please recreate the CacheableLookup instance.');
|
||
|
cacheError.cause = error;
|
||
|
|
||
|
throw cacheError;
|
||
|
};
|
||
|
}
|
||
|
|
||
|
if (isIterable(this._cache)) {
|
||
|
this._tick(cacheTtl);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
async queryAndCache(hostname) {
|
||
|
if (this._hostnamesToFallback.has(hostname)) {
|
||
|
return this._dnsLookup(hostname, all);
|
||
|
}
|
||
|
|
||
|
let query = await this._resolve(hostname);
|
||
|
|
||
|
if (query.entries.length === 0 && this._dnsLookup) {
|
||
|
query = await this._lookup(hostname);
|
||
|
|
||
|
if (query.entries.length !== 0 && this.fallbackDuration > 0) {
|
||
|
// Use `dns.lookup(...)` for that particular hostname
|
||
|
this._hostnamesToFallback.add(hostname);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const cacheTtl = query.entries.length === 0 ? this.errorTtl : query.cacheTtl;
|
||
|
await this._set(hostname, query.entries, cacheTtl);
|
||
|
|
||
|
return query.entries;
|
||
|
}
|
||
|
|
||
|
_tick(ms) {
|
||
|
const nextRemovalTime = this._nextRemovalTime;
|
||
|
|
||
|
if (!nextRemovalTime || ms < nextRemovalTime) {
|
||
|
clearTimeout(this._removalTimeout);
|
||
|
|
||
|
this._nextRemovalTime = ms;
|
||
|
|
||
|
this._removalTimeout = setTimeout(() => {
|
||
|
this._nextRemovalTime = false;
|
||
|
|
||
|
let nextExpiry = Infinity;
|
||
|
|
||
|
const now = Date.now();
|
||
|
|
||
|
for (const [hostname, entries] of this._cache) {
|
||
|
const expires = entries[kExpires];
|
||
|
|
||
|
if (now >= expires) {
|
||
|
this._cache.delete(hostname);
|
||
|
} else if (expires < nextExpiry) {
|
||
|
nextExpiry = expires;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (nextExpiry !== Infinity) {
|
||
|
this._tick(nextExpiry - now);
|
||
|
}
|
||
|
}, ms);
|
||
|
|
||
|
/* istanbul ignore next: There is no `timeout.unref()` when running inside an Electron renderer */
|
||
|
if (this._removalTimeout.unref) {
|
||
|
this._removalTimeout.unref();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
install(agent) {
|
||
|
verifyAgent(agent);
|
||
|
|
||
|
if (kCacheableLookupCreateConnection in agent) {
|
||
|
throw new Error('CacheableLookup has been already installed');
|
||
|
}
|
||
|
|
||
|
agent[kCacheableLookupCreateConnection] = agent.createConnection;
|
||
|
agent[kCacheableLookupInstance] = this;
|
||
|
|
||
|
agent.createConnection = (options, callback) => {
|
||
|
if (!('lookup' in options)) {
|
||
|
options.lookup = this.lookup;
|
||
|
}
|
||
|
|
||
|
return agent[kCacheableLookupCreateConnection](options, callback);
|
||
|
};
|
||
|
}
|
||
|
|
||
|
uninstall(agent) {
|
||
|
verifyAgent(agent);
|
||
|
|
||
|
if (agent[kCacheableLookupCreateConnection]) {
|
||
|
if (agent[kCacheableLookupInstance] !== this) {
|
||
|
throw new Error('The agent is not owned by this CacheableLookup instance');
|
||
|
}
|
||
|
|
||
|
agent.createConnection = agent[kCacheableLookupCreateConnection];
|
||
|
|
||
|
delete agent[kCacheableLookupCreateConnection];
|
||
|
delete agent[kCacheableLookupInstance];
|
||
|
}
|
||
|
}
|
||
|
|
||
|
updateInterfaceInfo() {
|
||
|
const {_iface} = this;
|
||
|
|
||
|
this._iface = getIfaceInfo();
|
||
|
|
||
|
if ((_iface.has4 && !this._iface.has4) || (_iface.has6 && !this._iface.has6)) {
|
||
|
this._cache.clear();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
clear(hostname) {
|
||
|
if (hostname) {
|
||
|
this._cache.delete(hostname);
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
this._cache.clear();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
let QuickLRU$2 = class QuickLRU {
|
||
|
constructor(options = {}) {
|
||
|
if (!(options.maxSize && options.maxSize > 0)) {
|
||
|
throw new TypeError('`maxSize` must be a number greater than 0');
|
||
|
}
|
||
|
|
||
|
this.maxSize = options.maxSize;
|
||
|
this.onEviction = options.onEviction;
|
||
|
this.cache = new Map();
|
||
|
this.oldCache = new Map();
|
||
|
this._size = 0;
|
||
|
}
|
||
|
|
||
|
_set(key, value) {
|
||
|
this.cache.set(key, value);
|
||
|
this._size++;
|
||
|
|
||
|
if (this._size >= this.maxSize) {
|
||
|
this._size = 0;
|
||
|
|
||
|
if (typeof this.onEviction === 'function') {
|
||
|
for (const [key, value] of this.oldCache.entries()) {
|
||
|
this.onEviction(key, value);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
this.oldCache = this.cache;
|
||
|
this.cache = new Map();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
get(key) {
|
||
|
if (this.cache.has(key)) {
|
||
|
return this.cache.get(key);
|
||
|
}
|
||
|
|
||
|
if (this.oldCache.has(key)) {
|
||
|
const value = this.oldCache.get(key);
|
||
|
this.oldCache.delete(key);
|
||
|
this._set(key, value);
|
||
|
return value;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
set(key, value) {
|
||
|
if (this.cache.has(key)) {
|
||
|
this.cache.set(key, value);
|
||
|
} else {
|
||
|
this._set(key, value);
|
||
|
}
|
||
|
|
||
|
return this;
|
||
|
}
|
||
|
|
||
|
has(key) {
|
||
|
return this.cache.has(key) || this.oldCache.has(key);
|
||
|
}
|
||
|
|
||
|
peek(key) {
|
||
|
if (this.cache.has(key)) {
|
||
|
return this.cache.get(key);
|
||
|
}
|
||
|
|
||
|
if (this.oldCache.has(key)) {
|
||
|
return this.oldCache.get(key);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
delete(key) {
|
||
|
const deleted = this.cache.delete(key);
|
||
|
if (deleted) {
|
||
|
this._size--;
|
||
|
}
|
||
|
|
||
|
return this.oldCache.delete(key) || deleted;
|
||
|
}
|
||
|
|
||
|
clear() {
|
||
|
this.cache.clear();
|
||
|
this.oldCache.clear();
|
||
|
this._size = 0;
|
||
|
}
|
||
|
|
||
|
* keys() {
|
||
|
for (const [key] of this) {
|
||
|
yield key;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
* values() {
|
||
|
for (const [, value] of this) {
|
||
|
yield value;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
* [Symbol.iterator]() {
|
||
|
for (const item of this.cache) {
|
||
|
yield item;
|
||
|
}
|
||
|
|
||
|
for (const item of this.oldCache) {
|
||
|
const [key] = item;
|
||
|
if (!this.cache.has(key)) {
|
||
|
yield item;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
get size() {
|
||
|
let oldCacheSize = 0;
|
||
|
for (const key of this.oldCache.keys()) {
|
||
|
if (!this.cache.has(key)) {
|
||
|
oldCacheSize++;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return Math.min(this._size + oldCacheSize, this.maxSize);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var quickLru = QuickLRU$2;
|
||
|
|
||
|
var delayAsyncDestroy$2 = stream => {
|
||
|
if (stream.listenerCount('error') !== 0) {
|
||
|
return stream;
|
||
|
}
|
||
|
|
||
|
stream.__destroy = stream._destroy;
|
||
|
stream._destroy = (...args) => {
|
||
|
const callback = args.pop();
|
||
|
|
||
|
stream.__destroy(...args, async error => {
|
||
|
await Promise.resolve();
|
||
|
callback(error);
|
||
|
});
|
||
|
};
|
||
|
|
||
|
const onError = error => {
|
||
|
// eslint-disable-next-line promise/prefer-await-to-then
|
||
|
Promise.resolve().then(() => {
|
||
|
stream.emit('error', error);
|
||
|
});
|
||
|
};
|
||
|
|
||
|
stream.once('error', onError);
|
||
|
|
||
|
// eslint-disable-next-line promise/prefer-await-to-then
|
||
|
Promise.resolve().then(() => {
|
||
|
stream.off('error', onError);
|
||
|
});
|
||
|
|
||
|
return stream;
|
||
|
};
|
||
|
|
||
|
// See https://github.com/facebook/jest/issues/2549
|
||
|
// eslint-disable-next-line node/prefer-global/url
|
||
|
const {URL: URL$4} = require$$0$5;
|
||
|
const EventEmitter = require$$0$1;
|
||
|
const tls$3 = require$$1$1;
|
||
|
const http2$2 = require$$3;
|
||
|
const QuickLRU$1 = quickLru;
|
||
|
const delayAsyncDestroy$1 = delayAsyncDestroy$2;
|
||
|
|
||
|
const kCurrentStreamCount = Symbol('currentStreamCount');
|
||
|
const kRequest = Symbol('request');
|
||
|
const kOriginSet = Symbol('cachedOriginSet');
|
||
|
const kGracefullyClosing = Symbol('gracefullyClosing');
|
||
|
const kLength = Symbol('length');
|
||
|
|
||
|
const nameKeys = [
|
||
|
// Not an Agent option actually
|
||
|
'createConnection',
|
||
|
|
||
|
// `http2.connect()` options
|
||
|
'maxDeflateDynamicTableSize',
|
||
|
'maxSettings',
|
||
|
'maxSessionMemory',
|
||
|
'maxHeaderListPairs',
|
||
|
'maxOutstandingPings',
|
||
|
'maxReservedRemoteStreams',
|
||
|
'maxSendHeaderBlockLength',
|
||
|
'paddingStrategy',
|
||
|
'peerMaxConcurrentStreams',
|
||
|
'settings',
|
||
|
|
||
|
// `tls.connect()` source options
|
||
|
'family',
|
||
|
'localAddress',
|
||
|
'rejectUnauthorized',
|
||
|
|
||
|
// `tls.connect()` secure context options
|
||
|
'pskCallback',
|
||
|
'minDHSize',
|
||
|
|
||
|
// `tls.connect()` destination options
|
||
|
// - `servername` is automatically validated, skip it
|
||
|
// - `host` and `port` just describe the destination server,
|
||
|
'path',
|
||
|
'socket',
|
||
|
|
||
|
// `tls.createSecureContext()` options
|
||
|
'ca',
|
||
|
'cert',
|
||
|
'sigalgs',
|
||
|
'ciphers',
|
||
|
'clientCertEngine',
|
||
|
'crl',
|
||
|
'dhparam',
|
||
|
'ecdhCurve',
|
||
|
'honorCipherOrder',
|
||
|
'key',
|
||
|
'privateKeyEngine',
|
||
|
'privateKeyIdentifier',
|
||
|
'maxVersion',
|
||
|
'minVersion',
|
||
|
'pfx',
|
||
|
'secureOptions',
|
||
|
'secureProtocol',
|
||
|
'sessionIdContext',
|
||
|
'ticketKeys'
|
||
|
];
|
||
|
|
||
|
const getSortedIndex = (array, value, compare) => {
|
||
|
let low = 0;
|
||
|
let high = array.length;
|
||
|
|
||
|
while (low < high) {
|
||
|
const mid = (low + high) >>> 1;
|
||
|
|
||
|
if (compare(array[mid], value)) {
|
||
|
low = mid + 1;
|
||
|
} else {
|
||
|
high = mid;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return low;
|
||
|
};
|
||
|
|
||
|
const compareSessions = (a, b) => a.remoteSettings.maxConcurrentStreams > b.remoteSettings.maxConcurrentStreams;
|
||
|
|
||
|
// See https://tools.ietf.org/html/rfc8336
|
||
|
const closeCoveredSessions = (where, session) => {
|
||
|
// Clients SHOULD NOT emit new requests on any connection whose Origin
|
||
|
// Set is a proper subset of another connection's Origin Set, and they
|
||
|
// SHOULD close it once all outstanding requests are satisfied.
|
||
|
for (let index = 0; index < where.length; index++) {
|
||
|
const coveredSession = where[index];
|
||
|
|
||
|
if (
|
||
|
// Unfortunately `.every()` returns true for an empty array
|
||
|
coveredSession[kOriginSet].length > 0
|
||
|
|
||
|
// The set is a proper subset when its length is less than the other set.
|
||
|
&& coveredSession[kOriginSet].length < session[kOriginSet].length
|
||
|
|
||
|
// And the other set includes all elements of the subset.
|
||
|
&& coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin))
|
||
|
|
||
|
// Makes sure that the session can handle all requests from the covered session.
|
||
|
&& (coveredSession[kCurrentStreamCount] + session[kCurrentStreamCount]) <= session.remoteSettings.maxConcurrentStreams
|
||
|
) {
|
||
|
// This allows pending requests to finish and prevents making new requests.
|
||
|
gracefullyClose(coveredSession);
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
// This is basically inverted `closeCoveredSessions(...)`.
|
||
|
const closeSessionIfCovered = (where, coveredSession) => {
|
||
|
for (let index = 0; index < where.length; index++) {
|
||
|
const session = where[index];
|
||
|
|
||
|
if (
|
||
|
coveredSession[kOriginSet].length > 0
|
||
|
&& coveredSession[kOriginSet].length < session[kOriginSet].length
|
||
|
&& coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin))
|
||
|
&& (coveredSession[kCurrentStreamCount] + session[kCurrentStreamCount]) <= session.remoteSettings.maxConcurrentStreams
|
||
|
) {
|
||
|
gracefullyClose(coveredSession);
|
||
|
|
||
|
return true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return false;
|
||
|
};
|
||
|
|
||
|
const gracefullyClose = session => {
|
||
|
session[kGracefullyClosing] = true;
|
||
|
|
||
|
if (session[kCurrentStreamCount] === 0) {
|
||
|
session.close();
|
||
|
}
|
||
|
};
|
||
|
|
||
|
let Agent$4 = class Agent extends EventEmitter {
|
||
|
constructor({timeout = 0, maxSessions = Number.POSITIVE_INFINITY, maxEmptySessions = 10, maxCachedTlsSessions = 100} = {}) {
|
||
|
super();
|
||
|
|
||
|
// SESSIONS[NORMALIZED_OPTIONS] = [];
|
||
|
this.sessions = {};
|
||
|
|
||
|
// The queue for creating new sessions. It looks like this:
|
||
|
// QUEUE[NORMALIZED_OPTIONS][NORMALIZED_ORIGIN] = ENTRY_FUNCTION
|
||
|
//
|
||
|
// It's faster when there are many origins. If there's only one, then QUEUE[`${options}:${origin}`] is faster.
|
||
|
// I guess object creation / deletion is causing the slowdown.
|
||
|
//
|
||
|
// The entry function has `listeners`, `completed` and `destroyed` properties.
|
||
|
// `listeners` is an array of objects containing `resolve` and `reject` functions.
|
||
|
// `completed` is a boolean. It's set to true after ENTRY_FUNCTION is executed.
|
||
|
// `destroyed` is a boolean. If it's set to true, the session will be destroyed if hasn't connected yet.
|
||
|
this.queue = {};
|
||
|
|
||
|
// Each session will use this timeout value.
|
||
|
this.timeout = timeout;
|
||
|
|
||
|
// Max sessions in total
|
||
|
this.maxSessions = maxSessions;
|
||
|
|
||
|
// Max empty sessions in total
|
||
|
this.maxEmptySessions = maxEmptySessions;
|
||
|
|
||
|
this._emptySessionCount = 0;
|
||
|
this._sessionCount = 0;
|
||
|
|
||
|
// We don't support push streams by default.
|
||
|
this.settings = {
|
||
|
enablePush: false,
|
||
|
initialWindowSize: 1024 * 1024 * 32 // 32MB, see https://github.com/nodejs/node/issues/38426
|
||
|
};
|
||
|
|
||
|
// Reusing TLS sessions increases performance.
|
||
|
this.tlsSessionCache = new QuickLRU$1({maxSize: maxCachedTlsSessions});
|
||
|
}
|
||
|
|
||
|
get protocol() {
|
||
|
return 'https:';
|
||
|
}
|
||
|
|
||
|
normalizeOptions(options) {
|
||
|
let normalized = '';
|
||
|
|
||
|
for (let index = 0; index < nameKeys.length; index++) {
|
||
|
const key = nameKeys[index];
|
||
|
|
||
|
normalized += ':';
|
||
|
|
||
|
if (options && options[key] !== undefined) {
|
||
|
normalized += options[key];
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return normalized;
|
||
|
}
|
||
|
|
||
|
_processQueue() {
|
||
|
if (this._sessionCount >= this.maxSessions) {
|
||
|
this.closeEmptySessions(this.maxSessions - this._sessionCount + 1);
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
// eslint-disable-next-line guard-for-in
|
||
|
for (const normalizedOptions in this.queue) {
|
||
|
// eslint-disable-next-line guard-for-in
|
||
|
for (const normalizedOrigin in this.queue[normalizedOptions]) {
|
||
|
const item = this.queue[normalizedOptions][normalizedOrigin];
|
||
|
|
||
|
// The entry function can be run only once.
|
||
|
if (!item.completed) {
|
||
|
item.completed = true;
|
||
|
|
||
|
item();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
_isBetterSession(thisStreamCount, thatStreamCount) {
|
||
|
return thisStreamCount > thatStreamCount;
|
||
|
}
|
||
|
|
||
|
_accept(session, listeners, normalizedOrigin, options) {
|
||
|
let index = 0;
|
||
|
|
||
|
while (index < listeners.length && session[kCurrentStreamCount] < session.remoteSettings.maxConcurrentStreams) {
|
||
|
// We assume `resolve(...)` calls `request(...)` *directly*,
|
||
|
// otherwise the session will get overloaded.
|
||
|
listeners[index].resolve(session);
|
||
|
|
||
|
index++;
|
||
|
}
|
||
|
|
||
|
listeners.splice(0, index);
|
||
|
|
||
|
if (listeners.length > 0) {
|
||
|
this.getSession(normalizedOrigin, options, listeners);
|
||
|
listeners.length = 0;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
getSession(origin, options, listeners) {
|
||
|
return new Promise((resolve, reject) => {
|
||
|
if (Array.isArray(listeners) && listeners.length > 0) {
|
||
|
listeners = [...listeners];
|
||
|
|
||
|
// Resolve the current promise ASAP, we're just moving the listeners.
|
||
|
// They will be executed at a different time.
|
||
|
resolve();
|
||
|
} else {
|
||
|
listeners = [{resolve, reject}];
|
||
|
}
|
||
|
|
||
|
try {
|
||
|
// Parse origin
|
||
|
if (typeof origin === 'string') {
|
||
|
origin = new URL$4(origin);
|
||
|
} else if (!(origin instanceof URL$4)) {
|
||
|
throw new TypeError('The `origin` argument needs to be a string or an URL object');
|
||
|
}
|
||
|
|
||
|
if (options) {
|
||
|
// Validate servername
|
||
|
const {servername} = options;
|
||
|
const {hostname} = origin;
|
||
|
if (servername && hostname !== servername) {
|
||
|
throw new Error(`Origin ${hostname} differs from servername ${servername}`);
|
||
|
}
|
||
|
}
|
||
|
} catch (error) {
|
||
|
for (let index = 0; index < listeners.length; index++) {
|
||
|
listeners[index].reject(error);
|
||
|
}
|
||
|
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
const normalizedOptions = this.normalizeOptions(options);
|
||
|
const normalizedOrigin = origin.origin;
|
||
|
|
||
|
if (normalizedOptions in this.sessions) {
|
||
|
const sessions = this.sessions[normalizedOptions];
|
||
|
|
||
|
let maxConcurrentStreams = -1;
|
||
|
let currentStreamsCount = -1;
|
||
|
let optimalSession;
|
||
|
|
||
|
// We could just do this.sessions[normalizedOptions].find(...) but that isn't optimal.
|
||
|
// Additionally, we are looking for session which has biggest current pending streams count.
|
||
|
//
|
||
|
// |------------| |------------| |------------| |------------|
|
||
|
// | Session: A | | Session: B | | Session: C | | Session: D |
|
||
|
// | Pending: 5 |-| Pending: 8 |-| Pending: 9 |-| Pending: 4 |
|
||
|
// | Max: 10 | | Max: 10 | | Max: 9 | | Max: 5 |
|
||
|
// |------------| |------------| |------------| |------------|
|
||
|
// ^
|
||
|
// |
|
||
|
// pick this one --
|
||
|
//
|
||
|
for (let index = 0; index < sessions.length; index++) {
|
||
|
const session = sessions[index];
|
||
|
|
||
|
const sessionMaxConcurrentStreams = session.remoteSettings.maxConcurrentStreams;
|
||
|
|
||
|
if (sessionMaxConcurrentStreams < maxConcurrentStreams) {
|
||
|
break;
|
||
|
}
|
||
|
|
||
|
if (!session[kOriginSet].includes(normalizedOrigin)) {
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
const sessionCurrentStreamsCount = session[kCurrentStreamCount];
|
||
|
|
||
|
if (
|
||
|
sessionCurrentStreamsCount >= sessionMaxConcurrentStreams
|
||
|
|| session[kGracefullyClosing]
|
||
|
// Unfortunately the `close` event isn't called immediately,
|
||
|
// so `session.destroyed` is `true`, but `session.closed` is `false`.
|
||
|
|| session.destroyed
|
||
|
) {
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
// We only need set this once.
|
||
|
if (!optimalSession) {
|
||
|
maxConcurrentStreams = sessionMaxConcurrentStreams;
|
||
|
}
|
||
|
|
||
|
// Either get the session which has biggest current stream count or the lowest.
|
||
|
if (this._isBetterSession(sessionCurrentStreamsCount, currentStreamsCount)) {
|
||
|
optimalSession = session;
|
||
|
currentStreamsCount = sessionCurrentStreamsCount;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (optimalSession) {
|
||
|
this._accept(optimalSession, listeners, normalizedOrigin, options);
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (normalizedOptions in this.queue) {
|
||
|
if (normalizedOrigin in this.queue[normalizedOptions]) {
|
||
|
// There's already an item in the queue, just attach ourselves to it.
|
||
|
this.queue[normalizedOptions][normalizedOrigin].listeners.push(...listeners);
|
||
|
return;
|
||
|
}
|
||
|
} else {
|
||
|
this.queue[normalizedOptions] = {
|
||
|
[kLength]: 0
|
||
|
};
|
||
|
}
|
||
|
|
||
|
// The entry must be removed from the queue IMMEDIATELY when:
|
||
|
// 1. the session connects successfully,
|
||
|
// 2. an error occurs.
|
||
|
const removeFromQueue = () => {
|
||
|
// Our entry can be replaced. We cannot remove the new one.
|
||
|
if (normalizedOptions in this.queue && this.queue[normalizedOptions][normalizedOrigin] === entry) {
|
||
|
delete this.queue[normalizedOptions][normalizedOrigin];
|
||
|
|
||
|
if (--this.queue[normalizedOptions][kLength] === 0) {
|
||
|
delete this.queue[normalizedOptions];
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
// The main logic is here
|
||
|
const entry = async () => {
|
||
|
this._sessionCount++;
|
||
|
|
||
|
const name = `${normalizedOrigin}:${normalizedOptions}`;
|
||
|
let receivedSettings = false;
|
||
|
let socket;
|
||
|
|
||
|
try {
|
||
|
const computedOptions = {...options};
|
||
|
|
||
|
if (computedOptions.settings === undefined) {
|
||
|
computedOptions.settings = this.settings;
|
||
|
}
|
||
|
|
||
|
if (computedOptions.session === undefined) {
|
||
|
computedOptions.session = this.tlsSessionCache.get(name);
|
||
|
}
|
||
|
|
||
|
const createConnection = computedOptions.createConnection || this.createConnection;
|
||
|
|
||
|
// A hacky workaround to enable async `createConnection`
|
||
|
socket = await createConnection.call(this, origin, computedOptions);
|
||
|
computedOptions.createConnection = () => socket;
|
||
|
|
||
|
const session = http2$2.connect(origin, computedOptions);
|
||
|
session[kCurrentStreamCount] = 0;
|
||
|
session[kGracefullyClosing] = false;
|
||
|
|
||
|
// Node.js return https://false:443 instead of https://1.1.1.1:443
|
||
|
const getOriginSet = () => {
|
||
|
const {socket} = session;
|
||
|
|
||
|
let originSet;
|
||
|
if (socket.servername === false) {
|
||
|
socket.servername = socket.remoteAddress;
|
||
|
originSet = session.originSet;
|
||
|
socket.servername = false;
|
||
|
} else {
|
||
|
originSet = session.originSet;
|
||
|
}
|
||
|
|
||
|
return originSet;
|
||
|
};
|
||
|
|
||
|
const isFree = () => session[kCurrentStreamCount] < session.remoteSettings.maxConcurrentStreams;
|
||
|
|
||
|
session.socket.once('session', tlsSession => {
|
||
|
this.tlsSessionCache.set(name, tlsSession);
|
||
|
});
|
||
|
|
||
|
session.once('error', error => {
|
||
|
// Listeners are empty when the session successfully connected.
|
||
|
for (let index = 0; index < listeners.length; index++) {
|
||
|
listeners[index].reject(error);
|
||
|
}
|
||
|
|
||
|
// The connection got broken, purge the cache.
|
||
|
this.tlsSessionCache.delete(name);
|
||
|
});
|
||
|
|
||
|
session.setTimeout(this.timeout, () => {
|
||
|
// Terminates all streams owned by this session.
|
||
|
session.destroy();
|
||
|
});
|
||
|
|
||
|
session.once('close', () => {
|
||
|
this._sessionCount--;
|
||
|
|
||
|
if (receivedSettings) {
|
||
|
// Assumes session `close` is emitted after request `close`
|
||
|
this._emptySessionCount--;
|
||
|
|
||
|
// This cannot be moved to the stream logic,
|
||
|
// because there may be a session that hadn't made a single request.
|
||
|
const where = this.sessions[normalizedOptions];
|
||
|
|
||
|
if (where.length === 1) {
|
||
|
delete this.sessions[normalizedOptions];
|
||
|
} else {
|
||
|
where.splice(where.indexOf(session), 1);
|
||
|
}
|
||
|
} else {
|
||
|
// Broken connection
|
||
|
removeFromQueue();
|
||
|
|
||
|
const error = new Error('Session closed without receiving a SETTINGS frame');
|
||
|
error.code = 'HTTP2WRAPPER_NOSETTINGS';
|
||
|
|
||
|
for (let index = 0; index < listeners.length; index++) {
|
||
|
listeners[index].reject(error);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// There may be another session awaiting.
|
||
|
this._processQueue();
|
||
|
});
|
||
|
|
||
|
// Iterates over the queue and processes listeners.
|
||
|
const processListeners = () => {
|
||
|
const queue = this.queue[normalizedOptions];
|
||
|
if (!queue) {
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
const originSet = session[kOriginSet];
|
||
|
|
||
|
for (let index = 0; index < originSet.length; index++) {
|
||
|
const origin = originSet[index];
|
||
|
|
||
|
if (origin in queue) {
|
||
|
const {listeners, completed} = queue[origin];
|
||
|
|
||
|
let index = 0;
|
||
|
|
||
|
// Prevents session overloading.
|
||
|
while (index < listeners.length && isFree()) {
|
||
|
// We assume `resolve(...)` calls `request(...)` *directly*,
|
||
|
// otherwise the session will get overloaded.
|
||
|
listeners[index].resolve(session);
|
||
|
|
||
|
index++;
|
||
|
}
|
||
|
|
||
|
queue[origin].listeners.splice(0, index);
|
||
|
|
||
|
if (queue[origin].listeners.length === 0 && !completed) {
|
||
|
delete queue[origin];
|
||
|
|
||
|
if (--queue[kLength] === 0) {
|
||
|
delete this.queue[normalizedOptions];
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// We're no longer free, no point in continuing.
|
||
|
if (!isFree()) {
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
// The Origin Set cannot shrink. No need to check if it suddenly became covered by another one.
|
||
|
session.on('origin', () => {
|
||
|
session[kOriginSet] = getOriginSet() || [];
|
||
|
session[kGracefullyClosing] = false;
|
||
|
closeSessionIfCovered(this.sessions[normalizedOptions], session);
|
||
|
|
||
|
if (session[kGracefullyClosing] || !isFree()) {
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
processListeners();
|
||
|
|
||
|
if (!isFree()) {
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
// Close covered sessions (if possible).
|
||
|
closeCoveredSessions(this.sessions[normalizedOptions], session);
|
||
|
});
|
||
|
|
||
|
session.once('remoteSettings', () => {
|
||
|
// The Agent could have been destroyed already.
|
||
|
if (entry.destroyed) {
|
||
|
const error = new Error('Agent has been destroyed');
|
||
|
|
||
|
for (let index = 0; index < listeners.length; index++) {
|
||
|
listeners[index].reject(error);
|
||
|
}
|
||
|
|
||
|
session.destroy();
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
// See https://github.com/nodejs/node/issues/38426
|
||
|
if (session.setLocalWindowSize) {
|
||
|
session.setLocalWindowSize(1024 * 1024 * 4); // 4 MB
|
||
|
}
|
||
|
|
||
|
session[kOriginSet] = getOriginSet() || [];
|
||
|
|
||
|
if (session.socket.encrypted) {
|
||
|
const mainOrigin = session[kOriginSet][0];
|
||
|
if (mainOrigin !== normalizedOrigin) {
|
||
|
const error = new Error(`Requested origin ${normalizedOrigin} does not match server ${mainOrigin}`);
|
||
|
|
||
|
for (let index = 0; index < listeners.length; index++) {
|
||
|
listeners[index].reject(error);
|
||
|
}
|
||
|
|
||
|
session.destroy();
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
removeFromQueue();
|
||
|
|
||
|
{
|
||
|
const where = this.sessions;
|
||
|
|
||
|
if (normalizedOptions in where) {
|
||
|
const sessions = where[normalizedOptions];
|
||
|
sessions.splice(getSortedIndex(sessions, session, compareSessions), 0, session);
|
||
|
} else {
|
||
|
where[normalizedOptions] = [session];
|
||
|
}
|
||
|
}
|
||
|
|
||
|
receivedSettings = true;
|
||
|
this._emptySessionCount++;
|
||
|
|
||
|
this.emit('session', session);
|
||
|
this._accept(session, listeners, normalizedOrigin, options);
|
||
|
|
||
|
if (session[kCurrentStreamCount] === 0 && this._emptySessionCount > this.maxEmptySessions) {
|
||
|
this.closeEmptySessions(this._emptySessionCount - this.maxEmptySessions);
|
||
|
}
|
||
|
|
||
|
// `session.remoteSettings.maxConcurrentStreams` might get increased
|
||
|
session.on('remoteSettings', () => {
|
||
|
if (!isFree()) {
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
processListeners();
|
||
|
|
||
|
if (!isFree()) {
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
// In case the Origin Set changes
|
||
|
closeCoveredSessions(this.sessions[normalizedOptions], session);
|
||
|
});
|
||
|
});
|
||
|
|
||
|
// Shim `session.request()` in order to catch all streams
|
||
|
session[kRequest] = session.request;
|
||
|
session.request = (headers, streamOptions) => {
|
||
|
if (session[kGracefullyClosing]) {
|
||
|
throw new Error('The session is gracefully closing. No new streams are allowed.');
|
||
|
}
|
||
|
|
||
|
const stream = session[kRequest](headers, streamOptions);
|
||
|
|
||
|
// The process won't exit until the session is closed or all requests are gone.
|
||
|
session.ref();
|
||
|
|
||
|
if (session[kCurrentStreamCount]++ === 0) {
|
||
|
this._emptySessionCount--;
|
||
|
}
|
||
|
|
||
|
stream.once('close', () => {
|
||
|
if (--session[kCurrentStreamCount] === 0) {
|
||
|
this._emptySessionCount++;
|
||
|
session.unref();
|
||
|
|
||
|
if (this._emptySessionCount > this.maxEmptySessions || session[kGracefullyClosing]) {
|
||
|
session.close();
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (session.destroyed || session.closed) {
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
if (isFree() && !closeSessionIfCovered(this.sessions[normalizedOptions], session)) {
|
||
|
closeCoveredSessions(this.sessions[normalizedOptions], session);
|
||
|
processListeners();
|
||
|
|
||
|
if (session[kCurrentStreamCount] === 0) {
|
||
|
this._processQueue();
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
|
||
|
return stream;
|
||
|
};
|
||
|
} catch (error) {
|
||
|
removeFromQueue();
|
||
|
this._sessionCount--;
|
||
|
|
||
|
for (let index = 0; index < listeners.length; index++) {
|
||
|
listeners[index].reject(error);
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
entry.listeners = listeners;
|
||
|
entry.completed = false;
|
||
|
entry.destroyed = false;
|
||
|
|
||
|
this.queue[normalizedOptions][normalizedOrigin] = entry;
|
||
|
this.queue[normalizedOptions][kLength]++;
|
||
|
this._processQueue();
|
||
|
});
|
||
|
}
|
||
|
|
||
|
request(origin, options, headers, streamOptions) {
|
||
|
return new Promise((resolve, reject) => {
|
||
|
this.getSession(origin, options, [{
|
||
|
reject,
|
||
|
resolve: session => {
|
||
|
try {
|
||
|
const stream = session.request(headers, streamOptions);
|
||
|
|
||
|
// Do not throw before `request(...)` has been awaited
|
||
|
delayAsyncDestroy$1(stream);
|
||
|
|
||
|
resolve(stream);
|
||
|
} catch (error) {
|
||
|
reject(error);
|
||
|
}
|
||
|
}
|
||
|
}]);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
async createConnection(origin, options) {
|
||
|
return Agent.connect(origin, options);
|
||
|
}
|
||
|
|
||
|
static connect(origin, options) {
|
||
|
options.ALPNProtocols = ['h2'];
|
||
|
|
||
|
const port = origin.port || 443;
|
||
|
const host = origin.hostname;
|
||
|
|
||
|
if (typeof options.servername === 'undefined') {
|
||
|
options.servername = host;
|
||
|
}
|
||
|
|
||
|
const socket = tls$3.connect(port, host, options);
|
||
|
|
||
|
if (options.socket) {
|
||
|
socket._peername = {
|
||
|
family: undefined,
|
||
|
address: undefined,
|
||
|
port
|
||
|
};
|
||
|
}
|
||
|
|
||
|
return socket;
|
||
|
}
|
||
|
|
||
|
closeEmptySessions(maxCount = Number.POSITIVE_INFINITY) {
|
||
|
let closedCount = 0;
|
||
|
|
||
|
const {sessions} = this;
|
||
|
|
||
|
// eslint-disable-next-line guard-for-in
|
||
|
for (const key in sessions) {
|
||
|
const thisSessions = sessions[key];
|
||
|
|
||
|
for (let index = 0; index < thisSessions.length; index++) {
|
||
|
const session = thisSessions[index];
|
||
|
|
||
|
if (session[kCurrentStreamCount] === 0) {
|
||
|
closedCount++;
|
||
|
session.close();
|
||
|
|
||
|
if (closedCount >= maxCount) {
|
||
|
return closedCount;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return closedCount;
|
||
|
}
|
||
|
|
||
|
destroy(reason) {
|
||
|
const {sessions, queue} = this;
|
||
|
|
||
|
// eslint-disable-next-line guard-for-in
|
||
|
for (const key in sessions) {
|
||
|
const thisSessions = sessions[key];
|
||
|
|
||
|
for (let index = 0; index < thisSessions.length; index++) {
|
||
|
thisSessions[index].destroy(reason);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// eslint-disable-next-line guard-for-in
|
||
|
for (const normalizedOptions in queue) {
|
||
|
const entries = queue[normalizedOptions];
|
||
|
|
||
|
// eslint-disable-next-line guard-for-in
|
||
|
for (const normalizedOrigin in entries) {
|
||
|
entries[normalizedOrigin].destroyed = true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// New requests should NOT attach to destroyed sessions
|
||
|
this.queue = {};
|
||
|
this.tlsSessionCache.clear();
|
||
|
}
|
||
|
|
||
|
get emptySessionCount() {
|
||
|
return this._emptySessionCount;
|
||
|
}
|
||
|
|
||
|
get pendingSessionCount() {
|
||
|
return this._sessionCount - this._emptySessionCount;
|
||
|
}
|
||
|
|
||
|
get sessionCount() {
|
||
|
return this._sessionCount;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
Agent$4.kCurrentStreamCount = kCurrentStreamCount;
|
||
|
Agent$4.kGracefullyClosing = kGracefullyClosing;
|
||
|
|
||
|
var agent = {
|
||
|
Agent: Agent$4,
|
||
|
globalAgent: new Agent$4()
|
||
|
};
|
||
|
|
||
|
const {Readable} = require$$0$3;
|
||
|
|
||
|
let IncomingMessage$2 = class IncomingMessage extends Readable {
|
||
|
constructor(socket, highWaterMark) {
|
||
|
super({
|
||
|
emitClose: false,
|
||
|
autoDestroy: true,
|
||
|
highWaterMark
|
||
|
});
|
||
|
|
||
|
this.statusCode = null;
|
||
|
this.statusMessage = '';
|
||
|
this.httpVersion = '2.0';
|
||
|
this.httpVersionMajor = 2;
|
||
|
this.httpVersionMinor = 0;
|
||
|
this.headers = {};
|
||
|
this.trailers = {};
|
||
|
this.req = null;
|
||
|
|
||
|
this.aborted = false;
|
||
|
this.complete = false;
|
||
|
this.upgrade = null;
|
||
|
|
||
|
this.rawHeaders = [];
|
||
|
this.rawTrailers = [];
|
||
|
|
||
|
this.socket = socket;
|
||
|
|
||
|
this._dumped = false;
|
||
|
}
|
||
|
|
||
|
get connection() {
|
||
|
return this.socket;
|
||
|
}
|
||
|
|
||
|
set connection(value) {
|
||
|
this.socket = value;
|
||
|
}
|
||
|
|
||
|
_destroy(error, callback) {
|
||
|
if (!this.readableEnded) {
|
||
|
this.aborted = true;
|
||
|
}
|
||
|
|
||
|
// See https://github.com/nodejs/node/issues/35303
|
||
|
callback();
|
||
|
|
||
|
this.req._request.destroy(error);
|
||
|
}
|
||
|
|
||
|
setTimeout(ms, callback) {
|
||
|
this.req.setTimeout(ms, callback);
|
||
|
return this;
|
||
|
}
|
||
|
|
||
|
_dump() {
|
||
|
if (!this._dumped) {
|
||
|
this._dumped = true;
|
||
|
|
||
|
this.removeAllListeners('data');
|
||
|
this.resume();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
_read() {
|
||
|
if (this.req) {
|
||
|
this.req._request.resume();
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var incomingMessage = IncomingMessage$2;
|
||
|
|
||
|
var proxyEvents$1 = (from, to, events) => {
|
||
|
for (const event of events) {
|
||
|
from.on(event, (...args) => to.emit(event, ...args));
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var errors = {exports: {}};
|
||
|
|
||
|
(function (module) {
|
||
|
/* istanbul ignore file: https://github.com/nodejs/node/blob/master/lib/internal/errors.js */
|
||
|
|
||
|
const makeError = (Base, key, getMessage) => {
|
||
|
module.exports[key] = class NodeError extends Base {
|
||
|
constructor(...args) {
|
||
|
super(typeof getMessage === 'string' ? getMessage : getMessage(args));
|
||
|
this.name = `${super.name} [${key}]`;
|
||
|
this.code = key;
|
||
|
}
|
||
|
};
|
||
|
};
|
||
|
|
||
|
makeError(TypeError, 'ERR_INVALID_ARG_TYPE', args => {
|
||
|
const type = args[0].includes('.') ? 'property' : 'argument';
|
||
|
|
||
|
let valid = args[1];
|
||
|
const isManyTypes = Array.isArray(valid);
|
||
|
|
||
|
if (isManyTypes) {
|
||
|
valid = `${valid.slice(0, -1).join(', ')} or ${valid.slice(-1)}`;
|
||
|
}
|
||
|
|
||
|
return `The "${args[0]}" ${type} must be ${isManyTypes ? 'one of' : 'of'} type ${valid}. Received ${typeof args[2]}`;
|
||
|
});
|
||
|
|
||
|
makeError(TypeError, 'ERR_INVALID_PROTOCOL', args =>
|
||
|
`Protocol "${args[0]}" not supported. Expected "${args[1]}"`
|
||
|
);
|
||
|
|
||
|
makeError(Error, 'ERR_HTTP_HEADERS_SENT', args =>
|
||
|
`Cannot ${args[0]} headers after they are sent to the client`
|
||
|
);
|
||
|
|
||
|
makeError(TypeError, 'ERR_INVALID_HTTP_TOKEN', args =>
|
||
|
`${args[0]} must be a valid HTTP token [${args[1]}]`
|
||
|
);
|
||
|
|
||
|
makeError(TypeError, 'ERR_HTTP_INVALID_HEADER_VALUE', args =>
|
||
|
`Invalid value "${args[0]} for header "${args[1]}"`
|
||
|
);
|
||
|
|
||
|
makeError(TypeError, 'ERR_INVALID_CHAR', args =>
|
||
|
`Invalid character in ${args[0]} [${args[1]}]`
|
||
|
);
|
||
|
|
||
|
makeError(
|
||
|
Error,
|
||
|
'ERR_HTTP2_NO_SOCKET_MANIPULATION',
|
||
|
'HTTP/2 sockets should not be directly manipulated (e.g. read and written)'
|
||
|
);
|
||
|
} (errors));
|
||
|
|
||
|
var errorsExports = errors.exports;
|
||
|
|
||
|
var isRequestPseudoHeader$1 = header => {
|
||
|
switch (header) {
|
||
|
case ':method':
|
||
|
case ':scheme':
|
||
|
case ':authority':
|
||
|
case ':path':
|
||
|
return true;
|
||
|
default:
|
||
|
return false;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const {ERR_INVALID_HTTP_TOKEN} = errorsExports;
|
||
|
const isRequestPseudoHeader = isRequestPseudoHeader$1;
|
||
|
|
||
|
const isValidHttpToken = /^[\^`\-\w!#$%&*+.|~]+$/;
|
||
|
|
||
|
var validateHeaderName$2 = name => {
|
||
|
if (typeof name !== 'string' || (!isValidHttpToken.test(name) && !isRequestPseudoHeader(name))) {
|
||
|
throw new ERR_INVALID_HTTP_TOKEN('Header name', name);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const {
|
||
|
ERR_HTTP_INVALID_HEADER_VALUE,
|
||
|
ERR_INVALID_CHAR
|
||
|
} = errorsExports;
|
||
|
|
||
|
const isInvalidHeaderValue = /[^\t\u0020-\u007E\u0080-\u00FF]/;
|
||
|
|
||
|
var validateHeaderValue$2 = (name, value) => {
|
||
|
if (typeof value === 'undefined') {
|
||
|
throw new ERR_HTTP_INVALID_HEADER_VALUE(value, name);
|
||
|
}
|
||
|
|
||
|
if (isInvalidHeaderValue.test(value)) {
|
||
|
throw new ERR_INVALID_CHAR('header content', name);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const {ERR_HTTP2_NO_SOCKET_MANIPULATION} = errorsExports;
|
||
|
|
||
|
/* istanbul ignore file */
|
||
|
/* https://github.com/nodejs/node/blob/6eec858f34a40ffa489c1ec54bb24da72a28c781/lib/internal/http2/compat.js#L195-L272 */
|
||
|
|
||
|
const proxySocketHandler$1 = {
|
||
|
has(stream, property) {
|
||
|
// Replaced [kSocket] with .socket
|
||
|
const reference = stream.session === undefined ? stream : stream.session.socket;
|
||
|
return (property in stream) || (property in reference);
|
||
|
},
|
||
|
|
||
|
get(stream, property) {
|
||
|
switch (property) {
|
||
|
case 'on':
|
||
|
case 'once':
|
||
|
case 'end':
|
||
|
case 'emit':
|
||
|
case 'destroy':
|
||
|
return stream[property].bind(stream);
|
||
|
case 'writable':
|
||
|
case 'destroyed':
|
||
|
return stream[property];
|
||
|
case 'readable':
|
||
|
if (stream.destroyed) {
|
||
|
return false;
|
||
|
}
|
||
|
|
||
|
return stream.readable;
|
||
|
case 'setTimeout': {
|
||
|
const {session} = stream;
|
||
|
if (session !== undefined) {
|
||
|
return session.setTimeout.bind(session);
|
||
|
}
|
||
|
|
||
|
return stream.setTimeout.bind(stream);
|
||
|
}
|
||
|
|
||
|
case 'write':
|
||
|
case 'read':
|
||
|
case 'pause':
|
||
|
case 'resume':
|
||
|
throw new ERR_HTTP2_NO_SOCKET_MANIPULATION();
|
||
|
default: {
|
||
|
// Replaced [kSocket] with .socket
|
||
|
const reference = stream.session === undefined ? stream : stream.session.socket;
|
||
|
const value = reference[property];
|
||
|
|
||
|
return typeof value === 'function' ? value.bind(reference) : value;
|
||
|
}
|
||
|
}
|
||
|
},
|
||
|
|
||
|
getPrototypeOf(stream) {
|
||
|
if (stream.session !== undefined) {
|
||
|
// Replaced [kSocket] with .socket
|
||
|
return Reflect.getPrototypeOf(stream.session.socket);
|
||
|
}
|
||
|
|
||
|
return Reflect.getPrototypeOf(stream);
|
||
|
},
|
||
|
|
||
|
set(stream, property, value) {
|
||
|
switch (property) {
|
||
|
case 'writable':
|
||
|
case 'readable':
|
||
|
case 'destroyed':
|
||
|
case 'on':
|
||
|
case 'once':
|
||
|
case 'end':
|
||
|
case 'emit':
|
||
|
case 'destroy':
|
||
|
stream[property] = value;
|
||
|
return true;
|
||
|
case 'setTimeout': {
|
||
|
const {session} = stream;
|
||
|
if (session === undefined) {
|
||
|
stream.setTimeout = value;
|
||
|
} else {
|
||
|
session.setTimeout = value;
|
||
|
}
|
||
|
|
||
|
return true;
|
||
|
}
|
||
|
|
||
|
case 'write':
|
||
|
case 'read':
|
||
|
case 'pause':
|
||
|
case 'resume':
|
||
|
throw new ERR_HTTP2_NO_SOCKET_MANIPULATION();
|
||
|
default: {
|
||
|
// Replaced [kSocket] with .socket
|
||
|
const reference = stream.session === undefined ? stream : stream.session.socket;
|
||
|
reference[property] = value;
|
||
|
return true;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var proxySocketHandler_1 = proxySocketHandler$1;
|
||
|
|
||
|
// See https://github.com/facebook/jest/issues/2549
|
||
|
// eslint-disable-next-line node/prefer-global/url
|
||
|
const {URL: URL$3, urlToHttpOptions: urlToHttpOptions$1} = require$$0$5;
|
||
|
const http2$1 = require$$3;
|
||
|
const {Writable} = require$$0$3;
|
||
|
const {Agent: Agent$3, globalAgent: globalAgent$4} = agent;
|
||
|
const IncomingMessage$1 = incomingMessage;
|
||
|
const proxyEvents = proxyEvents$1;
|
||
|
const {
|
||
|
ERR_INVALID_ARG_TYPE,
|
||
|
ERR_INVALID_PROTOCOL,
|
||
|
ERR_HTTP_HEADERS_SENT
|
||
|
} = errorsExports;
|
||
|
const validateHeaderName$1 = validateHeaderName$2;
|
||
|
const validateHeaderValue$1 = validateHeaderValue$2;
|
||
|
const proxySocketHandler = proxySocketHandler_1;
|
||
|
|
||
|
const {
|
||
|
HTTP2_HEADER_STATUS,
|
||
|
HTTP2_HEADER_METHOD,
|
||
|
HTTP2_HEADER_PATH,
|
||
|
HTTP2_HEADER_AUTHORITY,
|
||
|
HTTP2_METHOD_CONNECT
|
||
|
} = http2$1.constants;
|
||
|
|
||
|
const kHeaders = Symbol('headers');
|
||
|
const kOrigin = Symbol('origin');
|
||
|
const kSession = Symbol('session');
|
||
|
const kOptions = Symbol('options');
|
||
|
const kFlushedHeaders = Symbol('flushedHeaders');
|
||
|
const kJobs = Symbol('jobs');
|
||
|
const kPendingAgentPromise = Symbol('pendingAgentPromise');
|
||
|
|
||
|
let ClientRequest$1 = class ClientRequest extends Writable {
|
||
|
constructor(input, options, callback) {
|
||
|
super({
|
||
|
autoDestroy: false,
|
||
|
emitClose: false
|
||
|
});
|
||
|
|
||
|
if (typeof input === 'string') {
|
||
|
input = urlToHttpOptions$1(new URL$3(input));
|
||
|
} else if (input instanceof URL$3) {
|
||
|
input = urlToHttpOptions$1(input);
|
||
|
} else {
|
||
|
input = {...input};
|
||
|
}
|
||
|
|
||
|
if (typeof options === 'function' || options === undefined) {
|
||
|
// (options, callback)
|
||
|
callback = options;
|
||
|
options = input;
|
||
|
} else {
|
||
|
// (input, options, callback)
|
||
|
options = Object.assign(input, options);
|
||
|
}
|
||
|
|
||
|
if (options.h2session) {
|
||
|
this[kSession] = options.h2session;
|
||
|
|
||
|
if (this[kSession].destroyed) {
|
||
|
throw new Error('The session has been closed already');
|
||
|
}
|
||
|
|
||
|
this.protocol = this[kSession].socket.encrypted ? 'https:' : 'http:';
|
||
|
} else if (options.agent === false) {
|
||
|
this.agent = new Agent$3({maxEmptySessions: 0});
|
||
|
} else if (typeof options.agent === 'undefined' || options.agent === null) {
|
||
|
this.agent = globalAgent$4;
|
||
|
} else if (typeof options.agent.request === 'function') {
|
||
|
this.agent = options.agent;
|
||
|
} else {
|
||
|
throw new ERR_INVALID_ARG_TYPE('options.agent', ['http2wrapper.Agent-like Object', 'undefined', 'false'], options.agent);
|
||
|
}
|
||
|
|
||
|
if (this.agent) {
|
||
|
this.protocol = this.agent.protocol;
|
||
|
}
|
||
|
|
||
|
if (options.protocol && options.protocol !== this.protocol) {
|
||
|
throw new ERR_INVALID_PROTOCOL(options.protocol, this.protocol);
|
||
|
}
|
||
|
|
||
|
if (!options.port) {
|
||
|
options.port = options.defaultPort || (this.agent && this.agent.defaultPort) || 443;
|
||
|
}
|
||
|
|
||
|
options.host = options.hostname || options.host || 'localhost';
|
||
|
|
||
|
// Unused
|
||
|
delete options.hostname;
|
||
|
|
||
|
const {timeout} = options;
|
||
|
options.timeout = undefined;
|
||
|
|
||
|
this[kHeaders] = Object.create(null);
|
||
|
this[kJobs] = [];
|
||
|
|
||
|
this[kPendingAgentPromise] = undefined;
|
||
|
|
||
|
this.socket = null;
|
||
|
this.connection = null;
|
||
|
|
||
|
this.method = options.method || 'GET';
|
||
|
|
||
|
if (!(this.method === 'CONNECT' && (options.path === '/' || options.path === undefined))) {
|
||
|
this.path = options.path;
|
||
|
}
|
||
|
|
||
|
this.res = null;
|
||
|
this.aborted = false;
|
||
|
this.reusedSocket = false;
|
||
|
|
||
|
const {headers} = options;
|
||
|
if (headers) {
|
||
|
// eslint-disable-next-line guard-for-in
|
||
|
for (const header in headers) {
|
||
|
this.setHeader(header, headers[header]);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (options.auth && !('authorization' in this[kHeaders])) {
|
||
|
this[kHeaders].authorization = 'Basic ' + Buffer.from(options.auth).toString('base64');
|
||
|
}
|
||
|
|
||
|
options.session = options.tlsSession;
|
||
|
options.path = options.socketPath;
|
||
|
|
||
|
this[kOptions] = options;
|
||
|
|
||
|
// Clients that generate HTTP/2 requests directly SHOULD use the :authority pseudo-header field instead of the Host header field.
|
||
|
this[kOrigin] = new URL$3(`${this.protocol}//${options.servername || options.host}:${options.port}`);
|
||
|
|
||
|
// A socket is being reused
|
||
|
const reuseSocket = options._reuseSocket;
|
||
|
if (reuseSocket) {
|
||
|
options.createConnection = (...args) => {
|
||
|
if (reuseSocket.destroyed) {
|
||
|
return this.agent.createConnection(...args);
|
||
|
}
|
||
|
|
||
|
return reuseSocket;
|
||
|
};
|
||
|
|
||
|
// eslint-disable-next-line promise/prefer-await-to-then
|
||
|
this.agent.getSession(this[kOrigin], this[kOptions]).catch(() => {});
|
||
|
}
|
||
|
|
||
|
if (timeout) {
|
||
|
this.setTimeout(timeout);
|
||
|
}
|
||
|
|
||
|
if (callback) {
|
||
|
this.once('response', callback);
|
||
|
}
|
||
|
|
||
|
this[kFlushedHeaders] = false;
|
||
|
}
|
||
|
|
||
|
get method() {
|
||
|
return this[kHeaders][HTTP2_HEADER_METHOD];
|
||
|
}
|
||
|
|
||
|
set method(value) {
|
||
|
if (value) {
|
||
|
this[kHeaders][HTTP2_HEADER_METHOD] = value.toUpperCase();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
get path() {
|
||
|
const header = this.method === 'CONNECT' ? HTTP2_HEADER_AUTHORITY : HTTP2_HEADER_PATH;
|
||
|
|
||
|
return this[kHeaders][header];
|
||
|
}
|
||
|
|
||
|
set path(value) {
|
||
|
if (value) {
|
||
|
const header = this.method === 'CONNECT' ? HTTP2_HEADER_AUTHORITY : HTTP2_HEADER_PATH;
|
||
|
|
||
|
this[kHeaders][header] = value;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
get host() {
|
||
|
return this[kOrigin].hostname;
|
||
|
}
|
||
|
|
||
|
set host(_value) {
|
||
|
// Do nothing as this is read only.
|
||
|
}
|
||
|
|
||
|
get _mustNotHaveABody() {
|
||
|
return this.method === 'GET' || this.method === 'HEAD' || this.method === 'DELETE';
|
||
|
}
|
||
|
|
||
|
_write(chunk, encoding, callback) {
|
||
|
// https://github.com/nodejs/node/blob/654df09ae0c5e17d1b52a900a545f0664d8c7627/lib/internal/http2/util.js#L148-L156
|
||
|
if (this._mustNotHaveABody) {
|
||
|
callback(new Error('The GET, HEAD and DELETE methods must NOT have a body'));
|
||
|
/* istanbul ignore next: Node.js 12 throws directly */
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
this.flushHeaders();
|
||
|
|
||
|
const callWrite = () => this._request.write(chunk, encoding, callback);
|
||
|
if (this._request) {
|
||
|
callWrite();
|
||
|
} else {
|
||
|
this[kJobs].push(callWrite);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
_final(callback) {
|
||
|
this.flushHeaders();
|
||
|
|
||
|
const callEnd = () => {
|
||
|
// For GET, HEAD and DELETE and CONNECT
|
||
|
if (this._mustNotHaveABody || this.method === 'CONNECT') {
|
||
|
callback();
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
this._request.end(callback);
|
||
|
};
|
||
|
|
||
|
if (this._request) {
|
||
|
callEnd();
|
||
|
} else {
|
||
|
this[kJobs].push(callEnd);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
abort() {
|
||
|
if (this.res && this.res.complete) {
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
if (!this.aborted) {
|
||
|
process.nextTick(() => this.emit('abort'));
|
||
|
}
|
||
|
|
||
|
this.aborted = true;
|
||
|
|
||
|
this.destroy();
|
||
|
}
|
||
|
|
||
|
async _destroy(error, callback) {
|
||
|
if (this.res) {
|
||
|
this.res._dump();
|
||
|
}
|
||
|
|
||
|
if (this._request) {
|
||
|
this._request.destroy();
|
||
|
} else {
|
||
|
process.nextTick(() => {
|
||
|
this.emit('close');
|
||
|
});
|
||
|
}
|
||
|
|
||
|
try {
|
||
|
await this[kPendingAgentPromise];
|
||
|
} catch (internalError) {
|
||
|
if (this.aborted) {
|
||
|
error = internalError;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
callback(error);
|
||
|
}
|
||
|
|
||
|
async flushHeaders() {
|
||
|
if (this[kFlushedHeaders] || this.destroyed) {
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
this[kFlushedHeaders] = true;
|
||
|
|
||
|
const isConnectMethod = this.method === HTTP2_METHOD_CONNECT;
|
||
|
|
||
|
// The real magic is here
|
||
|
const onStream = stream => {
|
||
|
this._request = stream;
|
||
|
|
||
|
if (this.destroyed) {
|
||
|
stream.destroy();
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
// Forwards `timeout`, `continue`, `close` and `error` events to this instance.
|
||
|
if (!isConnectMethod) {
|
||
|
// TODO: Should we proxy `close` here?
|
||
|
proxyEvents(stream, this, ['timeout', 'continue']);
|
||
|
}
|
||
|
|
||
|
stream.once('error', error => {
|
||
|
this.destroy(error);
|
||
|
});
|
||
|
|
||
|
stream.once('aborted', () => {
|
||
|
const {res} = this;
|
||
|
if (res) {
|
||
|
res.aborted = true;
|
||
|
res.emit('aborted');
|
||
|
res.destroy();
|
||
|
} else {
|
||
|
this.destroy(new Error('The server aborted the HTTP/2 stream'));
|
||
|
}
|
||
|
});
|
||
|
|
||
|
const onResponse = (headers, flags, rawHeaders) => {
|
||
|
// If we were to emit raw request stream, it would be as fast as the native approach.
|
||
|
// Note that wrapping the raw stream in a Proxy instance won't improve the performance (already tested it).
|
||
|
const response = new IncomingMessage$1(this.socket, stream.readableHighWaterMark);
|
||
|
this.res = response;
|
||
|
|
||
|
// Undocumented, but it is used by `cacheable-request`
|
||
|
response.url = `${this[kOrigin].origin}${this.path}`;
|
||
|
|
||
|
response.req = this;
|
||
|
response.statusCode = headers[HTTP2_HEADER_STATUS];
|
||
|
response.headers = headers;
|
||
|
response.rawHeaders = rawHeaders;
|
||
|
|
||
|
response.once('end', () => {
|
||
|
response.complete = true;
|
||
|
|
||
|
// Has no effect, just be consistent with the Node.js behavior
|
||
|
response.socket = null;
|
||
|
response.connection = null;
|
||
|
});
|
||
|
|
||
|
if (isConnectMethod) {
|
||
|
response.upgrade = true;
|
||
|
|
||
|
// The HTTP1 API says the socket is detached here,
|
||
|
// but we can't do that so we pass the original HTTP2 request.
|
||
|
if (this.emit('connect', response, stream, Buffer.alloc(0))) {
|
||
|
this.emit('close');
|
||
|
} else {
|
||
|
// No listeners attached, destroy the original request.
|
||
|
stream.destroy();
|
||
|
}
|
||
|
} else {
|
||
|
// Forwards data
|
||
|
stream.on('data', chunk => {
|
||
|
if (!response._dumped && !response.push(chunk)) {
|
||
|
stream.pause();
|
||
|
}
|
||
|
});
|
||
|
|
||
|
stream.once('end', () => {
|
||
|
if (!this.aborted) {
|
||
|
response.push(null);
|
||
|
}
|
||
|
});
|
||
|
|
||
|
if (!this.emit('response', response)) {
|
||
|
// No listeners attached, dump the response.
|
||
|
response._dump();
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
// This event tells we are ready to listen for the data.
|
||
|
stream.once('response', onResponse);
|
||
|
|
||
|
// Emits `information` event
|
||
|
stream.once('headers', headers => this.emit('information', {statusCode: headers[HTTP2_HEADER_STATUS]}));
|
||
|
|
||
|
stream.once('trailers', (trailers, flags, rawTrailers) => {
|
||
|
const {res} = this;
|
||
|
|
||
|
// https://github.com/nodejs/node/issues/41251
|
||
|
if (res === null) {
|
||
|
onResponse(trailers, flags, rawTrailers);
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
// Assigns trailers to the response object.
|
||
|
res.trailers = trailers;
|
||
|
res.rawTrailers = rawTrailers;
|
||
|
});
|
||
|
|
||
|
stream.once('close', () => {
|
||
|
const {aborted, res} = this;
|
||
|
if (res) {
|
||
|
if (aborted) {
|
||
|
res.aborted = true;
|
||
|
res.emit('aborted');
|
||
|
res.destroy();
|
||
|
}
|
||
|
|
||
|
const finish = () => {
|
||
|
res.emit('close');
|
||
|
|
||
|
this.destroy();
|
||
|
this.emit('close');
|
||
|
};
|
||
|
|
||
|
if (res.readable) {
|
||
|
res.once('end', finish);
|
||
|
} else {
|
||
|
finish();
|
||
|
}
|
||
|
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
if (!this.destroyed) {
|
||
|
this.destroy(new Error('The HTTP/2 stream has been early terminated'));
|
||
|
this.emit('close');
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
this.destroy();
|
||
|
this.emit('close');
|
||
|
});
|
||
|
|
||
|
this.socket = new Proxy(stream, proxySocketHandler);
|
||
|
|
||
|
for (const job of this[kJobs]) {
|
||
|
job();
|
||
|
}
|
||
|
|
||
|
this[kJobs].length = 0;
|
||
|
|
||
|
this.emit('socket', this.socket);
|
||
|
};
|
||
|
|
||
|
if (!(HTTP2_HEADER_AUTHORITY in this[kHeaders]) && !isConnectMethod) {
|
||
|
this[kHeaders][HTTP2_HEADER_AUTHORITY] = this[kOrigin].host;
|
||
|
}
|
||
|
|
||
|
// Makes a HTTP2 request
|
||
|
if (this[kSession]) {
|
||
|
try {
|
||
|
onStream(this[kSession].request(this[kHeaders]));
|
||
|
} catch (error) {
|
||
|
this.destroy(error);
|
||
|
}
|
||
|
} else {
|
||
|
this.reusedSocket = true;
|
||
|
|
||
|
try {
|
||
|
const promise = this.agent.request(this[kOrigin], this[kOptions], this[kHeaders]);
|
||
|
this[kPendingAgentPromise] = promise;
|
||
|
|
||
|
onStream(await promise);
|
||
|
|
||
|
this[kPendingAgentPromise] = false;
|
||
|
} catch (error) {
|
||
|
this[kPendingAgentPromise] = false;
|
||
|
|
||
|
this.destroy(error);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
get connection() {
|
||
|
return this.socket;
|
||
|
}
|
||
|
|
||
|
set connection(value) {
|
||
|
this.socket = value;
|
||
|
}
|
||
|
|
||
|
getHeaderNames() {
|
||
|
return Object.keys(this[kHeaders]);
|
||
|
}
|
||
|
|
||
|
hasHeader(name) {
|
||
|
if (typeof name !== 'string') {
|
||
|
throw new ERR_INVALID_ARG_TYPE('name', 'string', name);
|
||
|
}
|
||
|
|
||
|
return Boolean(this[kHeaders][name.toLowerCase()]);
|
||
|
}
|
||
|
|
||
|
getHeader(name) {
|
||
|
if (typeof name !== 'string') {
|
||
|
throw new ERR_INVALID_ARG_TYPE('name', 'string', name);
|
||
|
}
|
||
|
|
||
|
return this[kHeaders][name.toLowerCase()];
|
||
|
}
|
||
|
|
||
|
get headersSent() {
|
||
|
return this[kFlushedHeaders];
|
||
|
}
|
||
|
|
||
|
removeHeader(name) {
|
||
|
if (typeof name !== 'string') {
|
||
|
throw new ERR_INVALID_ARG_TYPE('name', 'string', name);
|
||
|
}
|
||
|
|
||
|
if (this.headersSent) {
|
||
|
throw new ERR_HTTP_HEADERS_SENT('remove');
|
||
|
}
|
||
|
|
||
|
delete this[kHeaders][name.toLowerCase()];
|
||
|
}
|
||
|
|
||
|
setHeader(name, value) {
|
||
|
if (this.headersSent) {
|
||
|
throw new ERR_HTTP_HEADERS_SENT('set');
|
||
|
}
|
||
|
|
||
|
validateHeaderName$1(name);
|
||
|
validateHeaderValue$1(name, value);
|
||
|
|
||
|
const lowercased = name.toLowerCase();
|
||
|
|
||
|
if (lowercased === 'connection') {
|
||
|
if (value.toLowerCase() === 'keep-alive') {
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
throw new Error(`Invalid 'connection' header: ${value}`);
|
||
|
}
|
||
|
|
||
|
if (lowercased === 'host' && this.method === 'CONNECT') {
|
||
|
this[kHeaders][HTTP2_HEADER_AUTHORITY] = value;
|
||
|
} else {
|
||
|
this[kHeaders][lowercased] = value;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
setNoDelay() {
|
||
|
// HTTP2 sockets cannot be malformed, do nothing.
|
||
|
}
|
||
|
|
||
|
setSocketKeepAlive() {
|
||
|
// HTTP2 sockets cannot be malformed, do nothing.
|
||
|
}
|
||
|
|
||
|
setTimeout(ms, callback) {
|
||
|
const applyTimeout = () => this._request.setTimeout(ms, callback);
|
||
|
|
||
|
if (this._request) {
|
||
|
applyTimeout();
|
||
|
} else {
|
||
|
this[kJobs].push(applyTimeout);
|
||
|
}
|
||
|
|
||
|
return this;
|
||
|
}
|
||
|
|
||
|
get maxHeadersCount() {
|
||
|
if (!this.destroyed && this._request) {
|
||
|
return this._request.session.localSettings.maxHeaderListSize;
|
||
|
}
|
||
|
|
||
|
return undefined;
|
||
|
}
|
||
|
|
||
|
set maxHeadersCount(_value) {
|
||
|
// Updating HTTP2 settings would affect all requests, do nothing.
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var clientRequest = ClientRequest$1;
|
||
|
|
||
|
var auto$1 = {exports: {}};
|
||
|
|
||
|
const tls$2 = require$$1$1;
|
||
|
|
||
|
var resolveAlpn = (options = {}, connect = tls$2.connect) => new Promise((resolve, reject) => {
|
||
|
let timeout = false;
|
||
|
|
||
|
let socket;
|
||
|
|
||
|
const callback = async () => {
|
||
|
await socketPromise;
|
||
|
|
||
|
socket.off('timeout', onTimeout);
|
||
|
socket.off('error', reject);
|
||
|
|
||
|
if (options.resolveSocket) {
|
||
|
resolve({alpnProtocol: socket.alpnProtocol, socket, timeout});
|
||
|
|
||
|
if (timeout) {
|
||
|
await Promise.resolve();
|
||
|
socket.emit('timeout');
|
||
|
}
|
||
|
} else {
|
||
|
socket.destroy();
|
||
|
resolve({alpnProtocol: socket.alpnProtocol, timeout});
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const onTimeout = async () => {
|
||
|
timeout = true;
|
||
|
callback();
|
||
|
};
|
||
|
|
||
|
const socketPromise = (async () => {
|
||
|
try {
|
||
|
socket = await connect(options, callback);
|
||
|
|
||
|
socket.on('error', reject);
|
||
|
socket.once('timeout', onTimeout);
|
||
|
} catch (error) {
|
||
|
reject(error);
|
||
|
}
|
||
|
})();
|
||
|
});
|
||
|
|
||
|
const {isIP} = require$$0$6;
|
||
|
const assert = require$$5;
|
||
|
|
||
|
const getHost = host => {
|
||
|
if (host[0] === '[') {
|
||
|
const idx = host.indexOf(']');
|
||
|
|
||
|
assert(idx !== -1);
|
||
|
return host.slice(1, idx);
|
||
|
}
|
||
|
|
||
|
const idx = host.indexOf(':');
|
||
|
if (idx === -1) {
|
||
|
return host;
|
||
|
}
|
||
|
|
||
|
return host.slice(0, idx);
|
||
|
};
|
||
|
|
||
|
var calculateServerName$1 = host => {
|
||
|
const servername = getHost(host);
|
||
|
|
||
|
if (isIP(servername)) {
|
||
|
return '';
|
||
|
}
|
||
|
|
||
|
return servername;
|
||
|
};
|
||
|
|
||
|
// See https://github.com/facebook/jest/issues/2549
|
||
|
// eslint-disable-next-line node/prefer-global/url
|
||
|
const {URL: URL$2, urlToHttpOptions} = require$$0$5;
|
||
|
const http$2 = require$$1$2;
|
||
|
const https$2 = require$$2$1;
|
||
|
const resolveALPN = resolveAlpn;
|
||
|
const QuickLRU = quickLru;
|
||
|
const {Agent: Agent$2, globalAgent: globalAgent$3} = agent;
|
||
|
const Http2ClientRequest = clientRequest;
|
||
|
const calculateServerName = calculateServerName$1;
|
||
|
const delayAsyncDestroy = delayAsyncDestroy$2;
|
||
|
|
||
|
const cache = new QuickLRU({maxSize: 100});
|
||
|
const queue = new Map();
|
||
|
|
||
|
const installSocket = (agent, socket, options) => {
|
||
|
socket._httpMessage = {shouldKeepAlive: true};
|
||
|
|
||
|
const onFree = () => {
|
||
|
agent.emit('free', socket, options);
|
||
|
};
|
||
|
|
||
|
socket.on('free', onFree);
|
||
|
|
||
|
const onClose = () => {
|
||
|
agent.removeSocket(socket, options);
|
||
|
};
|
||
|
|
||
|
socket.on('close', onClose);
|
||
|
|
||
|
const onTimeout = () => {
|
||
|
const {freeSockets} = agent;
|
||
|
|
||
|
for (const sockets of Object.values(freeSockets)) {
|
||
|
if (sockets.includes(socket)) {
|
||
|
socket.destroy();
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
socket.on('timeout', onTimeout);
|
||
|
|
||
|
const onRemove = () => {
|
||
|
agent.removeSocket(socket, options);
|
||
|
socket.off('close', onClose);
|
||
|
socket.off('free', onFree);
|
||
|
socket.off('timeout', onTimeout);
|
||
|
socket.off('agentRemove', onRemove);
|
||
|
};
|
||
|
|
||
|
socket.on('agentRemove', onRemove);
|
||
|
|
||
|
agent.emit('free', socket, options);
|
||
|
};
|
||
|
|
||
|
const createResolveProtocol = (cache, queue = new Map(), connect = undefined) => {
|
||
|
return async options => {
|
||
|
const name = `${options.host}:${options.port}:${options.ALPNProtocols.sort()}`;
|
||
|
|
||
|
if (!cache.has(name)) {
|
||
|
if (queue.has(name)) {
|
||
|
const result = await queue.get(name);
|
||
|
return {alpnProtocol: result.alpnProtocol};
|
||
|
}
|
||
|
|
||
|
const {path} = options;
|
||
|
options.path = options.socketPath;
|
||
|
|
||
|
const resultPromise = resolveALPN(options, connect);
|
||
|
queue.set(name, resultPromise);
|
||
|
|
||
|
try {
|
||
|
const result = await resultPromise;
|
||
|
|
||
|
cache.set(name, result.alpnProtocol);
|
||
|
queue.delete(name);
|
||
|
|
||
|
options.path = path;
|
||
|
|
||
|
return result;
|
||
|
} catch (error) {
|
||
|
queue.delete(name);
|
||
|
|
||
|
options.path = path;
|
||
|
|
||
|
throw error;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return {alpnProtocol: cache.get(name)};
|
||
|
};
|
||
|
};
|
||
|
|
||
|
const defaultResolveProtocol = createResolveProtocol(cache, queue);
|
||
|
|
||
|
auto$1.exports = async (input, options, callback) => {
|
||
|
if (typeof input === 'string') {
|
||
|
input = urlToHttpOptions(new URL$2(input));
|
||
|
} else if (input instanceof URL$2) {
|
||
|
input = urlToHttpOptions(input);
|
||
|
} else {
|
||
|
input = {...input};
|
||
|
}
|
||
|
|
||
|
if (typeof options === 'function' || options === undefined) {
|
||
|
// (options, callback)
|
||
|
callback = options;
|
||
|
options = input;
|
||
|
} else {
|
||
|
// (input, options, callback)
|
||
|
options = Object.assign(input, options);
|
||
|
}
|
||
|
|
||
|
options.ALPNProtocols = options.ALPNProtocols || ['h2', 'http/1.1'];
|
||
|
|
||
|
if (!Array.isArray(options.ALPNProtocols) || options.ALPNProtocols.length === 0) {
|
||
|
throw new Error('The `ALPNProtocols` option must be an Array with at least one entry');
|
||
|
}
|
||
|
|
||
|
options.protocol = options.protocol || 'https:';
|
||
|
const isHttps = options.protocol === 'https:';
|
||
|
|
||
|
options.host = options.hostname || options.host || 'localhost';
|
||
|
options.session = options.tlsSession;
|
||
|
options.servername = options.servername || calculateServerName((options.headers && options.headers.host) || options.host);
|
||
|
options.port = options.port || (isHttps ? 443 : 80);
|
||
|
options._defaultAgent = isHttps ? https$2.globalAgent : http$2.globalAgent;
|
||
|
|
||
|
const resolveProtocol = options.resolveProtocol || defaultResolveProtocol;
|
||
|
|
||
|
// Note: We don't support `h2session` here
|
||
|
|
||
|
let {agent} = options;
|
||
|
if (agent !== undefined && agent !== false && agent.constructor.name !== 'Object') {
|
||
|
throw new Error('The `options.agent` can be only an object `http`, `https` or `http2` properties');
|
||
|
}
|
||
|
|
||
|
if (isHttps) {
|
||
|
options.resolveSocket = true;
|
||
|
|
||
|
let {socket, alpnProtocol, timeout} = await resolveProtocol(options);
|
||
|
|
||
|
if (timeout) {
|
||
|
if (socket) {
|
||
|
socket.destroy();
|
||
|
}
|
||
|
|
||
|
const error = new Error(`Timed out resolving ALPN: ${options.timeout} ms`);
|
||
|
error.code = 'ETIMEDOUT';
|
||
|
error.ms = options.timeout;
|
||
|
|
||
|
throw error;
|
||
|
}
|
||
|
|
||
|
// We can't accept custom `createConnection` because the API is different for HTTP/2
|
||
|
if (socket && options.createConnection) {
|
||
|
socket.destroy();
|
||
|
socket = undefined;
|
||
|
}
|
||
|
|
||
|
delete options.resolveSocket;
|
||
|
|
||
|
const isHttp2 = alpnProtocol === 'h2';
|
||
|
|
||
|
if (agent) {
|
||
|
agent = isHttp2 ? agent.http2 : agent.https;
|
||
|
options.agent = agent;
|
||
|
}
|
||
|
|
||
|
if (agent === undefined) {
|
||
|
agent = isHttp2 ? globalAgent$3 : https$2.globalAgent;
|
||
|
}
|
||
|
|
||
|
if (socket) {
|
||
|
if (agent === false) {
|
||
|
socket.destroy();
|
||
|
} else {
|
||
|
const defaultCreateConnection = (isHttp2 ? Agent$2 : https$2.Agent).prototype.createConnection;
|
||
|
|
||
|
if (agent.createConnection === defaultCreateConnection) {
|
||
|
if (isHttp2) {
|
||
|
options._reuseSocket = socket;
|
||
|
} else {
|
||
|
installSocket(agent, socket, options);
|
||
|
}
|
||
|
} else {
|
||
|
socket.destroy();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (isHttp2) {
|
||
|
return delayAsyncDestroy(new Http2ClientRequest(options, callback));
|
||
|
}
|
||
|
} else if (agent) {
|
||
|
options.agent = agent.http;
|
||
|
}
|
||
|
|
||
|
return delayAsyncDestroy(http$2.request(options, callback));
|
||
|
};
|
||
|
|
||
|
auto$1.exports.protocolCache = cache;
|
||
|
auto$1.exports.resolveProtocol = defaultResolveProtocol;
|
||
|
auto$1.exports.createResolveProtocol = createResolveProtocol;
|
||
|
|
||
|
var autoExports = auto$1.exports;
|
||
|
|
||
|
const stream = require$$0$3;
|
||
|
const tls$1 = require$$1$1;
|
||
|
|
||
|
// Really awesome hack.
|
||
|
const JSStreamSocket$2 = (new tls$1.TLSSocket(new stream.PassThrough()))._handle._parentWrap.constructor;
|
||
|
|
||
|
var jsStreamSocket = JSStreamSocket$2;
|
||
|
|
||
|
let UnexpectedStatusCodeError$2 = class UnexpectedStatusCodeError extends Error {
|
||
|
constructor(statusCode, statusMessage = '') {
|
||
|
super(`The proxy server rejected the request with status code ${statusCode} (${statusMessage || 'empty status message'})`);
|
||
|
this.statusCode = statusCode;
|
||
|
this.statusMessage = statusMessage;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var unexpectedStatusCodeError = UnexpectedStatusCodeError$2;
|
||
|
|
||
|
const checkType$1 = (name, value, types) => {
|
||
|
const valid = types.some(type => {
|
||
|
const typeofType = typeof type;
|
||
|
if (typeofType === 'string') {
|
||
|
return typeof value === type;
|
||
|
}
|
||
|
|
||
|
return value instanceof type;
|
||
|
});
|
||
|
|
||
|
if (!valid) {
|
||
|
const names = types.map(type => typeof type === 'string' ? type : type.name);
|
||
|
|
||
|
throw new TypeError(`Expected '${name}' to be a type of ${names.join(' or ')}, got ${typeof value}`);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var checkType_1 = checkType$1;
|
||
|
|
||
|
// See https://github.com/facebook/jest/issues/2549
|
||
|
// eslint-disable-next-line node/prefer-global/url
|
||
|
const {URL: URL$1} = require$$0$5;
|
||
|
const checkType = checkType_1;
|
||
|
|
||
|
var initialize$2 = (self, proxyOptions) => {
|
||
|
checkType('proxyOptions', proxyOptions, ['object']);
|
||
|
checkType('proxyOptions.headers', proxyOptions.headers, ['object', 'undefined']);
|
||
|
checkType('proxyOptions.raw', proxyOptions.raw, ['boolean', 'undefined']);
|
||
|
checkType('proxyOptions.url', proxyOptions.url, [URL$1, 'string']);
|
||
|
|
||
|
const url = new URL$1(proxyOptions.url);
|
||
|
|
||
|
self.proxyOptions = {
|
||
|
raw: true,
|
||
|
...proxyOptions,
|
||
|
headers: {...proxyOptions.headers},
|
||
|
url
|
||
|
};
|
||
|
};
|
||
|
|
||
|
var getAuthHeaders = self => {
|
||
|
const {username, password} = self.proxyOptions.url;
|
||
|
|
||
|
if (username || password) {
|
||
|
const data = `${username}:${password}`;
|
||
|
const authorization = `Basic ${Buffer.from(data).toString('base64')}`;
|
||
|
|
||
|
return {
|
||
|
'proxy-authorization': authorization,
|
||
|
authorization
|
||
|
};
|
||
|
}
|
||
|
|
||
|
return {};
|
||
|
};
|
||
|
|
||
|
const tls = require$$1$1;
|
||
|
const http$1 = require$$1$2;
|
||
|
const https$1 = require$$2$1;
|
||
|
const JSStreamSocket$1 = jsStreamSocket;
|
||
|
const {globalAgent: globalAgent$2} = agent;
|
||
|
const UnexpectedStatusCodeError$1 = unexpectedStatusCodeError;
|
||
|
const initialize$1 = initialize$2;
|
||
|
const getAuthorizationHeaders$2 = getAuthHeaders;
|
||
|
|
||
|
const createConnection = (self, options, callback) => {
|
||
|
(async () => {
|
||
|
try {
|
||
|
const {proxyOptions} = self;
|
||
|
const {url, headers, raw} = proxyOptions;
|
||
|
|
||
|
const stream = await globalAgent$2.request(url, proxyOptions, {
|
||
|
...getAuthorizationHeaders$2(self),
|
||
|
...headers,
|
||
|
':method': 'CONNECT',
|
||
|
':authority': `${options.host}:${options.port}`
|
||
|
});
|
||
|
|
||
|
stream.once('error', callback);
|
||
|
stream.once('response', headers => {
|
||
|
const statusCode = headers[':status'];
|
||
|
|
||
|
if (statusCode !== 200) {
|
||
|
callback(new UnexpectedStatusCodeError$1(statusCode, ''));
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
const encrypted = self instanceof https$1.Agent;
|
||
|
|
||
|
if (raw && encrypted) {
|
||
|
options.socket = stream;
|
||
|
const secureStream = tls.connect(options);
|
||
|
|
||
|
secureStream.once('close', () => {
|
||
|
stream.destroy();
|
||
|
});
|
||
|
|
||
|
callback(null, secureStream);
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
const socket = new JSStreamSocket$1(stream);
|
||
|
socket.encrypted = false;
|
||
|
socket._handle.getpeername = out => {
|
||
|
out.family = undefined;
|
||
|
out.address = undefined;
|
||
|
out.port = undefined;
|
||
|
};
|
||
|
|
||
|
callback(null, socket);
|
||
|
});
|
||
|
} catch (error) {
|
||
|
callback(error);
|
||
|
}
|
||
|
})();
|
||
|
};
|
||
|
|
||
|
let HttpOverHttp2$1 = class HttpOverHttp2 extends http$1.Agent {
|
||
|
constructor(options) {
|
||
|
super(options);
|
||
|
|
||
|
initialize$1(this, options.proxyOptions);
|
||
|
}
|
||
|
|
||
|
createConnection(options, callback) {
|
||
|
createConnection(this, options, callback);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
let HttpsOverHttp2$1 = class HttpsOverHttp2 extends https$1.Agent {
|
||
|
constructor(options) {
|
||
|
super(options);
|
||
|
|
||
|
initialize$1(this, options.proxyOptions);
|
||
|
}
|
||
|
|
||
|
createConnection(options, callback) {
|
||
|
createConnection(this, options, callback);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var h1OverH2 = {
|
||
|
HttpOverHttp2: HttpOverHttp2$1,
|
||
|
HttpsOverHttp2: HttpsOverHttp2$1
|
||
|
};
|
||
|
|
||
|
const {Agent: Agent$1} = agent;
|
||
|
const JSStreamSocket = jsStreamSocket;
|
||
|
const UnexpectedStatusCodeError = unexpectedStatusCodeError;
|
||
|
const initialize = initialize$2;
|
||
|
|
||
|
let Http2OverHttpX$2 = class Http2OverHttpX extends Agent$1 {
|
||
|
constructor(options) {
|
||
|
super(options);
|
||
|
|
||
|
initialize(this, options.proxyOptions);
|
||
|
}
|
||
|
|
||
|
async createConnection(origin, options) {
|
||
|
const authority = `${origin.hostname}:${origin.port || 443}`;
|
||
|
|
||
|
const [stream, statusCode, statusMessage] = await this._getProxyStream(authority);
|
||
|
if (statusCode !== 200) {
|
||
|
throw new UnexpectedStatusCodeError(statusCode, statusMessage);
|
||
|
}
|
||
|
|
||
|
if (this.proxyOptions.raw) {
|
||
|
options.socket = stream;
|
||
|
} else {
|
||
|
const socket = new JSStreamSocket(stream);
|
||
|
socket.encrypted = false;
|
||
|
socket._handle.getpeername = out => {
|
||
|
out.family = undefined;
|
||
|
out.address = undefined;
|
||
|
out.port = undefined;
|
||
|
};
|
||
|
|
||
|
return socket;
|
||
|
}
|
||
|
|
||
|
return super.createConnection(origin, options);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var h2OverHx = Http2OverHttpX$2;
|
||
|
|
||
|
const {globalAgent: globalAgent$1} = agent;
|
||
|
const Http2OverHttpX$1 = h2OverHx;
|
||
|
const getAuthorizationHeaders$1 = getAuthHeaders;
|
||
|
|
||
|
const getStatusCode = stream => new Promise((resolve, reject) => {
|
||
|
stream.once('error', reject);
|
||
|
stream.once('response', headers => {
|
||
|
stream.off('error', reject);
|
||
|
resolve(headers[':status']);
|
||
|
});
|
||
|
});
|
||
|
|
||
|
let Http2OverHttp2$1 = class Http2OverHttp2 extends Http2OverHttpX$1 {
|
||
|
async _getProxyStream(authority) {
|
||
|
const {proxyOptions} = this;
|
||
|
|
||
|
const headers = {
|
||
|
...getAuthorizationHeaders$1(this),
|
||
|
...proxyOptions.headers,
|
||
|
':method': 'CONNECT',
|
||
|
':authority': authority
|
||
|
};
|
||
|
|
||
|
const stream = await globalAgent$1.request(proxyOptions.url, proxyOptions, headers);
|
||
|
const statusCode = await getStatusCode(stream);
|
||
|
|
||
|
return [stream, statusCode, ''];
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var h2OverH2 = Http2OverHttp2$1;
|
||
|
|
||
|
const http = require$$1$2;
|
||
|
const https = require$$2$1;
|
||
|
const Http2OverHttpX = h2OverHx;
|
||
|
const getAuthorizationHeaders = getAuthHeaders;
|
||
|
|
||
|
const getStream = request => new Promise((resolve, reject) => {
|
||
|
const onConnect = (response, socket, head) => {
|
||
|
socket.unshift(head);
|
||
|
|
||
|
request.off('error', reject);
|
||
|
resolve([socket, response.statusCode, response.statusMessage]);
|
||
|
};
|
||
|
|
||
|
request.once('error', reject);
|
||
|
request.once('connect', onConnect);
|
||
|
});
|
||
|
|
||
|
let Http2OverHttp$1 = class Http2OverHttp extends Http2OverHttpX {
|
||
|
async _getProxyStream(authority) {
|
||
|
const {proxyOptions} = this;
|
||
|
const {url, headers} = this.proxyOptions;
|
||
|
|
||
|
const network = url.protocol === 'https:' ? https : http;
|
||
|
|
||
|
// `new URL('https://localhost/httpbin.org:443')` results in
|
||
|
// a `/httpbin.org:443` path, which has an invalid leading slash.
|
||
|
const request = network.request({
|
||
|
...proxyOptions,
|
||
|
hostname: url.hostname,
|
||
|
port: url.port,
|
||
|
path: authority,
|
||
|
headers: {
|
||
|
...getAuthorizationHeaders(this),
|
||
|
...headers,
|
||
|
host: authority
|
||
|
},
|
||
|
method: 'CONNECT'
|
||
|
}).end();
|
||
|
|
||
|
return getStream(request);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var h2OverH1 = {
|
||
|
Http2OverHttp: Http2OverHttp$1,
|
||
|
Http2OverHttps: Http2OverHttp$1
|
||
|
};
|
||
|
|
||
|
const http2 = require$$3;
|
||
|
const {
|
||
|
Agent,
|
||
|
globalAgent
|
||
|
} = agent;
|
||
|
const ClientRequest = clientRequest;
|
||
|
const IncomingMessage = incomingMessage;
|
||
|
const auto = autoExports;
|
||
|
const {
|
||
|
HttpOverHttp2,
|
||
|
HttpsOverHttp2
|
||
|
} = h1OverH2;
|
||
|
const Http2OverHttp2 = h2OverH2;
|
||
|
const {
|
||
|
Http2OverHttp,
|
||
|
Http2OverHttps
|
||
|
} = h2OverH1;
|
||
|
const validateHeaderName = validateHeaderName$2;
|
||
|
const validateHeaderValue = validateHeaderValue$2;
|
||
|
|
||
|
const request = (url, options, callback) => new ClientRequest(url, options, callback);
|
||
|
|
||
|
const get = (url, options, callback) => {
|
||
|
// eslint-disable-next-line unicorn/prevent-abbreviations
|
||
|
const req = new ClientRequest(url, options, callback);
|
||
|
req.end();
|
||
|
|
||
|
return req;
|
||
|
};
|
||
|
|
||
|
var source = {
|
||
|
...http2,
|
||
|
ClientRequest,
|
||
|
IncomingMessage,
|
||
|
Agent,
|
||
|
globalAgent,
|
||
|
request,
|
||
|
get,
|
||
|
auto,
|
||
|
proxies: {
|
||
|
HttpOverHttp2,
|
||
|
HttpsOverHttp2,
|
||
|
Http2OverHttp2,
|
||
|
Http2OverHttp,
|
||
|
Http2OverHttps
|
||
|
},
|
||
|
validateHeaderName,
|
||
|
validateHeaderValue
|
||
|
};
|
||
|
|
||
|
var http2wrapper = /*@__PURE__*/getDefaultExportFromCjs(source);
|
||
|
|
||
|
function parseLinkHeader(link) {
|
||
|
const parsed = [];
|
||
|
const items = link.split(',');
|
||
|
for (const item of items) {
|
||
|
// https://tools.ietf.org/html/rfc5988#section-5
|
||
|
const [rawUriReference, ...rawLinkParameters] = item.split(';');
|
||
|
const trimmedUriReference = rawUriReference.trim();
|
||
|
// eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with
|
||
|
if (trimmedUriReference[0] !== '<' || trimmedUriReference[trimmedUriReference.length - 1] !== '>') {
|
||
|
throw new Error(`Invalid format of the Link header reference: ${trimmedUriReference}`);
|
||
|
}
|
||
|
const reference = trimmedUriReference.slice(1, -1);
|
||
|
const parameters = {};
|
||
|
if (rawLinkParameters.length === 0) {
|
||
|
throw new Error(`Unexpected end of Link header parameters: ${rawLinkParameters.join(';')}`);
|
||
|
}
|
||
|
for (const rawParameter of rawLinkParameters) {
|
||
|
const trimmedRawParameter = rawParameter.trim();
|
||
|
const center = trimmedRawParameter.indexOf('=');
|
||
|
if (center === -1) {
|
||
|
throw new Error(`Failed to parse Link header: ${link}`);
|
||
|
}
|
||
|
const name = trimmedRawParameter.slice(0, center).trim();
|
||
|
const value = trimmedRawParameter.slice(center + 1).trim();
|
||
|
parameters[name] = value;
|
||
|
}
|
||
|
parsed.push({
|
||
|
reference,
|
||
|
parameters,
|
||
|
});
|
||
|
}
|
||
|
return parsed;
|
||
|
}
|
||
|
|
||
|
const [major, minor] = process$1.versions.node.split('.').map(Number);
|
||
|
function validateSearchParameters(searchParameters) {
|
||
|
// eslint-disable-next-line guard-for-in
|
||
|
for (const key in searchParameters) {
|
||
|
const value = searchParameters[key];
|
||
|
assert$1.any([is.string, is.number, is.boolean, is.null_, is.undefined], value);
|
||
|
}
|
||
|
}
|
||
|
const globalCache = new Map();
|
||
|
let globalDnsCache;
|
||
|
const getGlobalDnsCache = () => {
|
||
|
if (globalDnsCache) {
|
||
|
return globalDnsCache;
|
||
|
}
|
||
|
globalDnsCache = new CacheableLookup();
|
||
|
return globalDnsCache;
|
||
|
};
|
||
|
const defaultInternals = {
|
||
|
request: undefined,
|
||
|
agent: {
|
||
|
http: undefined,
|
||
|
https: undefined,
|
||
|
http2: undefined,
|
||
|
},
|
||
|
h2session: undefined,
|
||
|
decompress: true,
|
||
|
timeout: {
|
||
|
connect: undefined,
|
||
|
lookup: undefined,
|
||
|
read: undefined,
|
||
|
request: undefined,
|
||
|
response: undefined,
|
||
|
secureConnect: undefined,
|
||
|
send: undefined,
|
||
|
socket: undefined,
|
||
|
},
|
||
|
prefixUrl: '',
|
||
|
body: undefined,
|
||
|
form: undefined,
|
||
|
json: undefined,
|
||
|
cookieJar: undefined,
|
||
|
ignoreInvalidCookies: false,
|
||
|
searchParams: undefined,
|
||
|
dnsLookup: undefined,
|
||
|
dnsCache: undefined,
|
||
|
context: {},
|
||
|
hooks: {
|
||
|
init: [],
|
||
|
beforeRequest: [],
|
||
|
beforeError: [],
|
||
|
beforeRedirect: [],
|
||
|
beforeRetry: [],
|
||
|
afterResponse: [],
|
||
|
},
|
||
|
followRedirect: true,
|
||
|
maxRedirects: 10,
|
||
|
cache: undefined,
|
||
|
throwHttpErrors: true,
|
||
|
username: '',
|
||
|
password: '',
|
||
|
http2: false,
|
||
|
allowGetBody: false,
|
||
|
headers: {
|
||
|
'user-agent': 'got (https://github.com/sindresorhus/got)',
|
||
|
},
|
||
|
methodRewriting: false,
|
||
|
dnsLookupIpVersion: undefined,
|
||
|
parseJson: JSON.parse,
|
||
|
stringifyJson: JSON.stringify,
|
||
|
retry: {
|
||
|
limit: 2,
|
||
|
methods: [
|
||
|
'GET',
|
||
|
'PUT',
|
||
|
'HEAD',
|
||
|
'DELETE',
|
||
|
'OPTIONS',
|
||
|
'TRACE',
|
||
|
],
|
||
|
statusCodes: [
|
||
|
408,
|
||
|
413,
|
||
|
429,
|
||
|
500,
|
||
|
502,
|
||
|
503,
|
||
|
504,
|
||
|
521,
|
||
|
522,
|
||
|
524,
|
||
|
],
|
||
|
errorCodes: [
|
||
|
'ETIMEDOUT',
|
||
|
'ECONNRESET',
|
||
|
'EADDRINUSE',
|
||
|
'ECONNREFUSED',
|
||
|
'EPIPE',
|
||
|
'ENOTFOUND',
|
||
|
'ENETUNREACH',
|
||
|
'EAI_AGAIN',
|
||
|
],
|
||
|
maxRetryAfter: undefined,
|
||
|
calculateDelay: ({ computedValue }) => computedValue,
|
||
|
backoffLimit: Number.POSITIVE_INFINITY,
|
||
|
noise: 100,
|
||
|
},
|
||
|
localAddress: undefined,
|
||
|
method: 'GET',
|
||
|
createConnection: undefined,
|
||
|
cacheOptions: {
|
||
|
shared: undefined,
|
||
|
cacheHeuristic: undefined,
|
||
|
immutableMinTimeToLive: undefined,
|
||
|
ignoreCargoCult: undefined,
|
||
|
},
|
||
|
https: {
|
||
|
alpnProtocols: undefined,
|
||
|
rejectUnauthorized: undefined,
|
||
|
checkServerIdentity: undefined,
|
||
|
certificateAuthority: undefined,
|
||
|
key: undefined,
|
||
|
certificate: undefined,
|
||
|
passphrase: undefined,
|
||
|
pfx: undefined,
|
||
|
ciphers: undefined,
|
||
|
honorCipherOrder: undefined,
|
||
|
minVersion: undefined,
|
||
|
maxVersion: undefined,
|
||
|
signatureAlgorithms: undefined,
|
||
|
tlsSessionLifetime: undefined,
|
||
|
dhparam: undefined,
|
||
|
ecdhCurve: undefined,
|
||
|
certificateRevocationLists: undefined,
|
||
|
},
|
||
|
encoding: undefined,
|
||
|
resolveBodyOnly: false,
|
||
|
isStream: false,
|
||
|
responseType: 'text',
|
||
|
url: undefined,
|
||
|
pagination: {
|
||
|
transform(response) {
|
||
|
if (response.request.options.responseType === 'json') {
|
||
|
return response.body;
|
||
|
}
|
||
|
return JSON.parse(response.body);
|
||
|
},
|
||
|
paginate({ response }) {
|
||
|
const rawLinkHeader = response.headers.link;
|
||
|
if (typeof rawLinkHeader !== 'string' || rawLinkHeader.trim() === '') {
|
||
|
return false;
|
||
|
}
|
||
|
const parsed = parseLinkHeader(rawLinkHeader);
|
||
|
const next = parsed.find(entry => entry.parameters.rel === 'next' || entry.parameters.rel === '"next"');
|
||
|
if (next) {
|
||
|
return {
|
||
|
url: new URL$6(next.reference, response.url),
|
||
|
};
|
||
|
}
|
||
|
return false;
|
||
|
},
|
||
|
filter: () => true,
|
||
|
shouldContinue: () => true,
|
||
|
countLimit: Number.POSITIVE_INFINITY,
|
||
|
backoff: 0,
|
||
|
requestLimit: 10000,
|
||
|
stackAllItems: false,
|
||
|
},
|
||
|
setHost: true,
|
||
|
maxHeaderSize: undefined,
|
||
|
signal: undefined,
|
||
|
enableUnixSockets: true,
|
||
|
};
|
||
|
const cloneInternals = (internals) => {
|
||
|
const { hooks, retry } = internals;
|
||
|
const result = {
|
||
|
...internals,
|
||
|
context: { ...internals.context },
|
||
|
cacheOptions: { ...internals.cacheOptions },
|
||
|
https: { ...internals.https },
|
||
|
agent: { ...internals.agent },
|
||
|
headers: { ...internals.headers },
|
||
|
retry: {
|
||
|
...retry,
|
||
|
errorCodes: [...retry.errorCodes],
|
||
|
methods: [...retry.methods],
|
||
|
statusCodes: [...retry.statusCodes],
|
||
|
},
|
||
|
timeout: { ...internals.timeout },
|
||
|
hooks: {
|
||
|
init: [...hooks.init],
|
||
|
beforeRequest: [...hooks.beforeRequest],
|
||
|
beforeError: [...hooks.beforeError],
|
||
|
beforeRedirect: [...hooks.beforeRedirect],
|
||
|
beforeRetry: [...hooks.beforeRetry],
|
||
|
afterResponse: [...hooks.afterResponse],
|
||
|
},
|
||
|
searchParams: internals.searchParams ? new URLSearchParams(internals.searchParams) : undefined,
|
||
|
pagination: { ...internals.pagination },
|
||
|
};
|
||
|
if (result.url !== undefined) {
|
||
|
result.prefixUrl = '';
|
||
|
}
|
||
|
return result;
|
||
|
};
|
||
|
const cloneRaw = (raw) => {
|
||
|
const { hooks, retry } = raw;
|
||
|
const result = { ...raw };
|
||
|
if (is.object(raw.context)) {
|
||
|
result.context = { ...raw.context };
|
||
|
}
|
||
|
if (is.object(raw.cacheOptions)) {
|
||
|
result.cacheOptions = { ...raw.cacheOptions };
|
||
|
}
|
||
|
if (is.object(raw.https)) {
|
||
|
result.https = { ...raw.https };
|
||
|
}
|
||
|
if (is.object(raw.cacheOptions)) {
|
||
|
result.cacheOptions = { ...result.cacheOptions };
|
||
|
}
|
||
|
if (is.object(raw.agent)) {
|
||
|
result.agent = { ...raw.agent };
|
||
|
}
|
||
|
if (is.object(raw.headers)) {
|
||
|
result.headers = { ...raw.headers };
|
||
|
}
|
||
|
if (is.object(retry)) {
|
||
|
result.retry = { ...retry };
|
||
|
if (is.array(retry.errorCodes)) {
|
||
|
result.retry.errorCodes = [...retry.errorCodes];
|
||
|
}
|
||
|
if (is.array(retry.methods)) {
|
||
|
result.retry.methods = [...retry.methods];
|
||
|
}
|
||
|
if (is.array(retry.statusCodes)) {
|
||
|
result.retry.statusCodes = [...retry.statusCodes];
|
||
|
}
|
||
|
}
|
||
|
if (is.object(raw.timeout)) {
|
||
|
result.timeout = { ...raw.timeout };
|
||
|
}
|
||
|
if (is.object(hooks)) {
|
||
|
result.hooks = {
|
||
|
...hooks,
|
||
|
};
|
||
|
if (is.array(hooks.init)) {
|
||
|
result.hooks.init = [...hooks.init];
|
||
|
}
|
||
|
if (is.array(hooks.beforeRequest)) {
|
||
|
result.hooks.beforeRequest = [...hooks.beforeRequest];
|
||
|
}
|
||
|
if (is.array(hooks.beforeError)) {
|
||
|
result.hooks.beforeError = [...hooks.beforeError];
|
||
|
}
|
||
|
if (is.array(hooks.beforeRedirect)) {
|
||
|
result.hooks.beforeRedirect = [...hooks.beforeRedirect];
|
||
|
}
|
||
|
if (is.array(hooks.beforeRetry)) {
|
||
|
result.hooks.beforeRetry = [...hooks.beforeRetry];
|
||
|
}
|
||
|
if (is.array(hooks.afterResponse)) {
|
||
|
result.hooks.afterResponse = [...hooks.afterResponse];
|
||
|
}
|
||
|
}
|
||
|
// TODO: raw.searchParams
|
||
|
if (is.object(raw.pagination)) {
|
||
|
result.pagination = { ...raw.pagination };
|
||
|
}
|
||
|
return result;
|
||
|
};
|
||
|
const getHttp2TimeoutOption = (internals) => {
|
||
|
const delays = [internals.timeout.socket, internals.timeout.connect, internals.timeout.lookup, internals.timeout.request, internals.timeout.secureConnect].filter(delay => typeof delay === 'number');
|
||
|
if (delays.length > 0) {
|
||
|
return Math.min(...delays);
|
||
|
}
|
||
|
return undefined;
|
||
|
};
|
||
|
const init = (options, withOptions, self) => {
|
||
|
const initHooks = options.hooks?.init;
|
||
|
if (initHooks) {
|
||
|
for (const hook of initHooks) {
|
||
|
hook(withOptions, self);
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
class Options {
|
||
|
constructor(input, options, defaults) {
|
||
|
Object.defineProperty(this, "_unixOptions", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_internals", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_merging", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_init", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
assert$1.any([is.string, is.urlInstance, is.object, is.undefined], input);
|
||
|
assert$1.any([is.object, is.undefined], options);
|
||
|
assert$1.any([is.object, is.undefined], defaults);
|
||
|
if (input instanceof Options || options instanceof Options) {
|
||
|
throw new TypeError('The defaults must be passed as the third argument');
|
||
|
}
|
||
|
this._internals = cloneInternals(defaults?._internals ?? defaults ?? defaultInternals);
|
||
|
this._init = [...(defaults?._init ?? [])];
|
||
|
this._merging = false;
|
||
|
this._unixOptions = undefined;
|
||
|
// This rule allows `finally` to be considered more important.
|
||
|
// Meaning no matter the error thrown in the `try` block,
|
||
|
// if `finally` throws then the `finally` error will be thrown.
|
||
|
//
|
||
|
// Yes, we want this. If we set `url` first, then the `url.searchParams`
|
||
|
// would get merged. Instead we set the `searchParams` first, then
|
||
|
// `url.searchParams` is overwritten as expected.
|
||
|
//
|
||
|
/* eslint-disable no-unsafe-finally */
|
||
|
try {
|
||
|
if (is.plainObject(input)) {
|
||
|
try {
|
||
|
this.merge(input);
|
||
|
this.merge(options);
|
||
|
}
|
||
|
finally {
|
||
|
this.url = input.url;
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
try {
|
||
|
this.merge(options);
|
||
|
}
|
||
|
finally {
|
||
|
if (options?.url !== undefined) {
|
||
|
if (input === undefined) {
|
||
|
this.url = options.url;
|
||
|
}
|
||
|
else {
|
||
|
throw new TypeError('The `url` option is mutually exclusive with the `input` argument');
|
||
|
}
|
||
|
}
|
||
|
else if (input !== undefined) {
|
||
|
this.url = input;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
catch (error) {
|
||
|
error.options = this;
|
||
|
throw error;
|
||
|
}
|
||
|
/* eslint-enable no-unsafe-finally */
|
||
|
}
|
||
|
merge(options) {
|
||
|
if (!options) {
|
||
|
return;
|
||
|
}
|
||
|
if (options instanceof Options) {
|
||
|
for (const init of options._init) {
|
||
|
this.merge(init);
|
||
|
}
|
||
|
return;
|
||
|
}
|
||
|
options = cloneRaw(options);
|
||
|
init(this, options, this);
|
||
|
init(options, options, this);
|
||
|
this._merging = true;
|
||
|
// Always merge `isStream` first
|
||
|
if ('isStream' in options) {
|
||
|
this.isStream = options.isStream;
|
||
|
}
|
||
|
try {
|
||
|
let push = false;
|
||
|
for (const key in options) {
|
||
|
// `got.extend()` options
|
||
|
if (key === 'mutableDefaults' || key === 'handlers') {
|
||
|
continue;
|
||
|
}
|
||
|
// Never merge `url`
|
||
|
if (key === 'url') {
|
||
|
continue;
|
||
|
}
|
||
|
if (!(key in this)) {
|
||
|
throw new Error(`Unexpected option: ${key}`);
|
||
|
}
|
||
|
// @ts-expect-error Type 'unknown' is not assignable to type 'never'.
|
||
|
this[key] = options[key];
|
||
|
push = true;
|
||
|
}
|
||
|
if (push) {
|
||
|
this._init.push(options);
|
||
|
}
|
||
|
}
|
||
|
finally {
|
||
|
this._merging = false;
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
Custom request function.
|
||
|
The main purpose of this is to [support HTTP2 using a wrapper](https://github.com/szmarczak/http2-wrapper).
|
||
|
|
||
|
@default http.request | https.request
|
||
|
*/
|
||
|
get request() {
|
||
|
return this._internals.request;
|
||
|
}
|
||
|
set request(value) {
|
||
|
assert$1.any([is.function_, is.undefined], value);
|
||
|
this._internals.request = value;
|
||
|
}
|
||
|
/**
|
||
|
An object representing `http`, `https` and `http2` keys for [`http.Agent`](https://nodejs.org/api/http.html#http_class_http_agent), [`https.Agent`](https://nodejs.org/api/https.html#https_class_https_agent) and [`http2wrapper.Agent`](https://github.com/szmarczak/http2-wrapper#new-http2agentoptions) instance.
|
||
|
This is necessary because a request to one protocol might redirect to another.
|
||
|
In such a scenario, Got will switch over to the right protocol agent for you.
|
||
|
|
||
|
If a key is not present, it will default to a global agent.
|
||
|
|
||
|
@example
|
||
|
```
|
||
|
import got from 'got';
|
||
|
import HttpAgent from 'agentkeepalive';
|
||
|
|
||
|
const {HttpsAgent} = HttpAgent;
|
||
|
|
||
|
await got('https://sindresorhus.com', {
|
||
|
agent: {
|
||
|
http: new HttpAgent(),
|
||
|
https: new HttpsAgent()
|
||
|
}
|
||
|
});
|
||
|
```
|
||
|
*/
|
||
|
get agent() {
|
||
|
return this._internals.agent;
|
||
|
}
|
||
|
set agent(value) {
|
||
|
assert$1.plainObject(value);
|
||
|
// eslint-disable-next-line guard-for-in
|
||
|
for (const key in value) {
|
||
|
if (!(key in this._internals.agent)) {
|
||
|
throw new TypeError(`Unexpected agent option: ${key}`);
|
||
|
}
|
||
|
// @ts-expect-error - No idea why `value[key]` doesn't work here.
|
||
|
assert$1.any([is.object, is.undefined], value[key]);
|
||
|
}
|
||
|
if (this._merging) {
|
||
|
Object.assign(this._internals.agent, value);
|
||
|
}
|
||
|
else {
|
||
|
this._internals.agent = { ...value };
|
||
|
}
|
||
|
}
|
||
|
get h2session() {
|
||
|
return this._internals.h2session;
|
||
|
}
|
||
|
set h2session(value) {
|
||
|
this._internals.h2session = value;
|
||
|
}
|
||
|
/**
|
||
|
Decompress the response automatically.
|
||
|
|
||
|
This will set the `accept-encoding` header to `gzip, deflate, br` unless you set it yourself.
|
||
|
|
||
|
If this is disabled, a compressed response is returned as a `Buffer`.
|
||
|
This may be useful if you want to handle decompression yourself or stream the raw compressed data.
|
||
|
|
||
|
@default true
|
||
|
*/
|
||
|
get decompress() {
|
||
|
return this._internals.decompress;
|
||
|
}
|
||
|
set decompress(value) {
|
||
|
assert$1.boolean(value);
|
||
|
this._internals.decompress = value;
|
||
|
}
|
||
|
/**
|
||
|
Milliseconds to wait for the server to end the response before aborting the request with `got.TimeoutError` error (a.k.a. `request` property).
|
||
|
By default, there's no timeout.
|
||
|
|
||
|
This also accepts an `object` with the following fields to constrain the duration of each phase of the request lifecycle:
|
||
|
|
||
|
- `lookup` starts when a socket is assigned and ends when the hostname has been resolved.
|
||
|
Does not apply when using a Unix domain socket.
|
||
|
- `connect` starts when `lookup` completes (or when the socket is assigned if lookup does not apply to the request) and ends when the socket is connected.
|
||
|
- `secureConnect` starts when `connect` completes and ends when the handshaking process completes (HTTPS only).
|
||
|
- `socket` starts when the socket is connected. See [request.setTimeout](https://nodejs.org/api/http.html#http_request_settimeout_timeout_callback).
|
||
|
- `response` starts when the request has been written to the socket and ends when the response headers are received.
|
||
|
- `send` starts when the socket is connected and ends with the request has been written to the socket.
|
||
|
- `request` starts when the request is initiated and ends when the response's end event fires.
|
||
|
*/
|
||
|
get timeout() {
|
||
|
// We always return `Delays` here.
|
||
|
// It has to be `Delays | number`, otherwise TypeScript will error because the getter and the setter have incompatible types.
|
||
|
return this._internals.timeout;
|
||
|
}
|
||
|
set timeout(value) {
|
||
|
assert$1.plainObject(value);
|
||
|
// eslint-disable-next-line guard-for-in
|
||
|
for (const key in value) {
|
||
|
if (!(key in this._internals.timeout)) {
|
||
|
throw new Error(`Unexpected timeout option: ${key}`);
|
||
|
}
|
||
|
// @ts-expect-error - No idea why `value[key]` doesn't work here.
|
||
|
assert$1.any([is.number, is.undefined], value[key]);
|
||
|
}
|
||
|
if (this._merging) {
|
||
|
Object.assign(this._internals.timeout, value);
|
||
|
}
|
||
|
else {
|
||
|
this._internals.timeout = { ...value };
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
When specified, `prefixUrl` will be prepended to `url`.
|
||
|
The prefix can be any valid URL, either relative or absolute.
|
||
|
A trailing slash `/` is optional - one will be added automatically.
|
||
|
|
||
|
__Note__: `prefixUrl` will be ignored if the `url` argument is a URL instance.
|
||
|
|
||
|
__Note__: Leading slashes in `input` are disallowed when using this option to enforce consistency and avoid confusion.
|
||
|
For example, when the prefix URL is `https://example.com/foo` and the input is `/bar`, there's ambiguity whether the resulting URL would become `https://example.com/foo/bar` or `https://example.com/bar`.
|
||
|
The latter is used by browsers.
|
||
|
|
||
|
__Tip__: Useful when used with `got.extend()` to create niche-specific Got instances.
|
||
|
|
||
|
__Tip__: You can change `prefixUrl` using hooks as long as the URL still includes the `prefixUrl`.
|
||
|
If the URL doesn't include it anymore, it will throw.
|
||
|
|
||
|
@example
|
||
|
```
|
||
|
import got from 'got';
|
||
|
|
||
|
await got('unicorn', {prefixUrl: 'https://cats.com'});
|
||
|
//=> 'https://cats.com/unicorn'
|
||
|
|
||
|
const instance = got.extend({
|
||
|
prefixUrl: 'https://google.com'
|
||
|
});
|
||
|
|
||
|
await instance('unicorn', {
|
||
|
hooks: {
|
||
|
beforeRequest: [
|
||
|
options => {
|
||
|
options.prefixUrl = 'https://cats.com';
|
||
|
}
|
||
|
]
|
||
|
}
|
||
|
});
|
||
|
//=> 'https://cats.com/unicorn'
|
||
|
```
|
||
|
*/
|
||
|
get prefixUrl() {
|
||
|
// We always return `string` here.
|
||
|
// It has to be `string | URL`, otherwise TypeScript will error because the getter and the setter have incompatible types.
|
||
|
return this._internals.prefixUrl;
|
||
|
}
|
||
|
set prefixUrl(value) {
|
||
|
assert$1.any([is.string, is.urlInstance], value);
|
||
|
if (value === '') {
|
||
|
this._internals.prefixUrl = '';
|
||
|
return;
|
||
|
}
|
||
|
value = value.toString();
|
||
|
if (!value.endsWith('/')) {
|
||
|
value += '/';
|
||
|
}
|
||
|
if (this._internals.prefixUrl && this._internals.url) {
|
||
|
const { href } = this._internals.url;
|
||
|
this._internals.url.href = value + href.slice(this._internals.prefixUrl.length);
|
||
|
}
|
||
|
this._internals.prefixUrl = value;
|
||
|
}
|
||
|
/**
|
||
|
__Note #1__: The `body` option cannot be used with the `json` or `form` option.
|
||
|
|
||
|
__Note #2__: If you provide this option, `got.stream()` will be read-only.
|
||
|
|
||
|
__Note #3__: If you provide a payload with the `GET` or `HEAD` method, it will throw a `TypeError` unless the method is `GET` and the `allowGetBody` option is set to `true`.
|
||
|
|
||
|
__Note #4__: This option is not enumerable and will not be merged with the instance defaults.
|
||
|
|
||
|
The `content-length` header will be automatically set if `body` is a `string` / `Buffer` / [`FormData`](https://developer.mozilla.org/en-US/docs/Web/API/FormData) / [`form-data` instance](https://github.com/form-data/form-data), and `content-length` and `transfer-encoding` are not manually set in `options.headers`.
|
||
|
|
||
|
Since Got 12, the `content-length` is not automatically set when `body` is a `fs.createReadStream`.
|
||
|
*/
|
||
|
get body() {
|
||
|
return this._internals.body;
|
||
|
}
|
||
|
set body(value) {
|
||
|
assert$1.any([is.string, is.buffer, is.nodeStream, is.generator, is.asyncGenerator, isFormData$1, is.undefined], value);
|
||
|
if (is.nodeStream(value)) {
|
||
|
assert$1.truthy(value.readable);
|
||
|
}
|
||
|
if (value !== undefined) {
|
||
|
assert$1.undefined(this._internals.form);
|
||
|
assert$1.undefined(this._internals.json);
|
||
|
}
|
||
|
this._internals.body = value;
|
||
|
}
|
||
|
/**
|
||
|
The form body is converted to a query string using [`(new URLSearchParams(object)).toString()`](https://nodejs.org/api/url.html#url_constructor_new_urlsearchparams_obj).
|
||
|
|
||
|
If the `Content-Type` header is not present, it will be set to `application/x-www-form-urlencoded`.
|
||
|
|
||
|
__Note #1__: If you provide this option, `got.stream()` will be read-only.
|
||
|
|
||
|
__Note #2__: This option is not enumerable and will not be merged with the instance defaults.
|
||
|
*/
|
||
|
get form() {
|
||
|
return this._internals.form;
|
||
|
}
|
||
|
set form(value) {
|
||
|
assert$1.any([is.plainObject, is.undefined], value);
|
||
|
if (value !== undefined) {
|
||
|
assert$1.undefined(this._internals.body);
|
||
|
assert$1.undefined(this._internals.json);
|
||
|
}
|
||
|
this._internals.form = value;
|
||
|
}
|
||
|
/**
|
||
|
JSON body. If the `Content-Type` header is not set, it will be set to `application/json`.
|
||
|
|
||
|
__Note #1__: If you provide this option, `got.stream()` will be read-only.
|
||
|
|
||
|
__Note #2__: This option is not enumerable and will not be merged with the instance defaults.
|
||
|
*/
|
||
|
get json() {
|
||
|
return this._internals.json;
|
||
|
}
|
||
|
set json(value) {
|
||
|
if (value !== undefined) {
|
||
|
assert$1.undefined(this._internals.body);
|
||
|
assert$1.undefined(this._internals.form);
|
||
|
}
|
||
|
this._internals.json = value;
|
||
|
}
|
||
|
/**
|
||
|
The URL to request, as a string, a [`https.request` options object](https://nodejs.org/api/https.html#https_https_request_options_callback), or a [WHATWG `URL`](https://nodejs.org/api/url.html#url_class_url).
|
||
|
|
||
|
Properties from `options` will override properties in the parsed `url`.
|
||
|
|
||
|
If no protocol is specified, it will throw a `TypeError`.
|
||
|
|
||
|
__Note__: The query string is **not** parsed as search params.
|
||
|
|
||
|
@example
|
||
|
```
|
||
|
await got('https://example.com/?query=a b'); //=> https://example.com/?query=a%20b
|
||
|
await got('https://example.com/', {searchParams: {query: 'a b'}}); //=> https://example.com/?query=a+b
|
||
|
|
||
|
// The query string is overridden by `searchParams`
|
||
|
await got('https://example.com/?query=a b', {searchParams: {query: 'a b'}}); //=> https://example.com/?query=a+b
|
||
|
```
|
||
|
*/
|
||
|
get url() {
|
||
|
return this._internals.url;
|
||
|
}
|
||
|
set url(value) {
|
||
|
assert$1.any([is.string, is.urlInstance, is.undefined], value);
|
||
|
if (value === undefined) {
|
||
|
this._internals.url = undefined;
|
||
|
return;
|
||
|
}
|
||
|
if (is.string(value) && value.startsWith('/')) {
|
||
|
throw new Error('`url` must not start with a slash');
|
||
|
}
|
||
|
const urlString = `${this.prefixUrl}${value.toString()}`;
|
||
|
const url = new URL$6(urlString);
|
||
|
this._internals.url = url;
|
||
|
if (url.protocol === 'unix:') {
|
||
|
url.href = `http://unix${url.pathname}${url.search}`;
|
||
|
}
|
||
|
if (url.protocol !== 'http:' && url.protocol !== 'https:') {
|
||
|
const error = new Error(`Unsupported protocol: ${url.protocol}`);
|
||
|
error.code = 'ERR_UNSUPPORTED_PROTOCOL';
|
||
|
throw error;
|
||
|
}
|
||
|
if (this._internals.username) {
|
||
|
url.username = this._internals.username;
|
||
|
this._internals.username = '';
|
||
|
}
|
||
|
if (this._internals.password) {
|
||
|
url.password = this._internals.password;
|
||
|
this._internals.password = '';
|
||
|
}
|
||
|
if (this._internals.searchParams) {
|
||
|
url.search = this._internals.searchParams.toString();
|
||
|
this._internals.searchParams = undefined;
|
||
|
}
|
||
|
if (url.hostname === 'unix') {
|
||
|
if (!this._internals.enableUnixSockets) {
|
||
|
throw new Error('Using UNIX domain sockets but option `enableUnixSockets` is not enabled');
|
||
|
}
|
||
|
const matches = /(?<socketPath>.+?):(?<path>.+)/.exec(`${url.pathname}${url.search}`);
|
||
|
if (matches?.groups) {
|
||
|
const { socketPath, path } = matches.groups;
|
||
|
this._unixOptions = {
|
||
|
socketPath,
|
||
|
path,
|
||
|
host: '',
|
||
|
};
|
||
|
}
|
||
|
else {
|
||
|
this._unixOptions = undefined;
|
||
|
}
|
||
|
return;
|
||
|
}
|
||
|
this._unixOptions = undefined;
|
||
|
}
|
||
|
/**
|
||
|
Cookie support. You don't have to care about parsing or how to store them.
|
||
|
|
||
|
__Note__: If you provide this option, `options.headers.cookie` will be overridden.
|
||
|
*/
|
||
|
get cookieJar() {
|
||
|
return this._internals.cookieJar;
|
||
|
}
|
||
|
set cookieJar(value) {
|
||
|
assert$1.any([is.object, is.undefined], value);
|
||
|
if (value === undefined) {
|
||
|
this._internals.cookieJar = undefined;
|
||
|
return;
|
||
|
}
|
||
|
let { setCookie, getCookieString } = value;
|
||
|
assert$1.function_(setCookie);
|
||
|
assert$1.function_(getCookieString);
|
||
|
/* istanbul ignore next: Horrible `tough-cookie` v3 check */
|
||
|
if (setCookie.length === 4 && getCookieString.length === 0) {
|
||
|
setCookie = promisify$1(setCookie.bind(value));
|
||
|
getCookieString = promisify$1(getCookieString.bind(value));
|
||
|
this._internals.cookieJar = {
|
||
|
setCookie,
|
||
|
getCookieString: getCookieString,
|
||
|
};
|
||
|
}
|
||
|
else {
|
||
|
this._internals.cookieJar = value;
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
You can abort the `request` using [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController).
|
||
|
|
||
|
*Requires Node.js 16 or later.*
|
||
|
|
||
|
@example
|
||
|
```
|
||
|
import got from 'got';
|
||
|
|
||
|
const abortController = new AbortController();
|
||
|
|
||
|
const request = got('https://httpbin.org/anything', {
|
||
|
signal: abortController.signal
|
||
|
});
|
||
|
|
||
|
setTimeout(() => {
|
||
|
abortController.abort();
|
||
|
}, 100);
|
||
|
```
|
||
|
*/
|
||
|
// TODO: Replace `any` with `AbortSignal` when targeting Node 16.
|
||
|
get signal() {
|
||
|
return this._internals.signal;
|
||
|
}
|
||
|
// TODO: Replace `any` with `AbortSignal` when targeting Node 16.
|
||
|
set signal(value) {
|
||
|
assert$1.object(value);
|
||
|
this._internals.signal = value;
|
||
|
}
|
||
|
/**
|
||
|
Ignore invalid cookies instead of throwing an error.
|
||
|
Only useful when the `cookieJar` option has been set. Not recommended.
|
||
|
|
||
|
@default false
|
||
|
*/
|
||
|
get ignoreInvalidCookies() {
|
||
|
return this._internals.ignoreInvalidCookies;
|
||
|
}
|
||
|
set ignoreInvalidCookies(value) {
|
||
|
assert$1.boolean(value);
|
||
|
this._internals.ignoreInvalidCookies = value;
|
||
|
}
|
||
|
/**
|
||
|
Query string that will be added to the request URL.
|
||
|
This will override the query string in `url`.
|
||
|
|
||
|
If you need to pass in an array, you can do it using a `URLSearchParams` instance.
|
||
|
|
||
|
@example
|
||
|
```
|
||
|
import got from 'got';
|
||
|
|
||
|
const searchParams = new URLSearchParams([['key', 'a'], ['key', 'b']]);
|
||
|
|
||
|
await got('https://example.com', {searchParams});
|
||
|
|
||
|
console.log(searchParams.toString());
|
||
|
//=> 'key=a&key=b'
|
||
|
```
|
||
|
*/
|
||
|
get searchParams() {
|
||
|
if (this._internals.url) {
|
||
|
return this._internals.url.searchParams;
|
||
|
}
|
||
|
if (this._internals.searchParams === undefined) {
|
||
|
this._internals.searchParams = new URLSearchParams();
|
||
|
}
|
||
|
return this._internals.searchParams;
|
||
|
}
|
||
|
set searchParams(value) {
|
||
|
assert$1.any([is.string, is.object, is.undefined], value);
|
||
|
const url = this._internals.url;
|
||
|
if (value === undefined) {
|
||
|
this._internals.searchParams = undefined;
|
||
|
if (url) {
|
||
|
url.search = '';
|
||
|
}
|
||
|
return;
|
||
|
}
|
||
|
const searchParameters = this.searchParams;
|
||
|
let updated;
|
||
|
if (is.string(value)) {
|
||
|
updated = new URLSearchParams(value);
|
||
|
}
|
||
|
else if (value instanceof URLSearchParams) {
|
||
|
updated = value;
|
||
|
}
|
||
|
else {
|
||
|
validateSearchParameters(value);
|
||
|
updated = new URLSearchParams();
|
||
|
// eslint-disable-next-line guard-for-in
|
||
|
for (const key in value) {
|
||
|
const entry = value[key];
|
||
|
if (entry === null) {
|
||
|
updated.append(key, '');
|
||
|
}
|
||
|
else if (entry === undefined) {
|
||
|
searchParameters.delete(key);
|
||
|
}
|
||
|
else {
|
||
|
updated.append(key, entry);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
if (this._merging) {
|
||
|
// These keys will be replaced
|
||
|
for (const key of updated.keys()) {
|
||
|
searchParameters.delete(key);
|
||
|
}
|
||
|
for (const [key, value] of updated) {
|
||
|
searchParameters.append(key, value);
|
||
|
}
|
||
|
}
|
||
|
else if (url) {
|
||
|
url.search = searchParameters.toString();
|
||
|
}
|
||
|
else {
|
||
|
this._internals.searchParams = searchParameters;
|
||
|
}
|
||
|
}
|
||
|
get searchParameters() {
|
||
|
throw new Error('The `searchParameters` option does not exist. Use `searchParams` instead.');
|
||
|
}
|
||
|
set searchParameters(_value) {
|
||
|
throw new Error('The `searchParameters` option does not exist. Use `searchParams` instead.');
|
||
|
}
|
||
|
get dnsLookup() {
|
||
|
return this._internals.dnsLookup;
|
||
|
}
|
||
|
set dnsLookup(value) {
|
||
|
assert$1.any([is.function_, is.undefined], value);
|
||
|
this._internals.dnsLookup = value;
|
||
|
}
|
||
|
/**
|
||
|
An instance of [`CacheableLookup`](https://github.com/szmarczak/cacheable-lookup) used for making DNS lookups.
|
||
|
Useful when making lots of requests to different *public* hostnames.
|
||
|
|
||
|
`CacheableLookup` uses `dns.resolver4(..)` and `dns.resolver6(...)` under the hood and fall backs to `dns.lookup(...)` when the first two fail, which may lead to additional delay.
|
||
|
|
||
|
__Note__: This should stay disabled when making requests to internal hostnames such as `localhost`, `database.local` etc.
|
||
|
|
||
|
@default false
|
||
|
*/
|
||
|
get dnsCache() {
|
||
|
return this._internals.dnsCache;
|
||
|
}
|
||
|
set dnsCache(value) {
|
||
|
assert$1.any([is.object, is.boolean, is.undefined], value);
|
||
|
if (value === true) {
|
||
|
this._internals.dnsCache = getGlobalDnsCache();
|
||
|
}
|
||
|
else if (value === false) {
|
||
|
this._internals.dnsCache = undefined;
|
||
|
}
|
||
|
else {
|
||
|
this._internals.dnsCache = value;
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
User data. `context` is shallow merged and enumerable. If it contains non-enumerable properties they will NOT be merged.
|
||
|
|
||
|
@example
|
||
|
```
|
||
|
import got from 'got';
|
||
|
|
||
|
const instance = got.extend({
|
||
|
hooks: {
|
||
|
beforeRequest: [
|
||
|
options => {
|
||
|
if (!options.context || !options.context.token) {
|
||
|
throw new Error('Token required');
|
||
|
}
|
||
|
|
||
|
options.headers.token = options.context.token;
|
||
|
}
|
||
|
]
|
||
|
}
|
||
|
});
|
||
|
|
||
|
const context = {
|
||
|
token: 'secret'
|
||
|
};
|
||
|
|
||
|
const response = await instance('https://httpbin.org/headers', {context});
|
||
|
|
||
|
// Let's see the headers
|
||
|
console.log(response.body);
|
||
|
```
|
||
|
*/
|
||
|
get context() {
|
||
|
return this._internals.context;
|
||
|
}
|
||
|
set context(value) {
|
||
|
assert$1.object(value);
|
||
|
if (this._merging) {
|
||
|
Object.assign(this._internals.context, value);
|
||
|
}
|
||
|
else {
|
||
|
this._internals.context = { ...value };
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
Hooks allow modifications during the request lifecycle.
|
||
|
Hook functions may be async and are run serially.
|
||
|
*/
|
||
|
get hooks() {
|
||
|
return this._internals.hooks;
|
||
|
}
|
||
|
set hooks(value) {
|
||
|
assert$1.object(value);
|
||
|
// eslint-disable-next-line guard-for-in
|
||
|
for (const knownHookEvent in value) {
|
||
|
if (!(knownHookEvent in this._internals.hooks)) {
|
||
|
throw new Error(`Unexpected hook event: ${knownHookEvent}`);
|
||
|
}
|
||
|
const typedKnownHookEvent = knownHookEvent;
|
||
|
const hooks = value[typedKnownHookEvent];
|
||
|
assert$1.any([is.array, is.undefined], hooks);
|
||
|
if (hooks) {
|
||
|
for (const hook of hooks) {
|
||
|
assert$1.function_(hook);
|
||
|
}
|
||
|
}
|
||
|
if (this._merging) {
|
||
|
if (hooks) {
|
||
|
// @ts-expect-error FIXME
|
||
|
this._internals.hooks[typedKnownHookEvent].push(...hooks);
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
if (!hooks) {
|
||
|
throw new Error(`Missing hook event: ${knownHookEvent}`);
|
||
|
}
|
||
|
// @ts-expect-error FIXME
|
||
|
this._internals.hooks[knownHookEvent] = [...hooks];
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
Defines if redirect responses should be followed automatically.
|
||
|
|
||
|
Note that if a `303` is sent by the server in response to any request type (`POST`, `DELETE`, etc.), Got will automatically request the resource pointed to in the location header via `GET`.
|
||
|
This is in accordance with [the spec](https://tools.ietf.org/html/rfc7231#section-6.4.4). You can optionally turn on this behavior also for other redirect codes - see `methodRewriting`.
|
||
|
|
||
|
@default true
|
||
|
*/
|
||
|
get followRedirect() {
|
||
|
return this._internals.followRedirect;
|
||
|
}
|
||
|
set followRedirect(value) {
|
||
|
assert$1.boolean(value);
|
||
|
this._internals.followRedirect = value;
|
||
|
}
|
||
|
get followRedirects() {
|
||
|
throw new TypeError('The `followRedirects` option does not exist. Use `followRedirect` instead.');
|
||
|
}
|
||
|
set followRedirects(_value) {
|
||
|
throw new TypeError('The `followRedirects` option does not exist. Use `followRedirect` instead.');
|
||
|
}
|
||
|
/**
|
||
|
If exceeded, the request will be aborted and a `MaxRedirectsError` will be thrown.
|
||
|
|
||
|
@default 10
|
||
|
*/
|
||
|
get maxRedirects() {
|
||
|
return this._internals.maxRedirects;
|
||
|
}
|
||
|
set maxRedirects(value) {
|
||
|
assert$1.number(value);
|
||
|
this._internals.maxRedirects = value;
|
||
|
}
|
||
|
/**
|
||
|
A cache adapter instance for storing cached response data.
|
||
|
|
||
|
@default false
|
||
|
*/
|
||
|
get cache() {
|
||
|
return this._internals.cache;
|
||
|
}
|
||
|
set cache(value) {
|
||
|
assert$1.any([is.object, is.string, is.boolean, is.undefined], value);
|
||
|
if (value === true) {
|
||
|
this._internals.cache = globalCache;
|
||
|
}
|
||
|
else if (value === false) {
|
||
|
this._internals.cache = undefined;
|
||
|
}
|
||
|
else {
|
||
|
this._internals.cache = value;
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
Determines if a `got.HTTPError` is thrown for unsuccessful responses.
|
||
|
|
||
|
If this is disabled, requests that encounter an error status code will be resolved with the `response` instead of throwing.
|
||
|
This may be useful if you are checking for resource availability and are expecting error responses.
|
||
|
|
||
|
@default true
|
||
|
*/
|
||
|
get throwHttpErrors() {
|
||
|
return this._internals.throwHttpErrors;
|
||
|
}
|
||
|
set throwHttpErrors(value) {
|
||
|
assert$1.boolean(value);
|
||
|
this._internals.throwHttpErrors = value;
|
||
|
}
|
||
|
get username() {
|
||
|
const url = this._internals.url;
|
||
|
const value = url ? url.username : this._internals.username;
|
||
|
return decodeURIComponent(value);
|
||
|
}
|
||
|
set username(value) {
|
||
|
assert$1.string(value);
|
||
|
const url = this._internals.url;
|
||
|
const fixedValue = encodeURIComponent(value);
|
||
|
if (url) {
|
||
|
url.username = fixedValue;
|
||
|
}
|
||
|
else {
|
||
|
this._internals.username = fixedValue;
|
||
|
}
|
||
|
}
|
||
|
get password() {
|
||
|
const url = this._internals.url;
|
||
|
const value = url ? url.password : this._internals.password;
|
||
|
return decodeURIComponent(value);
|
||
|
}
|
||
|
set password(value) {
|
||
|
assert$1.string(value);
|
||
|
const url = this._internals.url;
|
||
|
const fixedValue = encodeURIComponent(value);
|
||
|
if (url) {
|
||
|
url.password = fixedValue;
|
||
|
}
|
||
|
else {
|
||
|
this._internals.password = fixedValue;
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
If set to `true`, Got will additionally accept HTTP2 requests.
|
||
|
|
||
|
It will choose either HTTP/1.1 or HTTP/2 depending on the ALPN protocol.
|
||
|
|
||
|
__Note__: This option requires Node.js 15.10.0 or newer as HTTP/2 support on older Node.js versions is very buggy.
|
||
|
|
||
|
__Note__: Overriding `options.request` will disable HTTP2 support.
|
||
|
|
||
|
@default false
|
||
|
|
||
|
@example
|
||
|
```
|
||
|
import got from 'got';
|
||
|
|
||
|
const {headers} = await got('https://nghttp2.org/httpbin/anything', {http2: true});
|
||
|
|
||
|
console.log(headers.via);
|
||
|
//=> '2 nghttpx'
|
||
|
```
|
||
|
*/
|
||
|
get http2() {
|
||
|
return this._internals.http2;
|
||
|
}
|
||
|
set http2(value) {
|
||
|
assert$1.boolean(value);
|
||
|
this._internals.http2 = value;
|
||
|
}
|
||
|
/**
|
||
|
Set this to `true` to allow sending body for the `GET` method.
|
||
|
However, the [HTTP/2 specification](https://tools.ietf.org/html/rfc7540#section-8.1.3) says that `An HTTP GET request includes request header fields and no payload body`, therefore when using the HTTP/2 protocol this option will have no effect.
|
||
|
This option is only meant to interact with non-compliant servers when you have no other choice.
|
||
|
|
||
|
__Note__: The [RFC 7231](https://tools.ietf.org/html/rfc7231#section-4.3.1) doesn't specify any particular behavior for the GET method having a payload, therefore __it's considered an [anti-pattern](https://en.wikipedia.org/wiki/Anti-pattern)__.
|
||
|
|
||
|
@default false
|
||
|
*/
|
||
|
get allowGetBody() {
|
||
|
return this._internals.allowGetBody;
|
||
|
}
|
||
|
set allowGetBody(value) {
|
||
|
assert$1.boolean(value);
|
||
|
this._internals.allowGetBody = value;
|
||
|
}
|
||
|
/**
|
||
|
Request headers.
|
||
|
|
||
|
Existing headers will be overwritten. Headers set to `undefined` will be omitted.
|
||
|
|
||
|
@default {}
|
||
|
*/
|
||
|
get headers() {
|
||
|
return this._internals.headers;
|
||
|
}
|
||
|
set headers(value) {
|
||
|
assert$1.plainObject(value);
|
||
|
if (this._merging) {
|
||
|
Object.assign(this._internals.headers, lowercaseKeys(value));
|
||
|
}
|
||
|
else {
|
||
|
this._internals.headers = lowercaseKeys(value);
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
Specifies if the HTTP request method should be [rewritten as `GET`](https://tools.ietf.org/html/rfc7231#section-6.4) on redirects.
|
||
|
|
||
|
As the [specification](https://tools.ietf.org/html/rfc7231#section-6.4) prefers to rewrite the HTTP method only on `303` responses, this is Got's default behavior.
|
||
|
Setting `methodRewriting` to `true` will also rewrite `301` and `302` responses, as allowed by the spec. This is the behavior followed by `curl` and browsers.
|
||
|
|
||
|
__Note__: Got never performs method rewriting on `307` and `308` responses, as this is [explicitly prohibited by the specification](https://www.rfc-editor.org/rfc/rfc7231#section-6.4.7).
|
||
|
|
||
|
@default false
|
||
|
*/
|
||
|
get methodRewriting() {
|
||
|
return this._internals.methodRewriting;
|
||
|
}
|
||
|
set methodRewriting(value) {
|
||
|
assert$1.boolean(value);
|
||
|
this._internals.methodRewriting = value;
|
||
|
}
|
||
|
/**
|
||
|
Indicates which DNS record family to use.
|
||
|
|
||
|
Values:
|
||
|
- `undefined`: IPv4 (if present) or IPv6
|
||
|
- `4`: Only IPv4
|
||
|
- `6`: Only IPv6
|
||
|
|
||
|
@default undefined
|
||
|
*/
|
||
|
get dnsLookupIpVersion() {
|
||
|
return this._internals.dnsLookupIpVersion;
|
||
|
}
|
||
|
set dnsLookupIpVersion(value) {
|
||
|
if (value !== undefined && value !== 4 && value !== 6) {
|
||
|
throw new TypeError(`Invalid DNS lookup IP version: ${value}`);
|
||
|
}
|
||
|
this._internals.dnsLookupIpVersion = value;
|
||
|
}
|
||
|
/**
|
||
|
A function used to parse JSON responses.
|
||
|
|
||
|
@example
|
||
|
```
|
||
|
import got from 'got';
|
||
|
import Bourne from '@hapi/bourne';
|
||
|
|
||
|
const parsed = await got('https://example.com', {
|
||
|
parseJson: text => Bourne.parse(text)
|
||
|
}).json();
|
||
|
|
||
|
console.log(parsed);
|
||
|
```
|
||
|
*/
|
||
|
get parseJson() {
|
||
|
return this._internals.parseJson;
|
||
|
}
|
||
|
set parseJson(value) {
|
||
|
assert$1.function_(value);
|
||
|
this._internals.parseJson = value;
|
||
|
}
|
||
|
/**
|
||
|
A function used to stringify the body of JSON requests.
|
||
|
|
||
|
@example
|
||
|
```
|
||
|
import got from 'got';
|
||
|
|
||
|
await got.post('https://example.com', {
|
||
|
stringifyJson: object => JSON.stringify(object, (key, value) => {
|
||
|
if (key.startsWith('_')) {
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
return value;
|
||
|
}),
|
||
|
json: {
|
||
|
some: 'payload',
|
||
|
_ignoreMe: 1234
|
||
|
}
|
||
|
});
|
||
|
```
|
||
|
|
||
|
@example
|
||
|
```
|
||
|
import got from 'got';
|
||
|
|
||
|
await got.post('https://example.com', {
|
||
|
stringifyJson: object => JSON.stringify(object, (key, value) => {
|
||
|
if (typeof value === 'number') {
|
||
|
return value.toString();
|
||
|
}
|
||
|
|
||
|
return value;
|
||
|
}),
|
||
|
json: {
|
||
|
some: 'payload',
|
||
|
number: 1
|
||
|
}
|
||
|
});
|
||
|
```
|
||
|
*/
|
||
|
get stringifyJson() {
|
||
|
return this._internals.stringifyJson;
|
||
|
}
|
||
|
set stringifyJson(value) {
|
||
|
assert$1.function_(value);
|
||
|
this._internals.stringifyJson = value;
|
||
|
}
|
||
|
/**
|
||
|
An object representing `limit`, `calculateDelay`, `methods`, `statusCodes`, `maxRetryAfter` and `errorCodes` fields for maximum retry count, retry handler, allowed methods, allowed status codes, maximum [`Retry-After`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After) time and allowed error codes.
|
||
|
|
||
|
Delays between retries counts with function `1000 * Math.pow(2, retry) + Math.random() * 100`, where `retry` is attempt number (starts from 1).
|
||
|
|
||
|
The `calculateDelay` property is a `function` that receives an object with `attemptCount`, `retryOptions`, `error` and `computedValue` properties for current retry count, the retry options, error and default computed value.
|
||
|
The function must return a delay in milliseconds (or a Promise resolving with it) (`0` return value cancels retry).
|
||
|
|
||
|
By default, it retries *only* on the specified methods, status codes, and on these network errors:
|
||
|
|
||
|
- `ETIMEDOUT`: One of the [timeout](#timeout) limits were reached.
|
||
|
- `ECONNRESET`: Connection was forcibly closed by a peer.
|
||
|
- `EADDRINUSE`: Could not bind to any free port.
|
||
|
- `ECONNREFUSED`: Connection was refused by the server.
|
||
|
- `EPIPE`: The remote side of the stream being written has been closed.
|
||
|
- `ENOTFOUND`: Couldn't resolve the hostname to an IP address.
|
||
|
- `ENETUNREACH`: No internet connection.
|
||
|
- `EAI_AGAIN`: DNS lookup timed out.
|
||
|
|
||
|
__Note__: If `maxRetryAfter` is set to `undefined`, it will use `options.timeout`.
|
||
|
__Note__: If [`Retry-After`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After) header is greater than `maxRetryAfter`, it will cancel the request.
|
||
|
*/
|
||
|
get retry() {
|
||
|
return this._internals.retry;
|
||
|
}
|
||
|
set retry(value) {
|
||
|
assert$1.plainObject(value);
|
||
|
assert$1.any([is.function_, is.undefined], value.calculateDelay);
|
||
|
assert$1.any([is.number, is.undefined], value.maxRetryAfter);
|
||
|
assert$1.any([is.number, is.undefined], value.limit);
|
||
|
assert$1.any([is.array, is.undefined], value.methods);
|
||
|
assert$1.any([is.array, is.undefined], value.statusCodes);
|
||
|
assert$1.any([is.array, is.undefined], value.errorCodes);
|
||
|
assert$1.any([is.number, is.undefined], value.noise);
|
||
|
if (value.noise && Math.abs(value.noise) > 100) {
|
||
|
throw new Error(`The maximum acceptable retry noise is +/- 100ms, got ${value.noise}`);
|
||
|
}
|
||
|
for (const key in value) {
|
||
|
if (!(key in this._internals.retry)) {
|
||
|
throw new Error(`Unexpected retry option: ${key}`);
|
||
|
}
|
||
|
}
|
||
|
if (this._merging) {
|
||
|
Object.assign(this._internals.retry, value);
|
||
|
}
|
||
|
else {
|
||
|
this._internals.retry = { ...value };
|
||
|
}
|
||
|
const { retry } = this._internals;
|
||
|
retry.methods = [...new Set(retry.methods.map(method => method.toUpperCase()))];
|
||
|
retry.statusCodes = [...new Set(retry.statusCodes)];
|
||
|
retry.errorCodes = [...new Set(retry.errorCodes)];
|
||
|
}
|
||
|
/**
|
||
|
From `http.RequestOptions`.
|
||
|
|
||
|
The IP address used to send the request from.
|
||
|
*/
|
||
|
get localAddress() {
|
||
|
return this._internals.localAddress;
|
||
|
}
|
||
|
set localAddress(value) {
|
||
|
assert$1.any([is.string, is.undefined], value);
|
||
|
this._internals.localAddress = value;
|
||
|
}
|
||
|
/**
|
||
|
The HTTP method used to make the request.
|
||
|
|
||
|
@default 'GET'
|
||
|
*/
|
||
|
get method() {
|
||
|
return this._internals.method;
|
||
|
}
|
||
|
set method(value) {
|
||
|
assert$1.string(value);
|
||
|
this._internals.method = value.toUpperCase();
|
||
|
}
|
||
|
get createConnection() {
|
||
|
return this._internals.createConnection;
|
||
|
}
|
||
|
set createConnection(value) {
|
||
|
assert$1.any([is.function_, is.undefined], value);
|
||
|
this._internals.createConnection = value;
|
||
|
}
|
||
|
/**
|
||
|
From `http-cache-semantics`
|
||
|
|
||
|
@default {}
|
||
|
*/
|
||
|
get cacheOptions() {
|
||
|
return this._internals.cacheOptions;
|
||
|
}
|
||
|
set cacheOptions(value) {
|
||
|
assert$1.plainObject(value);
|
||
|
assert$1.any([is.boolean, is.undefined], value.shared);
|
||
|
assert$1.any([is.number, is.undefined], value.cacheHeuristic);
|
||
|
assert$1.any([is.number, is.undefined], value.immutableMinTimeToLive);
|
||
|
assert$1.any([is.boolean, is.undefined], value.ignoreCargoCult);
|
||
|
for (const key in value) {
|
||
|
if (!(key in this._internals.cacheOptions)) {
|
||
|
throw new Error(`Cache option \`${key}\` does not exist`);
|
||
|
}
|
||
|
}
|
||
|
if (this._merging) {
|
||
|
Object.assign(this._internals.cacheOptions, value);
|
||
|
}
|
||
|
else {
|
||
|
this._internals.cacheOptions = { ...value };
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
Options for the advanced HTTPS API.
|
||
|
*/
|
||
|
get https() {
|
||
|
return this._internals.https;
|
||
|
}
|
||
|
set https(value) {
|
||
|
assert$1.plainObject(value);
|
||
|
assert$1.any([is.boolean, is.undefined], value.rejectUnauthorized);
|
||
|
assert$1.any([is.function_, is.undefined], value.checkServerIdentity);
|
||
|
assert$1.any([is.string, is.object, is.array, is.undefined], value.certificateAuthority);
|
||
|
assert$1.any([is.string, is.object, is.array, is.undefined], value.key);
|
||
|
assert$1.any([is.string, is.object, is.array, is.undefined], value.certificate);
|
||
|
assert$1.any([is.string, is.undefined], value.passphrase);
|
||
|
assert$1.any([is.string, is.buffer, is.array, is.undefined], value.pfx);
|
||
|
assert$1.any([is.array, is.undefined], value.alpnProtocols);
|
||
|
assert$1.any([is.string, is.undefined], value.ciphers);
|
||
|
assert$1.any([is.string, is.buffer, is.undefined], value.dhparam);
|
||
|
assert$1.any([is.string, is.undefined], value.signatureAlgorithms);
|
||
|
assert$1.any([is.string, is.undefined], value.minVersion);
|
||
|
assert$1.any([is.string, is.undefined], value.maxVersion);
|
||
|
assert$1.any([is.boolean, is.undefined], value.honorCipherOrder);
|
||
|
assert$1.any([is.number, is.undefined], value.tlsSessionLifetime);
|
||
|
assert$1.any([is.string, is.undefined], value.ecdhCurve);
|
||
|
assert$1.any([is.string, is.buffer, is.array, is.undefined], value.certificateRevocationLists);
|
||
|
for (const key in value) {
|
||
|
if (!(key in this._internals.https)) {
|
||
|
throw new Error(`HTTPS option \`${key}\` does not exist`);
|
||
|
}
|
||
|
}
|
||
|
if (this._merging) {
|
||
|
Object.assign(this._internals.https, value);
|
||
|
}
|
||
|
else {
|
||
|
this._internals.https = { ...value };
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
[Encoding](https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings) to be used on `setEncoding` of the response data.
|
||
|
|
||
|
To get a [`Buffer`](https://nodejs.org/api/buffer.html), you need to set `responseType` to `buffer` instead.
|
||
|
Don't set this option to `null`.
|
||
|
|
||
|
__Note__: This doesn't affect streams! Instead, you need to do `got.stream(...).setEncoding(encoding)`.
|
||
|
|
||
|
@default 'utf-8'
|
||
|
*/
|
||
|
get encoding() {
|
||
|
return this._internals.encoding;
|
||
|
}
|
||
|
set encoding(value) {
|
||
|
if (value === null) {
|
||
|
throw new TypeError('To get a Buffer, set `options.responseType` to `buffer` instead');
|
||
|
}
|
||
|
assert$1.any([is.string, is.undefined], value);
|
||
|
this._internals.encoding = value;
|
||
|
}
|
||
|
/**
|
||
|
When set to `true` the promise will return the Response body instead of the Response object.
|
||
|
|
||
|
@default false
|
||
|
*/
|
||
|
get resolveBodyOnly() {
|
||
|
return this._internals.resolveBodyOnly;
|
||
|
}
|
||
|
set resolveBodyOnly(value) {
|
||
|
assert$1.boolean(value);
|
||
|
this._internals.resolveBodyOnly = value;
|
||
|
}
|
||
|
/**
|
||
|
Returns a `Stream` instead of a `Promise`.
|
||
|
This is equivalent to calling `got.stream(url, options?)`.
|
||
|
|
||
|
@default false
|
||
|
*/
|
||
|
get isStream() {
|
||
|
return this._internals.isStream;
|
||
|
}
|
||
|
set isStream(value) {
|
||
|
assert$1.boolean(value);
|
||
|
this._internals.isStream = value;
|
||
|
}
|
||
|
/**
|
||
|
The parsing method.
|
||
|
|
||
|
The promise also has `.text()`, `.json()` and `.buffer()` methods which return another Got promise for the parsed body.
|
||
|
|
||
|
It's like setting the options to `{responseType: 'json', resolveBodyOnly: true}` but without affecting the main Got promise.
|
||
|
|
||
|
__Note__: When using streams, this option is ignored.
|
||
|
|
||
|
@example
|
||
|
```
|
||
|
const responsePromise = got(url);
|
||
|
const bufferPromise = responsePromise.buffer();
|
||
|
const jsonPromise = responsePromise.json();
|
||
|
|
||
|
const [response, buffer, json] = Promise.all([responsePromise, bufferPromise, jsonPromise]);
|
||
|
// `response` is an instance of Got Response
|
||
|
// `buffer` is an instance of Buffer
|
||
|
// `json` is an object
|
||
|
```
|
||
|
|
||
|
@example
|
||
|
```
|
||
|
// This
|
||
|
const body = await got(url).json();
|
||
|
|
||
|
// is semantically the same as this
|
||
|
const body = await got(url, {responseType: 'json', resolveBodyOnly: true});
|
||
|
```
|
||
|
*/
|
||
|
get responseType() {
|
||
|
return this._internals.responseType;
|
||
|
}
|
||
|
set responseType(value) {
|
||
|
if (value === undefined) {
|
||
|
this._internals.responseType = 'text';
|
||
|
return;
|
||
|
}
|
||
|
if (value !== 'text' && value !== 'buffer' && value !== 'json') {
|
||
|
throw new Error(`Invalid \`responseType\` option: ${value}`);
|
||
|
}
|
||
|
this._internals.responseType = value;
|
||
|
}
|
||
|
get pagination() {
|
||
|
return this._internals.pagination;
|
||
|
}
|
||
|
set pagination(value) {
|
||
|
assert$1.object(value);
|
||
|
if (this._merging) {
|
||
|
Object.assign(this._internals.pagination, value);
|
||
|
}
|
||
|
else {
|
||
|
this._internals.pagination = value;
|
||
|
}
|
||
|
}
|
||
|
get auth() {
|
||
|
throw new Error('Parameter `auth` is deprecated. Use `username` / `password` instead.');
|
||
|
}
|
||
|
set auth(_value) {
|
||
|
throw new Error('Parameter `auth` is deprecated. Use `username` / `password` instead.');
|
||
|
}
|
||
|
get setHost() {
|
||
|
return this._internals.setHost;
|
||
|
}
|
||
|
set setHost(value) {
|
||
|
assert$1.boolean(value);
|
||
|
this._internals.setHost = value;
|
||
|
}
|
||
|
get maxHeaderSize() {
|
||
|
return this._internals.maxHeaderSize;
|
||
|
}
|
||
|
set maxHeaderSize(value) {
|
||
|
assert$1.any([is.number, is.undefined], value);
|
||
|
this._internals.maxHeaderSize = value;
|
||
|
}
|
||
|
get enableUnixSockets() {
|
||
|
return this._internals.enableUnixSockets;
|
||
|
}
|
||
|
set enableUnixSockets(value) {
|
||
|
assert$1.boolean(value);
|
||
|
this._internals.enableUnixSockets = value;
|
||
|
}
|
||
|
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||
|
toJSON() {
|
||
|
return { ...this._internals };
|
||
|
}
|
||
|
[Symbol.for('nodejs.util.inspect.custom')](_depth, options) {
|
||
|
return inspect(this._internals, options);
|
||
|
}
|
||
|
createNativeRequestOptions() {
|
||
|
const internals = this._internals;
|
||
|
const url = internals.url;
|
||
|
let agent;
|
||
|
if (url.protocol === 'https:') {
|
||
|
agent = internals.http2 ? internals.agent : internals.agent.https;
|
||
|
}
|
||
|
else {
|
||
|
agent = internals.agent.http;
|
||
|
}
|
||
|
const { https } = internals;
|
||
|
let { pfx } = https;
|
||
|
if (is.array(pfx) && is.plainObject(pfx[0])) {
|
||
|
pfx = pfx.map(object => ({
|
||
|
buf: object.buffer,
|
||
|
passphrase: object.passphrase,
|
||
|
}));
|
||
|
}
|
||
|
return {
|
||
|
...internals.cacheOptions,
|
||
|
...this._unixOptions,
|
||
|
// HTTPS options
|
||
|
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||
|
ALPNProtocols: https.alpnProtocols,
|
||
|
ca: https.certificateAuthority,
|
||
|
cert: https.certificate,
|
||
|
key: https.key,
|
||
|
passphrase: https.passphrase,
|
||
|
pfx: https.pfx,
|
||
|
rejectUnauthorized: https.rejectUnauthorized,
|
||
|
checkServerIdentity: https.checkServerIdentity ?? checkServerIdentity,
|
||
|
ciphers: https.ciphers,
|
||
|
honorCipherOrder: https.honorCipherOrder,
|
||
|
minVersion: https.minVersion,
|
||
|
maxVersion: https.maxVersion,
|
||
|
sigalgs: https.signatureAlgorithms,
|
||
|
sessionTimeout: https.tlsSessionLifetime,
|
||
|
dhparam: https.dhparam,
|
||
|
ecdhCurve: https.ecdhCurve,
|
||
|
crl: https.certificateRevocationLists,
|
||
|
// HTTP options
|
||
|
lookup: internals.dnsLookup ?? internals.dnsCache?.lookup,
|
||
|
family: internals.dnsLookupIpVersion,
|
||
|
agent,
|
||
|
setHost: internals.setHost,
|
||
|
method: internals.method,
|
||
|
maxHeaderSize: internals.maxHeaderSize,
|
||
|
localAddress: internals.localAddress,
|
||
|
headers: internals.headers,
|
||
|
createConnection: internals.createConnection,
|
||
|
timeout: internals.http2 ? getHttp2TimeoutOption(internals) : undefined,
|
||
|
// HTTP/2 options
|
||
|
h2session: internals.h2session,
|
||
|
};
|
||
|
}
|
||
|
getRequestFunction() {
|
||
|
const url = this._internals.url;
|
||
|
const { request } = this._internals;
|
||
|
if (!request && url) {
|
||
|
return this.getFallbackRequestFunction();
|
||
|
}
|
||
|
return request;
|
||
|
}
|
||
|
getFallbackRequestFunction() {
|
||
|
const url = this._internals.url;
|
||
|
if (!url) {
|
||
|
return;
|
||
|
}
|
||
|
if (url.protocol === 'https:') {
|
||
|
if (this._internals.http2) {
|
||
|
if (major < 15 || (major === 15 && minor < 10)) {
|
||
|
const error = new Error('To use the `http2` option, install Node.js 15.10.0 or above');
|
||
|
error.code = 'EUNSUPPORTED';
|
||
|
throw error;
|
||
|
}
|
||
|
return http2wrapper.auto;
|
||
|
}
|
||
|
return https$4.request;
|
||
|
}
|
||
|
return http$4.request;
|
||
|
}
|
||
|
freeze() {
|
||
|
const options = this._internals;
|
||
|
Object.freeze(options);
|
||
|
Object.freeze(options.hooks);
|
||
|
Object.freeze(options.hooks.afterResponse);
|
||
|
Object.freeze(options.hooks.beforeError);
|
||
|
Object.freeze(options.hooks.beforeRedirect);
|
||
|
Object.freeze(options.hooks.beforeRequest);
|
||
|
Object.freeze(options.hooks.beforeRetry);
|
||
|
Object.freeze(options.hooks.init);
|
||
|
Object.freeze(options.https);
|
||
|
Object.freeze(options.cacheOptions);
|
||
|
Object.freeze(options.agent);
|
||
|
Object.freeze(options.headers);
|
||
|
Object.freeze(options.timeout);
|
||
|
Object.freeze(options.retry);
|
||
|
Object.freeze(options.retry.errorCodes);
|
||
|
Object.freeze(options.retry.methods);
|
||
|
Object.freeze(options.retry.statusCodes);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const isResponseOk = (response) => {
|
||
|
const { statusCode } = response;
|
||
|
const limitStatusCode = response.request.options.followRedirect ? 299 : 399;
|
||
|
return (statusCode >= 200 && statusCode <= limitStatusCode) || statusCode === 304;
|
||
|
};
|
||
|
/**
|
||
|
An error to be thrown when server response code is 2xx, and parsing body fails.
|
||
|
Includes a `response` property.
|
||
|
*/
|
||
|
class ParseError extends RequestError$1 {
|
||
|
constructor(error, response) {
|
||
|
const { options } = response.request;
|
||
|
super(`${error.message} in "${options.url.toString()}"`, error, response.request);
|
||
|
this.name = 'ParseError';
|
||
|
this.code = 'ERR_BODY_PARSE_FAILURE';
|
||
|
}
|
||
|
}
|
||
|
const parseBody = (response, responseType, parseJson, encoding) => {
|
||
|
const { rawBody } = response;
|
||
|
try {
|
||
|
if (responseType === 'text') {
|
||
|
return rawBody.toString(encoding);
|
||
|
}
|
||
|
if (responseType === 'json') {
|
||
|
return rawBody.length === 0 ? '' : parseJson(rawBody.toString(encoding));
|
||
|
}
|
||
|
if (responseType === 'buffer') {
|
||
|
return rawBody;
|
||
|
}
|
||
|
}
|
||
|
catch (error) {
|
||
|
throw new ParseError(error, response);
|
||
|
}
|
||
|
throw new ParseError({
|
||
|
message: `Unknown body type '${responseType}'`,
|
||
|
name: 'Error',
|
||
|
}, response);
|
||
|
};
|
||
|
|
||
|
function isClientRequest(clientRequest) {
|
||
|
return clientRequest.writable && !clientRequest.writableEnded;
|
||
|
}
|
||
|
|
||
|
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||
|
function isUnixSocketURL(url) {
|
||
|
return url.protocol === 'unix:' || url.hostname === 'unix';
|
||
|
}
|
||
|
|
||
|
const supportsBrotli = is.string(process$1.versions.brotli);
|
||
|
const methodsWithoutBody = new Set(['GET', 'HEAD']);
|
||
|
const cacheableStore = new WeakableMap();
|
||
|
const redirectCodes = new Set([300, 301, 302, 303, 304, 307, 308]);
|
||
|
const proxiedRequestEvents$1 = [
|
||
|
'socket',
|
||
|
'connect',
|
||
|
'continue',
|
||
|
'information',
|
||
|
'upgrade',
|
||
|
];
|
||
|
const noop = () => { };
|
||
|
class Request extends Duplex {
|
||
|
constructor(url, options, defaults) {
|
||
|
super({
|
||
|
// Don't destroy immediately, as the error may be emitted on unsuccessful retry
|
||
|
autoDestroy: false,
|
||
|
// It needs to be zero because we're just proxying the data to another stream
|
||
|
highWaterMark: 0,
|
||
|
});
|
||
|
// @ts-expect-error - Ignoring for now.
|
||
|
Object.defineProperty(this, 'constructor', {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_noPipe", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
// @ts-expect-error https://github.com/microsoft/TypeScript/issues/9568
|
||
|
Object.defineProperty(this, "options", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "response", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "requestUrl", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "redirectUrls", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "retryCount", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_stopRetry", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_downloadedSize", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_uploadedSize", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_stopReading", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_pipedServerResponses", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_request", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_responseSize", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_bodySize", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_unproxyEvents", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_isFromCache", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_cannotHaveBody", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_triggerRead", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_cancelTimeouts", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_removeListeners", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_nativeResponse", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_flushed", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
Object.defineProperty(this, "_aborted", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
// We need this because `this._request` if `undefined` when using cache
|
||
|
Object.defineProperty(this, "_requestInitialized", {
|
||
|
enumerable: true,
|
||
|
configurable: true,
|
||
|
writable: true,
|
||
|
value: void 0
|
||
|
});
|
||
|
this._downloadedSize = 0;
|
||
|
this._uploadedSize = 0;
|
||
|
this._stopReading = false;
|
||
|
this._pipedServerResponses = new Set();
|
||
|
this._cannotHaveBody = false;
|
||
|
this._unproxyEvents = noop;
|
||
|
this._triggerRead = false;
|
||
|
this._cancelTimeouts = noop;
|
||
|
this._removeListeners = noop;
|
||
|
this._jobs = [];
|
||
|
this._flushed = false;
|
||
|
this._requestInitialized = false;
|
||
|
this._aborted = false;
|
||
|
this.redirectUrls = [];
|
||
|
this.retryCount = 0;
|
||
|
this._stopRetry = noop;
|
||
|
this.on('pipe', source => {
|
||
|
if (source.headers) {
|
||
|
Object.assign(this.options.headers, source.headers);
|
||
|
}
|
||
|
});
|
||
|
this.on('newListener', event => {
|
||
|
if (event === 'retry' && this.listenerCount('retry') > 0) {
|
||
|
throw new Error('A retry listener has been attached already.');
|
||
|
}
|
||
|
});
|
||
|
try {
|
||
|
this.options = new Options(url, options, defaults);
|
||
|
if (!this.options.url) {
|
||
|
if (this.options.prefixUrl === '') {
|
||
|
throw new TypeError('Missing `url` property');
|
||
|
}
|
||
|
this.options.url = '';
|
||
|
}
|
||
|
this.requestUrl = this.options.url;
|
||
|
}
|
||
|
catch (error) {
|
||
|
const { options } = error;
|
||
|
if (options) {
|
||
|
this.options = options;
|
||
|
}
|
||
|
this.flush = async () => {
|
||
|
this.flush = async () => { };
|
||
|
this.destroy(error);
|
||
|
};
|
||
|
return;
|
||
|
}
|
||
|
// Important! If you replace `body` in a handler with another stream, make sure it's readable first.
|
||
|
// The below is run only once.
|
||
|
const { body } = this.options;
|
||
|
if (is.nodeStream(body)) {
|
||
|
body.once('error', error => {
|
||
|
if (this._flushed) {
|
||
|
this._beforeError(new UploadError(error, this));
|
||
|
}
|
||
|
else {
|
||
|
this.flush = async () => {
|
||
|
this.flush = async () => { };
|
||
|
this._beforeError(new UploadError(error, this));
|
||
|
};
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
if (this.options.signal) {
|
||
|
const abort = () => {
|
||
|
this.destroy(new AbortError(this));
|
||
|
};
|
||
|
if (this.options.signal.aborted) {
|
||
|
abort();
|
||
|
}
|
||
|
else {
|
||
|
this.options.signal.addEventListener('abort', abort);
|
||
|
this._removeListeners = () => {
|
||
|
this.options.signal.removeEventListener('abort', abort);
|
||
|
};
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
async flush() {
|
||
|
if (this._flushed) {
|
||
|
return;
|
||
|
}
|
||
|
this._flushed = true;
|
||
|
try {
|
||
|
await this._finalizeBody();
|
||
|
if (this.destroyed) {
|
||
|
return;
|
||
|
}
|
||
|
await this._makeRequest();
|
||
|
if (this.destroyed) {
|
||
|
this._request?.destroy();
|
||
|
return;
|
||
|
}
|
||
|
// Queued writes etc.
|
||
|
for (const job of this._jobs) {
|
||
|
job();
|
||
|
}
|
||
|
// Prevent memory leak
|
||
|
this._jobs.length = 0;
|
||
|
this._requestInitialized = true;
|
||
|
}
|
||
|
catch (error) {
|
||
|
this._beforeError(error);
|
||
|
}
|
||
|
}
|
||
|
_beforeError(error) {
|
||
|
if (this._stopReading) {
|
||
|
return;
|
||
|
}
|
||
|
const { response, options } = this;
|
||
|
const attemptCount = this.retryCount + (error.name === 'RetryError' ? 0 : 1);
|
||
|
this._stopReading = true;
|
||
|
if (!(error instanceof RequestError$1)) {
|
||
|
error = new RequestError$1(error.message, error, this);
|
||
|
}
|
||
|
const typedError = error;
|
||
|
void (async () => {
|
||
|
// Node.js parser is really weird.
|
||
|
// It emits post-request Parse Errors on the same instance as previous request. WTF.
|
||
|
// Therefore we need to check if it has been destroyed as well.
|
||
|
//
|
||
|
// Furthermore, Node.js 16 `response.destroy()` doesn't immediately destroy the socket,
|
||
|
// but makes the response unreadable. So we additionally need to check `response.readable`.
|
||
|
if (response?.readable && !response.rawBody && !this._request?.socket?.destroyed) {
|
||
|
// @types/node has incorrect typings. `setEncoding` accepts `null` as well.
|
||
|
response.setEncoding(this.readableEncoding);
|
||
|
const success = await this._setRawBody(response);
|
||
|
if (success) {
|
||
|
response.body = response.rawBody.toString();
|
||
|
}
|
||
|
}
|
||
|
if (this.listenerCount('retry') !== 0) {
|
||
|
let backoff;
|
||
|
try {
|
||
|
let retryAfter;
|
||
|
if (response && 'retry-after' in response.headers) {
|
||
|
retryAfter = Number(response.headers['retry-after']);
|
||
|
if (Number.isNaN(retryAfter)) {
|
||
|
retryAfter = Date.parse(response.headers['retry-after']) - Date.now();
|
||
|
if (retryAfter <= 0) {
|
||
|
retryAfter = 1;
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
retryAfter *= 1000;
|
||
|
}
|
||
|
}
|
||
|
const retryOptions = options.retry;
|
||
|
backoff = await retryOptions.calculateDelay({
|
||
|
attemptCount,
|
||
|
retryOptions,
|
||
|
error: typedError,
|
||
|
retryAfter,
|
||
|
computedValue: calculateRetryDelay$1({
|
||
|
attemptCount,
|
||
|
retryOptions,
|
||
|
error: typedError,
|
||
|
retryAfter,
|
||
|
computedValue: retryOptions.maxRetryAfter ?? options.timeout.request ?? Number.POSITIVE_INFINITY,
|
||
|
}),
|
||
|
});
|
||
|
}
|
||
|
catch (error_) {
|
||
|
void this._error(new RequestError$1(error_.message, error_, this));
|
||
|
return;
|
||
|
}
|
||
|
if (backoff) {
|
||
|
await new Promise(resolve => {
|
||
|
const timeout = setTimeout(resolve, backoff);
|
||
|
this._stopRetry = () => {
|
||
|
clearTimeout(timeout);
|
||
|
resolve();
|
||
|
};
|
||
|
});
|
||
|
// Something forced us to abort the retry
|
||
|
if (this.destroyed) {
|
||
|
return;
|
||
|
}
|
||
|
try {
|
||
|
for (const hook of this.options.hooks.beforeRetry) {
|
||
|
// eslint-disable-next-line no-await-in-loop
|
||
|
await hook(typedError, this.retryCount + 1);
|
||
|
}
|
||
|
}
|
||
|
catch (error_) {
|
||
|
void this._error(new RequestError$1(error_.message, error, this));
|
||
|
return;
|
||
|
}
|
||
|
// Something forced us to abort the retry
|
||
|
if (this.destroyed) {
|
||
|
return;
|
||
|
}
|
||
|
this.destroy();
|
||
|
this.emit('retry', this.retryCount + 1, error, (updatedOptions) => {
|
||
|
const request = new Request(options.url, updatedOptions, options);
|
||
|
request.retryCount = this.retryCount + 1;
|
||
|
process$1.nextTick(() => {
|
||
|
void request.flush();
|
||
|
});
|
||
|
return request;
|
||
|
});
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
void this._error(typedError);
|
||
|
})();
|
||
|
}
|
||
|
_read() {
|
||
|
this._triggerRead = true;
|
||
|
const { response } = this;
|
||
|
if (response && !this._stopReading) {
|
||
|
// We cannot put this in the `if` above
|
||
|
// because `.read()` also triggers the `end` event
|
||
|
if (response.readableLength) {
|
||
|
this._triggerRead = false;
|
||
|
}
|
||
|
let data;
|
||
|
while ((data = response.read()) !== null) {
|
||
|
this._downloadedSize += data.length; // eslint-disable-line @typescript-eslint/restrict-plus-operands
|
||
|
const progress = this.downloadProgress;
|
||
|
if (progress.percent < 1) {
|
||
|
this.emit('downloadProgress', progress);
|
||
|
}
|
||
|
this.push(data);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
_write(chunk, encoding, callback) {
|
||
|
const write = () => {
|
||
|
this._writeRequest(chunk, encoding, callback);
|
||
|
};
|
||
|
if (this._requestInitialized) {
|
||
|
write();
|
||
|
}
|
||
|
else {
|
||
|
this._jobs.push(write);
|
||
|
}
|
||
|
}
|
||
|
_final(callback) {
|
||
|
const endRequest = () => {
|
||
|
// We need to check if `this._request` is present,
|
||
|
// because it isn't when we use cache.
|
||
|
if (!this._request || this._request.destroyed) {
|
||
|
callback();
|
||
|
return;
|
||
|
}
|
||
|
this._request.end((error) => {
|
||
|
// The request has been destroyed before `_final` finished.
|
||
|
// See https://github.com/nodejs/node/issues/39356
|
||
|
if (this._request._writableState?.errored) {
|
||
|
return;
|
||
|
}
|
||
|
if (!error) {
|
||
|
this._bodySize = this._uploadedSize;
|
||
|
this.emit('uploadProgress', this.uploadProgress);
|
||
|
this._request.emit('upload-complete');
|
||
|
}
|
||
|
callback(error);
|
||
|
});
|
||
|
};
|
||
|
if (this._requestInitialized) {
|
||
|
endRequest();
|
||
|
}
|
||
|
else {
|
||
|
this._jobs.push(endRequest);
|
||
|
}
|
||
|
}
|
||
|
_destroy(error, callback) {
|
||
|
this._stopReading = true;
|
||
|
this.flush = async () => { };
|
||
|
// Prevent further retries
|
||
|
this._stopRetry();
|
||
|
this._cancelTimeouts();
|
||
|
this._removeListeners();
|
||
|
if (this.options) {
|
||
|
const { body } = this.options;
|
||
|
if (is.nodeStream(body)) {
|
||
|
body.destroy();
|
||
|
}
|
||
|
}
|
||
|
if (this._request) {
|
||
|
this._request.destroy();
|
||
|
}
|
||
|
if (error !== null && !is.undefined(error) && !(error instanceof RequestError$1)) {
|
||
|
error = new RequestError$1(error.message, error, this);
|
||
|
}
|
||
|
callback(error);
|
||
|
}
|
||
|
pipe(destination, options) {
|
||
|
if (destination instanceof ServerResponse) {
|
||
|
this._pipedServerResponses.add(destination);
|
||
|
}
|
||
|
return super.pipe(destination, options);
|
||
|
}
|
||
|
unpipe(destination) {
|
||
|
if (destination instanceof ServerResponse) {
|
||
|
this._pipedServerResponses.delete(destination);
|
||
|
}
|
||
|
super.unpipe(destination);
|
||
|
return this;
|
||
|
}
|
||
|
async _finalizeBody() {
|
||
|
const { options } = this;
|
||
|
const { headers } = options;
|
||
|
const isForm = !is.undefined(options.form);
|
||
|
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||
|
const isJSON = !is.undefined(options.json);
|
||
|
const isBody = !is.undefined(options.body);
|
||
|
const cannotHaveBody = methodsWithoutBody.has(options.method) && !(options.method === 'GET' && options.allowGetBody);
|
||
|
this._cannotHaveBody = cannotHaveBody;
|
||
|
if (isForm || isJSON || isBody) {
|
||
|
if (cannotHaveBody) {
|
||
|
throw new TypeError(`The \`${options.method}\` method cannot be used with a body`);
|
||
|
}
|
||
|
// Serialize body
|
||
|
const noContentType = !is.string(headers['content-type']);
|
||
|
if (isBody) {
|
||
|
// Body is spec-compliant FormData
|
||
|
if (isFormData$1(options.body)) {
|
||
|
const encoder = new FormDataEncoder(options.body);
|
||
|
if (noContentType) {
|
||
|
headers['content-type'] = encoder.headers['Content-Type'];
|
||
|
}
|
||
|
if ('Content-Length' in encoder.headers) {
|
||
|
headers['content-length'] = encoder.headers['Content-Length'];
|
||
|
}
|
||
|
options.body = encoder.encode();
|
||
|
}
|
||
|
// Special case for https://github.com/form-data/form-data
|
||
|
if (isFormData(options.body) && noContentType) {
|
||
|
headers['content-type'] = `multipart/form-data; boundary=${options.body.getBoundary()}`;
|
||
|
}
|
||
|
}
|
||
|
else if (isForm) {
|
||
|
if (noContentType) {
|
||
|
headers['content-type'] = 'application/x-www-form-urlencoded';
|
||
|
}
|
||
|
const { form } = options;
|
||
|
options.form = undefined;
|
||
|
options.body = (new URLSearchParams(form)).toString();
|
||
|
}
|
||
|
else {
|
||
|
if (noContentType) {
|
||
|
headers['content-type'] = 'application/json';
|
||
|
}
|
||
|
const { json } = options;
|
||
|
options.json = undefined;
|
||
|
options.body = options.stringifyJson(json);
|
||
|
}
|
||
|
const uploadBodySize = await getBodySize(options.body, options.headers);
|
||
|
// See https://tools.ietf.org/html/rfc7230#section-3.3.2
|
||
|
// A user agent SHOULD send a Content-Length in a request message when
|
||
|
// no Transfer-Encoding is sent and the request method defines a meaning
|
||
|
// for an enclosed payload body. For example, a Content-Length header
|
||
|
// field is normally sent in a POST request even when the value is 0
|
||
|
// (indicating an empty payload body). A user agent SHOULD NOT send a
|
||
|
// Content-Length header field when the request message does not contain
|
||
|
// a payload body and the method semantics do not anticipate such a
|
||
|
// body.
|
||
|
if (is.undefined(headers['content-length']) && is.undefined(headers['transfer-encoding']) && !cannotHaveBody && !is.undefined(uploadBodySize)) {
|
||
|
headers['content-length'] = String(uploadBodySize);
|
||
|
}
|
||
|
}
|
||
|
if (options.responseType === 'json' && !('accept' in options.headers)) {
|
||
|
options.headers.accept = 'application/json';
|
||
|
}
|
||
|
this._bodySize = Number(headers['content-length']) || undefined;
|
||
|
}
|
||
|
async _onResponseBase(response) {
|
||
|
// This will be called e.g. when using cache so we need to check if this request has been aborted.
|
||
|
if (this.isAborted) {
|
||
|
return;
|
||
|
}
|
||
|
const { options } = this;
|
||
|
const { url } = options;
|
||
|
this._nativeResponse = response;
|
||
|
if (options.decompress) {
|
||
|
response = decompressResponse$1(response);
|
||
|
}
|
||
|
const statusCode = response.statusCode;
|
||
|
const typedResponse = response;
|
||
|
typedResponse.statusMessage = typedResponse.statusMessage ?? http$4.STATUS_CODES[statusCode];
|
||
|
typedResponse.url = options.url.toString();
|
||
|
typedResponse.requestUrl = this.requestUrl;
|
||
|
typedResponse.redirectUrls = this.redirectUrls;
|
||
|
typedResponse.request = this;
|
||
|
typedResponse.isFromCache = this._nativeResponse.fromCache ?? false;
|
||
|
typedResponse.ip = this.ip;
|
||
|
typedResponse.retryCount = this.retryCount;
|
||
|
typedResponse.ok = isResponseOk(typedResponse);
|
||
|
this._isFromCache = typedResponse.isFromCache;
|
||
|
this._responseSize = Number(response.headers['content-length']) || undefined;
|
||
|
this.response = typedResponse;
|
||
|
response.once('end', () => {
|
||
|
this._responseSize = this._downloadedSize;
|
||
|
this.emit('downloadProgress', this.downloadProgress);
|
||
|
});
|
||
|
response.once('error', (error) => {
|
||
|
this._aborted = true;
|
||
|
// Force clean-up, because some packages don't do this.
|
||
|
// TODO: Fix decompress-response
|
||
|
response.destroy();
|
||
|
this._beforeError(new ReadError(error, this));
|
||
|
});
|
||
|
response.once('aborted', () => {
|
||
|
this._aborted = true;
|
||
|
this._beforeError(new ReadError({
|
||
|
name: 'Error',
|
||
|
message: 'The server aborted pending request',
|
||
|
code: 'ECONNRESET',
|
||
|
}, this));
|
||
|
});
|
||
|
this.emit('downloadProgress', this.downloadProgress);
|
||
|
const rawCookies = response.headers['set-cookie'];
|
||
|
if (is.object(options.cookieJar) && rawCookies) {
|
||
|
let promises = rawCookies.map(async (rawCookie) => options.cookieJar.setCookie(rawCookie, url.toString()));
|
||
|
if (options.ignoreInvalidCookies) {
|
||
|
promises = promises.map(async (promise) => {
|
||
|
try {
|
||
|
await promise;
|
||
|
}
|
||
|
catch { }
|
||
|
});
|
||
|
}
|
||
|
try {
|
||
|
await Promise.all(promises);
|
||
|
}
|
||
|
catch (error) {
|
||
|
this._beforeError(error);
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
// The above is running a promise, therefore we need to check if this request has been aborted yet again.
|
||
|
if (this.isAborted) {
|
||
|
return;
|
||
|
}
|
||
|
if (options.followRedirect && response.headers.location && redirectCodes.has(statusCode)) {
|
||
|
// We're being redirected, we don't care about the response.
|
||
|
// It'd be best to abort the request, but we can't because
|
||
|
// we would have to sacrifice the TCP connection. We don't want that.
|
||
|
response.resume();
|
||
|
this._cancelTimeouts();
|
||
|
this._unproxyEvents();
|
||
|
if (this.redirectUrls.length >= options.maxRedirects) {
|
||
|
this._beforeError(new MaxRedirectsError(this));
|
||
|
return;
|
||
|
}
|
||
|
this._request = undefined;
|
||
|
const updatedOptions = new Options(undefined, undefined, this.options);
|
||
|
const serverRequestedGet = statusCode === 303 && updatedOptions.method !== 'GET' && updatedOptions.method !== 'HEAD';
|
||
|
const canRewrite = statusCode !== 307 && statusCode !== 308;
|
||
|
const userRequestedGet = updatedOptions.methodRewriting && canRewrite;
|
||
|
if (serverRequestedGet || userRequestedGet) {
|
||
|
updatedOptions.method = 'GET';
|
||
|
updatedOptions.body = undefined;
|
||
|
updatedOptions.json = undefined;
|
||
|
updatedOptions.form = undefined;
|
||
|
delete updatedOptions.headers['content-length'];
|
||
|
}
|
||
|
try {
|
||
|
// We need this in order to support UTF-8
|
||
|
const redirectBuffer = Buffer$1.from(response.headers.location, 'binary').toString();
|
||
|
const redirectUrl = new URL$6(redirectBuffer, url);
|
||
|
if (!isUnixSocketURL(url) && isUnixSocketURL(redirectUrl)) {
|
||
|
this._beforeError(new RequestError$1('Cannot redirect to UNIX socket', {}, this));
|
||
|
return;
|
||
|
}
|
||
|
// Redirecting to a different site, clear sensitive data.
|
||
|
if (redirectUrl.hostname !== url.hostname || redirectUrl.port !== url.port) {
|
||
|
if ('host' in updatedOptions.headers) {
|
||
|
delete updatedOptions.headers.host;
|
||
|
}
|
||
|
if ('cookie' in updatedOptions.headers) {
|
||
|
delete updatedOptions.headers.cookie;
|
||
|
}
|
||
|
if ('authorization' in updatedOptions.headers) {
|
||
|
delete updatedOptions.headers.authorization;
|
||
|
}
|
||
|
if (updatedOptions.username || updatedOptions.password) {
|
||
|
updatedOptions.username = '';
|
||
|
updatedOptions.password = '';
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
redirectUrl.username = updatedOptions.username;
|
||
|
redirectUrl.password = updatedOptions.password;
|
||
|
}
|
||
|
this.redirectUrls.push(redirectUrl);
|
||
|
updatedOptions.prefixUrl = '';
|
||
|
updatedOptions.url = redirectUrl;
|
||
|
for (const hook of updatedOptions.hooks.beforeRedirect) {
|
||
|
// eslint-disable-next-line no-await-in-loop
|
||
|
await hook(updatedOptions, typedResponse);
|
||
|
}
|
||
|
this.emit('redirect', updatedOptions, typedResponse);
|
||
|
this.options = updatedOptions;
|
||
|
await this._makeRequest();
|
||
|
}
|
||
|
catch (error) {
|
||
|
this._beforeError(error);
|
||
|
return;
|
||
|
}
|
||
|
return;
|
||
|
}
|
||
|
// `HTTPError`s always have `error.response.body` defined.
|
||
|
// Therefore we cannot retry if `options.throwHttpErrors` is false.
|
||
|
// On the last retry, if `options.throwHttpErrors` is false, we would need to return the body,
|
||
|
// but that wouldn't be possible since the body would be already read in `error.response.body`.
|
||
|
if (options.isStream && options.throwHttpErrors && !isResponseOk(typedResponse)) {
|
||
|
this._beforeError(new HTTPError(typedResponse));
|
||
|
return;
|
||
|
}
|
||
|
response.on('readable', () => {
|
||
|
if (this._triggerRead) {
|
||
|
this._read();
|
||
|
}
|
||
|
});
|
||
|
this.on('resume', () => {
|
||
|
response.resume();
|
||
|
});
|
||
|
this.on('pause', () => {
|
||
|
response.pause();
|
||
|
});
|
||
|
response.once('end', () => {
|
||
|
this.push(null);
|
||
|
});
|
||
|
if (this._noPipe) {
|
||
|
const success = await this._setRawBody();
|
||
|
if (success) {
|
||
|
this.emit('response', response);
|
||
|
}
|
||
|
return;
|
||
|
}
|
||
|
this.emit('response', response);
|
||
|
for (const destination of this._pipedServerResponses) {
|
||
|
if (destination.headersSent) {
|
||
|
continue;
|
||
|
}
|
||
|
// eslint-disable-next-line guard-for-in
|
||
|
for (const key in response.headers) {
|
||
|
const isAllowed = options.decompress ? key !== 'content-encoding' : true;
|
||
|
const value = response.headers[key];
|
||
|
if (isAllowed) {
|
||
|
destination.setHeader(key, value);
|
||
|
}
|
||
|
}
|
||
|
destination.statusCode = statusCode;
|
||
|
}
|
||
|
}
|
||
|
async _setRawBody(from = this) {
|
||
|
if (from.readableEnded) {
|
||
|
return false;
|
||
|
}
|
||
|
try {
|
||
|
// Errors are emitted via the `error` event
|
||
|
const rawBody = await buffer(from);
|
||
|
// On retry Request is destroyed with no error, therefore the above will successfully resolve.
|
||
|
// So in order to check if this was really successfull, we need to check if it has been properly ended.
|
||
|
if (!this.isAborted) {
|
||
|
this.response.rawBody = rawBody;
|
||
|
return true;
|
||
|
}
|
||
|
}
|
||
|
catch { }
|
||
|
return false;
|
||
|
}
|
||
|
async _onResponse(response) {
|
||
|
try {
|
||
|
await this._onResponseBase(response);
|
||
|
}
|
||
|
catch (error) {
|
||
|
/* istanbul ignore next: better safe than sorry */
|
||
|
this._beforeError(error);
|
||
|
}
|
||
|
}
|
||
|
_onRequest(request) {
|
||
|
const { options } = this;
|
||
|
const { timeout, url } = options;
|
||
|
timer$1(request);
|
||
|
if (this.options.http2) {
|
||
|
// Unset stream timeout, as the `timeout` option was used only for connection timeout.
|
||
|
request.setTimeout(0);
|
||
|
}
|
||
|
this._cancelTimeouts = timedOut(request, timeout, url);
|
||
|
const responseEventName = options.cache ? 'cacheableResponse' : 'response';
|
||
|
request.once(responseEventName, (response) => {
|
||
|
void this._onResponse(response);
|
||
|
});
|
||
|
request.once('error', (error) => {
|
||
|
this._aborted = true;
|
||
|
// Force clean-up, because some packages (e.g. nock) don't do this.
|
||
|
request.destroy();
|
||
|
error = error instanceof TimeoutError ? new TimeoutError$1(error, this.timings, this) : new RequestError$1(error.message, error, this);
|
||
|
this._beforeError(error);
|
||
|
});
|
||
|
this._unproxyEvents = proxyEvents$2(request, this, proxiedRequestEvents$1);
|
||
|
this._request = request;
|
||
|
this.emit('uploadProgress', this.uploadProgress);
|
||
|
this._sendBody();
|
||
|
this.emit('request', request);
|
||
|
}
|
||
|
async _asyncWrite(chunk) {
|
||
|
return new Promise((resolve, reject) => {
|
||
|
super.write(chunk, error => {
|
||
|
if (error) {
|
||
|
reject(error);
|
||
|
return;
|
||
|
}
|
||
|
resolve();
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
_sendBody() {
|
||
|
// Send body
|
||
|
const { body } = this.options;
|
||
|
const currentRequest = this.redirectUrls.length === 0 ? this : this._request ?? this;
|
||
|
if (is.nodeStream(body)) {
|
||
|
body.pipe(currentRequest);
|
||
|
}
|
||
|
else if (is.generator(body) || is.asyncGenerator(body)) {
|
||
|
(async () => {
|
||
|
try {
|
||
|
for await (const chunk of body) {
|
||
|
await this._asyncWrite(chunk);
|
||
|
}
|
||
|
super.end();
|
||
|
}
|
||
|
catch (error) {
|
||
|
this._beforeError(error);
|
||
|
}
|
||
|
})();
|
||
|
}
|
||
|
else if (!is.undefined(body)) {
|
||
|
this._writeRequest(body, undefined, () => { });
|
||
|
currentRequest.end();
|
||
|
}
|
||
|
else if (this._cannotHaveBody || this._noPipe) {
|
||
|
currentRequest.end();
|
||
|
}
|
||
|
}
|
||
|
_prepareCache(cache) {
|
||
|
if (!cacheableStore.has(cache)) {
|
||
|
const cacheableRequest = new CacheableRequest$1(((requestOptions, handler) => {
|
||
|
const result = requestOptions._request(requestOptions, handler);
|
||
|
// TODO: remove this when `cacheable-request` supports async request functions.
|
||
|
if (is.promise(result)) {
|
||
|
// We only need to implement the error handler in order to support HTTP2 caching.
|
||
|
// The result will be a promise anyway.
|
||
|
// @ts-expect-error ignore
|
||
|
// eslint-disable-next-line @typescript-eslint/promise-function-async
|
||
|
result.once = (event, handler) => {
|
||
|
if (event === 'error') {
|
||
|
(async () => {
|
||
|
try {
|
||
|
await result;
|
||
|
}
|
||
|
catch (error) {
|
||
|
handler(error);
|
||
|
}
|
||
|
})();
|
||
|
}
|
||
|
else if (event === 'abort') {
|
||
|
// The empty catch is needed here in case when
|
||
|
// it rejects before it's `await`ed in `_makeRequest`.
|
||
|
(async () => {
|
||
|
try {
|
||
|
const request = (await result);
|
||
|
request.once('abort', handler);
|
||
|
}
|
||
|
catch { }
|
||
|
})();
|
||
|
}
|
||
|
else {
|
||
|
/* istanbul ignore next: safety check */
|
||
|
throw new Error(`Unknown HTTP2 promise event: ${event}`);
|
||
|
}
|
||
|
return result;
|
||
|
};
|
||
|
}
|
||
|
return result;
|
||
|
}), cache);
|
||
|
cacheableStore.set(cache, cacheableRequest.request());
|
||
|
}
|
||
|
}
|
||
|
async _createCacheableRequest(url, options) {
|
||
|
return new Promise((resolve, reject) => {
|
||
|
// TODO: Remove `utils/url-to-options.ts` when `cacheable-request` is fixed
|
||
|
Object.assign(options, urlToOptions(url));
|
||
|
let request;
|
||
|
// TODO: Fix `cacheable-response`. This is ugly.
|
||
|
const cacheRequest = cacheableStore.get(options.cache)(options, async (response) => {
|
||
|
response._readableState.autoDestroy = false;
|
||
|
if (request) {
|
||
|
const fix = () => {
|
||
|
if (response.req) {
|
||
|
response.complete = response.req.res.complete;
|
||
|
}
|
||
|
};
|
||
|
response.prependOnceListener('end', fix);
|
||
|
fix();
|
||
|
(await request).emit('cacheableResponse', response);
|
||
|
}
|
||
|
resolve(response);
|
||
|
});
|
||
|
cacheRequest.once('error', reject);
|
||
|
cacheRequest.once('request', async (requestOrPromise) => {
|
||
|
request = requestOrPromise;
|
||
|
resolve(request);
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
async _makeRequest() {
|
||
|
const { options } = this;
|
||
|
const { headers, username, password } = options;
|
||
|
const cookieJar = options.cookieJar;
|
||
|
for (const key in headers) {
|
||
|
if (is.undefined(headers[key])) {
|
||
|
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
|
||
|
delete headers[key];
|
||
|
}
|
||
|
else if (is.null_(headers[key])) {
|
||
|
throw new TypeError(`Use \`undefined\` instead of \`null\` to delete the \`${key}\` header`);
|
||
|
}
|
||
|
}
|
||
|
if (options.decompress && is.undefined(headers['accept-encoding'])) {
|
||
|
headers['accept-encoding'] = supportsBrotli ? 'gzip, deflate, br' : 'gzip, deflate';
|
||
|
}
|
||
|
if (username || password) {
|
||
|
const credentials = Buffer$1.from(`${username}:${password}`).toString('base64');
|
||
|
headers.authorization = `Basic ${credentials}`;
|
||
|
}
|
||
|
// Set cookies
|
||
|
if (cookieJar) {
|
||
|
const cookieString = await cookieJar.getCookieString(options.url.toString());
|
||
|
if (is.nonEmptyString(cookieString)) {
|
||
|
headers.cookie = cookieString;
|
||
|
}
|
||
|
}
|
||
|
// Reset `prefixUrl`
|
||
|
options.prefixUrl = '';
|
||
|
let request;
|
||
|
for (const hook of options.hooks.beforeRequest) {
|
||
|
// eslint-disable-next-line no-await-in-loop
|
||
|
const result = await hook(options);
|
||
|
if (!is.undefined(result)) {
|
||
|
// @ts-expect-error Skip the type mismatch to support abstract responses
|
||
|
request = () => result;
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
if (!request) {
|
||
|
request = options.getRequestFunction();
|
||
|
}
|
||
|
const url = options.url;
|
||
|
this._requestOptions = options.createNativeRequestOptions();
|
||
|
if (options.cache) {
|
||
|
this._requestOptions._request = request;
|
||
|
this._requestOptions.cache = options.cache;
|
||
|
this._requestOptions.body = options.body;
|
||
|
this._prepareCache(options.cache);
|
||
|
}
|
||
|
// Cache support
|
||
|
const fn = options.cache ? this._createCacheableRequest : request;
|
||
|
try {
|
||
|
// We can't do `await fn(...)`,
|
||
|
// because stream `error` event can be emitted before `Promise.resolve()`.
|
||
|
let requestOrResponse = fn(url, this._requestOptions);
|
||
|
if (is.promise(requestOrResponse)) {
|
||
|
requestOrResponse = await requestOrResponse;
|
||
|
}
|
||
|
// Fallback
|
||
|
if (is.undefined(requestOrResponse)) {
|
||
|
requestOrResponse = options.getFallbackRequestFunction()(url, this._requestOptions);
|
||
|
if (is.promise(requestOrResponse)) {
|
||
|
requestOrResponse = await requestOrResponse;
|
||
|
}
|
||
|
}
|
||
|
if (isClientRequest(requestOrResponse)) {
|
||
|
this._onRequest(requestOrResponse);
|
||
|
}
|
||
|
else if (this.writable) {
|
||
|
this.once('finish', () => {
|
||
|
void this._onResponse(requestOrResponse);
|
||
|
});
|
||
|
this._sendBody();
|
||
|
}
|
||
|
else {
|
||
|
void this._onResponse(requestOrResponse);
|
||
|
}
|
||
|
}
|
||
|
catch (error) {
|
||
|
if (error instanceof CacheError) {
|
||
|
throw new CacheError$1(error, this);
|
||
|
}
|
||
|
throw error;
|
||
|
}
|
||
|
}
|
||
|
async _error(error) {
|
||
|
try {
|
||
|
if (error instanceof HTTPError && !this.options.throwHttpErrors) {
|
||
|
// This branch can be reached only when using the Promise API
|
||
|
// Skip calling the hooks on purpose.
|
||
|
// See https://github.com/sindresorhus/got/issues/2103
|
||
|
}
|
||
|
else {
|
||
|
for (const hook of this.options.hooks.beforeError) {
|
||
|
// eslint-disable-next-line no-await-in-loop
|
||
|
error = await hook(error);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
catch (error_) {
|
||
|
error = new RequestError$1(error_.message, error_, this);
|
||
|
}
|
||
|
this.destroy(error);
|
||
|
}
|
||
|
_writeRequest(chunk, encoding, callback) {
|
||
|
if (!this._request || this._request.destroyed) {
|
||
|
// Probably the `ClientRequest` instance will throw
|
||
|
return;
|
||
|
}
|
||
|
this._request.write(chunk, encoding, (error) => {
|
||
|
// The `!destroyed` check is required to prevent `uploadProgress` being emitted after the stream was destroyed
|
||
|
if (!error && !this._request.destroyed) {
|
||
|
this._uploadedSize += Buffer$1.byteLength(chunk, encoding);
|
||
|
const progress = this.uploadProgress;
|
||
|
if (progress.percent < 1) {
|
||
|
this.emit('uploadProgress', progress);
|
||
|
}
|
||
|
}
|
||
|
callback(error);
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
The remote IP address.
|
||
|
*/
|
||
|
get ip() {
|
||
|
return this.socket?.remoteAddress;
|
||
|
}
|
||
|
/**
|
||
|
Indicates whether the request has been aborted or not.
|
||
|
*/
|
||
|
get isAborted() {
|
||
|
return this._aborted;
|
||
|
}
|
||
|
get socket() {
|
||
|
return this._request?.socket ?? undefined;
|
||
|
}
|
||
|
/**
|
||
|
Progress event for downloading (receiving a response).
|
||
|
*/
|
||
|
get downloadProgress() {
|
||
|
let percent;
|
||
|
if (this._responseSize) {
|
||
|
percent = this._downloadedSize / this._responseSize;
|
||
|
}
|
||
|
else if (this._responseSize === this._downloadedSize) {
|
||
|
percent = 1;
|
||
|
}
|
||
|
else {
|
||
|
percent = 0;
|
||
|
}
|
||
|
return {
|
||
|
percent,
|
||
|
transferred: this._downloadedSize,
|
||
|
total: this._responseSize,
|
||
|
};
|
||
|
}
|
||
|
/**
|
||
|
Progress event for uploading (sending a request).
|
||
|
*/
|
||
|
get uploadProgress() {
|
||
|
let percent;
|
||
|
if (this._bodySize) {
|
||
|
percent = this._uploadedSize / this._bodySize;
|
||
|
}
|
||
|
else if (this._bodySize === this._uploadedSize) {
|
||
|
percent = 1;
|
||
|
}
|
||
|
else {
|
||
|
percent = 0;
|
||
|
}
|
||
|
return {
|
||
|
percent,
|
||
|
transferred: this._uploadedSize,
|
||
|
total: this._bodySize,
|
||
|
};
|
||
|
}
|
||
|
/**
|
||
|
The object contains the following properties:
|
||
|
|
||
|
- `start` - Time when the request started.
|
||
|
- `socket` - Time when a socket was assigned to the request.
|
||
|
- `lookup` - Time when the DNS lookup finished.
|
||
|
- `connect` - Time when the socket successfully connected.
|
||
|
- `secureConnect` - Time when the socket securely connected.
|
||
|
- `upload` - Time when the request finished uploading.
|
||
|
- `response` - Time when the request fired `response` event.
|
||
|
- `end` - Time when the response fired `end` event.
|
||
|
- `error` - Time when the request fired `error` event.
|
||
|
- `abort` - Time when the request fired `abort` event.
|
||
|
- `phases`
|
||
|
- `wait` - `timings.socket - timings.start`
|
||
|
- `dns` - `timings.lookup - timings.socket`
|
||
|
- `tcp` - `timings.connect - timings.lookup`
|
||
|
- `tls` - `timings.secureConnect - timings.connect`
|
||
|
- `request` - `timings.upload - (timings.secureConnect || timings.connect)`
|
||
|
- `firstByte` - `timings.response - timings.upload`
|
||
|
- `download` - `timings.end - timings.response`
|
||
|
- `total` - `(timings.end || timings.error || timings.abort) - timings.start`
|
||
|
|
||
|
If something has not been measured yet, it will be `undefined`.
|
||
|
|
||
|
__Note__: The time is a `number` representing the milliseconds elapsed since the UNIX epoch.
|
||
|
*/
|
||
|
get timings() {
|
||
|
return this._request?.timings;
|
||
|
}
|
||
|
/**
|
||
|
Whether the response was retrieved from the cache.
|
||
|
*/
|
||
|
get isFromCache() {
|
||
|
return this._isFromCache;
|
||
|
}
|
||
|
get reusedSocket() {
|
||
|
return this._request?.reusedSocket;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
An error to be thrown when the request is aborted with `.cancel()`.
|
||
|
*/
|
||
|
class CancelError extends RequestError$1 {
|
||
|
constructor(request) {
|
||
|
super('Promise was canceled', {}, request);
|
||
|
this.name = 'CancelError';
|
||
|
this.code = 'ERR_CANCELED';
|
||
|
}
|
||
|
/**
|
||
|
Whether the promise is canceled.
|
||
|
*/
|
||
|
get isCanceled() {
|
||
|
return true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const proxiedRequestEvents = [
|
||
|
'request',
|
||
|
'response',
|
||
|
'redirect',
|
||
|
'uploadProgress',
|
||
|
'downloadProgress',
|
||
|
];
|
||
|
function asPromise(firstRequest) {
|
||
|
let globalRequest;
|
||
|
let globalResponse;
|
||
|
let normalizedOptions;
|
||
|
const emitter = new EventEmitter$3();
|
||
|
const promise = new PCancelable((resolve, reject, onCancel) => {
|
||
|
onCancel(() => {
|
||
|
globalRequest.destroy();
|
||
|
});
|
||
|
onCancel.shouldReject = false;
|
||
|
onCancel(() => {
|
||
|
reject(new CancelError(globalRequest));
|
||
|
});
|
||
|
const makeRequest = (retryCount) => {
|
||
|
// Errors when a new request is made after the promise settles.
|
||
|
// Used to detect a race condition.
|
||
|
// See https://github.com/sindresorhus/got/issues/1489
|
||
|
onCancel(() => { });
|
||
|
const request = firstRequest ?? new Request(undefined, undefined, normalizedOptions);
|
||
|
request.retryCount = retryCount;
|
||
|
request._noPipe = true;
|
||
|
globalRequest = request;
|
||
|
request.once('response', async (response) => {
|
||
|
// Parse body
|
||
|
const contentEncoding = (response.headers['content-encoding'] ?? '').toLowerCase();
|
||
|
const isCompressed = contentEncoding === 'gzip' || contentEncoding === 'deflate' || contentEncoding === 'br';
|
||
|
const { options } = request;
|
||
|
if (isCompressed && !options.decompress) {
|
||
|
response.body = response.rawBody;
|
||
|
}
|
||
|
else {
|
||
|
try {
|
||
|
response.body = parseBody(response, options.responseType, options.parseJson, options.encoding);
|
||
|
}
|
||
|
catch (error) {
|
||
|
// Fall back to `utf8`
|
||
|
response.body = response.rawBody.toString();
|
||
|
if (isResponseOk(response)) {
|
||
|
request._beforeError(error);
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
try {
|
||
|
const hooks = options.hooks.afterResponse;
|
||
|
for (const [index, hook] of hooks.entries()) {
|
||
|
// @ts-expect-error TS doesn't notice that CancelableRequest is a Promise
|
||
|
// eslint-disable-next-line no-await-in-loop
|
||
|
response = await hook(response, async (updatedOptions) => {
|
||
|
options.merge(updatedOptions);
|
||
|
options.prefixUrl = '';
|
||
|
if (updatedOptions.url) {
|
||
|
options.url = updatedOptions.url;
|
||
|
}
|
||
|
// Remove any further hooks for that request, because we'll call them anyway.
|
||
|
// The loop continues. We don't want duplicates (asPromise recursion).
|
||
|
options.hooks.afterResponse = options.hooks.afterResponse.slice(0, index);
|
||
|
throw new RetryError(request);
|
||
|
});
|
||
|
if (!(is.object(response) && is.number(response.statusCode) && !is.nullOrUndefined(response.body))) {
|
||
|
throw new TypeError('The `afterResponse` hook returned an invalid value');
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
catch (error) {
|
||
|
request._beforeError(error);
|
||
|
return;
|
||
|
}
|
||
|
globalResponse = response;
|
||
|
if (!isResponseOk(response)) {
|
||
|
request._beforeError(new HTTPError(response));
|
||
|
return;
|
||
|
}
|
||
|
request.destroy();
|
||
|
resolve(request.options.resolveBodyOnly ? response.body : response);
|
||
|
});
|
||
|
const onError = (error) => {
|
||
|
if (promise.isCanceled) {
|
||
|
return;
|
||
|
}
|
||
|
const { options } = request;
|
||
|
if (error instanceof HTTPError && !options.throwHttpErrors) {
|
||
|
const { response } = error;
|
||
|
request.destroy();
|
||
|
resolve(request.options.resolveBodyOnly ? response.body : response);
|
||
|
return;
|
||
|
}
|
||
|
reject(error);
|
||
|
};
|
||
|
request.once('error', onError);
|
||
|
const previousBody = request.options?.body;
|
||
|
request.once('retry', (newRetryCount, error) => {
|
||
|
firstRequest = undefined;
|
||
|
const newBody = request.options.body;
|
||
|
if (previousBody === newBody && is.nodeStream(newBody)) {
|
||
|
error.message = 'Cannot retry with consumed body stream';
|
||
|
onError(error);
|
||
|
return;
|
||
|
}
|
||
|
// This is needed! We need to reuse `request.options` because they can get modified!
|
||
|
// For example, by calling `promise.json()`.
|
||
|
normalizedOptions = request.options;
|
||
|
makeRequest(newRetryCount);
|
||
|
});
|
||
|
proxyEvents$2(request, emitter, proxiedRequestEvents);
|
||
|
if (is.undefined(firstRequest)) {
|
||
|
void request.flush();
|
||
|
}
|
||
|
};
|
||
|
makeRequest(0);
|
||
|
});
|
||
|
promise.on = (event, fn) => {
|
||
|
emitter.on(event, fn);
|
||
|
return promise;
|
||
|
};
|
||
|
promise.off = (event, fn) => {
|
||
|
emitter.off(event, fn);
|
||
|
return promise;
|
||
|
};
|
||
|
const shortcut = (responseType) => {
|
||
|
const newPromise = (async () => {
|
||
|
// Wait until downloading has ended
|
||
|
await promise;
|
||
|
const { options } = globalResponse.request;
|
||
|
return parseBody(globalResponse, responseType, options.parseJson, options.encoding);
|
||
|
})();
|
||
|
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||
|
Object.defineProperties(newPromise, Object.getOwnPropertyDescriptors(promise));
|
||
|
return newPromise;
|
||
|
};
|
||
|
promise.json = () => {
|
||
|
if (globalRequest.options) {
|
||
|
const { headers } = globalRequest.options;
|
||
|
if (!globalRequest.writableFinished && !('accept' in headers)) {
|
||
|
headers.accept = 'application/json';
|
||
|
}
|
||
|
}
|
||
|
return shortcut('json');
|
||
|
};
|
||
|
promise.buffer = () => shortcut('buffer');
|
||
|
promise.text = () => shortcut('text');
|
||
|
return promise;
|
||
|
}
|
||
|
|
||
|
// The `delay` package weighs 10KB (!)
|
||
|
const delay = async (ms) => new Promise(resolve => {
|
||
|
setTimeout(resolve, ms);
|
||
|
});
|
||
|
const isGotInstance = (value) => is.function_(value);
|
||
|
const aliases = [
|
||
|
'get',
|
||
|
'post',
|
||
|
'put',
|
||
|
'patch',
|
||
|
'head',
|
||
|
'delete',
|
||
|
];
|
||
|
const create = (defaults) => {
|
||
|
defaults = {
|
||
|
options: new Options(undefined, undefined, defaults.options),
|
||
|
handlers: [...defaults.handlers],
|
||
|
mutableDefaults: defaults.mutableDefaults,
|
||
|
};
|
||
|
Object.defineProperty(defaults, 'mutableDefaults', {
|
||
|
enumerable: true,
|
||
|
configurable: false,
|
||
|
writable: false,
|
||
|
});
|
||
|
// Got interface
|
||
|
const got = ((url, options, defaultOptions = defaults.options) => {
|
||
|
const request = new Request(url, options, defaultOptions);
|
||
|
let promise;
|
||
|
const lastHandler = (normalized) => {
|
||
|
// Note: `options` is `undefined` when `new Options(...)` fails
|
||
|
request.options = normalized;
|
||
|
request._noPipe = !normalized.isStream;
|
||
|
void request.flush();
|
||
|
if (normalized.isStream) {
|
||
|
return request;
|
||
|
}
|
||
|
if (!promise) {
|
||
|
promise = asPromise(request);
|
||
|
}
|
||
|
return promise;
|
||
|
};
|
||
|
let iteration = 0;
|
||
|
const iterateHandlers = (newOptions) => {
|
||
|
const handler = defaults.handlers[iteration++] ?? lastHandler;
|
||
|
const result = handler(newOptions, iterateHandlers);
|
||
|
if (is.promise(result) && !request.options.isStream) {
|
||
|
if (!promise) {
|
||
|
promise = asPromise(request);
|
||
|
}
|
||
|
if (result !== promise) {
|
||
|
const descriptors = Object.getOwnPropertyDescriptors(promise);
|
||
|
for (const key in descriptors) {
|
||
|
if (key in result) {
|
||
|
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
|
||
|
delete descriptors[key];
|
||
|
}
|
||
|
}
|
||
|
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||
|
Object.defineProperties(result, descriptors);
|
||
|
result.cancel = promise.cancel;
|
||
|
}
|
||
|
}
|
||
|
return result;
|
||
|
};
|
||
|
return iterateHandlers(request.options);
|
||
|
});
|
||
|
got.extend = (...instancesOrOptions) => {
|
||
|
const options = new Options(undefined, undefined, defaults.options);
|
||
|
const handlers = [...defaults.handlers];
|
||
|
let mutableDefaults;
|
||
|
for (const value of instancesOrOptions) {
|
||
|
if (isGotInstance(value)) {
|
||
|
options.merge(value.defaults.options);
|
||
|
handlers.push(...value.defaults.handlers);
|
||
|
mutableDefaults = value.defaults.mutableDefaults;
|
||
|
}
|
||
|
else {
|
||
|
options.merge(value);
|
||
|
if (value.handlers) {
|
||
|
handlers.push(...value.handlers);
|
||
|
}
|
||
|
mutableDefaults = value.mutableDefaults;
|
||
|
}
|
||
|
}
|
||
|
return create({
|
||
|
options,
|
||
|
handlers,
|
||
|
mutableDefaults: Boolean(mutableDefaults),
|
||
|
});
|
||
|
};
|
||
|
// Pagination
|
||
|
const paginateEach = (async function* (url, options) {
|
||
|
let normalizedOptions = new Options(url, options, defaults.options);
|
||
|
normalizedOptions.resolveBodyOnly = false;
|
||
|
const { pagination } = normalizedOptions;
|
||
|
assert$1.function_(pagination.transform);
|
||
|
assert$1.function_(pagination.shouldContinue);
|
||
|
assert$1.function_(pagination.filter);
|
||
|
assert$1.function_(pagination.paginate);
|
||
|
assert$1.number(pagination.countLimit);
|
||
|
assert$1.number(pagination.requestLimit);
|
||
|
assert$1.number(pagination.backoff);
|
||
|
const allItems = [];
|
||
|
let { countLimit } = pagination;
|
||
|
let numberOfRequests = 0;
|
||
|
while (numberOfRequests < pagination.requestLimit) {
|
||
|
if (numberOfRequests !== 0) {
|
||
|
// eslint-disable-next-line no-await-in-loop
|
||
|
await delay(pagination.backoff);
|
||
|
}
|
||
|
// eslint-disable-next-line no-await-in-loop
|
||
|
const response = (await got(undefined, undefined, normalizedOptions));
|
||
|
// eslint-disable-next-line no-await-in-loop
|
||
|
const parsed = await pagination.transform(response);
|
||
|
const currentItems = [];
|
||
|
assert$1.array(parsed);
|
||
|
for (const item of parsed) {
|
||
|
if (pagination.filter({ item, currentItems, allItems })) {
|
||
|
if (!pagination.shouldContinue({ item, currentItems, allItems })) {
|
||
|
return;
|
||
|
}
|
||
|
yield item;
|
||
|
if (pagination.stackAllItems) {
|
||
|
allItems.push(item);
|
||
|
}
|
||
|
currentItems.push(item);
|
||
|
if (--countLimit <= 0) {
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
const optionsToMerge = pagination.paginate({
|
||
|
response,
|
||
|
currentItems,
|
||
|
allItems,
|
||
|
});
|
||
|
if (optionsToMerge === false) {
|
||
|
return;
|
||
|
}
|
||
|
if (optionsToMerge === response.request.options) {
|
||
|
normalizedOptions = response.request.options;
|
||
|
}
|
||
|
else {
|
||
|
normalizedOptions.merge(optionsToMerge);
|
||
|
assert$1.any([is.urlInstance, is.undefined], optionsToMerge.url);
|
||
|
if (optionsToMerge.url !== undefined) {
|
||
|
normalizedOptions.prefixUrl = '';
|
||
|
normalizedOptions.url = optionsToMerge.url;
|
||
|
}
|
||
|
}
|
||
|
numberOfRequests++;
|
||
|
}
|
||
|
});
|
||
|
got.paginate = paginateEach;
|
||
|
got.paginate.all = (async (url, options) => {
|
||
|
const results = [];
|
||
|
for await (const item of paginateEach(url, options)) {
|
||
|
results.push(item);
|
||
|
}
|
||
|
return results;
|
||
|
});
|
||
|
// For those who like very descriptive names
|
||
|
got.paginate.each = paginateEach;
|
||
|
// Stream API
|
||
|
got.stream = ((url, options) => got(url, { ...options, isStream: true }));
|
||
|
// Shortcuts
|
||
|
for (const method of aliases) {
|
||
|
got[method] = ((url, options) => got(url, { ...options, method }));
|
||
|
got.stream[method] = ((url, options) => got(url, { ...options, method, isStream: true }));
|
||
|
}
|
||
|
if (!defaults.mutableDefaults) {
|
||
|
Object.freeze(defaults.handlers);
|
||
|
defaults.options.freeze();
|
||
|
}
|
||
|
Object.defineProperty(got, 'defaults', {
|
||
|
value: defaults,
|
||
|
writable: false,
|
||
|
configurable: false,
|
||
|
enumerable: true,
|
||
|
});
|
||
|
return got;
|
||
|
};
|
||
|
var create$1 = create;
|
||
|
|
||
|
const defaults = {
|
||
|
options: new Options(),
|
||
|
handlers: [],
|
||
|
mutableDefaults: false,
|
||
|
};
|
||
|
const got = create$1(defaults);
|
||
|
var got$1 = got;
|
||
|
|
||
|
// Main
|
||
|
const ENV_CACHE_DAEMONDIR = 'MAGIC_NIX_CACHE_DAEMONDIR';
|
||
|
function getCacherUrl() {
|
||
|
const runnerArch = process.env.RUNNER_ARCH;
|
||
|
const runnerOs = process.env.RUNNER_OS;
|
||
|
const binarySuffix = `${runnerArch}-${runnerOs}`;
|
||
|
const urlPrefix = `https://install.determinate.systems/magic-nix-cache`;
|
||
|
if (coreExports.getInput('source-url')) {
|
||
|
return coreExports.getInput('source-url');
|
||
|
}
|
||
|
if (coreExports.getInput('source-tag')) {
|
||
|
return `${urlPrefix}/tag/${coreExports.getInput('source-tag')}/${binarySuffix}`;
|
||
|
}
|
||
|
if (coreExports.getInput('source-pr')) {
|
||
|
return `${urlPrefix}/pr/${coreExports.getInput('source-pr')}/${binarySuffix}`;
|
||
|
}
|
||
|
if (coreExports.getInput('source-branch')) {
|
||
|
return `${urlPrefix}/branch/${coreExports.getInput('source-branch')}/${binarySuffix}`;
|
||
|
}
|
||
|
if (coreExports.getInput('source-revision')) {
|
||
|
return `${urlPrefix}/rev/${coreExports.getInput('source-revision')}/${binarySuffix}`;
|
||
|
}
|
||
|
return `${urlPrefix}/latest/${binarySuffix}`;
|
||
|
}
|
||
|
async function fetchAutoCacher(destination) {
|
||
|
const stream = createWriteStream(destination, {
|
||
|
encoding: "binary",
|
||
|
mode: 0o755,
|
||
|
});
|
||
|
const binary_url = getCacherUrl();
|
||
|
coreExports.debug(`Fetching the Magic Nix Cache from ${binary_url}`);
|
||
|
return pipeline(got$1.stream(binary_url), stream);
|
||
|
}
|
||
|
async function setUpAutoCache() {
|
||
|
const tmpdir = process.env['RUNNER_TEMP'] || os$2.tmpdir();
|
||
|
const required_env = ['ACTIONS_CACHE_URL', 'ACTIONS_RUNTIME_URL', 'ACTIONS_RUNTIME_TOKEN'];
|
||
|
var anyMissing = false;
|
||
|
for (const n of required_env) {
|
||
|
if (!process.env.hasOwnProperty(n)) {
|
||
|
anyMissing = true;
|
||
|
coreExports.warning(`Disabling automatic caching since required environment ${n} isn't available`);
|
||
|
}
|
||
|
}
|
||
|
if (anyMissing) {
|
||
|
return;
|
||
|
}
|
||
|
coreExports.debug(`GitHub Action Cache URL: ${process.env['ACTIONS_CACHE_URL']}`);
|
||
|
const daemonDir = await fs$2.mkdtemp(path$1.join(tmpdir, 'magic-nix-cache-'));
|
||
|
var daemonBin;
|
||
|
if (coreExports.getInput('source-binary')) {
|
||
|
daemonBin = coreExports.getInput('source-binary');
|
||
|
}
|
||
|
else {
|
||
|
daemonBin = `${daemonDir}/magic-nix-cache`;
|
||
|
await fetchAutoCacher(daemonBin);
|
||
|
}
|
||
|
var runEnv;
|
||
|
if (coreExports.isDebug()) {
|
||
|
runEnv = {
|
||
|
RUST_LOG: "trace,nix_actions_cache=debug,gha_cache=debug",
|
||
|
RUST_BACKTRACE: "full",
|
||
|
...process.env
|
||
|
};
|
||
|
}
|
||
|
else {
|
||
|
runEnv = process.env;
|
||
|
}
|
||
|
const output = openSync(`${daemonDir}/parent.log`, 'a');
|
||
|
const launch = spawn(daemonBin, [
|
||
|
'--daemon-dir', daemonDir,
|
||
|
'--listen', coreExports.getInput('listen'),
|
||
|
'--upstream', coreExports.getInput('upstream-cache'),
|
||
|
'--diagnostic-endpoint', coreExports.getInput('diagnostic-endpoint')
|
||
|
], {
|
||
|
stdio: ['ignore', output, output],
|
||
|
env: runEnv
|
||
|
});
|
||
|
await new Promise((resolve, reject) => {
|
||
|
launch.on('exit', (code, signal) => {
|
||
|
if (signal) {
|
||
|
reject(new Error(`Daemon was killed by signal ${signal}`));
|
||
|
}
|
||
|
else if (code) {
|
||
|
reject(new Error(`Daemon exited with code ${code}`));
|
||
|
}
|
||
|
else {
|
||
|
resolve();
|
||
|
}
|
||
|
});
|
||
|
});
|
||
|
await fs$2.mkdir(`${process.env["HOME"]}/.config/nix`, { recursive: true });
|
||
|
const nixConf = openSync(`${process.env["HOME"]}/.config/nix/nix.conf`, 'a');
|
||
|
writeSync(nixConf, `${"\n"}extra-substituters = http://${coreExports.getInput('listen')}/?trusted=1&compression=zstd¶llel-compression=true${"\n"}`);
|
||
|
close(nixConf);
|
||
|
coreExports.debug('Launched Magic Nix Cache');
|
||
|
coreExports.exportVariable(ENV_CACHE_DAEMONDIR, daemonDir);
|
||
|
}
|
||
|
async function notifyAutoCache() {
|
||
|
const daemonDir = process.env[ENV_CACHE_DAEMONDIR];
|
||
|
if (!daemonDir) {
|
||
|
return;
|
||
|
}
|
||
|
const res = await got$1.post(`http://${coreExports.getInput('listen')}/api/workflow-start`).json();
|
||
|
coreExports.debug(res);
|
||
|
}
|
||
|
async function tearDownAutoCache() {
|
||
|
const daemonDir = process.env[ENV_CACHE_DAEMONDIR];
|
||
|
if (!daemonDir) {
|
||
|
coreExports.debug('magic-nix-cache not started - Skipping');
|
||
|
return;
|
||
|
}
|
||
|
const pidFile = path$1.join(daemonDir, 'daemon.pid');
|
||
|
const pid = parseInt(await fs$2.readFile(pidFile, { encoding: 'ascii' }));
|
||
|
coreExports.debug(`found daemon pid: ${pid}`);
|
||
|
if (!pid) {
|
||
|
throw new Error("magic-nix-cache did not start successfully");
|
||
|
}
|
||
|
const log = new Tail_1(path$1.join(daemonDir, 'daemon.log'));
|
||
|
coreExports.debug(`tailing daemon.log...`);
|
||
|
log.on('line', (line) => {
|
||
|
coreExports.debug(`got a log line`);
|
||
|
coreExports.info(line);
|
||
|
});
|
||
|
try {
|
||
|
coreExports.debug(`about to post to localhost`);
|
||
|
const res = await got$1.post(`http://${coreExports.getInput('listen')}/api/workflow-finish`).json();
|
||
|
coreExports.debug(`back from post`);
|
||
|
coreExports.debug(res);
|
||
|
}
|
||
|
finally {
|
||
|
await setTimeout$1(5000);
|
||
|
coreExports.debug(`unwatching the daemon log`);
|
||
|
log.unwatch();
|
||
|
}
|
||
|
coreExports.debug(`killing`);
|
||
|
try {
|
||
|
process.kill(pid, 'SIGTERM');
|
||
|
}
|
||
|
catch (e) {
|
||
|
if (e.code !== 'ESRCH') {
|
||
|
throw e;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
const isPost = !!process.env['STATE_isPost'];
|
||
|
try {
|
||
|
if (!isPost) {
|
||
|
coreExports.saveState('isPost', 'true');
|
||
|
await setUpAutoCache();
|
||
|
await notifyAutoCache();
|
||
|
}
|
||
|
else {
|
||
|
await tearDownAutoCache();
|
||
|
}
|
||
|
}
|
||
|
catch (e) {
|
||
|
coreExports.info(`got an exception:`);
|
||
|
coreExports.info(e);
|
||
|
if (!isPost) {
|
||
|
coreExports.setFailed(e.message);
|
||
|
throw e;
|
||
|
}
|
||
|
else {
|
||
|
coreExports.info("not considering this a failure: finishing the upload is optional, anyway.");
|
||
|
process.exit();
|
||
|
}
|
||
|
}
|
||
|
coreExports.debug(`rip`);
|