From a287a5b2eebb095b3767857364f4420bf86c7745 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Feb 2024 18:01:50 +0000 Subject: [PATCH 01/11] Bump undici from 5.26.4 to 5.28.3 Bumps [undici](https://github.com/nodejs/undici) from 5.26.4 to 5.28.3. - [Release notes](https://github.com/nodejs/undici/releases) - [Commits](https://github.com/nodejs/undici/compare/v5.26.4...v5.28.3) --- updated-dependencies: - dependency-name: undici dependency-type: indirect ... Signed-off-by: dependabot[bot] --- package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/package-lock.json b/package-lock.json index 2472af9..15bb0aa 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1364,9 +1364,9 @@ } }, "node_modules/undici": { - "version": "5.26.4", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.26.4.tgz", - "integrity": "sha512-OG+QOf0fTLtazL9P9X7yqWxQ+Z0395Wk6DSkyTxtaq3wQEjIroVe7Y4asCX/vcCxYpNGMnwz8F0qbRYUoaQVMw==", + "version": "5.28.3", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", + "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", "dependencies": { "@fastify/busboy": "^2.0.0" }, @@ -2439,9 +2439,9 @@ "dev": true }, "undici": { - "version": "5.26.4", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.26.4.tgz", - "integrity": "sha512-OG+QOf0fTLtazL9P9X7yqWxQ+Z0395Wk6DSkyTxtaq3wQEjIroVe7Y4asCX/vcCxYpNGMnwz8F0qbRYUoaQVMw==", + "version": "5.28.3", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", + "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", "requires": { "@fastify/busboy": "^2.0.0" } From b7039708981f4b6bd444c54c94b79e4f48dc6eaf Mon Sep 17 00:00:00 2001 From: Dharmesh Patel Date: Fri, 8 Mar 2024 19:46:19 +0530 Subject: [PATCH 02/11] Update dist file. --- dist/index.js | 1124 ++++++++++++++++++++++++++++++++++++------------- 1 file changed, 823 insertions(+), 301 deletions(-) diff --git a/dist/index.js b/dist/index.js index a4eaa7e..568c8de 100644 --- a/dist/index.js +++ b/dist/index.js @@ -7733,6 +7733,7 @@ const MockAgent = __nccwpck_require__(6771) const MockPool = __nccwpck_require__(6193) const mockErrors = __nccwpck_require__(888) const ProxyAgent = __nccwpck_require__(7858) +const RetryHandler = __nccwpck_require__(2286) const { getGlobalDispatcher, setGlobalDispatcher } = __nccwpck_require__(1892) const DecoratorHandler = __nccwpck_require__(6930) const RedirectHandler = __nccwpck_require__(2860) @@ -7754,6 +7755,7 @@ module.exports.Pool = Pool module.exports.BalancedPool = BalancedPool module.exports.Agent = Agent module.exports.ProxyAgent = ProxyAgent +module.exports.RetryHandler = RetryHandler module.exports.DecoratorHandler = DecoratorHandler module.exports.RedirectHandler = RedirectHandler @@ -8654,6 +8656,7 @@ function request (opts, callback) { } module.exports = request +module.exports.RequestHandler = RequestHandler /***/ }), @@ -9036,6 +9039,8 @@ const kBody = Symbol('kBody') const kAbort = Symbol('abort') const kContentType = Symbol('kContentType') +const noop = () => {} + module.exports = class BodyReadable extends Readable { constructor ({ resume, @@ -9169,37 +9174,50 @@ module.exports = class BodyReadable extends Readable { return this[kBody] } - async dump (opts) { + dump (opts) { let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144 const signal = opts && opts.signal - const abortFn = () => { - this.destroy() - } - let signalListenerCleanup + if (signal) { - if (typeof signal !== 'object' || !('aborted' in signal)) { - throw new InvalidArgumentError('signal must be an AbortSignal') - } - util.throwIfAborted(signal) - signalListenerCleanup = util.addAbortListener(signal, abortFn) - } - try { - for await (const chunk of this) { - util.throwIfAborted(signal) - limit -= Buffer.byteLength(chunk) - if (limit < 0) { - return + try { + if (typeof signal !== 'object' || !('aborted' in signal)) { + throw new InvalidArgumentError('signal must be an AbortSignal') } + util.throwIfAborted(signal) + } catch (err) { + return Promise.reject(err) } - } catch { - util.throwIfAborted(signal) - } finally { - if (typeof signalListenerCleanup === 'function') { - signalListenerCleanup() - } else if (signalListenerCleanup) { - signalListenerCleanup[Symbol.dispose]() - } } + + if (this.closed) { + return Promise.resolve(null) + } + + return new Promise((resolve, reject) => { + const signalListenerCleanup = signal + ? util.addAbortListener(signal, () => { + this.destroy() + }) + : noop + + this + .on('close', function () { + signalListenerCleanup() + if (signal && signal.aborted) { + reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' })) + } else { + resolve(null) + } + }) + .on('error', noop) + .on('data', function (chunk) { + limit -= chunk.length + if (limit <= 0) { + this.destroy() + } + }) + .resume() + }) } } @@ -10579,13 +10597,13 @@ module.exports = { /***/ }), /***/ 9174: -/***/ ((module) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; module.exports = { - kConstruct: Symbol('constructable') + kConstruct: (__nccwpck_require__(2785).kConstruct) } @@ -11571,11 +11589,9 @@ class Parser { socket[kReset] = true } - let pause - try { - pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false - } catch (err) { - util.destroy(socket, err) + const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false + + if (request.aborted) { return -1 } @@ -11622,13 +11638,8 @@ class Parser { this.bytesRead += buf.length - try { - if (request.onData(buf) === false) { - return constants.ERROR.PAUSED - } - } catch (err) { - util.destroy(socket, err) - return -1 + if (request.onData(buf) === false) { + return constants.ERROR.PAUSED } } @@ -11669,11 +11680,7 @@ class Parser { return -1 } - try { - request.onComplete(headers) - } catch (err) { - errorRequest(client, request, err) - } + request.onComplete(headers) client[kQueue][client[kRunningIdx]++] = null @@ -11837,7 +11844,7 @@ async function connect (client) { const idx = hostname.indexOf(']') assert(idx !== -1) - const ip = hostname.substr(1, idx - 1) + const ip = hostname.substring(1, idx) assert(net.isIP(ip)) hostname = ip @@ -12116,23 +12123,7 @@ function _resume (client, sync) { return } - if (util.isStream(request.body) && util.bodyLength(request.body) === 0) { - request.body - .on('data', /* istanbul ignore next */ function () { - /* istanbul ignore next */ - assert(false) - }) - .on('error', function (err) { - errorRequest(client, request, err) - }) - .on('end', function () { - util.destroy(this) - }) - - request.body = null - } - - if (client[kRunning] > 0 && + if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 && (util.isStream(request.body) || util.isAsyncIterable(request.body))) { // Request with stream or iterator body can error while other requests // are inflight and indirectly error those as well. @@ -12153,6 +12144,11 @@ function _resume (client, sync) { } } +// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2 +function shouldSendContentLength (method) { + return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT' +} + function write (client, request) { if (client[kHTTPConnVersion] === 'h2') { writeH2(client, client[kHTTP2Session], request) @@ -12181,7 +12177,9 @@ function write (client, request) { body.read(0) } - let contentLength = util.bodyLength(body) + const bodyLength = util.bodyLength(body) + + let contentLength = bodyLength if (contentLength === null) { contentLength = request.contentLength @@ -12196,7 +12194,9 @@ function write (client, request) { contentLength = null } - if (request.contentLength !== null && request.contentLength !== contentLength) { + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) { if (client[kStrictContentLength]) { errorRequest(client, request, new RequestContentLengthMismatchError()) return false @@ -12277,7 +12277,7 @@ function write (client, request) { } /* istanbul ignore else: assertion */ - if (!body) { + if (!body || bodyLength === 0) { if (contentLength === 0) { socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') } else { @@ -12343,6 +12343,7 @@ function writeH2 (client, session, request) { return false } + /** @type {import('node:http2').ClientHttp2Stream} */ let stream const h2State = client[kHTTP2SessionState] @@ -12417,7 +12418,9 @@ function writeH2 (client, session, request) { contentLength = null } - if (request.contentLength != null && request.contentLength !== contentLength) { + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) { if (client[kStrictContentLength]) { errorRequest(client, request, new RequestContentLengthMismatchError()) return false @@ -12436,14 +12439,10 @@ function writeH2 (client, session, request) { const shouldEndStream = method === 'GET' || method === 'HEAD' if (expectContinue) { headers[HTTP2_HEADER_EXPECT] = '100-continue' - /** - * @type {import('node:http2').ClientHttp2Stream} - */ stream = session.request(headers, { endStream: shouldEndStream, signal }) stream.once('continue', writeBodyH2) } else { - /** @type {import('node:http2').ClientHttp2Stream} */ stream = session.request(headers, { endStream: shouldEndStream, signal @@ -12455,7 +12454,9 @@ function writeH2 (client, session, request) { ++h2State.openStreams stream.once('response', headers => { - if (request.onHeaders(Number(headers[HTTP2_HEADER_STATUS]), headers, stream.resume.bind(stream), '') === false) { + const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers + + if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) { stream.pause() } }) @@ -12465,13 +12466,17 @@ function writeH2 (client, session, request) { }) stream.on('data', (chunk) => { - if (request.onData(chunk) === false) stream.pause() + if (request.onData(chunk) === false) { + stream.pause() + } }) stream.once('close', () => { h2State.openStreams -= 1 // TODO(HTTP/2): unref only if current streams count is 0 - if (h2State.openStreams === 0) session.unref() + if (h2State.openStreams === 0) { + session.unref() + } }) stream.once('error', function (err) { @@ -12631,7 +12636,11 @@ function writeStream ({ h2stream, body, client, request, socket, contentLength, } } const onAbort = function () { - onFinished(new RequestAbortedError()) + if (finished) { + return + } + const err = new RequestAbortedError() + queueMicrotask(() => onFinished(err)) } const onFinished = function (err) { if (finished) { @@ -14236,6 +14245,19 @@ class ResponseExceededMaxSizeError extends UndiciError { } } +class RequestRetryError extends UndiciError { + constructor (message, code, { headers, data }) { + super(message) + Error.captureStackTrace(this, RequestRetryError) + this.name = 'RequestRetryError' + this.message = message || 'Request retry error' + this.code = 'UND_ERR_REQ_RETRY' + this.statusCode = code + this.data = data + this.headers = headers + } +} + module.exports = { HTTPParserError, UndiciError, @@ -14255,7 +14277,8 @@ module.exports = { NotSupportedError, ResponseContentLengthMismatchError, BalancedPoolMissingUpstreamError, - ResponseExceededMaxSizeError + ResponseExceededMaxSizeError, + RequestRetryError } @@ -14379,10 +14402,29 @@ class Request { this.method = method + this.abort = null + if (body == null) { this.body = null } else if (util.isStream(body)) { this.body = body + + const rState = this.body._readableState + if (!rState || !rState.autoDestroy) { + this.endHandler = function autoDestroy () { + util.destroy(this) + } + this.body.on('end', this.endHandler) + } + + this.errorHandler = err => { + if (this.abort) { + this.abort(err) + } else { + this.error = err + } + } + this.body.on('error', this.errorHandler) } else if (util.isBuffer(body)) { this.body = body.byteLength ? body : null } else if (ArrayBuffer.isView(body)) { @@ -14478,9 +14520,9 @@ class Request { onBodySent (chunk) { if (this[kHandler].onBodySent) { try { - this[kHandler].onBodySent(chunk) + return this[kHandler].onBodySent(chunk) } catch (err) { - this.onError(err) + this.abort(err) } } } @@ -14489,13 +14531,26 @@ class Request { if (channels.bodySent.hasSubscribers) { channels.bodySent.publish({ request: this }) } + + if (this[kHandler].onRequestSent) { + try { + return this[kHandler].onRequestSent() + } catch (err) { + this.abort(err) + } + } } onConnect (abort) { assert(!this.aborted) assert(!this.completed) - return this[kHandler].onConnect(abort) + if (this.error) { + abort(this.error) + } else { + this.abort = abort + return this[kHandler].onConnect(abort) + } } onHeaders (statusCode, headers, resume, statusText) { @@ -14506,14 +14561,23 @@ class Request { channels.headers.publish({ request: this, response: { statusCode, headers, statusText } }) } - return this[kHandler].onHeaders(statusCode, headers, resume, statusText) + try { + return this[kHandler].onHeaders(statusCode, headers, resume, statusText) + } catch (err) { + this.abort(err) + } } onData (chunk) { assert(!this.aborted) assert(!this.completed) - return this[kHandler].onData(chunk) + try { + return this[kHandler].onData(chunk) + } catch (err) { + this.abort(err) + return false + } } onUpgrade (statusCode, headers, socket) { @@ -14524,16 +14588,26 @@ class Request { } onComplete (trailers) { + this.onFinally() + assert(!this.aborted) this.completed = true if (channels.trailers.hasSubscribers) { channels.trailers.publish({ request: this, trailers }) } - return this[kHandler].onComplete(trailers) + + try { + return this[kHandler].onComplete(trailers) + } catch (err) { + // TODO (fix): This might be a bad idea? + this.onError(err) + } } onError (error) { + this.onFinally() + if (channels.error.hasSubscribers) { channels.error.publish({ request: this, error }) } @@ -14542,9 +14616,22 @@ class Request { return } this.aborted = true + return this[kHandler].onError(error) } + onFinally () { + if (this.errorHandler) { + this.body.off('error', this.errorHandler) + this.errorHandler = null + } + + if (this.endHandler) { + this.body.off('end', this.endHandler) + this.endHandler = null + } + } + // TODO: adjust to support H2 addHeader (key, value) { processHeader(this, key, value) @@ -14766,7 +14853,9 @@ module.exports = { kHTTP2BuildRequest: Symbol('http2 build request'), kHTTP1BuildRequest: Symbol('http1 build request'), kHTTP2CopyHeaders: Symbol('http2 copy headers'), - kHTTPConnVersion: Symbol('http connection version') + kHTTPConnVersion: Symbol('http connection version'), + kRetryHandlerDefaultRetry: Symbol('retry agent default retry'), + kConstruct: Symbol('constructable') } @@ -14903,13 +14992,13 @@ function getHostname (host) { const idx = host.indexOf(']') assert(idx !== -1) - return host.substr(1, idx - 1) + return host.substring(1, idx) } const idx = host.indexOf(':') if (idx === -1) return host - return host.substr(0, idx) + return host.substring(0, idx) } // IP addresses are not valid server names per RFC6066 @@ -14968,7 +15057,7 @@ function isReadableAborted (stream) { } function destroy (stream, err) { - if (!isStream(stream) || isDestroyed(stream)) { + if (stream == null || !isStream(stream) || isDestroyed(stream)) { return } @@ -15006,7 +15095,7 @@ function parseHeaders (headers, obj = {}) { if (!val) { if (Array.isArray(headers[i + 1])) { - obj[key] = headers[i + 1] + obj[key] = headers[i + 1].map(x => x.toString('utf8')) } else { obj[key] = headers[i + 1].toString('utf8') } @@ -15209,16 +15298,7 @@ function throwIfAborted (signal) { } } -let events function addAbortListener (signal, listener) { - if (typeof Symbol.dispose === 'symbol') { - if (!events) { - events = __nccwpck_require__(2361) - } - if (typeof events.addAbortListener === 'function' && 'aborted' in signal) { - return events.addAbortListener(signal, listener) - } - } if ('addEventListener' in signal) { signal.addEventListener('abort', listener, { once: true }) return () => signal.removeEventListener('abort', listener) @@ -15242,6 +15322,21 @@ function toUSVString (val) { return `${val}` } +// Parsed accordingly to RFC 9110 +// https://www.rfc-editor.org/rfc/rfc9110#field.content-range +function parseRangeHeader (range) { + if (range == null || range === '') return { start: 0, end: null, size: null } + + const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null + return m + ? { + start: parseInt(m[1]), + end: m[2] ? parseInt(m[2]) : null, + size: m[3] ? parseInt(m[3]) : null + } + : null +} + const kEnumerableProperty = Object.create(null) kEnumerableProperty.enumerable = true @@ -15275,9 +15370,11 @@ module.exports = { buildURL, throwIfAborted, addAbortListener, + parseRangeHeader, nodeMajor, nodeMinor, - nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13) + nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13), + safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'] } @@ -15542,6 +15639,8 @@ let ReadableStream = globalThis.ReadableStream /** @type {globalThis['File']} */ const File = NativeFile ?? UndiciFile +const textEncoder = new TextEncoder() +const textDecoder = new TextDecoder() // https://fetch.spec.whatwg.org/#concept-bodyinit-extract function extractBody (object, keepalive = false) { @@ -15565,7 +15664,7 @@ function extractBody (object, keepalive = false) { stream = new ReadableStream({ async pull (controller) { controller.enqueue( - typeof source === 'string' ? new TextEncoder().encode(source) : source + typeof source === 'string' ? textEncoder.encode(source) : source ) queueMicrotask(() => readableStreamClose(controller)) }, @@ -15635,7 +15734,6 @@ function extractBody (object, keepalive = false) { // - That the content-length is calculated in advance. // - And that all parts are pre-encoded and ready to be sent. - const enc = new TextEncoder() const blobParts = [] const rn = new Uint8Array([13, 10]) // '\r\n' length = 0 @@ -15643,13 +15741,13 @@ function extractBody (object, keepalive = false) { for (const [name, value] of object) { if (typeof value === 'string') { - const chunk = enc.encode(prefix + + const chunk = textEncoder.encode(prefix + `; name="${escape(normalizeLinefeeds(name))}"` + `\r\n\r\n${normalizeLinefeeds(value)}\r\n`) blobParts.push(chunk) length += chunk.byteLength } else { - const chunk = enc.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` + + const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` + (value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' + `Content-Type: ${ value.type || 'application/octet-stream' @@ -15663,7 +15761,7 @@ function extractBody (object, keepalive = false) { } } - const chunk = enc.encode(`--${boundary}--`) + const chunk = textEncoder.encode(`--${boundary}--`) blobParts.push(chunk) length += chunk.byteLength if (hasUnknownSizeValue) { @@ -15959,14 +16057,16 @@ function bodyMixinMethods (instance) { let text = '' // application/x-www-form-urlencoded parser will keep the BOM. // https://url.spec.whatwg.org/#concept-urlencoded-parser - const textDecoder = new TextDecoder('utf-8', { ignoreBOM: true }) + // Note that streaming decoder is stateful and cannot be reused + const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true }) + for await (const chunk of consumeBody(this[kState].body)) { if (!isUint8Array(chunk)) { throw new TypeError('Expected Uint8Array chunk') } - text += textDecoder.decode(chunk, { stream: true }) + text += streamingDecoder.decode(chunk, { stream: true }) } - text += textDecoder.decode() + text += streamingDecoder.decode() entries = new URLSearchParams(text) } catch (err) { // istanbul ignore next: Unclear when new URLSearchParams can fail on a string. @@ -16081,7 +16181,7 @@ function utf8DecodeBytes (buffer) { // 3. Process a queue with an instance of UTF-8’s // decoder, ioQueue, output, and "replacement". - const output = new TextDecoder().decode(buffer) + const output = textDecoder.decode(buffer) // 4. Return output. return output @@ -16129,10 +16229,12 @@ module.exports = { const { MessageChannel, receiveMessageOnPort } = __nccwpck_require__(1267) const corsSafeListedMethods = ['GET', 'HEAD', 'POST'] +const corsSafeListedMethodsSet = new Set(corsSafeListedMethods) const nullBodyStatus = [101, 204, 205, 304] const redirectStatus = [301, 302, 303, 307, 308] +const redirectStatusSet = new Set(redirectStatus) // https://fetch.spec.whatwg.org/#block-bad-port const badPorts = [ @@ -16144,6 +16246,8 @@ const badPorts = [ '10080' ] +const badPortsSet = new Set(badPorts) + // https://w3c.github.io/webappsec-referrer-policy/#referrer-policies const referrerPolicy = [ '', @@ -16156,10 +16260,12 @@ const referrerPolicy = [ 'strict-origin-when-cross-origin', 'unsafe-url' ] +const referrerPolicySet = new Set(referrerPolicy) const requestRedirect = ['follow', 'manual', 'error'] const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE'] +const safeMethodsSet = new Set(safeMethods) const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors'] @@ -16194,6 +16300,7 @@ const requestDuplex = [ // http://fetch.spec.whatwg.org/#forbidden-method const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK'] +const forbiddenMethodsSet = new Set(forbiddenMethods) const subresource = [ 'audio', @@ -16209,6 +16316,7 @@ const subresource = [ 'xslt', '' ] +const subresourceSet = new Set(subresource) /** @type {globalThis['DOMException']} */ const DOMException = globalThis.DOMException ?? (() => { @@ -16258,7 +16366,14 @@ module.exports = { nullBodyStatus, safeMethods, badPorts, - requestDuplex + requestDuplex, + subresourceSet, + badPortsSet, + redirectStatusSet, + corsSafeListedMethodsSet, + safeMethodsSet, + forbiddenMethodsSet, + referrerPolicySet } @@ -16388,17 +16503,14 @@ function dataURLProcessor (dataURL) { * @param {boolean} excludeFragment */ function URLSerializer (url, excludeFragment = false) { - const href = url.href - if (!excludeFragment) { - return href + return url.href } - const hash = href.lastIndexOf('#') - if (hash === -1) { - return href - } - return href.slice(0, hash) + const href = url.href + const hashLength = url.hash.length + + return hashLength === 0 ? href : href.substring(0, href.length - hashLength) } // https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points @@ -16914,6 +17026,7 @@ const { isBlobLike } = __nccwpck_require__(2538) const { webidl } = __nccwpck_require__(1744) const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) const { kEnumerableProperty } = __nccwpck_require__(3983) +const encoder = new TextEncoder() class File extends Blob { constructor (fileBits, fileName, options = {}) { @@ -17187,7 +17300,7 @@ function processBlobParts (parts, options) { } // 3. Append the result of UTF-8 encoding s to bytes. - bytes.push(new TextEncoder().encode(s)) + bytes.push(encoder.encode(s)) } else if ( types.isAnyArrayBuffer(element) || types.isTypedArray(element) @@ -17581,7 +17694,7 @@ module.exports = { -const { kHeadersList } = __nccwpck_require__(2785) +const { kHeadersList, kConstruct } = __nccwpck_require__(2785) const { kGuard } = __nccwpck_require__(5861) const { kEnumerableProperty } = __nccwpck_require__(3983) const { @@ -17595,6 +17708,13 @@ const assert = __nccwpck_require__(9491) const kHeadersMap = Symbol('headers map') const kHeadersSortedMap = Symbol('headers map sorted') +/** + * @param {number} code + */ +function isHTTPWhiteSpaceCharCode (code) { + return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020 +} + /** * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize * @param {string} potentialValue @@ -17603,12 +17723,12 @@ function headerValueNormalize (potentialValue) { // To normalize a byte sequence potentialValue, remove // any leading and trailing HTTP whitespace bytes from // potentialValue. + let i = 0; let j = potentialValue.length + + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i - // Trimming the end with `.replace()` and a RegExp is typically subject to - // ReDoS. This is safer and faster. - let i = potentialValue.length - while (/[\r\n\t ]/.test(potentialValue.charAt(--i))); - return potentialValue.slice(0, i + 1).replace(/^[\r\n\t ]+/, '') + return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j) } function fill (headers, object) { @@ -17617,7 +17737,8 @@ function fill (headers, object) { // 1. If object is a sequence, then for each header in object: // Note: webidl conversion to array has already been done. if (Array.isArray(object)) { - for (const header of object) { + for (let i = 0; i < object.length; ++i) { + const header = object[i] // 1. If header does not contain exactly two items, then throw a TypeError. if (header.length !== 2) { throw webidl.errors.exception({ @@ -17627,15 +17748,16 @@ function fill (headers, object) { } // 2. Append (header’s first item, header’s second item) to headers. - headers.append(header[0], header[1]) + appendHeader(headers, header[0], header[1]) } } else if (typeof object === 'object' && object !== null) { // Note: null should throw // 2. Otherwise, object is a record, then for each key → value in object, // append (key, value) to headers - for (const [key, value] of Object.entries(object)) { - headers.append(key, value) + const keys = Object.keys(object) + for (let i = 0; i < keys.length; ++i) { + appendHeader(headers, keys[i], object[keys[i]]) } } else { throw webidl.errors.conversionFailed({ @@ -17646,6 +17768,50 @@ function fill (headers, object) { } } +/** + * @see https://fetch.spec.whatwg.org/#concept-headers-append + */ +function appendHeader (headers, name, value) { + // 1. Normalize value. + value = headerValueNormalize(value) + + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value, + type: 'header value' + }) + } + + // 3. If headers’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if headers’s guard is "request" and name is a + // forbidden header name, return. + // Note: undici does not implement forbidden header names + if (headers[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (headers[kGuard] === 'request-no-cors') { + // 5. Otherwise, if headers’s guard is "request-no-cors": + // TODO + } + + // 6. Otherwise, if headers’s guard is "response" and name is a + // forbidden response-header name, return. + + // 7. Append (name, value) to headers’s header list. + return headers[kHeadersList].append(name, value) + + // 8. If headers’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from headers +} + class HeadersList { /** @type {[string, string][]|null} */ cookies = null @@ -17654,7 +17820,7 @@ class HeadersList { if (init instanceof HeadersList) { this[kHeadersMap] = new Map(init[kHeadersMap]) this[kHeadersSortedMap] = init[kHeadersSortedMap] - this.cookies = init.cookies + this.cookies = init.cookies === null ? null : [...init.cookies] } else { this[kHeadersMap] = new Map(init) this[kHeadersSortedMap] = null @@ -17716,7 +17882,7 @@ class HeadersList { // the first such header to value and remove the // others. // 2. Otherwise, append header (name, value) to list. - return this[kHeadersMap].set(lowercaseName, { name, value }) + this[kHeadersMap].set(lowercaseName, { name, value }) } // https://fetch.spec.whatwg.org/#concept-header-list-delete @@ -17729,20 +17895,18 @@ class HeadersList { this.cookies = null } - return this[kHeadersMap].delete(name) + this[kHeadersMap].delete(name) } // https://fetch.spec.whatwg.org/#concept-header-list-get get (name) { - // 1. If list does not contain name, then return null. - if (!this.contains(name)) { - return null - } + const value = this[kHeadersMap].get(name.toLowerCase()) + // 1. If list does not contain name, then return null. // 2. Return the values of all headers in list whose name // is a byte-case-insensitive match for name, // separated from each other by 0x2C 0x20, in order. - return this[kHeadersMap].get(name.toLowerCase())?.value ?? null + return value === undefined ? null : value.value } * [Symbol.iterator] () { @@ -17768,6 +17932,9 @@ class HeadersList { // https://fetch.spec.whatwg.org/#headers-class class Headers { constructor (init = undefined) { + if (init === kConstruct) { + return + } this[kHeadersList] = new HeadersList() // The new Headers(init) constructor steps are: @@ -17791,43 +17958,7 @@ class Headers { name = webidl.converters.ByteString(name) value = webidl.converters.ByteString(value) - // 1. Normalize value. - value = headerValueNormalize(value) - - // 2. If name is not a header name or value is not a - // header value, then throw a TypeError. - if (!isValidHeaderName(name)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.append', - value: name, - type: 'header name' - }) - } else if (!isValidHeaderValue(value)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.append', - value, - type: 'header value' - }) - } - - // 3. If headers’s guard is "immutable", then throw a TypeError. - // 4. Otherwise, if headers’s guard is "request" and name is a - // forbidden header name, return. - // Note: undici does not implement forbidden header names - if (this[kGuard] === 'immutable') { - throw new TypeError('immutable') - } else if (this[kGuard] === 'request-no-cors') { - // 5. Otherwise, if headers’s guard is "request-no-cors": - // TODO - } - - // 6. Otherwise, if headers’s guard is "response" and name is a - // forbidden response-header name, return. - - // 7. Append (name, value) to headers’s header list. - // 8. If headers’s guard is "request-no-cors", then remove - // privileged no-CORS request headers from headers - return this[kHeadersList].append(name, value) + return appendHeader(this, name, value) } // https://fetch.spec.whatwg.org/#dom-headers-delete @@ -17872,7 +18003,7 @@ class Headers { // 7. Delete name from this’s header list. // 8. If this’s guard is "request-no-cors", then remove // privileged no-CORS request headers from this. - return this[kHeadersList].delete(name) + this[kHeadersList].delete(name) } // https://fetch.spec.whatwg.org/#dom-headers-get @@ -17965,7 +18096,7 @@ class Headers { // 7. Set (name, value) in this’s header list. // 8. If this’s guard is "request-no-cors", then remove // privileged no-CORS request headers from this - return this[kHeadersList].set(name, value) + this[kHeadersList].set(name, value) } // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie @@ -18001,7 +18132,8 @@ class Headers { const cookies = this[kHeadersList].cookies // 3. For each name of names: - for (const [name, value] of names) { + for (let i = 0; i < names.length; ++i) { + const [name, value] = names[i] // 1. If name is `set-cookie`, then: if (name === 'set-cookie') { // 1. Let values be a list of all values of headers in list whose name @@ -18009,8 +18141,8 @@ class Headers { // 2. For each value of values: // 1. Append (name, value) to headers. - for (const value of cookies) { - headers.push([name, value]) + for (let j = 0; j < cookies.length; ++j) { + headers.push([name, cookies[j]]) } } else { // 2. Otherwise: @@ -18034,6 +18166,12 @@ class Headers { keys () { webidl.brandCheck(this, Headers) + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key') + } + return makeIterator( () => [...this[kHeadersSortedMap].values()], 'Headers', @@ -18044,6 +18182,12 @@ class Headers { values () { webidl.brandCheck(this, Headers) + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'value') + } + return makeIterator( () => [...this[kHeadersSortedMap].values()], 'Headers', @@ -18054,6 +18198,12 @@ class Headers { entries () { webidl.brandCheck(this, Headers) + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key+value') + } + return makeIterator( () => [...this[kHeadersSortedMap].values()], 'Headers', @@ -18185,11 +18335,11 @@ const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) const assert = __nccwpck_require__(9491) const { safelyExtractBody } = __nccwpck_require__(1472) const { - redirectStatus, + redirectStatusSet, nullBodyStatus, - safeMethods, + safeMethodsSet, requestBodyHeader, - subresource, + subresourceSet, DOMException } = __nccwpck_require__(1037) const { kHeadersList } = __nccwpck_require__(2785) @@ -18201,6 +18351,7 @@ const { TransformStream } = __nccwpck_require__(5356) const { getGlobalDispatcher } = __nccwpck_require__(1892) const { webidl } = __nccwpck_require__(1744) const { STATUS_CODES } = __nccwpck_require__(3685) +const GET_OR_HEAD = ['GET', 'HEAD'] /** @type {import('buffer').resolveObjectURL} */ let resolveObjectURL @@ -18260,7 +18411,7 @@ class Fetch extends EE { } // https://fetch.spec.whatwg.org/#fetch-method -async function fetch (input, init = {}) { +function fetch (input, init = {}) { webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' }) // 1. Let p be a new promise. @@ -18343,7 +18494,7 @@ async function fetch (input, init = {}) { const processResponse = (response) => { // 1. If locallyAborted is true, terminate these substeps. if (locallyAborted) { - return + return Promise.resolve() } // 2. If response’s aborted flag is set, then: @@ -18356,7 +18507,7 @@ async function fetch (input, init = {}) { // deserializedError. abortFetch(p, request, responseObject, controller.serializedAbortReason) - return + return Promise.resolve() } // 3. If response is a network error, then reject p with a TypeError @@ -18365,7 +18516,7 @@ async function fetch (input, init = {}) { p.reject( Object.assign(new TypeError('fetch failed'), { cause: response.error }) ) - return + return Promise.resolve() } // 4. Set responseObject to the result of creating a Response object, @@ -18424,7 +18575,7 @@ function finalizeAndReportTiming (response, initiatorType = 'other') { } // 8. If response’s timing allow passed flag is not set, then: - if (!timingInfo.timingAllowPassed) { + if (!response.timingAllowPassed) { // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo. timingInfo = createOpaqueTimingInfo({ startTime: timingInfo.startTime @@ -18648,7 +18799,7 @@ function fetching ({ } // 15. If request is a subresource request, then: - if (subresource.includes(request.destination)) { + if (subresourceSet.has(request.destination)) { // TODO } @@ -18915,13 +19066,13 @@ async function mainFetch (fetchParams, recursive = false) { // https://fetch.spec.whatwg.org/#concept-scheme-fetch // given a fetch params fetchParams -async function schemeFetch (fetchParams) { +function schemeFetch (fetchParams) { // Note: since the connection is destroyed on redirect, which sets fetchParams to a // cancelled state, we do not want this condition to trigger *unless* there have been // no redirects. See https://github.com/nodejs/undici/issues/1776 // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) { - return makeAppropriateNetworkError(fetchParams) + return Promise.resolve(makeAppropriateNetworkError(fetchParams)) } // 2. Let request be fetchParams’s request. @@ -18937,7 +19088,7 @@ async function schemeFetch (fetchParams) { // and body is the empty byte sequence as a body. // Otherwise, return a network error. - return makeNetworkError('about scheme is not supported') + return Promise.resolve(makeNetworkError('about scheme is not supported')) } case 'blob:': { if (!resolveObjectURL) { @@ -18950,7 +19101,7 @@ async function schemeFetch (fetchParams) { // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56 // Buffer.resolveObjectURL does not ignore URL queries. if (blobURLEntry.search.length !== 0) { - return makeNetworkError('NetworkError when attempting to fetch resource.') + return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.')) } const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString()) @@ -18958,7 +19109,7 @@ async function schemeFetch (fetchParams) { // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s // object is not a Blob object, then return a network error. if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) { - return makeNetworkError('invalid method') + return Promise.resolve(makeNetworkError('invalid method')) } // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object. @@ -18985,7 +19136,7 @@ async function schemeFetch (fetchParams) { response.body = body - return response + return Promise.resolve(response) } case 'data:': { // 1. Let dataURLStruct be the result of running the @@ -18996,7 +19147,7 @@ async function schemeFetch (fetchParams) { // 2. If dataURLStruct is failure, then return a // network error. if (dataURLStruct === 'failure') { - return makeNetworkError('failed to fetch the data URL') + return Promise.resolve(makeNetworkError('failed to fetch the data URL')) } // 3. Let mimeType be dataURLStruct’s MIME type, serialized. @@ -19005,28 +19156,28 @@ async function schemeFetch (fetchParams) { // 4. Return a response whose status message is `OK`, // header list is « (`Content-Type`, mimeType) », // and body is dataURLStruct’s body as a body. - return makeResponse({ + return Promise.resolve(makeResponse({ statusText: 'OK', headersList: [ ['content-type', { name: 'Content-Type', value: mimeType }] ], body: safelyExtractBody(dataURLStruct.body)[0] - }) + })) } case 'file:': { // For now, unfortunate as it is, file URLs are left as an exercise for the reader. // When in doubt, return a network error. - return makeNetworkError('not implemented... yet...') + return Promise.resolve(makeNetworkError('not implemented... yet...')) } case 'http:': case 'https:': { // Return the result of running HTTP fetch given fetchParams. - return await httpFetch(fetchParams) + return httpFetch(fetchParams) .catch((err) => makeNetworkError(err)) } default: { - return makeNetworkError('unknown scheme') + return Promise.resolve(makeNetworkError('unknown scheme')) } } } @@ -19045,7 +19196,7 @@ function finalizeResponse (fetchParams, response) { } // https://fetch.spec.whatwg.org/#fetch-finale -async function fetchFinale (fetchParams, response) { +function fetchFinale (fetchParams, response) { // 1. If response is a network error, then: if (response.type === 'error') { // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ». @@ -19129,8 +19280,9 @@ async function fetchFinale (fetchParams, response) { } else { // 4. Otherwise, fully read response’s body given processBody, processBodyError, // and fetchParams’s task destination. - await fullyReadBody(response.body, processBody, processBodyError) + return fullyReadBody(response.body, processBody, processBodyError) } + return Promise.resolve() } } @@ -19201,7 +19353,7 @@ async function httpFetch (fetchParams) { } // 8. If actualResponse’s status is a redirect status, then: - if (redirectStatus.includes(actualResponse.status)) { + if (redirectStatusSet.has(actualResponse.status)) { // 1. If actualResponse’s status is not 303, request’s body is not null, // and the connection uses HTTP/2, then user agents may, and are even // encouraged to, transmit an RST_STREAM frame. @@ -19238,7 +19390,7 @@ async function httpFetch (fetchParams) { } // https://fetch.spec.whatwg.org/#http-redirect-fetch -async function httpRedirectFetch (fetchParams, response) { +function httpRedirectFetch (fetchParams, response) { // 1. Let request be fetchParams’s request. const request = fetchParams.request @@ -19264,18 +19416,18 @@ async function httpRedirectFetch (fetchParams, response) { } } catch (err) { // 5. If locationURL is failure, then return a network error. - return makeNetworkError(err) + return Promise.resolve(makeNetworkError(err)) } // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network // error. if (!urlIsHttpHttpsScheme(locationURL)) { - return makeNetworkError('URL scheme must be a HTTP(S) scheme') + return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme')) } // 7. If request’s redirect count is 20, then return a network error. if (request.redirectCount === 20) { - return makeNetworkError('redirect count exceeded') + return Promise.resolve(makeNetworkError('redirect count exceeded')) } // 8. Increase request’s redirect count by 1. @@ -19289,7 +19441,7 @@ async function httpRedirectFetch (fetchParams, response) { (locationURL.username || locationURL.password) && !sameOrigin(request, locationURL) ) { - return makeNetworkError('cross origin not allowed for request mode "cors"') + return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"')) } // 10. If request’s response tainting is "cors" and locationURL includes @@ -19298,9 +19450,9 @@ async function httpRedirectFetch (fetchParams, response) { request.responseTainting === 'cors' && (locationURL.username || locationURL.password) ) { - return makeNetworkError( + return Promise.resolve(makeNetworkError( 'URL cannot contain credentials for request mode "cors"' - ) + )) } // 11. If actualResponse’s status is not 303, request’s body is non-null, @@ -19310,7 +19462,7 @@ async function httpRedirectFetch (fetchParams, response) { request.body != null && request.body.source == null ) { - return makeNetworkError() + return Promise.resolve(makeNetworkError()) } // 12. If one of the following is true @@ -19319,7 +19471,7 @@ async function httpRedirectFetch (fetchParams, response) { if ( ([301, 302].includes(actualResponse.status) && request.method === 'POST') || (actualResponse.status === 303 && - !['GET', 'HEAD'].includes(request.method)) + !GET_OR_HEAD.includes(request.method)) ) { // then: // 1. Set request’s method to `GET` and request’s body to null. @@ -19340,6 +19492,9 @@ async function httpRedirectFetch (fetchParams, response) { // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name request.headersList.delete('authorization') + // https://fetch.spec.whatwg.org/#authentication-entries + request.headersList.delete('proxy-authorization', true) + // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. request.headersList.delete('cookie') request.headersList.delete('host') @@ -19603,7 +19758,7 @@ async function httpNetworkOrCacheFetch ( // responses in httpCache, as per the "Invalidation" chapter of HTTP // Caching, and set storedResponse to null. [HTTP-CACHING] if ( - !safeMethods.includes(httpRequest.method) && + !safeMethodsSet.has(httpRequest.method) && forwardResponse.status >= 200 && forwardResponse.status <= 399 ) { @@ -20094,7 +20249,7 @@ async function httpNetworkFetch ( path: url.pathname + url.search, origin: url.origin, method: request.method, - body: fetchParams.controller.dispatcher.isMockActive ? request.body && request.body.source : body, + body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body, headers: request.headersList.entries, maxRedirections: 0, upgrade: request.mode === 'websocket' ? 'websocket' : undefined @@ -20139,7 +20294,7 @@ async function httpNetworkFetch ( location = val } - headers.append(key, val) + headers[kHeadersList].append(key, val) } } else { const keys = Object.keys(headersList) @@ -20153,7 +20308,7 @@ async function httpNetworkFetch ( location = val } - headers.append(key, val) + headers[kHeadersList].append(key, val) } } @@ -20163,7 +20318,7 @@ async function httpNetworkFetch ( const willFollow = request.redirect === 'follow' && location && - redirectStatus.includes(status) + redirectStatusSet.has(status) // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) { @@ -20257,7 +20412,7 @@ async function httpNetworkFetch ( const key = headersList[n + 0].toString('latin1') const val = headersList[n + 1].toString('latin1') - headers.append(key, val) + headers[kHeadersList].append(key, val) } resolve({ @@ -20300,11 +20455,12 @@ const { isValidHTTPToken, sameOrigin, normalizeMethod, - makePolicyContainer + makePolicyContainer, + normalizeMethodRecord } = __nccwpck_require__(2538) const { - forbiddenMethods, - corsSafeListedMethods, + forbiddenMethodsSet, + corsSafeListedMethodsSet, referrerPolicy, requestRedirect, requestMode, @@ -20317,13 +20473,12 @@ const { kHeaders, kSignal, kState, kGuard, kRealm } = __nccwpck_require__(5861) const { webidl } = __nccwpck_require__(1744) const { getGlobalOrigin } = __nccwpck_require__(1246) const { URLSerializer } = __nccwpck_require__(685) -const { kHeadersList } = __nccwpck_require__(2785) +const { kHeadersList, kConstruct } = __nccwpck_require__(2785) const assert = __nccwpck_require__(9491) const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = __nccwpck_require__(2361) let TransformStream = globalThis.TransformStream -const kInit = Symbol('init') const kAbortController = Symbol('abortController') const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { @@ -20334,7 +20489,7 @@ const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { class Request { // https://fetch.spec.whatwg.org/#dom-request constructor (input, init = {}) { - if (input === kInit) { + if (input === kConstruct) { return } @@ -20473,8 +20628,10 @@ class Request { urlList: [...request.urlList] }) + const initHasKey = Object.keys(init).length !== 0 + // 13. If init is not empty, then: - if (Object.keys(init).length > 0) { + if (initHasKey) { // 1. If request’s mode is "navigate", then set it to "same-origin". if (request.mode === 'navigate') { request.mode = 'same-origin' @@ -20589,7 +20746,7 @@ class Request { } // 23. If init["integrity"] exists, then set request’s integrity metadata to it. - if (init.integrity !== undefined && init.integrity != null) { + if (init.integrity != null) { request.integrity = String(init.integrity) } @@ -20605,16 +20762,16 @@ class Request { // 2. If method is not a method or method is a forbidden method, then // throw a TypeError. - if (!isValidHTTPToken(init.method)) { - throw TypeError(`'${init.method}' is not a valid HTTP method.`) + if (!isValidHTTPToken(method)) { + throw new TypeError(`'${method}' is not a valid HTTP method.`) } - if (forbiddenMethods.indexOf(method.toUpperCase()) !== -1) { - throw TypeError(`'${init.method}' HTTP method is unsupported.`) + if (forbiddenMethodsSet.has(method.toUpperCase())) { + throw new TypeError(`'${method}' HTTP method is unsupported.`) } // 3. Normalize method. - method = normalizeMethod(init.method) + method = normalizeMethodRecord[method] ?? normalizeMethod(method) // 4. Set request’s method to method. request.method = method @@ -20685,7 +20842,7 @@ class Request { // 30. Set this’s headers to a new Headers object with this’s relevant // Realm, whose header list is request’s header list and guard is // "request". - this[kHeaders] = new Headers() + this[kHeaders] = new Headers(kConstruct) this[kHeaders][kHeadersList] = request.headersList this[kHeaders][kGuard] = 'request' this[kHeaders][kRealm] = this[kRealm] @@ -20694,7 +20851,7 @@ class Request { if (mode === 'no-cors') { // 1. If this’s request’s method is not a CORS-safelisted method, // then throw a TypeError. - if (!corsSafeListedMethods.includes(request.method)) { + if (!corsSafeListedMethodsSet.has(request.method)) { throw new TypeError( `'${request.method} is unsupported in no-cors mode.` ) @@ -20705,25 +20862,25 @@ class Request { } // 32. If init is not empty, then: - if (Object.keys(init).length !== 0) { + if (initHasKey) { + /** @type {HeadersList} */ + const headersList = this[kHeaders][kHeadersList] // 1. Let headers be a copy of this’s headers and its associated header // list. - let headers = new Headers(this[kHeaders]) - // 2. If init["headers"] exists, then set headers to init["headers"]. - if (init.headers !== undefined) { - headers = init.headers - } + const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList) // 3. Empty this’s headers’s header list. - this[kHeaders][kHeadersList].clear() + headersList.clear() // 4. If headers is a Headers object, then for each header in its header // list, append header’s name/header’s value to this’s headers. - if (headers.constructor.name === 'Headers') { + if (headers instanceof HeadersList) { for (const [key, val] of headers) { - this[kHeaders].append(key, val) + headersList.append(key, val) } + // Note: Copy the `set-cookie` meta-data. + headersList.cookies = headers.cookies } else { // 5. Otherwise, fill this’s headers with headers. fillHeaders(this[kHeaders], headers) @@ -21012,10 +21169,10 @@ class Request { // 3. Let clonedRequestObject be the result of creating a Request object, // given clonedRequest, this’s headers’s guard, and this’s relevant Realm. - const clonedRequestObject = new Request(kInit) + const clonedRequestObject = new Request(kConstruct) clonedRequestObject[kState] = clonedRequest clonedRequestObject[kRealm] = this[kRealm] - clonedRequestObject[kHeaders] = new Headers() + clonedRequestObject[kHeaders] = new Headers(kConstruct) clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard] clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm] @@ -21256,7 +21413,7 @@ const { isomorphicEncode } = __nccwpck_require__(2538) const { - redirectStatus, + redirectStatusSet, nullBodyStatus, DOMException } = __nccwpck_require__(1037) @@ -21265,11 +21422,12 @@ const { webidl } = __nccwpck_require__(1744) const { FormData } = __nccwpck_require__(2015) const { getGlobalOrigin } = __nccwpck_require__(1246) const { URLSerializer } = __nccwpck_require__(685) -const { kHeadersList } = __nccwpck_require__(2785) +const { kHeadersList, kConstruct } = __nccwpck_require__(2785) const assert = __nccwpck_require__(9491) const { types } = __nccwpck_require__(3837) const ReadableStream = globalThis.ReadableStream || (__nccwpck_require__(5356).ReadableStream) +const textEncoder = new TextEncoder('utf-8') // https://fetch.spec.whatwg.org/#response-class class Response { @@ -21299,7 +21457,7 @@ class Response { } // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data. - const bytes = new TextEncoder('utf-8').encode( + const bytes = textEncoder.encode( serializeJavascriptValueToJSONString(data) ) @@ -21344,7 +21502,7 @@ class Response { } // 3. If status is not a redirect status, then throw a RangeError. - if (!redirectStatus.includes(status)) { + if (!redirectStatusSet.has(status)) { throw new RangeError('Invalid status code ' + status) } @@ -21385,7 +21543,7 @@ class Response { // 2. Set this’s headers to a new Headers object with this’s relevant // Realm, whose header list is this’s response’s header list and guard // is "response". - this[kHeaders] = new Headers() + this[kHeaders] = new Headers(kConstruct) this[kHeaders][kGuard] = 'response' this[kHeaders][kHeadersList] = this[kState].headersList this[kHeaders][kRealm] = this[kRealm] @@ -21755,11 +21913,7 @@ webidl.converters.XMLHttpRequestBodyInit = function (V) { return webidl.converters.Blob(V, { strict: false }) } - if ( - types.isAnyArrayBuffer(V) || - types.isTypedArray(V) || - types.isDataView(V) - ) { + if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) { return webidl.converters.BufferSource(V) } @@ -21842,7 +21996,7 @@ module.exports = { "use strict"; -const { redirectStatus, badPorts, referrerPolicy: referrerPolicyTokens } = __nccwpck_require__(1037) +const { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = __nccwpck_require__(1037) const { getGlobalOrigin } = __nccwpck_require__(1246) const { performance } = __nccwpck_require__(4074) const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983) @@ -21871,7 +22025,7 @@ function responseURL (response) { // https://fetch.spec.whatwg.org/#concept-response-location-url function responseLocationURL (response, requestFragment) { // 1. If response’s status is not a redirect status, then return null. - if (!redirectStatus.includes(response.status)) { + if (!redirectStatusSet.has(response.status)) { return null } @@ -21906,7 +22060,7 @@ function requestBadPort (request) { // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port, // then return blocked. - if (urlIsHttpHttpsScheme(url) && badPorts.includes(url.port)) { + if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) { return 'blocked' } @@ -21945,52 +22099,57 @@ function isValidReasonPhrase (statusText) { return true } -function isTokenChar (c) { - return !( - c >= 0x7f || - c <= 0x20 || - c === '(' || - c === ')' || - c === '<' || - c === '>' || - c === '@' || - c === ',' || - c === ';' || - c === ':' || - c === '\\' || - c === '"' || - c === '/' || - c === '[' || - c === ']' || - c === '?' || - c === '=' || - c === '{' || - c === '}' - ) +/** + * @see https://tools.ietf.org/html/rfc7230#section-3.2.6 + * @param {number} c + */ +function isTokenCharCode (c) { + switch (c) { + case 0x22: + case 0x28: + case 0x29: + case 0x2c: + case 0x2f: + case 0x3a: + case 0x3b: + case 0x3c: + case 0x3d: + case 0x3e: + case 0x3f: + case 0x40: + case 0x5b: + case 0x5c: + case 0x5d: + case 0x7b: + case 0x7d: + // DQUOTE and "(),/:;<=>?@[\]{}" + return false + default: + // VCHAR %x21-7E + return c >= 0x21 && c <= 0x7e + } } -// See RFC 7230, Section 3.2.6. -// https://github.com/chromium/chromium/blob/d7da0240cae77824d1eda25745c4022757499131/third_party/blink/renderer/platform/network/http_parsers.cc#L321 +/** + * @param {string} characters + */ function isValidHTTPToken (characters) { - if (!characters || typeof characters !== 'string') { + if (characters.length === 0) { return false } for (let i = 0; i < characters.length; ++i) { - const c = characters.charCodeAt(i) - if (c > 0x7f || !isTokenChar(c)) { + if (!isTokenCharCode(characters.charCodeAt(i))) { return false } } return true } -// https://fetch.spec.whatwg.org/#header-name -// https://github.com/chromium/chromium/blob/b3d37e6f94f87d59e44662d6078f6a12de845d17/net/http/http_util.cc#L342 +/** + * @see https://fetch.spec.whatwg.org/#header-name + * @param {string} potentialValue + */ function isValidHeaderName (potentialValue) { - if (potentialValue.length === 0) { - return false - } - return isValidHTTPToken(potentialValue) } @@ -22048,7 +22207,7 @@ function setRequestReferrerPolicyOnRedirect (request, actualResponse) { // The left-most policy is the fallback. for (let i = policyHeader.length; i !== 0; i--) { const token = policyHeader[i - 1].trim() - if (referrerPolicyTokens.includes(token)) { + if (referrerPolicyTokens.has(token)) { policy = token break } @@ -22535,11 +22694,30 @@ function isCancelled (fetchParams) { fetchParams.controller.state === 'terminated' } -// https://fetch.spec.whatwg.org/#concept-method-normalize +const normalizeMethodRecord = { + delete: 'DELETE', + DELETE: 'DELETE', + get: 'GET', + GET: 'GET', + head: 'HEAD', + HEAD: 'HEAD', + options: 'OPTIONS', + OPTIONS: 'OPTIONS', + post: 'POST', + POST: 'POST', + put: 'PUT', + PUT: 'PUT' +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(normalizeMethodRecord, null) + +/** + * @see https://fetch.spec.whatwg.org/#concept-method-normalize + * @param {string} method + */ function normalizeMethod (method) { - return /^(DELETE|GET|HEAD|OPTIONS|POST|PUT)$/i.test(method) - ? method.toUpperCase() - : method + return normalizeMethodRecord[method.toLowerCase()] ?? method } // https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string @@ -22884,7 +23062,8 @@ module.exports = { urlIsLocal, urlHasHttpsScheme, urlIsHttpHttpsScheme, - readAllBytes + readAllBytes, + normalizeMethodRecord } @@ -23323,12 +23502,10 @@ webidl.converters.ByteString = function (V) { // 2. If the value of any element of x is greater than // 255, then throw a TypeError. for (let index = 0; index < x.length; index++) { - const charCode = x.charCodeAt(index) - - if (charCode > 255) { + if (x.charCodeAt(index) > 255) { throw new TypeError( 'Cannot convert argument to a ByteString because the character at ' + - `index ${index} has a value of ${charCode} which is greater than 255.` + `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.` ) } } @@ -25005,6 +25182,349 @@ function cleanRequestHeaders (headers, removeContent, unknownOrigin) { module.exports = RedirectHandler +/***/ }), + +/***/ 2286: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const assert = __nccwpck_require__(9491) + +const { kRetryHandlerDefaultRetry } = __nccwpck_require__(2785) +const { RequestRetryError } = __nccwpck_require__(8045) +const { isDisturbed, parseHeaders, parseRangeHeader } = __nccwpck_require__(3983) + +function calculateRetryAfterHeader (retryAfter) { + const current = Date.now() + const diff = new Date(retryAfter).getTime() - current + + return diff +} + +class RetryHandler { + constructor (opts, handlers) { + const { retryOptions, ...dispatchOpts } = opts + const { + // Retry scoped + retry: retryFn, + maxRetries, + maxTimeout, + minTimeout, + timeoutFactor, + // Response scoped + methods, + errorCodes, + retryAfter, + statusCodes + } = retryOptions ?? {} + + this.dispatch = handlers.dispatch + this.handler = handlers.handler + this.opts = dispatchOpts + this.abort = null + this.aborted = false + this.retryOpts = { + retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry], + retryAfter: retryAfter ?? true, + maxTimeout: maxTimeout ?? 30 * 1000, // 30s, + timeout: minTimeout ?? 500, // .5s + timeoutFactor: timeoutFactor ?? 2, + maxRetries: maxRetries ?? 5, + // What errors we should retry + methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'], + // Indicates which errors to retry + statusCodes: statusCodes ?? [500, 502, 503, 504, 429], + // List of errors to retry + errorCodes: errorCodes ?? [ + 'ECONNRESET', + 'ECONNREFUSED', + 'ENOTFOUND', + 'ENETDOWN', + 'ENETUNREACH', + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'EPIPE' + ] + } + + this.retryCount = 0 + this.start = 0 + this.end = null + this.etag = null + this.resume = null + + // Handle possible onConnect duplication + this.handler.onConnect(reason => { + this.aborted = true + if (this.abort) { + this.abort(reason) + } else { + this.reason = reason + } + }) + } + + onRequestSent () { + if (this.handler.onRequestSent) { + this.handler.onRequestSent() + } + } + + onUpgrade (statusCode, headers, socket) { + if (this.handler.onUpgrade) { + this.handler.onUpgrade(statusCode, headers, socket) + } + } + + onConnect (abort) { + if (this.aborted) { + abort(this.reason) + } else { + this.abort = abort + } + } + + onBodySent (chunk) { + if (this.handler.onBodySent) return this.handler.onBodySent(chunk) + } + + static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) { + const { statusCode, code, headers } = err + const { method, retryOptions } = opts + const { + maxRetries, + timeout, + maxTimeout, + timeoutFactor, + statusCodes, + errorCodes, + methods + } = retryOptions + let { counter, currentTimeout } = state + + currentTimeout = + currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout + + // Any code that is not a Undici's originated and allowed to retry + if ( + code && + code !== 'UND_ERR_REQ_RETRY' && + code !== 'UND_ERR_SOCKET' && + !errorCodes.includes(code) + ) { + cb(err) + return + } + + // If a set of method are provided and the current method is not in the list + if (Array.isArray(methods) && !methods.includes(method)) { + cb(err) + return + } + + // If a set of status code are provided and the current status code is not in the list + if ( + statusCode != null && + Array.isArray(statusCodes) && + !statusCodes.includes(statusCode) + ) { + cb(err) + return + } + + // If we reached the max number of retries + if (counter > maxRetries) { + cb(err) + return + } + + let retryAfterHeader = headers != null && headers['retry-after'] + if (retryAfterHeader) { + retryAfterHeader = Number(retryAfterHeader) + retryAfterHeader = isNaN(retryAfterHeader) + ? calculateRetryAfterHeader(retryAfterHeader) + : retryAfterHeader * 1e3 // Retry-After is in seconds + } + + const retryTimeout = + retryAfterHeader > 0 + ? Math.min(retryAfterHeader, maxTimeout) + : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout) + + state.currentTimeout = retryTimeout + + setTimeout(() => cb(null), retryTimeout) + } + + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const headers = parseHeaders(rawHeaders) + + this.retryCount += 1 + + if (statusCode >= 300) { + this.abort( + new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + // Checkpoint for resume from where we left it + if (this.resume != null) { + this.resume = null + + if (statusCode !== 206) { + return true + } + + const contentRange = parseRangeHeader(headers['content-range']) + // If no content range + if (!contentRange) { + this.abort( + new RequestRetryError('Content-Range mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + // Let's start with a weak etag check + if (this.etag != null && this.etag !== headers.etag) { + this.abort( + new RequestRetryError('ETag mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + const { start, size, end = size } = contentRange + + assert(this.start === start, 'content-range mismatch') + assert(this.end == null || this.end === end, 'content-range mismatch') + + this.resume = resume + return true + } + + if (this.end == null) { + if (statusCode === 206) { + // First time we receive 206 + const range = parseRangeHeader(headers['content-range']) + + if (range == null) { + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) + } + + const { start, size, end = size } = range + + assert( + start != null && Number.isFinite(start) && this.start !== start, + 'content-range mismatch' + ) + assert(Number.isFinite(start)) + assert( + end != null && Number.isFinite(end) && this.end !== end, + 'invalid content-length' + ) + + this.start = start + this.end = end + } + + // We make our best to checkpoint the body for further range headers + if (this.end == null) { + const contentLength = headers['content-length'] + this.end = contentLength != null ? Number(contentLength) : null + } + + assert(Number.isFinite(this.start)) + assert( + this.end == null || Number.isFinite(this.end), + 'invalid content-length' + ) + + this.resume = resume + this.etag = headers.etag != null ? headers.etag : null + + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) + } + + const err = new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) + + this.abort(err) + + return false + } + + onData (chunk) { + this.start += chunk.length + + return this.handler.onData(chunk) + } + + onComplete (rawTrailers) { + this.retryCount = 0 + return this.handler.onComplete(rawTrailers) + } + + onError (err) { + if (this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } + + this.retryOpts.retry( + err, + { + state: { counter: this.retryCount++, currentTimeout: this.retryAfter }, + opts: { retryOptions: this.retryOpts, ...this.opts } + }, + onRetry.bind(this) + ) + + function onRetry (err) { + if (err != null || this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } + + if (this.start !== 0) { + this.opts = { + ...this.opts, + headers: { + ...this.opts.headers, + range: `bytes=${this.start}-${this.end ?? ''}` + } + } + } + + try { + this.dispatch(this.opts, this) + } catch (err) { + this.handler.onError(err) + } + } + } +} + +module.exports = RetryHandler + + /***/ }), /***/ 8861: @@ -26817,7 +27337,7 @@ class Pool extends PoolBase { maxCachedSessions, allowH2, socketPath, - timeout: connectTimeout == null ? 10e3 : connectTimeout, + timeout: connectTimeout, ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), ...connect }) @@ -26927,6 +27447,9 @@ class ProxyAgent extends DispatcherBase { this[kProxyTls] = opts.proxyTls this[kProxyHeaders] = opts.headers || {} + const resolvedUrl = new URL(opts.uri) + const { origin, port, host, username, password } = resolvedUrl + if (opts.auth && opts.token) { throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token') } else if (opts.auth) { @@ -26934,11 +27457,10 @@ class ProxyAgent extends DispatcherBase { this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}` } else if (opts.token) { this[kProxyHeaders]['proxy-authorization'] = opts.token + } else if (username && password) { + this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}` } - const resolvedUrl = new URL(opts.uri) - const { origin, port, host } = resolvedUrl - const connect = buildConnector({ ...opts.proxyTls }) this[kConnectEndpoint] = buildConnector({ ...opts.requestTls }) this[kClient] = clientFactory(resolvedUrl, { connect }) @@ -26962,7 +27484,7 @@ class ProxyAgent extends DispatcherBase { }) if (statusCode !== 200) { socket.on('error', () => {}).destroy() - callback(new RequestAbortedError('Proxy response !== 200 when HTTP Tunneling')) + callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`)) } if (opts.protocol !== 'https:') { callback(null, socket) From 9efbf6e311aecc518551099c599bc7d56a0776f3 Mon Sep 17 00:00:00 2001 From: Dharmesh Patel Date: Fri, 8 Mar 2024 22:06:44 +0530 Subject: [PATCH 03/11] Skip the release branch from the PR validations. --- dist/index.js | 6 ++++++ src/pull-request.js | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/dist/index.js b/dist/index.js index 568c8de..7adedcb 100644 --- a/dist/index.js +++ b/dist/index.js @@ -32202,6 +32202,12 @@ async function pull_request_run() { pull_request_core.debug(`Pull Request: ${JSON.stringify(pullRequest)}`); pull_request_core.debug(`Is Draft: ${JSON.stringify(isDraft)}`); + // Ignore release PRs. + if ( pullRequest?.head.ref && pullRequest?.head.ref.startsWith("release/") ) { + pull_request_core.info("Skipping release PR"); + return; + } + // Handle Bot User if ("Bot" === author?.type) { // Skip validation against bot user. diff --git a/src/pull-request.js b/src/pull-request.js index 6b86e93..8fd7cd5 100644 --- a/src/pull-request.js +++ b/src/pull-request.js @@ -47,6 +47,12 @@ export async function run() { core.debug(`Pull Request: ${JSON.stringify(pullRequest)}`); core.debug(`Is Draft: ${JSON.stringify(isDraft)}`); + // Ignore release PRs. + if ( pullRequest?.head.ref && pullRequest?.head.ref.startsWith("release/") ) { + core.info("Skipping release PR"); + return; + } + // Handle Bot User if ("Bot" === author?.type) { // Skip validation against bot user. From 7a405d9220c8bdeb81bd7b76c0500b3f35e698ce Mon Sep 17 00:00:00 2001 From: Dharmesh Patel Date: Fri, 8 Mar 2024 23:07:26 +0530 Subject: [PATCH 04/11] Update src/pull-request.js Co-authored-by: Darin Kotter --- src/pull-request.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pull-request.js b/src/pull-request.js index 8fd7cd5..cb44c53 100644 --- a/src/pull-request.js +++ b/src/pull-request.js @@ -48,7 +48,7 @@ export async function run() { core.debug(`Is Draft: ${JSON.stringify(isDraft)}`); // Ignore release PRs. - if ( pullRequest?.head.ref && pullRequest?.head.ref.startsWith("release/") ) { + if (pullRequest?.head.ref && pullRequest?.head.ref.startsWith("release/")) { core.info("Skipping release PR"); return; } From f01095dbf2a63cec117f6e1b7d231275e4d1def3 Mon Sep 17 00:00:00 2001 From: Dharmesh Patel Date: Fri, 8 Mar 2024 23:09:10 +0530 Subject: [PATCH 05/11] regenerate dist file. --- dist/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dist/index.js b/dist/index.js index 7adedcb..08fb613 100644 --- a/dist/index.js +++ b/dist/index.js @@ -32203,7 +32203,7 @@ async function pull_request_run() { pull_request_core.debug(`Is Draft: ${JSON.stringify(isDraft)}`); // Ignore release PRs. - if ( pullRequest?.head.ref && pullRequest?.head.ref.startsWith("release/") ) { + if (pullRequest?.head.ref && pullRequest?.head.ref.startsWith("release/")) { pull_request_core.info("Skipping release PR"); return; } From 5d06eea7dbd9971714115ba689dc8446b9e9dfe1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 17:34:42 +0000 Subject: [PATCH 06/11] Bump undici from 5.28.3 to 5.28.4 in the npm_and_yarn group Bumps the npm_and_yarn group with 1 update: [undici](https://github.com/nodejs/undici). Updates `undici` from 5.28.3 to 5.28.4 - [Release notes](https://github.com/nodejs/undici/releases) - [Commits](https://github.com/nodejs/undici/compare/v5.28.3...v5.28.4) --- updated-dependencies: - dependency-name: undici dependency-type: indirect dependency-group: npm_and_yarn ... Signed-off-by: dependabot[bot] --- package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/package-lock.json b/package-lock.json index 6072396..eff8451 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1364,9 +1364,9 @@ } }, "node_modules/undici": { - "version": "5.28.3", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", - "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", + "version": "5.28.4", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", + "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", "dependencies": { "@fastify/busboy": "^2.0.0" }, @@ -2439,9 +2439,9 @@ "dev": true }, "undici": { - "version": "5.28.3", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", - "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", + "version": "5.28.4", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", + "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", "requires": { "@fastify/busboy": "^2.0.0" } From 14eef754d4eb998aaf675e351e7456e4be55193e Mon Sep 17 00:00:00 2001 From: Dharmesh Patel Date: Fri, 5 Apr 2024 22:59:33 +0530 Subject: [PATCH 07/11] Update dist file. --- dist/index.js | 297 +++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 256 insertions(+), 41 deletions(-) diff --git a/dist/index.js b/dist/index.js index 08fb613..007c756 100644 --- a/dist/index.js +++ b/dist/index.js @@ -14044,6 +14044,132 @@ function onConnectTimeout (socket) { module.exports = buildConnector +/***/ }), + +/***/ 4462: +/***/ ((module) => { + +"use strict"; + + +/** @type {Record} */ +const headerNameLowerCasedRecord = {} + +// https://developer.mozilla.org/docs/Web/HTTP/Headers +const wellknownHeaderNames = [ + 'Accept', + 'Accept-Encoding', + 'Accept-Language', + 'Accept-Ranges', + 'Access-Control-Allow-Credentials', + 'Access-Control-Allow-Headers', + 'Access-Control-Allow-Methods', + 'Access-Control-Allow-Origin', + 'Access-Control-Expose-Headers', + 'Access-Control-Max-Age', + 'Access-Control-Request-Headers', + 'Access-Control-Request-Method', + 'Age', + 'Allow', + 'Alt-Svc', + 'Alt-Used', + 'Authorization', + 'Cache-Control', + 'Clear-Site-Data', + 'Connection', + 'Content-Disposition', + 'Content-Encoding', + 'Content-Language', + 'Content-Length', + 'Content-Location', + 'Content-Range', + 'Content-Security-Policy', + 'Content-Security-Policy-Report-Only', + 'Content-Type', + 'Cookie', + 'Cross-Origin-Embedder-Policy', + 'Cross-Origin-Opener-Policy', + 'Cross-Origin-Resource-Policy', + 'Date', + 'Device-Memory', + 'Downlink', + 'ECT', + 'ETag', + 'Expect', + 'Expect-CT', + 'Expires', + 'Forwarded', + 'From', + 'Host', + 'If-Match', + 'If-Modified-Since', + 'If-None-Match', + 'If-Range', + 'If-Unmodified-Since', + 'Keep-Alive', + 'Last-Modified', + 'Link', + 'Location', + 'Max-Forwards', + 'Origin', + 'Permissions-Policy', + 'Pragma', + 'Proxy-Authenticate', + 'Proxy-Authorization', + 'RTT', + 'Range', + 'Referer', + 'Referrer-Policy', + 'Refresh', + 'Retry-After', + 'Sec-WebSocket-Accept', + 'Sec-WebSocket-Extensions', + 'Sec-WebSocket-Key', + 'Sec-WebSocket-Protocol', + 'Sec-WebSocket-Version', + 'Server', + 'Server-Timing', + 'Service-Worker-Allowed', + 'Service-Worker-Navigation-Preload', + 'Set-Cookie', + 'SourceMap', + 'Strict-Transport-Security', + 'Supports-Loading-Mode', + 'TE', + 'Timing-Allow-Origin', + 'Trailer', + 'Transfer-Encoding', + 'Upgrade', + 'Upgrade-Insecure-Requests', + 'User-Agent', + 'Vary', + 'Via', + 'WWW-Authenticate', + 'X-Content-Type-Options', + 'X-DNS-Prefetch-Control', + 'X-Frame-Options', + 'X-Permitted-Cross-Domain-Policies', + 'X-Powered-By', + 'X-Requested-With', + 'X-XSS-Protection' +] + +for (let i = 0; i < wellknownHeaderNames.length; ++i) { + const key = wellknownHeaderNames[i] + const lowerCasedKey = key.toLowerCase() + headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = + lowerCasedKey +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(headerNameLowerCasedRecord, null) + +module.exports = { + wellknownHeaderNames, + headerNameLowerCasedRecord +} + + /***/ }), /***/ 8045: @@ -14876,6 +15002,7 @@ const { InvalidArgumentError } = __nccwpck_require__(8045) const { Blob } = __nccwpck_require__(4300) const nodeUtil = __nccwpck_require__(3837) const { stringify } = __nccwpck_require__(3477) +const { headerNameLowerCasedRecord } = __nccwpck_require__(4462) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) @@ -15085,6 +15212,15 @@ function parseKeepAliveTimeout (val) { return m ? parseInt(m[1], 10) * 1000 : null } +/** + * Retrieves a header name and returns its lowercase value. + * @param {string | Buffer} value Header name + * @returns {string} + */ +function headerNameToString (value) { + return headerNameLowerCasedRecord[value] || value.toLowerCase() +} + function parseHeaders (headers, obj = {}) { // For H2 support if (!Array.isArray(headers)) return headers @@ -15356,6 +15492,7 @@ module.exports = { isIterable, isAsyncIterable, isDestroyed, + headerNameToString, parseRawHeaders, parseHeaders, parseKeepAliveTimeout, @@ -22003,14 +22140,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983 const assert = __nccwpck_require__(9491) const { isUint8Array } = __nccwpck_require__(9830) +let supportedHashes = [] + // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable /** @type {import('crypto')|undefined} */ let crypto try { crypto = __nccwpck_require__(6113) + const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] + supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) +/* c8 ignore next 3 */ } catch { - } function responseURL (response) { @@ -22538,66 +22679,56 @@ function bytesMatch (bytes, metadataList) { return true } - // 3. If parsedMetadata is the empty set, return true. + // 3. If response is not eligible for integrity validation, return false. + // TODO + + // 4. If parsedMetadata is the empty set, return true. if (parsedMetadata.length === 0) { return true } - // 4. Let metadata be the result of getting the strongest + // 5. Let metadata be the result of getting the strongest // metadata from parsedMetadata. - const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) - // get the strongest algorithm - const strongest = list[0].algo - // get all entries that use the strongest algorithm; ignore weaker - const metadata = list.filter((item) => item.algo === strongest) + const strongest = getStrongestMetadata(parsedMetadata) + const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) - // 5. For each item in metadata: + // 6. For each item in metadata: for (const item of metadata) { // 1. Let algorithm be the alg component of item. const algorithm = item.algo // 2. Let expectedValue be the val component of item. - let expectedValue = item.hash + const expectedValue = item.hash // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // "be liberal with padding". This is annoying, and it's not even in the spec. - if (expectedValue.endsWith('==')) { - expectedValue = expectedValue.slice(0, -2) - } - // 3. Let actualValue be the result of applying algorithm to bytes. let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') - if (actualValue.endsWith('==')) { - actualValue = actualValue.slice(0, -2) + if (actualValue[actualValue.length - 1] === '=') { + if (actualValue[actualValue.length - 2] === '=') { + actualValue = actualValue.slice(0, -2) + } else { + actualValue = actualValue.slice(0, -1) + } } // 4. If actualValue is a case-sensitive match for expectedValue, // return true. - if (actualValue === expectedValue) { - return true - } - - let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') - - if (actualBase64URL.endsWith('==')) { - actualBase64URL = actualBase64URL.slice(0, -2) - } - - if (actualBase64URL === expectedValue) { + if (compareBase64Mixed(actualValue, expectedValue)) { return true } } - // 6. Return false. + // 7. Return false. return false } // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 -const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i +const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i /** * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata @@ -22611,8 +22742,6 @@ function parseMetadata (metadata) { // 2. Let empty be equal to true. let empty = true - const supportedHashes = crypto.getHashes() - // 3. For each token returned by splitting metadata on spaces: for (const token of metadata.split(' ')) { // 1. Set empty to false. @@ -22622,7 +22751,11 @@ function parseMetadata (metadata) { const parsedToken = parseHashWithOptions.exec(token) // 3. If token does not parse, continue to the next token. - if (parsedToken === null || parsedToken.groups === undefined) { + if ( + parsedToken === null || + parsedToken.groups === undefined || + parsedToken.groups.algo === undefined + ) { // Note: Chromium blocks the request at this point, but Firefox // gives a warning that an invalid integrity was given. The // correct behavior is to ignore these, and subsequently not @@ -22631,11 +22764,11 @@ function parseMetadata (metadata) { } // 4. Let algorithm be the hash-algo component of token. - const algorithm = parsedToken.groups.algo + const algorithm = parsedToken.groups.algo.toLowerCase() // 5. If algorithm is a hash function recognized by the user // agent, add the parsed token to result. - if (supportedHashes.includes(algorithm.toLowerCase())) { + if (supportedHashes.includes(algorithm)) { result.push(parsedToken.groups) } } @@ -22648,6 +22781,82 @@ function parseMetadata (metadata) { return result } +/** + * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList + */ +function getStrongestMetadata (metadataList) { + // Let algorithm be the algo component of the first item in metadataList. + // Can be sha256 + let algorithm = metadataList[0].algo + // If the algorithm is sha512, then it is the strongest + // and we can return immediately + if (algorithm[3] === '5') { + return algorithm + } + + for (let i = 1; i < metadataList.length; ++i) { + const metadata = metadataList[i] + // If the algorithm is sha512, then it is the strongest + // and we can break the loop immediately + if (metadata.algo[3] === '5') { + algorithm = 'sha512' + break + // If the algorithm is sha384, then a potential sha256 or sha384 is ignored + } else if (algorithm[3] === '3') { + continue + // algorithm is sha256, check if algorithm is sha384 and if so, set it as + // the strongest + } else if (metadata.algo[3] === '3') { + algorithm = 'sha384' + } + } + return algorithm +} + +function filterMetadataListByAlgorithm (metadataList, algorithm) { + if (metadataList.length === 1) { + return metadataList + } + + let pos = 0 + for (let i = 0; i < metadataList.length; ++i) { + if (metadataList[i].algo === algorithm) { + metadataList[pos++] = metadataList[i] + } + } + + metadataList.length = pos + + return metadataList +} + +/** + * Compares two base64 strings, allowing for base64url + * in the second string. + * +* @param {string} actualValue always base64 + * @param {string} expectedValue base64 or base64url + * @returns {boolean} + */ +function compareBase64Mixed (actualValue, expectedValue) { + if (actualValue.length !== expectedValue.length) { + return false + } + for (let i = 0; i < actualValue.length; ++i) { + if (actualValue[i] !== expectedValue[i]) { + if ( + (actualValue[i] === '+' && expectedValue[i] === '-') || + (actualValue[i] === '/' && expectedValue[i] === '_') + ) { + continue + } + return false + } + } + + return true +} + // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { // TODO @@ -23063,7 +23272,8 @@ module.exports = { urlHasHttpsScheme, urlIsHttpHttpsScheme, readAllBytes, - normalizeMethodRecord + normalizeMethodRecord, + parseMetadata } @@ -25150,12 +25360,17 @@ function parseLocation (statusCode, headers) { // https://tools.ietf.org/html/rfc7231#section-6.4.4 function shouldRemoveHeader (header, removeContent, unknownOrigin) { - return ( - (header.length === 4 && header.toString().toLowerCase() === 'host') || - (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || - (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || - (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') - ) + if (header.length === 4) { + return util.headerNameToString(header) === 'host' + } + if (removeContent && util.headerNameToString(header).startsWith('content-')) { + return true + } + if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) { + const name = util.headerNameToString(header) + return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization' + } + return false } // https://tools.ietf.org/html/rfc7231#section-6.4 From 7b4797b8bf090ce38ff9842053ebd3bdc3f0f867 Mon Sep 17 00:00:00 2001 From: Dharmesh Patel Date: Mon, 22 Apr 2024 12:54:48 +0530 Subject: [PATCH 08/11] Bumped `actions/upload-artifact` from v3 to v4 --- .github/workflows/check-dist.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-dist.yml b/.github/workflows/check-dist.yml index 3cdd4f3..01a0c5f 100644 --- a/.github/workflows/check-dist.yml +++ b/.github/workflows/check-dist.yml @@ -44,7 +44,7 @@ jobs: id: diff # If index.js was different than expected, upload the expected version as an artifact - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: ${{ failure() && steps.diff.conclusion == 'failure' }} with: name: dist From e8eb8f98b080b08419fa1b4ac5ae60ded164d548 Mon Sep 17 00:00:00 2001 From: Dharmesh Patel Date: Fri, 3 May 2024 17:08:40 +0530 Subject: [PATCH 09/11] Bump version to 2.1.2 --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index eff8451..378e13f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "10up/action-repo-automator", - "version": "2.1.1", + "version": "2.1.2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "10up/action-repo-automator", - "version": "2.1.1", + "version": "2.1.2", "license": "MIT", "dependencies": { "@actions/core": "^1.10.1", diff --git a/package.json b/package.json index c7c2cea..bdc82b3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "10up/action-repo-automator", - "version": "2.1.1", + "version": "2.1.2", "description": "GitHub Action to automate some common repository operations and validate PR description, changelog and credits.", "keywords": [ "GitHub", From 54b5ff4199bdeb3f1a483883498eec0d4b332753 Mon Sep 17 00:00:00 2001 From: Dharmesh Patel Date: Fri, 3 May 2024 17:56:54 +0530 Subject: [PATCH 10/11] Changelog updates. --- CHANGELOG.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 67fca99..2f3e67f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,14 @@ All notable changes to this project will be documented in this file, per [the Ke ## [Unreleased] - TBD +## [2.1.2] - 2024-05-XX +### Changed +- Exclude the release pull request from the PR validations. (props [@iamdharmesh](https://github.com/iamdharmesh), [@dkotter](https://github.com/dkotter) via [#119](https://github.com/10up/action-repo-automator/pull/119)) +- Bump `actions/upload-artifact` from v3 to v4 (props [@iamdharmesh](https://github.com/iamdharmesh), [@jeffpaul](https://github.com/jeffpaul) via [#121](https://github.com/10up/action-repo-automator/pull/121)) + +### Security +- Bump `undici` from 5.26.4 to 5.26.4 (props [@dependabot[bot]](https://github.com/apps/dependabot), [@iamdharmesh](https://github.com/iamdharmesh) via [#115](https://github.com/10up/action-repo-automator/pull/115), [#120](https://github.com/10up/action-repo-automator/pull/120)) + ## [2.1.1] - 2024-02-29 ### Changed - Disabled dependabot version updates for GitHub Actions and npm dependencies (props [@iamdharmesh](https://github.com/iamdharmesh), [@jeffpaul](https://github.com/jeffpaul) via [#113](https://github.com/10up/action-repo-automator/pull/113)) @@ -65,6 +73,7 @@ All notable changes to this project will be documented in this file, per [the Ke - Initial project release 🎉 [Unreleased]: https://github.com/10up/action-repo-automator/compare/trunk...develop +[2.1.2]: https://github.com/10up/action-repo-automator/compare/2.1.1..2.1.2 [2.1.1]: https://github.com/10up/action-repo-automator/compare/2.1.0..2.1.1 [2.1.0]: https://github.com/10up/action-repo-automator/compare/2.0.0..2.1.0 [2.0.0]: https://github.com/10up/action-repo-automator/compare/1.0.1..2.0.0 From 8107fd599433a658900a3df9b71eb426949f7e6f Mon Sep 17 00:00:00 2001 From: Jeffrey Paul Date: Fri, 3 May 2024 08:17:38 -0500 Subject: [PATCH 11/11] Update CHANGELOG.md --- CHANGELOG.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2f3e67f..c88c4cd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,13 +4,13 @@ All notable changes to this project will be documented in this file, per [the Ke ## [Unreleased] - TBD -## [2.1.2] - 2024-05-XX +## [2.1.2] - 2024-05-06 ### Changed -- Exclude the release pull request from the PR validations. (props [@iamdharmesh](https://github.com/iamdharmesh), [@dkotter](https://github.com/dkotter) via [#119](https://github.com/10up/action-repo-automator/pull/119)) -- Bump `actions/upload-artifact` from v3 to v4 (props [@iamdharmesh](https://github.com/iamdharmesh), [@jeffpaul](https://github.com/jeffpaul) via [#121](https://github.com/10up/action-repo-automator/pull/121)) +- Exclude the release pull request from the PR validations (props [@iamdharmesh](https://github.com/iamdharmesh), [@dkotter](https://github.com/dkotter) via [#119](https://github.com/10up/action-repo-automator/pull/119)). +- Bump `actions/upload-artifact` from v3 to v4 (props [@iamdharmesh](https://github.com/iamdharmesh), [@jeffpaul](https://github.com/jeffpaul) via [#121](https://github.com/10up/action-repo-automator/pull/121)). ### Security -- Bump `undici` from 5.26.4 to 5.26.4 (props [@dependabot[bot]](https://github.com/apps/dependabot), [@iamdharmesh](https://github.com/iamdharmesh) via [#115](https://github.com/10up/action-repo-automator/pull/115), [#120](https://github.com/10up/action-repo-automator/pull/120)) +- Bump `undici` from 5.26.4 to 5.28.4 (props [@dependabot[bot]](https://github.com/apps/dependabot), [@iamdharmesh](https://github.com/iamdharmesh) via [#115](https://github.com/10up/action-repo-automator/pull/115), [#120](https://github.com/10up/action-repo-automator/pull/120)). ## [2.1.1] - 2024-02-29 ### Changed