diff --git a/deps/undici/src/lib/compat/dispatcher-weakref.js b/deps/undici/src/lib/compat/dispatcher-weakref.js
index dbca8580404ebf..db97e0f6e1eaad 100644
--- a/deps/undici/src/lib/compat/dispatcher-weakref.js
+++ b/deps/undici/src/lib/compat/dispatcher-weakref.js
@@ -31,6 +31,14 @@ class CompatFinalizer {
}
module.exports = function () {
+ // FIXME: remove workaround when the Node bug is fixed
+ // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
+ if (process.env.NODE_V8_COVERAGE) {
+ return {
+ WeakRef: CompatWeakRef,
+ FinalizationRegistry: CompatFinalizer
+ }
+ }
return {
WeakRef: global.WeakRef || CompatWeakRef,
FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer
diff --git a/deps/undici/src/lib/core/connect.js b/deps/undici/src/lib/core/connect.js
index bb71085a1565fc..33091173fa8dbc 100644
--- a/deps/undici/src/lib/core/connect.js
+++ b/deps/undici/src/lib/core/connect.js
@@ -13,7 +13,9 @@ let tls // include tls conditionally since it is not always available
// re-use is enabled.
let SessionCache
-if (global.FinalizationRegistry) {
+// FIXME: remove workaround when the Node bug is fixed
+// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
+if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) {
SessionCache = class WeakSessionCache {
constructor (maxCachedSessions) {
this._maxCachedSessions = maxCachedSessions
diff --git a/deps/undici/src/lib/core/util.js b/deps/undici/src/lib/core/util.js
index 259ba7b38a64e9..769811f57f7b8c 100644
--- a/deps/undici/src/lib/core/util.js
+++ b/deps/undici/src/lib/core/util.js
@@ -58,31 +58,31 @@ function parseURL (url) {
throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')
}
- if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) {
- throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')
+ if (!/^https?:/.test(url.origin || url.protocol)) {
+ throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
}
- if (url.path != null && typeof url.path !== 'string') {
- throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.')
- }
+ if (!(url instanceof URL)) {
+ if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) {
+ throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')
+ }
- if (url.pathname != null && typeof url.pathname !== 'string') {
- throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.')
- }
+ if (url.path != null && typeof url.path !== 'string') {
+ throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.')
+ }
- if (url.hostname != null && typeof url.hostname !== 'string') {
- throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.')
- }
+ if (url.pathname != null && typeof url.pathname !== 'string') {
+ throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.')
+ }
- if (url.origin != null && typeof url.origin !== 'string') {
- throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')
- }
+ if (url.hostname != null && typeof url.hostname !== 'string') {
+ throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.')
+ }
- if (!/^https?:/.test(url.origin || url.protocol)) {
- throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
- }
+ if (url.origin != null && typeof url.origin !== 'string') {
+ throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')
+ }
- if (!(url instanceof URL)) {
const port = url.port != null
? url.port
: (url.protocol === 'https:' ? 443 : 80)
diff --git a/deps/undici/src/lib/fetch/body.js b/deps/undici/src/lib/fetch/body.js
index 105eb553157b06..1d9f17d7e330c6 100644
--- a/deps/undici/src/lib/fetch/body.js
+++ b/deps/undici/src/lib/fetch/body.js
@@ -1,6 +1,6 @@
'use strict'
-const Busboy = require('busboy')
+const Busboy = require('@fastify/busboy')
const util = require('../core/util')
const {
ReadableStreamFrom,
@@ -385,10 +385,9 @@ function bodyMixinMethods (instance) {
let busboy
try {
- busboy = Busboy({
+ busboy = new Busboy({
headers,
- preservePath: true,
- defParamCharset: 'utf8'
+ preservePath: true
})
} catch (err) {
throw new DOMException(`${err}`, 'AbortError')
@@ -397,8 +396,7 @@ function bodyMixinMethods (instance) {
busboy.on('field', (name, value) => {
responseFormData.append(name, value)
})
- busboy.on('file', (name, value, info) => {
- const { filename, encoding, mimeType } = info
+ busboy.on('file', (name, value, filename, encoding, mimeType) => {
const chunks = []
if (encoding === 'base64' || encoding.toLowerCase() === 'base64') {
diff --git a/deps/undici/src/lib/fetch/global.js b/deps/undici/src/lib/fetch/global.js
index 42282acdfe2572..1df6f1227bc265 100644
--- a/deps/undici/src/lib/fetch/global.js
+++ b/deps/undici/src/lib/fetch/global.js
@@ -9,14 +9,6 @@ function getGlobalOrigin () {
}
function setGlobalOrigin (newOrigin) {
- if (
- newOrigin !== undefined &&
- typeof newOrigin !== 'string' &&
- !(newOrigin instanceof URL)
- ) {
- throw new Error('Invalid base url')
- }
-
if (newOrigin === undefined) {
Object.defineProperty(globalThis, globalOrigin, {
value: undefined,
diff --git a/deps/undici/src/node_modules/busboy/LICENSE b/deps/undici/src/node_modules/@fastify/busboy/LICENSE
similarity index 100%
rename from deps/undici/src/node_modules/busboy/LICENSE
rename to deps/undici/src/node_modules/@fastify/busboy/LICENSE
diff --git a/deps/undici/src/node_modules/@fastify/busboy/README.md b/deps/undici/src/node_modules/@fastify/busboy/README.md
new file mode 100644
index 00000000000000..c74e618b6d6044
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/README.md
@@ -0,0 +1,271 @@
+# busboy
+
+
+
+[](https://github.com/fastify/busboy/actions)
+[](https://coveralls.io/r/fastify/busboy?branch=master)
+[](https://standardjs.com/)
+[](https://github.com/nodejs/security-wg/blob/HEAD/processes/responsible_disclosure_template.md)
+
+
+
+
+
+[](https://www.npmjs.com/package/@fastify/busboy)
+[](https://www.npmjs.com/package/@fastify/busboy)
+
+
+
+Description
+===========
+
+A Node.js module for parsing incoming HTML form data.
+
+This is an officially supported fork by [fastify](https://github.com/fastify/) organization of the amazing library [originally created](https://github.com/mscdex/busboy) by Brian White,
+aimed at addressing long-standing issues with it.
+
+Benchmark (Mean time for 500 Kb payload, 2000 cycles, 1000 cycle warmup):
+
+| Library | Version | Mean time in nanoseconds (less is better) |
+|-----------------------|---------|-------------------------------------------|
+| busboy | 0.3.1 | `340114` |
+| @fastify/busboy | 1.0.0 | `270984` |
+
+[Changelog](https://github.com/fastify/busboy/blob/master/CHANGELOG.md) since busboy 0.31.
+
+Requirements
+============
+
+* [Node.js](http://nodejs.org/) 10+
+
+
+Install
+=======
+
+ npm i @fastify/busboy
+
+
+Examples
+========
+
+* Parsing (multipart) with default options:
+
+```javascript
+const http = require('node:http');
+const { inspect } = require('node:util');
+const Busboy = require('busboy');
+
+http.createServer((req, res) => {
+ if (req.method === 'POST') {
+ const busboy = new Busboy({ headers: req.headers });
+ busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
+ console.log(`File [${fieldname}]: filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`);
+ file.on('data', data => {
+ console.log(`File [${fieldname}] got ${data.length} bytes`);
+ });
+ file.on('end', () => {
+ console.log(`File [${fieldname}] Finished`);
+ });
+ });
+ busboy.on('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => {
+ console.log(`Field [${fieldname}]: value: ${inspect(val)}`);
+ });
+ busboy.on('finish', () => {
+ console.log('Done parsing form!');
+ res.writeHead(303, { Connection: 'close', Location: '/' });
+ res.end();
+ });
+ req.pipe(busboy);
+ } else if (req.method === 'GET') {
+ res.writeHead(200, { Connection: 'close' });
+ res.end(`
+
+ `);
+ }
+}).listen(8000, () => {
+ console.log('Listening for requests');
+});
+
+// Example output, using http://nodejs.org/images/ryan-speaker.jpg as the file:
+//
+// Listening for requests
+// File [filefield]: filename: ryan-speaker.jpg, encoding: binary
+// File [filefield] got 11971 bytes
+// Field [textfield]: value: 'testing! :-)'
+// File [filefield] Finished
+// Done parsing form!
+```
+
+* Save all incoming files to disk:
+
+```javascript
+const http = require('node:http');
+const path = require('node:path');
+const os = require('node:os');
+const fs = require('node:fs');
+
+const Busboy = require('busboy');
+
+http.createServer(function(req, res) {
+ if (req.method === 'POST') {
+ const busboy = new Busboy({ headers: req.headers });
+ busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
+ var saveTo = path.join(os.tmpdir(), path.basename(fieldname));
+ file.pipe(fs.createWriteStream(saveTo));
+ });
+ busboy.on('finish', function() {
+ res.writeHead(200, { 'Connection': 'close' });
+ res.end("That's all folks!");
+ });
+ return req.pipe(busboy);
+ }
+ res.writeHead(404);
+ res.end();
+}).listen(8000, function() {
+ console.log('Listening for requests');
+});
+```
+
+* Parsing (urlencoded) with default options:
+
+```javascript
+const http = require('node:http');
+const { inspect } = require('node:util');
+
+const Busboy = require('busboy');
+
+http.createServer(function(req, res) {
+ if (req.method === 'POST') {
+ const busboy = new Busboy({ headers: req.headers });
+ busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
+ console.log('File [' + fieldname + ']: filename: ' + filename);
+ file.on('data', function(data) {
+ console.log('File [' + fieldname + '] got ' + data.length + ' bytes');
+ });
+ file.on('end', function() {
+ console.log('File [' + fieldname + '] Finished');
+ });
+ });
+ busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) {
+ console.log('Field [' + fieldname + ']: value: ' + inspect(val));
+ });
+ busboy.on('finish', function() {
+ console.log('Done parsing form!');
+ res.writeHead(303, { Connection: 'close', Location: '/' });
+ res.end();
+ });
+ req.pipe(busboy);
+ } else if (req.method === 'GET') {
+ res.writeHead(200, { Connection: 'close' });
+ res.end('\
+ \
+ ');
+ }
+}).listen(8000, function() {
+ console.log('Listening for requests');
+});
+
+// Example output:
+//
+// Listening for requests
+// Field [textfield]: value: 'testing! :-)'
+// Field [selectfield]: value: '9001'
+// Field [checkfield]: value: 'on'
+// Done parsing form!
+```
+
+
+API
+===
+
+_Busboy_ is a _Writable_ stream
+
+Busboy (special) events
+-----------------------
+
+* **file**(< _string_ >fieldname, < _ReadableStream_ >stream, < _string_ >filename, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new file form field found. `transferEncoding` contains the 'Content-Transfer-Encoding' value for the file stream. `mimeType` contains the 'Content-Type' value for the file stream.
+ * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically and safely discarded (these discarded files do still count towards `files` and `parts` limits).
+ * If a configured file size limit was reached, `stream` will both have a boolean property `truncated` (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens.
+ * The property `bytesRead` informs about the number of bytes that have been read so far.
+
+* **field**(< _string_ >fieldname, < _string_ >value, < _boolean_ >fieldnameTruncated, < _boolean_ >valueTruncated, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new non-file field found.
+
+* **partsLimit**() - Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted.
+
+* **filesLimit**() - Emitted when specified `files` limit has been reached. No more 'file' events will be emitted.
+
+* **fieldsLimit**() - Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted.
+
+
+Busboy methods
+--------------
+
+* **(constructor)**(< _object_ >config) - Creates and returns a new Busboy instance.
+
+ * The constructor takes the following valid `config` settings:
+
+ * **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers.
+
+ * **autoDestroy** - _boolean_ - Whether this stream should automatically call .destroy() on itself after ending. (Default: false).
+
+ * **highWaterMark** - _integer_ - highWaterMark to use for this Busboy instance (Default: WritableStream default).
+
+ * **fileHwm** - _integer_ - highWaterMark to use for file streams (Default: ReadableStream default).
+
+ * **defCharset** - _string_ - Default character set to use when one isn't defined (Default: 'utf8').
+
+ * **preservePath** - _boolean_ - If paths in the multipart 'filename' field shall be preserved. (Default: false).
+
+ * **isPartAFile** - __function__ - Use this function to override the default file detection functionality. It has following parameters:
+
+ * fieldName - __string__ The name of the field.
+
+ * contentType - __string__ The content-type of the part, e.g. `text/plain`, `image/jpeg`, `application/octet-stream`
+
+ * fileName - __string__ The name of a file supplied by the part.
+
+ (Default: `(fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)`)
+
+ * **limits** - _object_ - Various limits on incoming data. Valid properties are:
+
+ * **fieldNameSize** - _integer_ - Max field name size (in bytes) (Default: 100 bytes).
+
+ * **fieldSize** - _integer_ - Max field value size (in bytes) (Default: 1 MiB, which is 1024 x 1024 bytes).
+
+ * **fields** - _integer_ - Max number of non-file fields (Default: Infinity).
+
+ * **fileSize** - _integer_ - For multipart forms, the max file size (in bytes) (Default: Infinity).
+
+ * **files** - _integer_ - For multipart forms, the max number of file fields (Default: Infinity).
+
+ * **parts** - _integer_ - For multipart forms, the max number of parts (fields + files) (Default: Infinity).
+
+ * **headerPairs** - _integer_ - For multipart forms, the max number of header key=>value pairs to parse **Default:** 2000
+
+ * **headerSize** - _integer_ - For multipart forms, the max size of a multipart header **Default:** 81920.
+
+ * The constructor can throw errors:
+
+ * **Busboy expected an options-Object.** - Busboy expected an Object as first parameters.
+
+ * **Busboy expected an options-Object with headers-attribute.** - The first parameter is lacking of a headers-attribute.
+
+ * **Limit $limit is not a valid number** - Busboy expected the desired limit to be of type number. Busboy throws this Error to prevent a potential security issue by falling silently back to the Busboy-defaults. Potential source for this Error can be the direct use of environment variables without transforming them to the type number.
+
+ * **Unsupported Content-Type.** - The `Content-Type` isn't one Busboy can parse.
+
+ * **Missing Content-Type-header.** - The provided headers don't include `Content-Type` at all.
diff --git a/deps/undici/src/node_modules/streamsearch/LICENSE b/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/LICENSE
similarity index 98%
rename from deps/undici/src/node_modules/streamsearch/LICENSE
rename to deps/undici/src/node_modules/@fastify/busboy/deps/dicer/LICENSE
index 9ea90e03922d5e..290762e94f4e2f 100644
--- a/deps/undici/src/node_modules/streamsearch/LICENSE
+++ b/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/LICENSE
@@ -1,19 +1,19 @@
-Copyright Brian White. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to
-deal in the Software without restriction, including without limitation the
-rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-sell copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+Copyright Brian White. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
\ No newline at end of file
diff --git a/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js b/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js
new file mode 100644
index 00000000000000..79da160c34238f
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js
@@ -0,0 +1,207 @@
+'use strict'
+
+const WritableStream = require('node:stream').Writable
+const inherits = require('node:util').inherits
+
+const StreamSearch = require('../../streamsearch/sbmh')
+
+const PartStream = require('./PartStream')
+const HeaderParser = require('./HeaderParser')
+
+const DASH = 45
+const B_ONEDASH = Buffer.from('-')
+const B_CRLF = Buffer.from('\r\n')
+const EMPTY_FN = function () {}
+
+function Dicer (cfg) {
+ if (!(this instanceof Dicer)) { return new Dicer(cfg) }
+ WritableStream.call(this, cfg)
+
+ if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') }
+
+ if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined }
+
+ this._headerFirst = cfg.headerFirst
+
+ this._dashes = 0
+ this._parts = 0
+ this._finished = false
+ this._realFinish = false
+ this._isPreamble = true
+ this._justMatched = false
+ this._firstWrite = true
+ this._inHeader = true
+ this._part = undefined
+ this._cb = undefined
+ this._ignoreData = false
+ this._partOpts = { highWaterMark: cfg.partHwm }
+ this._pause = false
+
+ const self = this
+ this._hparser = new HeaderParser(cfg)
+ this._hparser.on('header', function (header) {
+ self._inHeader = false
+ self._part.emit('header', header)
+ })
+}
+inherits(Dicer, WritableStream)
+
+Dicer.prototype.emit = function (ev) {
+ if (ev === 'finish' && !this._realFinish) {
+ if (!this._finished) {
+ const self = this
+ process.nextTick(function () {
+ self.emit('error', new Error('Unexpected end of multipart data'))
+ if (self._part && !self._ignoreData) {
+ const type = (self._isPreamble ? 'Preamble' : 'Part')
+ self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data'))
+ self._part.push(null)
+ process.nextTick(function () {
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ })
+ return
+ }
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ })
+ }
+ } else { WritableStream.prototype.emit.apply(this, arguments) }
+}
+
+Dicer.prototype._write = function (data, encoding, cb) {
+ // ignore unexpected data (e.g. extra trailer data after finished)
+ if (!this._hparser && !this._bparser) { return cb() }
+
+ if (this._headerFirst && this._isPreamble) {
+ if (!this._part) {
+ this._part = new PartStream(this._partOpts)
+ if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() }
+ }
+ const r = this._hparser.push(data)
+ if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() }
+ }
+
+ // allows for "easier" testing
+ if (this._firstWrite) {
+ this._bparser.push(B_CRLF)
+ this._firstWrite = false
+ }
+
+ this._bparser.push(data)
+
+ if (this._pause) { this._cb = cb } else { cb() }
+}
+
+Dicer.prototype.reset = function () {
+ this._part = undefined
+ this._bparser = undefined
+ this._hparser = undefined
+}
+
+Dicer.prototype.setBoundary = function (boundary) {
+ const self = this
+ this._bparser = new StreamSearch('\r\n--' + boundary)
+ this._bparser.on('info', function (isMatch, data, start, end) {
+ self._oninfo(isMatch, data, start, end)
+ })
+}
+
+Dicer.prototype._ignore = function () {
+ if (this._part && !this._ignoreData) {
+ this._ignoreData = true
+ this._part.on('error', EMPTY_FN)
+ // we must perform some kind of read on the stream even though we are
+ // ignoring the data, otherwise node's Readable stream will not emit 'end'
+ // after pushing null to the stream
+ this._part.resume()
+ }
+}
+
+Dicer.prototype._oninfo = function (isMatch, data, start, end) {
+ let buf; const self = this; let i = 0; let r; let shouldWriteMore = true
+
+ if (!this._part && this._justMatched && data) {
+ while (this._dashes < 2 && (start + i) < end) {
+ if (data[start + i] === DASH) {
+ ++i
+ ++this._dashes
+ } else {
+ if (this._dashes) { buf = B_ONEDASH }
+ this._dashes = 0
+ break
+ }
+ }
+ if (this._dashes === 2) {
+ if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) }
+ this.reset()
+ this._finished = true
+ // no more parts will be added
+ if (self._parts === 0) {
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ }
+ }
+ if (this._dashes) { return }
+ }
+ if (this._justMatched) { this._justMatched = false }
+ if (!this._part) {
+ this._part = new PartStream(this._partOpts)
+ this._part._read = function (n) {
+ self._unpause()
+ }
+ if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() }
+ if (!this._isPreamble) { this._inHeader = true }
+ }
+ if (data && start < end && !this._ignoreData) {
+ if (this._isPreamble || !this._inHeader) {
+ if (buf) { shouldWriteMore = this._part.push(buf) }
+ shouldWriteMore = this._part.push(data.slice(start, end))
+ if (!shouldWriteMore) { this._pause = true }
+ } else if (!this._isPreamble && this._inHeader) {
+ if (buf) { this._hparser.push(buf) }
+ r = this._hparser.push(data.slice(start, end))
+ if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) }
+ }
+ }
+ if (isMatch) {
+ this._hparser.reset()
+ if (this._isPreamble) { this._isPreamble = false } else {
+ if (start !== end) {
+ ++this._parts
+ this._part.on('end', function () {
+ if (--self._parts === 0) {
+ if (self._finished) {
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ } else {
+ self._unpause()
+ }
+ }
+ })
+ }
+ }
+ this._part.push(null)
+ this._part = undefined
+ this._ignoreData = false
+ this._justMatched = true
+ this._dashes = 0
+ }
+}
+
+Dicer.prototype._unpause = function () {
+ if (!this._pause) { return }
+
+ this._pause = false
+ if (this._cb) {
+ const cb = this._cb
+ this._cb = undefined
+ cb()
+ }
+}
+
+module.exports = Dicer
diff --git a/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js b/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js
new file mode 100644
index 00000000000000..65f667b59be8e7
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js
@@ -0,0 +1,100 @@
+'use strict'
+
+const EventEmitter = require('node:events').EventEmitter
+const inherits = require('node:util').inherits
+const getLimit = require('../../../lib/utils/getLimit')
+
+const StreamSearch = require('../../streamsearch/sbmh')
+
+const B_DCRLF = Buffer.from('\r\n\r\n')
+const RE_CRLF = /\r\n/g
+const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex
+
+function HeaderParser (cfg) {
+ EventEmitter.call(this)
+
+ cfg = cfg || {}
+ const self = this
+ this.nread = 0
+ this.maxed = false
+ this.npairs = 0
+ this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000)
+ this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024)
+ this.buffer = ''
+ this.header = {}
+ this.finished = false
+ this.ss = new StreamSearch(B_DCRLF)
+ this.ss.on('info', function (isMatch, data, start, end) {
+ if (data && !self.maxed) {
+ if (self.nread + end - start >= self.maxHeaderSize) {
+ end = self.maxHeaderSize - self.nread + start
+ self.nread = self.maxHeaderSize
+ self.maxed = true
+ } else { self.nread += (end - start) }
+
+ self.buffer += data.toString('binary', start, end)
+ }
+ if (isMatch) { self._finish() }
+ })
+}
+inherits(HeaderParser, EventEmitter)
+
+HeaderParser.prototype.push = function (data) {
+ const r = this.ss.push(data)
+ if (this.finished) { return r }
+}
+
+HeaderParser.prototype.reset = function () {
+ this.finished = false
+ this.buffer = ''
+ this.header = {}
+ this.ss.reset()
+}
+
+HeaderParser.prototype._finish = function () {
+ if (this.buffer) { this._parseHeader() }
+ this.ss.matches = this.ss.maxMatches
+ const header = this.header
+ this.header = {}
+ this.buffer = ''
+ this.finished = true
+ this.nread = this.npairs = 0
+ this.maxed = false
+ this.emit('header', header)
+}
+
+HeaderParser.prototype._parseHeader = function () {
+ if (this.npairs === this.maxHeaderPairs) { return }
+
+ const lines = this.buffer.split(RE_CRLF)
+ const len = lines.length
+ let m, h
+
+ for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
+ if (lines[i].length === 0) { continue }
+ if (lines[i][0] === '\t' || lines[i][0] === ' ') {
+ // folded header content
+ // RFC2822 says to just remove the CRLF and not the whitespace following
+ // it, so we follow the RFC and include the leading whitespace ...
+ if (h) {
+ this.header[h][this.header[h].length - 1] += lines[i]
+ continue
+ }
+ }
+
+ const posColon = lines[i].indexOf(':')
+ if (
+ posColon === -1 ||
+ posColon === 0
+ ) {
+ return
+ }
+ m = RE_HDR.exec(lines[i])
+ h = m[1].toLowerCase()
+ this.header[h] = this.header[h] || []
+ this.header[h].push((m[2] || ''))
+ if (++this.npairs === this.maxHeaderPairs) { break }
+ }
+}
+
+module.exports = HeaderParser
diff --git a/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js b/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js
new file mode 100644
index 00000000000000..c91da1c4132129
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js
@@ -0,0 +1,13 @@
+'use strict'
+
+const inherits = require('node:util').inherits
+const ReadableStream = require('node:stream').Readable
+
+function PartStream (opts) {
+ ReadableStream.call(this, opts)
+}
+inherits(PartStream, ReadableStream)
+
+PartStream.prototype._read = function (n) {}
+
+module.exports = PartStream
diff --git a/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts b/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts
new file mode 100644
index 00000000000000..3c5b8962d99b33
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts
@@ -0,0 +1,164 @@
+// Type definitions for dicer 0.2
+// Project: https://github.com/mscdex/dicer
+// Definitions by: BendingBender
+// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
+// TypeScript Version: 2.2
+///
+
+import stream = require("stream");
+
+// tslint:disable:unified-signatures
+
+/**
+ * A very fast streaming multipart parser for node.js.
+ * Dicer is a WritableStream
+ *
+ * Dicer (special) events:
+ * - on('finish', ()) - Emitted when all parts have been parsed and the Dicer instance has been ended.
+ * - on('part', (stream: PartStream)) - Emitted when a new part has been found.
+ * - on('preamble', (stream: PartStream)) - Emitted for preamble if you should happen to need it (can usually be ignored).
+ * - on('trailer', (data: Buffer)) - Emitted when trailing data was found after the terminating boundary (as with the preamble, this can usually be ignored too).
+ */
+export class Dicer extends stream.Writable {
+ /**
+ * Creates and returns a new Dicer instance with the following valid config settings:
+ *
+ * @param config The configuration to use
+ */
+ constructor(config: Dicer.Config);
+ /**
+ * Sets the boundary to use for parsing and performs some initialization needed for parsing.
+ * You should only need to use this if you set headerFirst to true in the constructor and are parsing the boundary from the preamble header.
+ *
+ * @param boundary The boundary to use
+ */
+ setBoundary(boundary: string): void;
+ addListener(event: "finish", listener: () => void): this;
+ addListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ addListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ addListener(event: "trailer", listener: (data: Buffer) => void): this;
+ addListener(event: "close", listener: () => void): this;
+ addListener(event: "drain", listener: () => void): this;
+ addListener(event: "error", listener: (err: Error) => void): this;
+ addListener(event: "pipe", listener: (src: stream.Readable) => void): this;
+ addListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ addListener(event: string, listener: (...args: any[]) => void): this;
+ on(event: "finish", listener: () => void): this;
+ on(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ on(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ on(event: "trailer", listener: (data: Buffer) => void): this;
+ on(event: "close", listener: () => void): this;
+ on(event: "drain", listener: () => void): this;
+ on(event: "error", listener: (err: Error) => void): this;
+ on(event: "pipe", listener: (src: stream.Readable) => void): this;
+ on(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ on(event: string, listener: (...args: any[]) => void): this;
+ once(event: "finish", listener: () => void): this;
+ once(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ once(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ once(event: "trailer", listener: (data: Buffer) => void): this;
+ once(event: "close", listener: () => void): this;
+ once(event: "drain", listener: () => void): this;
+ once(event: "error", listener: (err: Error) => void): this;
+ once(event: "pipe", listener: (src: stream.Readable) => void): this;
+ once(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ once(event: string, listener: (...args: any[]) => void): this;
+ prependListener(event: "finish", listener: () => void): this;
+ prependListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ prependListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ prependListener(event: "trailer", listener: (data: Buffer) => void): this;
+ prependListener(event: "close", listener: () => void): this;
+ prependListener(event: "drain", listener: () => void): this;
+ prependListener(event: "error", listener: (err: Error) => void): this;
+ prependListener(event: "pipe", listener: (src: stream.Readable) => void): this;
+ prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ prependListener(event: string, listener: (...args: any[]) => void): this;
+ prependOnceListener(event: "finish", listener: () => void): this;
+ prependOnceListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ prependOnceListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ prependOnceListener(event: "trailer", listener: (data: Buffer) => void): this;
+ prependOnceListener(event: "close", listener: () => void): this;
+ prependOnceListener(event: "drain", listener: () => void): this;
+ prependOnceListener(event: "error", listener: (err: Error) => void): this;
+ prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this;
+ prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ prependOnceListener(event: string, listener: (...args: any[]) => void): this;
+ removeListener(event: "finish", listener: () => void): this;
+ removeListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ removeListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ removeListener(event: "trailer", listener: (data: Buffer) => void): this;
+ removeListener(event: "close", listener: () => void): this;
+ removeListener(event: "drain", listener: () => void): this;
+ removeListener(event: "error", listener: (err: Error) => void): this;
+ removeListener(event: "pipe", listener: (src: stream.Readable) => void): this;
+ removeListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ removeListener(event: string, listener: (...args: any[]) => void): this;
+}
+
+declare namespace Dicer {
+ interface Config {
+ /**
+ * This is the boundary used to detect the beginning of a new part.
+ */
+ boundary?: string | undefined;
+ /**
+ * If true, preamble header parsing will be performed first.
+ */
+ headerFirst?: boolean | undefined;
+ /**
+ * The maximum number of header key=>value pairs to parse Default: 2000 (same as node's http).
+ */
+ maxHeaderPairs?: number | undefined;
+ }
+
+ /**
+ * PartStream is a _ReadableStream_
+ *
+ * PartStream (special) events:
+ * - on('header', (header: object)) - An object containing the header for this particular part. Each property value is an array of one or more string values.
+ */
+ interface PartStream extends stream.Readable {
+ addListener(event: "header", listener: (header: object) => void): this;
+ addListener(event: "close", listener: () => void): this;
+ addListener(event: "data", listener: (chunk: Buffer | string) => void): this;
+ addListener(event: "end", listener: () => void): this;
+ addListener(event: "readable", listener: () => void): this;
+ addListener(event: "error", listener: (err: Error) => void): this;
+ addListener(event: string, listener: (...args: any[]) => void): this;
+ on(event: "header", listener: (header: object) => void): this;
+ on(event: "close", listener: () => void): this;
+ on(event: "data", listener: (chunk: Buffer | string) => void): this;
+ on(event: "end", listener: () => void): this;
+ on(event: "readable", listener: () => void): this;
+ on(event: "error", listener: (err: Error) => void): this;
+ on(event: string, listener: (...args: any[]) => void): this;
+ once(event: "header", listener: (header: object) => void): this;
+ once(event: "close", listener: () => void): this;
+ once(event: "data", listener: (chunk: Buffer | string) => void): this;
+ once(event: "end", listener: () => void): this;
+ once(event: "readable", listener: () => void): this;
+ once(event: "error", listener: (err: Error) => void): this;
+ once(event: string, listener: (...args: any[]) => void): this;
+ prependListener(event: "header", listener: (header: object) => void): this;
+ prependListener(event: "close", listener: () => void): this;
+ prependListener(event: "data", listener: (chunk: Buffer | string) => void): this;
+ prependListener(event: "end", listener: () => void): this;
+ prependListener(event: "readable", listener: () => void): this;
+ prependListener(event: "error", listener: (err: Error) => void): this;
+ prependListener(event: string, listener: (...args: any[]) => void): this;
+ prependOnceListener(event: "header", listener: (header: object) => void): this;
+ prependOnceListener(event: "close", listener: () => void): this;
+ prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this;
+ prependOnceListener(event: "end", listener: () => void): this;
+ prependOnceListener(event: "readable", listener: () => void): this;
+ prependOnceListener(event: "error", listener: (err: Error) => void): this;
+ prependOnceListener(event: string, listener: (...args: any[]) => void): this;
+ removeListener(event: "header", listener: (header: object) => void): this;
+ removeListener(event: "close", listener: () => void): this;
+ removeListener(event: "data", listener: (chunk: Buffer | string) => void): this;
+ removeListener(event: "end", listener: () => void): this;
+ removeListener(event: "readable", listener: () => void): this;
+ removeListener(event: "error", listener: (err: Error) => void): this;
+ removeListener(event: string, listener: (...args: any[]) => void): this;
+ }
+}
\ No newline at end of file
diff --git a/deps/undici/src/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js b/deps/undici/src/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js
new file mode 100644
index 00000000000000..b90c0e862b4062
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js
@@ -0,0 +1,228 @@
+'use strict'
+
+/**
+ * Copyright Brian White. All rights reserved.
+ *
+ * @see https://github.com/mscdex/streamsearch
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ * IN THE SOFTWARE.
+ *
+ * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation
+ * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool
+ */
+const EventEmitter = require('node:events').EventEmitter
+const inherits = require('node:util').inherits
+
+function SBMH (needle) {
+ if (typeof needle === 'string') {
+ needle = Buffer.from(needle)
+ }
+
+ if (!Buffer.isBuffer(needle)) {
+ throw new TypeError('The needle has to be a String or a Buffer.')
+ }
+
+ const needleLength = needle.length
+
+ if (needleLength === 0) {
+ throw new Error('The needle cannot be an empty String/Buffer.')
+ }
+
+ if (needleLength > 256) {
+ throw new Error('The needle cannot have a length bigger than 256.')
+ }
+
+ this.maxMatches = Infinity
+ this.matches = 0
+
+ this._occ = new Array(256)
+ .fill(needleLength) // Initialize occurrence table.
+ this._lookbehind_size = 0
+ this._needle = needle
+ this._bufpos = 0
+
+ this._lookbehind = Buffer.alloc(needleLength)
+
+ // Populate occurrence table with analysis of the needle,
+ // ignoring last letter.
+ for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var
+ this._occ[needle[i]] = needleLength - 1 - i
+ }
+}
+inherits(SBMH, EventEmitter)
+
+SBMH.prototype.reset = function () {
+ this._lookbehind_size = 0
+ this.matches = 0
+ this._bufpos = 0
+}
+
+SBMH.prototype.push = function (chunk, pos) {
+ if (!Buffer.isBuffer(chunk)) {
+ chunk = Buffer.from(chunk, 'binary')
+ }
+ const chlen = chunk.length
+ this._bufpos = pos || 0
+ let r
+ while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) }
+ return r
+}
+
+SBMH.prototype._sbmh_feed = function (data) {
+ const len = data.length
+ const needle = this._needle
+ const needleLength = needle.length
+ const lastNeedleChar = needle[needleLength - 1]
+
+ // Positive: points to a position in `data`
+ // pos == 3 points to data[3]
+ // Negative: points to a position in the lookbehind buffer
+ // pos == -2 points to lookbehind[lookbehind_size - 2]
+ let pos = -this._lookbehind_size
+ let ch
+
+ if (pos < 0) {
+ // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool
+ // search with character lookup code that considers both the
+ // lookbehind buffer and the current round's haystack data.
+ //
+ // Loop until
+ // there is a match.
+ // or until
+ // we've moved past the position that requires the
+ // lookbehind buffer. In this case we switch to the
+ // optimized loop.
+ // or until
+ // the character to look at lies outside the haystack.
+ while (pos < 0 && pos <= len - needleLength) {
+ ch = this._sbmh_lookup_char(data, pos + needleLength - 1)
+
+ if (
+ ch === lastNeedleChar &&
+ this._sbmh_memcmp(data, pos, needleLength - 1)
+ ) {
+ this._lookbehind_size = 0
+ ++this.matches
+ this.emit('info', true)
+
+ return (this._bufpos = pos + needleLength)
+ }
+ pos += this._occ[ch]
+ }
+
+ // No match.
+
+ if (pos < 0) {
+ // There's too few data for Boyer-Moore-Horspool to run,
+ // so let's use a different algorithm to skip as much as
+ // we can.
+ // Forward pos until
+ // the trailing part of lookbehind + data
+ // looks like the beginning of the needle
+ // or until
+ // pos == 0
+ while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos }
+ }
+
+ if (pos >= 0) {
+ // Discard lookbehind buffer.
+ this.emit('info', false, this._lookbehind, 0, this._lookbehind_size)
+ this._lookbehind_size = 0
+ } else {
+ // Cut off part of the lookbehind buffer that has
+ // been processed and append the entire haystack
+ // into it.
+ const bytesToCutOff = this._lookbehind_size + pos
+ if (bytesToCutOff > 0) {
+ // The cut off data is guaranteed not to contain the needle.
+ this.emit('info', false, this._lookbehind, 0, bytesToCutOff)
+ }
+
+ this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff,
+ this._lookbehind_size - bytesToCutOff)
+ this._lookbehind_size -= bytesToCutOff
+
+ data.copy(this._lookbehind, this._lookbehind_size)
+ this._lookbehind_size += len
+
+ this._bufpos = len
+ return len
+ }
+ }
+
+ pos += (pos >= 0) * this._bufpos
+
+ // Lookbehind buffer is now empty. We only need to check if the
+ // needle is in the haystack.
+ if (data.indexOf(needle, pos) !== -1) {
+ pos = data.indexOf(needle, pos)
+ ++this.matches
+ if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) }
+
+ return (this._bufpos = pos + needleLength)
+ } else {
+ pos = len - needleLength
+ }
+
+ // There was no match. If there's trailing haystack data that we cannot
+ // match yet using the Boyer-Moore-Horspool algorithm (because the trailing
+ // data is less than the needle size) then match using a modified
+ // algorithm that starts matching from the beginning instead of the end.
+ // Whatever trailing data is left after running this algorithm is added to
+ // the lookbehind buffer.
+ while (
+ pos < len &&
+ (
+ data[pos] !== needle[0] ||
+ (
+ (Buffer.compare(
+ data.subarray(pos, pos + len - pos),
+ needle.subarray(0, len - pos)
+ ) !== 0)
+ )
+ )
+ ) {
+ ++pos
+ }
+ if (pos < len) {
+ data.copy(this._lookbehind, 0, pos, pos + (len - pos))
+ this._lookbehind_size = len - pos
+ }
+
+ // Everything until pos is guaranteed not to contain needle data.
+ if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) }
+
+ this._bufpos = len
+ return len
+}
+
+SBMH.prototype._sbmh_lookup_char = function (data, pos) {
+ return (pos < 0)
+ ? this._lookbehind[this._lookbehind_size + pos]
+ : data[pos]
+}
+
+SBMH.prototype._sbmh_memcmp = function (data, pos, len) {
+ for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
+ if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false }
+ }
+ return true
+}
+
+module.exports = SBMH
diff --git a/deps/undici/src/node_modules/@fastify/busboy/lib/main.d.ts b/deps/undici/src/node_modules/@fastify/busboy/lib/main.d.ts
new file mode 100644
index 00000000000000..91b6448363a712
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/lib/main.d.ts
@@ -0,0 +1,196 @@
+// Definitions by: Jacob Baskin
+// BendingBender
+// Igor Savin
+
+///
+
+import * as http from 'http';
+import { Readable, Writable } from 'stream';
+export { Dicer } from "../deps/dicer/lib/dicer";
+
+export const Busboy: BusboyConstructor;
+export default Busboy;
+
+export interface BusboyConfig {
+ /**
+ * These are the HTTP headers of the incoming request, which are used by individual parsers.
+ */
+ headers: BusboyHeaders;
+ /**
+ * `highWaterMark` to use for this Busboy instance.
+ * @default WritableStream default.
+ */
+ highWaterMark?: number | undefined;
+ /**
+ * highWaterMark to use for file streams.
+ * @default ReadableStream default.
+ */
+ fileHwm?: number | undefined;
+ /**
+ * Default character set to use when one isn't defined.
+ * @default 'utf8'
+ */
+ defCharset?: string | undefined;
+ /**
+ * Detect if a Part is a file.
+ *
+ * By default a file is detected if contentType
+ * is application/octet-stream or fileName is not
+ * undefined.
+ *
+ * Modify this to handle e.g. Blobs.
+ */
+ isPartAFile?: (fieldName: string | undefined, contentType: string | undefined, fileName: string | undefined) => boolean;
+ /**
+ * If paths in the multipart 'filename' field shall be preserved.
+ * @default false
+ */
+ preservePath?: boolean | undefined;
+ /**
+ * Various limits on incoming data.
+ */
+ limits?:
+ | {
+ /**
+ * Max field name size (in bytes)
+ * @default 100 bytes
+ */
+ fieldNameSize?: number | undefined;
+ /**
+ * Max field value size (in bytes)
+ * @default 1MB
+ */
+ fieldSize?: number | undefined;
+ /**
+ * Max number of non-file fields
+ * @default Infinity
+ */
+ fields?: number | undefined;
+ /**
+ * For multipart forms, the max file size (in bytes)
+ * @default Infinity
+ */
+ fileSize?: number | undefined;
+ /**
+ * For multipart forms, the max number of file fields
+ * @default Infinity
+ */
+ files?: number | undefined;
+ /**
+ * For multipart forms, the max number of parts (fields + files)
+ * @default Infinity
+ */
+ parts?: number | undefined;
+ /**
+ * For multipart forms, the max number of header key=>value pairs to parse
+ * @default 2000
+ */
+ headerPairs?: number | undefined;
+
+ /**
+ * For multipart forms, the max size of a header part
+ * @default 81920
+ */
+ headerSize?: number | undefined;
+ }
+ | undefined;
+}
+
+export type BusboyHeaders = { 'content-type': string } & http.IncomingHttpHeaders;
+
+export interface BusboyFileStream extends
+ Readable {
+
+ truncated: boolean;
+
+ /**
+ * The number of bytes that have been read so far.
+ */
+ bytesRead: number;
+}
+
+export interface Busboy extends Writable {
+ addListener(event: Event, listener: BusboyEvents[Event]): this;
+
+ addListener(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ on(event: Event, listener: BusboyEvents[Event]): this;
+
+ on(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ once(event: Event, listener: BusboyEvents[Event]): this;
+
+ once(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ removeListener(event: Event, listener: BusboyEvents[Event]): this;
+
+ removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ off(event: Event, listener: BusboyEvents[Event]): this;
+
+ off(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ prependListener(event: Event, listener: BusboyEvents[Event]): this;
+
+ prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ prependOnceListener(event: Event, listener: BusboyEvents[Event]): this;
+
+ prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
+}
+
+export interface BusboyEvents {
+ /**
+ * Emitted for each new file form field found.
+ *
+ * * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the
+ * file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents),
+ * otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any**
+ * incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically
+ * and safely discarded (these discarded files do still count towards `files` and `parts` limits).
+ * * If a configured file size limit was reached, `stream` will both have a boolean property `truncated`
+ * (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens.
+ *
+ * @param listener.transferEncoding Contains the 'Content-Transfer-Encoding' value for the file stream.
+ * @param listener.mimeType Contains the 'Content-Type' value for the file stream.
+ */
+ file: (
+ fieldname: string,
+ stream: BusboyFileStream,
+ filename: string,
+ transferEncoding: string,
+ mimeType: string,
+ ) => void;
+ /**
+ * Emitted for each new non-file field found.
+ */
+ field: (
+ fieldname: string,
+ value: string,
+ fieldnameTruncated: boolean,
+ valueTruncated: boolean,
+ transferEncoding: string,
+ mimeType: string,
+ ) => void;
+ finish: () => void;
+ /**
+ * Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted.
+ */
+ partsLimit: () => void;
+ /**
+ * Emitted when specified `files` limit has been reached. No more 'file' events will be emitted.
+ */
+ filesLimit: () => void;
+ /**
+ * Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted.
+ */
+ fieldsLimit: () => void;
+ error: (error: unknown) => void;
+}
+
+export interface BusboyConstructor {
+ (options: BusboyConfig): Busboy;
+
+ new(options: BusboyConfig): Busboy;
+}
+
diff --git a/deps/undici/src/node_modules/@fastify/busboy/lib/main.js b/deps/undici/src/node_modules/@fastify/busboy/lib/main.js
new file mode 100644
index 00000000000000..8794bebf2296b5
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/lib/main.js
@@ -0,0 +1,85 @@
+'use strict'
+
+const WritableStream = require('node:stream').Writable
+const { inherits } = require('node:util')
+const Dicer = require('../deps/dicer/lib/Dicer')
+
+const MultipartParser = require('./types/multipart')
+const UrlencodedParser = require('./types/urlencoded')
+const parseParams = require('./utils/parseParams')
+
+function Busboy (opts) {
+ if (!(this instanceof Busboy)) { return new Busboy(opts) }
+
+ if (typeof opts !== 'object') {
+ throw new TypeError('Busboy expected an options-Object.')
+ }
+ if (typeof opts.headers !== 'object') {
+ throw new TypeError('Busboy expected an options-Object with headers-attribute.')
+ }
+ if (typeof opts.headers['content-type'] !== 'string') {
+ throw new TypeError('Missing Content-Type-header.')
+ }
+
+ const {
+ headers,
+ ...streamOptions
+ } = opts
+
+ this.opts = {
+ autoDestroy: false,
+ ...streamOptions
+ }
+ WritableStream.call(this, this.opts)
+
+ this._done = false
+ this._parser = this.getParserByHeaders(headers)
+ this._finished = false
+}
+inherits(Busboy, WritableStream)
+
+Busboy.prototype.emit = function (ev) {
+ if (ev === 'finish') {
+ if (!this._done) {
+ this._parser?.end()
+ return
+ } else if (this._finished) {
+ return
+ }
+ this._finished = true
+ }
+ WritableStream.prototype.emit.apply(this, arguments)
+}
+
+Busboy.prototype.getParserByHeaders = function (headers) {
+ const parsed = parseParams(headers['content-type'])
+
+ const cfg = {
+ defCharset: this.opts.defCharset,
+ fileHwm: this.opts.fileHwm,
+ headers,
+ highWaterMark: this.opts.highWaterMark,
+ isPartAFile: this.opts.isPartAFile,
+ limits: this.opts.limits,
+ parsedConType: parsed,
+ preservePath: this.opts.preservePath
+ }
+
+ if (MultipartParser.detect.test(parsed[0])) {
+ return new MultipartParser(this, cfg)
+ }
+ if (UrlencodedParser.detect.test(parsed[0])) {
+ return new UrlencodedParser(this, cfg)
+ }
+ throw new Error('Unsupported Content-Type.')
+}
+
+Busboy.prototype._write = function (chunk, encoding, cb) {
+ this._parser.write(chunk, cb)
+}
+
+module.exports = Busboy
+module.exports.default = Busboy
+module.exports.Busboy = Busboy
+
+module.exports.Dicer = Dicer
diff --git a/deps/undici/src/node_modules/@fastify/busboy/lib/types/multipart.js b/deps/undici/src/node_modules/@fastify/busboy/lib/types/multipart.js
new file mode 100644
index 00000000000000..ad242db27a78e3
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/lib/types/multipart.js
@@ -0,0 +1,306 @@
+'use strict'
+
+// TODO:
+// * support 1 nested multipart level
+// (see second multipart example here:
+// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data)
+// * support limits.fieldNameSize
+// -- this will require modifications to utils.parseParams
+
+const { Readable } = require('node:stream')
+const { inherits } = require('node:util')
+
+const Dicer = require('../../deps/dicer/lib/Dicer')
+
+const parseParams = require('../utils/parseParams')
+const decodeText = require('../utils/decodeText')
+const basename = require('../utils/basename')
+const getLimit = require('../utils/getLimit')
+
+const RE_BOUNDARY = /^boundary$/i
+const RE_FIELD = /^form-data$/i
+const RE_CHARSET = /^charset$/i
+const RE_FILENAME = /^filename$/i
+const RE_NAME = /^name$/i
+
+Multipart.detect = /^multipart\/form-data/i
+function Multipart (boy, cfg) {
+ let i
+ let len
+ const self = this
+ let boundary
+ const limits = cfg.limits
+ const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined))
+ const parsedConType = cfg.parsedConType || []
+ const defCharset = cfg.defCharset || 'utf8'
+ const preservePath = cfg.preservePath
+ const fileOpts = { highWaterMark: cfg.fileHwm }
+
+ for (i = 0, len = parsedConType.length; i < len; ++i) {
+ if (Array.isArray(parsedConType[i]) &&
+ RE_BOUNDARY.test(parsedConType[i][0])) {
+ boundary = parsedConType[i][1]
+ break
+ }
+ }
+
+ function checkFinished () {
+ if (nends === 0 && finished && !boy._done) {
+ finished = false
+ self.end()
+ }
+ }
+
+ if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') }
+
+ const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
+ const fileSizeLimit = getLimit(limits, 'fileSize', Infinity)
+ const filesLimit = getLimit(limits, 'files', Infinity)
+ const fieldsLimit = getLimit(limits, 'fields', Infinity)
+ const partsLimit = getLimit(limits, 'parts', Infinity)
+ const headerPairsLimit = getLimit(limits, 'headerPairs', 2000)
+ const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024)
+
+ let nfiles = 0
+ let nfields = 0
+ let nends = 0
+ let curFile
+ let curField
+ let finished = false
+
+ this._needDrain = false
+ this._pause = false
+ this._cb = undefined
+ this._nparts = 0
+ this._boy = boy
+
+ const parserCfg = {
+ boundary,
+ maxHeaderPairs: headerPairsLimit,
+ maxHeaderSize: headerSizeLimit,
+ partHwm: fileOpts.highWaterMark,
+ highWaterMark: cfg.highWaterMark
+ }
+
+ this.parser = new Dicer(parserCfg)
+ this.parser.on('drain', function () {
+ self._needDrain = false
+ if (self._cb && !self._pause) {
+ const cb = self._cb
+ self._cb = undefined
+ cb()
+ }
+ }).on('part', function onPart (part) {
+ if (++self._nparts > partsLimit) {
+ self.parser.removeListener('part', onPart)
+ self.parser.on('part', skipPart)
+ boy.hitPartsLimit = true
+ boy.emit('partsLimit')
+ return skipPart(part)
+ }
+
+ // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let
+ // us emit 'end' early since we know the part has ended if we are already
+ // seeing the next part
+ if (curField) {
+ const field = curField
+ field.emit('end')
+ field.removeAllListeners('end')
+ }
+
+ part.on('header', function (header) {
+ let contype
+ let fieldname
+ let parsed
+ let charset
+ let encoding
+ let filename
+ let nsize = 0
+
+ if (header['content-type']) {
+ parsed = parseParams(header['content-type'][0])
+ if (parsed[0]) {
+ contype = parsed[0].toLowerCase()
+ for (i = 0, len = parsed.length; i < len; ++i) {
+ if (RE_CHARSET.test(parsed[i][0])) {
+ charset = parsed[i][1].toLowerCase()
+ break
+ }
+ }
+ }
+ }
+
+ if (contype === undefined) { contype = 'text/plain' }
+ if (charset === undefined) { charset = defCharset }
+
+ if (header['content-disposition']) {
+ parsed = parseParams(header['content-disposition'][0])
+ if (!RE_FIELD.test(parsed[0])) { return skipPart(part) }
+ for (i = 0, len = parsed.length; i < len; ++i) {
+ if (RE_NAME.test(parsed[i][0])) {
+ fieldname = parsed[i][1]
+ } else if (RE_FILENAME.test(parsed[i][0])) {
+ filename = parsed[i][1]
+ if (!preservePath) { filename = basename(filename) }
+ }
+ }
+ } else { return skipPart(part) }
+
+ if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' }
+
+ let onData,
+ onEnd
+
+ if (isPartAFile(fieldname, contype, filename)) {
+ // file/binary field
+ if (nfiles === filesLimit) {
+ if (!boy.hitFilesLimit) {
+ boy.hitFilesLimit = true
+ boy.emit('filesLimit')
+ }
+ return skipPart(part)
+ }
+
+ ++nfiles
+
+ if (!boy._events.file) {
+ self.parser._ignore()
+ return
+ }
+
+ ++nends
+ const file = new FileStream(fileOpts)
+ curFile = file
+ file.on('end', function () {
+ --nends
+ self._pause = false
+ checkFinished()
+ if (self._cb && !self._needDrain) {
+ const cb = self._cb
+ self._cb = undefined
+ cb()
+ }
+ })
+ file._read = function (n) {
+ if (!self._pause) { return }
+ self._pause = false
+ if (self._cb && !self._needDrain) {
+ const cb = self._cb
+ self._cb = undefined
+ cb()
+ }
+ }
+ boy.emit('file', fieldname, file, filename, encoding, contype)
+
+ onData = function (data) {
+ if ((nsize += data.length) > fileSizeLimit) {
+ const extralen = fileSizeLimit - nsize + data.length
+ if (extralen > 0) { file.push(data.slice(0, extralen)) }
+ file.truncated = true
+ file.bytesRead = fileSizeLimit
+ part.removeAllListeners('data')
+ file.emit('limit')
+ return
+ } else if (!file.push(data)) { self._pause = true }
+
+ file.bytesRead = nsize
+ }
+
+ onEnd = function () {
+ curFile = undefined
+ file.push(null)
+ }
+ } else {
+ // non-file field
+ if (nfields === fieldsLimit) {
+ if (!boy.hitFieldsLimit) {
+ boy.hitFieldsLimit = true
+ boy.emit('fieldsLimit')
+ }
+ return skipPart(part)
+ }
+
+ ++nfields
+ ++nends
+ let buffer = ''
+ let truncated = false
+ curField = part
+
+ onData = function (data) {
+ if ((nsize += data.length) > fieldSizeLimit) {
+ const extralen = (fieldSizeLimit - (nsize - data.length))
+ buffer += data.toString('binary', 0, extralen)
+ truncated = true
+ part.removeAllListeners('data')
+ } else { buffer += data.toString('binary') }
+ }
+
+ onEnd = function () {
+ curField = undefined
+ if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) }
+ boy.emit('field', fieldname, buffer, false, truncated, encoding, contype)
+ --nends
+ checkFinished()
+ }
+ }
+
+ /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become
+ broken. Streams2/streams3 is a huge black box of confusion, but
+ somehow overriding the sync state seems to fix things again (and still
+ seems to work for previous node versions).
+ */
+ part._readableState.sync = false
+
+ part.on('data', onData)
+ part.on('end', onEnd)
+ }).on('error', function (err) {
+ if (curFile) { curFile.emit('error', err) }
+ })
+ }).on('error', function (err) {
+ boy.emit('error', err)
+ }).on('finish', function () {
+ finished = true
+ checkFinished()
+ })
+}
+
+Multipart.prototype.write = function (chunk, cb) {
+ const r = this.parser.write(chunk)
+ if (r && !this._pause) {
+ cb()
+ } else {
+ this._needDrain = !r
+ this._cb = cb
+ }
+}
+
+Multipart.prototype.end = function () {
+ const self = this
+
+ if (self.parser.writable) {
+ self.parser.end()
+ } else if (!self._boy._done) {
+ process.nextTick(function () {
+ self._boy._done = true
+ self._boy.emit('finish')
+ })
+ }
+}
+
+function skipPart (part) {
+ part.resume()
+}
+
+function FileStream (opts) {
+ Readable.call(this, opts)
+
+ this.bytesRead = 0
+
+ this.truncated = false
+}
+
+inherits(FileStream, Readable)
+
+FileStream.prototype._read = function (n) {}
+
+module.exports = Multipart
diff --git a/deps/undici/src/node_modules/@fastify/busboy/lib/types/urlencoded.js b/deps/undici/src/node_modules/@fastify/busboy/lib/types/urlencoded.js
new file mode 100644
index 00000000000000..6f5f7846d8b14f
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/lib/types/urlencoded.js
@@ -0,0 +1,190 @@
+'use strict'
+
+const Decoder = require('../utils/Decoder')
+const decodeText = require('../utils/decodeText')
+const getLimit = require('../utils/getLimit')
+
+const RE_CHARSET = /^charset$/i
+
+UrlEncoded.detect = /^application\/x-www-form-urlencoded/i
+function UrlEncoded (boy, cfg) {
+ const limits = cfg.limits
+ const parsedConType = cfg.parsedConType
+ this.boy = boy
+
+ this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
+ this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100)
+ this.fieldsLimit = getLimit(limits, 'fields', Infinity)
+
+ let charset
+ for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var
+ if (Array.isArray(parsedConType[i]) &&
+ RE_CHARSET.test(parsedConType[i][0])) {
+ charset = parsedConType[i][1].toLowerCase()
+ break
+ }
+ }
+
+ if (charset === undefined) { charset = cfg.defCharset || 'utf8' }
+
+ this.decoder = new Decoder()
+ this.charset = charset
+ this._fields = 0
+ this._state = 'key'
+ this._checkingBytes = true
+ this._bytesKey = 0
+ this._bytesVal = 0
+ this._key = ''
+ this._val = ''
+ this._keyTrunc = false
+ this._valTrunc = false
+ this._hitLimit = false
+}
+
+UrlEncoded.prototype.write = function (data, cb) {
+ if (this._fields === this.fieldsLimit) {
+ if (!this.boy.hitFieldsLimit) {
+ this.boy.hitFieldsLimit = true
+ this.boy.emit('fieldsLimit')
+ }
+ return cb()
+ }
+
+ let idxeq; let idxamp; let i; let p = 0; const len = data.length
+
+ while (p < len) {
+ if (this._state === 'key') {
+ idxeq = idxamp = undefined
+ for (i = p; i < len; ++i) {
+ if (!this._checkingBytes) { ++p }
+ if (data[i] === 0x3D/* = */) {
+ idxeq = i
+ break
+ } else if (data[i] === 0x26/* & */) {
+ idxamp = i
+ break
+ }
+ if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {
+ this._hitLimit = true
+ break
+ } else if (this._checkingBytes) { ++this._bytesKey }
+ }
+
+ if (idxeq !== undefined) {
+ // key with assignment
+ if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) }
+ this._state = 'val'
+
+ this._hitLimit = false
+ this._checkingBytes = true
+ this._val = ''
+ this._bytesVal = 0
+ this._valTrunc = false
+ this.decoder.reset()
+
+ p = idxeq + 1
+ } else if (idxamp !== undefined) {
+ // key with no assignment
+ ++this._fields
+ let key; const keyTrunc = this._keyTrunc
+ if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key }
+
+ this._hitLimit = false
+ this._checkingBytes = true
+ this._key = ''
+ this._bytesKey = 0
+ this._keyTrunc = false
+ this.decoder.reset()
+
+ if (key.length) {
+ this.boy.emit('field', decodeText(key, 'binary', this.charset),
+ '',
+ keyTrunc,
+ false)
+ }
+
+ p = idxamp + 1
+ if (this._fields === this.fieldsLimit) { return cb() }
+ } else if (this._hitLimit) {
+ // we may not have hit the actual limit if there are encoded bytes...
+ if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) }
+ p = i
+ if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {
+ // yep, we actually did hit the limit
+ this._checkingBytes = false
+ this._keyTrunc = true
+ }
+ } else {
+ if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) }
+ p = len
+ }
+ } else {
+ idxamp = undefined
+ for (i = p; i < len; ++i) {
+ if (!this._checkingBytes) { ++p }
+ if (data[i] === 0x26/* & */) {
+ idxamp = i
+ break
+ }
+ if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {
+ this._hitLimit = true
+ break
+ } else if (this._checkingBytes) { ++this._bytesVal }
+ }
+
+ if (idxamp !== undefined) {
+ ++this._fields
+ if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) }
+ this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+ decodeText(this._val, 'binary', this.charset),
+ this._keyTrunc,
+ this._valTrunc)
+ this._state = 'key'
+
+ this._hitLimit = false
+ this._checkingBytes = true
+ this._key = ''
+ this._bytesKey = 0
+ this._keyTrunc = false
+ this.decoder.reset()
+
+ p = idxamp + 1
+ if (this._fields === this.fieldsLimit) { return cb() }
+ } else if (this._hitLimit) {
+ // we may not have hit the actual limit if there are encoded bytes...
+ if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) }
+ p = i
+ if ((this._val === '' && this.fieldSizeLimit === 0) ||
+ (this._bytesVal = this._val.length) === this.fieldSizeLimit) {
+ // yep, we actually did hit the limit
+ this._checkingBytes = false
+ this._valTrunc = true
+ }
+ } else {
+ if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) }
+ p = len
+ }
+ }
+ }
+ cb()
+}
+
+UrlEncoded.prototype.end = function () {
+ if (this.boy._done) { return }
+
+ if (this._state === 'key' && this._key.length > 0) {
+ this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+ '',
+ this._keyTrunc,
+ false)
+ } else if (this._state === 'val') {
+ this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+ decodeText(this._val, 'binary', this.charset),
+ this._keyTrunc,
+ this._valTrunc)
+ }
+ this.boy._done = true
+ this.boy.emit('finish')
+}
+
+module.exports = UrlEncoded
diff --git a/deps/undici/src/node_modules/@fastify/busboy/lib/utils/Decoder.js b/deps/undici/src/node_modules/@fastify/busboy/lib/utils/Decoder.js
new file mode 100644
index 00000000000000..7917678c746538
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/lib/utils/Decoder.js
@@ -0,0 +1,54 @@
+'use strict'
+
+const RE_PLUS = /\+/g
+
+const HEX = [
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
+ 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+]
+
+function Decoder () {
+ this.buffer = undefined
+}
+Decoder.prototype.write = function (str) {
+ // Replace '+' with ' ' before decoding
+ str = str.replace(RE_PLUS, ' ')
+ let res = ''
+ let i = 0; let p = 0; const len = str.length
+ for (; i < len; ++i) {
+ if (this.buffer !== undefined) {
+ if (!HEX[str.charCodeAt(i)]) {
+ res += '%' + this.buffer
+ this.buffer = undefined
+ --i // retry character
+ } else {
+ this.buffer += str[i]
+ ++p
+ if (this.buffer.length === 2) {
+ res += String.fromCharCode(parseInt(this.buffer, 16))
+ this.buffer = undefined
+ }
+ }
+ } else if (str[i] === '%') {
+ if (i > p) {
+ res += str.substring(p, i)
+ p = i
+ }
+ this.buffer = ''
+ ++p
+ }
+ }
+ if (p < len && this.buffer === undefined) { res += str.substring(p) }
+ return res
+}
+Decoder.prototype.reset = function () {
+ this.buffer = undefined
+}
+
+module.exports = Decoder
diff --git a/deps/undici/src/node_modules/@fastify/busboy/lib/utils/basename.js b/deps/undici/src/node_modules/@fastify/busboy/lib/utils/basename.js
new file mode 100644
index 00000000000000..db588199db051d
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/lib/utils/basename.js
@@ -0,0 +1,14 @@
+'use strict'
+
+module.exports = function basename (path) {
+ if (typeof path !== 'string') { return '' }
+ for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var
+ switch (path.charCodeAt(i)) {
+ case 0x2F: // '/'
+ case 0x5C: // '\'
+ path = path.slice(i + 1)
+ return (path === '..' || path === '.' ? '' : path)
+ }
+ }
+ return (path === '..' || path === '.' ? '' : path)
+}
diff --git a/deps/undici/src/node_modules/@fastify/busboy/lib/utils/decodeText.js b/deps/undici/src/node_modules/@fastify/busboy/lib/utils/decodeText.js
new file mode 100644
index 00000000000000..ee376062c1843f
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/lib/utils/decodeText.js
@@ -0,0 +1,26 @@
+'use strict'
+
+// Node has always utf-8
+const utf8Decoder = new TextDecoder('utf-8')
+const textDecoders = new Map([
+ ['utf-8', utf8Decoder],
+ ['utf8', utf8Decoder]
+])
+
+function decodeText (text, textEncoding, destEncoding) {
+ if (text) {
+ if (textDecoders.has(destEncoding)) {
+ try {
+ return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding))
+ } catch (e) { }
+ } else {
+ try {
+ textDecoders.set(destEncoding, new TextDecoder(destEncoding))
+ return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding))
+ } catch (e) { }
+ }
+ }
+ return text
+}
+
+module.exports = decodeText
diff --git a/deps/undici/src/node_modules/@fastify/busboy/lib/utils/getLimit.js b/deps/undici/src/node_modules/@fastify/busboy/lib/utils/getLimit.js
new file mode 100644
index 00000000000000..cb64fd6721aacc
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/lib/utils/getLimit.js
@@ -0,0 +1,16 @@
+'use strict'
+
+module.exports = function getLimit (limits, name, defaultLimit) {
+ if (
+ !limits ||
+ limits[name] === undefined ||
+ limits[name] === null
+ ) { return defaultLimit }
+
+ if (
+ typeof limits[name] !== 'number' ||
+ isNaN(limits[name])
+ ) { throw new TypeError('Limit ' + name + ' is not a valid number') }
+
+ return limits[name]
+}
diff --git a/deps/undici/src/node_modules/@fastify/busboy/lib/utils/parseParams.js b/deps/undici/src/node_modules/@fastify/busboy/lib/utils/parseParams.js
new file mode 100644
index 00000000000000..f9214180fa79d5
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/lib/utils/parseParams.js
@@ -0,0 +1,87 @@
+'use strict'
+
+const decodeText = require('./decodeText')
+
+const RE_ENCODED = /%([a-fA-F0-9]{2})/g
+
+function encodedReplacer (match, byte) {
+ return String.fromCharCode(parseInt(byte, 16))
+}
+
+function parseParams (str) {
+ const res = []
+ let state = 'key'
+ let charset = ''
+ let inquote = false
+ let escaping = false
+ let p = 0
+ let tmp = ''
+
+ for (var i = 0, len = str.length; i < len; ++i) { // eslint-disable-line no-var
+ const char = str[i]
+ if (char === '\\' && inquote) {
+ if (escaping) { escaping = false } else {
+ escaping = true
+ continue
+ }
+ } else if (char === '"') {
+ if (!escaping) {
+ if (inquote) {
+ inquote = false
+ state = 'key'
+ } else { inquote = true }
+ continue
+ } else { escaping = false }
+ } else {
+ if (escaping && inquote) { tmp += '\\' }
+ escaping = false
+ if ((state === 'charset' || state === 'lang') && char === "'") {
+ if (state === 'charset') {
+ state = 'lang'
+ charset = tmp.substring(1)
+ } else { state = 'value' }
+ tmp = ''
+ continue
+ } else if (state === 'key' &&
+ (char === '*' || char === '=') &&
+ res.length) {
+ if (char === '*') { state = 'charset' } else { state = 'value' }
+ res[p] = [tmp, undefined]
+ tmp = ''
+ continue
+ } else if (!inquote && char === ';') {
+ state = 'key'
+ if (charset) {
+ if (tmp.length) {
+ tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
+ 'binary',
+ charset)
+ }
+ charset = ''
+ } else if (tmp.length) {
+ tmp = decodeText(tmp, 'binary', 'utf8')
+ }
+ if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp }
+ tmp = ''
+ ++p
+ continue
+ } else if (!inquote && (char === ' ' || char === '\t')) { continue }
+ }
+ tmp += char
+ }
+ if (charset && tmp.length) {
+ tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
+ 'binary',
+ charset)
+ } else if (tmp) {
+ tmp = decodeText(tmp, 'binary', 'utf8')
+ }
+
+ if (res[p] === undefined) {
+ if (tmp) { res[p] = tmp }
+ } else { res[p][1] = tmp }
+
+ return res
+}
+
+module.exports = parseParams
diff --git a/deps/undici/src/node_modules/@fastify/busboy/package.json b/deps/undici/src/node_modules/@fastify/busboy/package.json
new file mode 100644
index 00000000000000..3288ee06872612
--- /dev/null
+++ b/deps/undici/src/node_modules/@fastify/busboy/package.json
@@ -0,0 +1,89 @@
+{
+ "name": "@fastify/busboy",
+ "version": "2.0.0",
+ "private": false,
+ "author": "Brian White ",
+ "contributors": [
+ {
+ "name": "Igor Savin",
+ "email": "kibertoad@gmail.com",
+ "url": "https://github.com/kibertoad"
+ },
+ {
+ "name": "Aras Abbasi",
+ "email": "aras.abbasi@gmail.com",
+ "url": "https://github.com/uzlopak"
+ }
+ ],
+ "description": "A streaming parser for HTML form data for node.js",
+ "main": "lib/main",
+ "types": "lib/main.d.ts",
+ "scripts": {
+ "bench:busboy": "cd benchmarks && npm install && npm run benchmark-fastify",
+ "bench:dicer": "node bench/dicer/dicer-bench-multipart-parser.js",
+ "coveralls": "nyc report --reporter=lcov",
+ "lint": "npm run lint:standard",
+ "lint:everything": "npm run lint && npm run test:types",
+ "lint:fix": "standard --fix",
+ "lint:standard": "standard --verbose | snazzy",
+ "test:mocha": "mocha test",
+ "test:types": "tsd",
+ "test:coverage": "nyc npm run test",
+ "test": "npm run test:mocha"
+ },
+ "engines": {
+ "node": ">=14"
+ },
+ "devDependencies": {
+ "@types/node": "^20.1.0",
+ "busboy": "^1.0.0",
+ "chai": "^4.3.6",
+ "eslint": "^8.23.0",
+ "eslint-config-standard": "^17.0.0",
+ "eslint-plugin-n": "^16.0.0",
+ "mocha": "^10.0.0",
+ "nyc": "^15.1.0",
+ "photofinish": "^1.8.0",
+ "snazzy": "^9.0.0",
+ "standard": "^17.0.0",
+ "tsd": "^0.29.0",
+ "typescript": "^5.0.2"
+ },
+ "keywords": [
+ "uploads",
+ "forms",
+ "multipart",
+ "form-data"
+ ],
+ "license": "MIT",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/fastify/busboy.git"
+ },
+ "tsd": {
+ "directory": "test/types",
+ "compilerOptions": {
+ "esModuleInterop": false,
+ "module": "commonjs",
+ "target": "ES2017"
+ }
+ },
+ "standard": {
+ "globals": [
+ "describe",
+ "it"
+ ],
+ "ignore": [
+ "bench"
+ ]
+ },
+ "files": [
+ "README.md",
+ "LICENSE",
+ "lib/*",
+ "deps/encoding/*",
+ "deps/dicer/lib",
+ "deps/streamsearch/",
+ "deps/dicer/LICENSE"
+ ]
+}
diff --git a/deps/undici/src/node_modules/busboy/.eslintrc.js b/deps/undici/src/node_modules/busboy/.eslintrc.js
deleted file mode 100644
index be9311d02655a2..00000000000000
--- a/deps/undici/src/node_modules/busboy/.eslintrc.js
+++ /dev/null
@@ -1,5 +0,0 @@
-'use strict';
-
-module.exports = {
- extends: '@mscdex/eslint-config',
-};
diff --git a/deps/undici/src/node_modules/busboy/.github/workflows/ci.yml b/deps/undici/src/node_modules/busboy/.github/workflows/ci.yml
deleted file mode 100644
index 799bae04adb62a..00000000000000
--- a/deps/undici/src/node_modules/busboy/.github/workflows/ci.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-name: CI
-
-on:
- pull_request:
- push:
- branches: [ master ]
-
-jobs:
- tests-linux:
- runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- node-version: [10.16.0, 10.x, 12.x, 14.x, 16.x]
- steps:
- - uses: actions/checkout@v2
- - name: Use Node.js ${{ matrix.node-version }}
- uses: actions/setup-node@v1
- with:
- node-version: ${{ matrix.node-version }}
- - name: Install module
- run: npm install
- - name: Run tests
- run: npm test
diff --git a/deps/undici/src/node_modules/busboy/.github/workflows/lint.yml b/deps/undici/src/node_modules/busboy/.github/workflows/lint.yml
deleted file mode 100644
index 9f9e1f589a30be..00000000000000
--- a/deps/undici/src/node_modules/busboy/.github/workflows/lint.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-name: lint
-
-on:
- pull_request:
- push:
- branches: [ master ]
-
-env:
- NODE_VERSION: 16.x
-
-jobs:
- lint-js:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: Use Node.js ${{ env.NODE_VERSION }}
- uses: actions/setup-node@v1
- with:
- node-version: ${{ env.NODE_VERSION }}
- - name: Install ESLint + ESLint configs/plugins
- run: npm install --only=dev
- - name: Lint files
- run: npm run lint
diff --git a/deps/undici/src/node_modules/busboy/README.md b/deps/undici/src/node_modules/busboy/README.md
deleted file mode 100644
index 654af30455614e..00000000000000
--- a/deps/undici/src/node_modules/busboy/README.md
+++ /dev/null
@@ -1,191 +0,0 @@
-# Description
-
-A node.js module for parsing incoming HTML form data.
-
-Changes (breaking or otherwise) in v1.0.0 can be found [here](https://github.com/mscdex/busboy/issues/266).
-
-# Requirements
-
-* [node.js](http://nodejs.org/) -- v10.16.0 or newer
-
-
-# Install
-
- npm install busboy
-
-
-# Examples
-
-* Parsing (multipart) with default options:
-
-```js
-const http = require('http');
-
-const busboy = require('busboy');
-
-http.createServer((req, res) => {
- if (req.method === 'POST') {
- console.log('POST request');
- const bb = busboy({ headers: req.headers });
- bb.on('file', (name, file, info) => {
- const { filename, encoding, mimeType } = info;
- console.log(
- `File [${name}]: filename: %j, encoding: %j, mimeType: %j`,
- filename,
- encoding,
- mimeType
- );
- file.on('data', (data) => {
- console.log(`File [${name}] got ${data.length} bytes`);
- }).on('close', () => {
- console.log(`File [${name}] done`);
- });
- });
- bb.on('field', (name, val, info) => {
- console.log(`Field [${name}]: value: %j`, val);
- });
- bb.on('close', () => {
- console.log('Done parsing form!');
- res.writeHead(303, { Connection: 'close', Location: '/' });
- res.end();
- });
- req.pipe(bb);
- } else if (req.method === 'GET') {
- res.writeHead(200, { Connection: 'close' });
- res.end(`
-
-
-
-
-
-
- `);
- }
-}).listen(8000, () => {
- console.log('Listening for requests');
-});
-
-// Example output:
-//
-// Listening for requests
-// < ... form submitted ... >
-// POST request
-// File [filefield]: filename: "logo.jpg", encoding: "binary", mime: "image/jpeg"
-// File [filefield] got 11912 bytes
-// Field [textfield]: value: "testing! :-)"
-// File [filefield] done
-// Done parsing form!
-```
-
-* Save all incoming files to disk:
-
-```js
-const { randomFillSync } = require('crypto');
-const fs = require('fs');
-const http = require('http');
-const os = require('os');
-const path = require('path');
-
-const busboy = require('busboy');
-
-const random = (() => {
- const buf = Buffer.alloc(16);
- return () => randomFillSync(buf).toString('hex');
-})();
-
-http.createServer((req, res) => {
- if (req.method === 'POST') {
- const bb = busboy({ headers: req.headers });
- bb.on('file', (name, file, info) => {
- const saveTo = path.join(os.tmpdir(), `busboy-upload-${random()}`);
- file.pipe(fs.createWriteStream(saveTo));
- });
- bb.on('close', () => {
- res.writeHead(200, { 'Connection': 'close' });
- res.end(`That's all folks!`);
- });
- req.pipe(bb);
- return;
- }
- res.writeHead(404);
- res.end();
-}).listen(8000, () => {
- console.log('Listening for requests');
-});
-```
-
-
-# API
-
-## Exports
-
-`busboy` exports a single function:
-
-**( _function_ )**(< _object_ >config) - Creates and returns a new _Writable_ form parser stream.
-
-* Valid `config` properties:
-
- * **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers.
-
- * **highWaterMark** - _integer_ - highWaterMark to use for the parser stream. **Default:** node's _stream.Writable_ default.
-
- * **fileHwm** - _integer_ - highWaterMark to use for individual file streams. **Default:** node's _stream.Readable_ default.
-
- * **defCharset** - _string_ - Default character set to use when one isn't defined. **Default:** `'utf8'`.
-
- * **defParamCharset** - _string_ - For multipart forms, the default character set to use for values of part header parameters (e.g. filename) that are not extended parameters (that contain an explicit charset). **Default:** `'latin1'`.
-
- * **preservePath** - _boolean_ - If paths in filenames from file parts in a `'multipart/form-data'` request shall be preserved. **Default:** `false`.
-
- * **limits** - _object_ - Various limits on incoming data. Valid properties are:
-
- * **fieldNameSize** - _integer_ - Max field name size (in bytes). **Default:** `100`.
-
- * **fieldSize** - _integer_ - Max field value size (in bytes). **Default:** `1048576` (1MB).
-
- * **fields** - _integer_ - Max number of non-file fields. **Default:** `Infinity`.
-
- * **fileSize** - _integer_ - For multipart forms, the max file size (in bytes). **Default:** `Infinity`.
-
- * **files** - _integer_ - For multipart forms, the max number of file fields. **Default:** `Infinity`.
-
- * **parts** - _integer_ - For multipart forms, the max number of parts (fields + files). **Default:** `Infinity`.
-
- * **headerPairs** - _integer_ - For multipart forms, the max number of header key-value pairs to parse. **Default:** `2000` (same as node's http module).
-
-This function can throw exceptions if there is something wrong with the values in `config`. For example, if the Content-Type in `headers` is missing entirely, is not a supported type, or is missing the boundary for `'multipart/form-data'` requests.
-
-## (Special) Parser stream events
-
-* **file**(< _string_ >name, < _Readable_ >stream, < _object_ >info) - Emitted for each new file found. `name` contains the form field name. `stream` is a _Readable_ stream containing the file's data. No transformations/conversions (e.g. base64 to raw binary) are done on the file's data. `info` contains the following properties:
-
- * `filename` - _string_ - If supplied, this contains the file's filename. **WARNING:** You should almost _never_ use this value as-is (especially if you are using `preservePath: true` in your `config`) as it could contain malicious input. You are better off generating your own (safe) filenames, or at the very least using a hash of the filename.
-
- * `encoding` - _string_ - The file's `'Content-Transfer-Encoding'` value.
-
- * `mimeType` - _string_ - The file's `'Content-Type'` value.
-
- **Note:** If you listen for this event, you should always consume the `stream` whether you care about its contents or not (you can simply do `stream.resume();` if you want to discard/skip the contents), otherwise the `'finish'`/`'close'` event will never fire on the busboy parser stream.
- However, if you aren't accepting files, you can either simply not listen for the `'file'` event at all or set `limits.files` to `0`, and any/all files will be automatically skipped (these skipped files will still count towards any configured `limits.files` and `limits.parts` limits though).
-
- **Note:** If a configured `limits.fileSize` limit was reached for a file, `stream` will both have a boolean property `truncated` set to `true` (best checked at the end of the stream) and emit a `'limit'` event to notify you when this happens.
-
-* **field**(< _string_ >name, < _string_ >value, < _object_ >info) - Emitted for each new non-file field found. `name` contains the form field name. `value` contains the string value of the field. `info` contains the following properties:
-
- * `nameTruncated` - _boolean_ - Whether `name` was truncated or not (due to a configured `limits.fieldNameSize` limit)
-
- * `valueTruncated` - _boolean_ - Whether `value` was truncated or not (due to a configured `limits.fieldSize` limit)
-
- * `encoding` - _string_ - The field's `'Content-Transfer-Encoding'` value.
-
- * `mimeType` - _string_ - The field's `'Content-Type'` value.
-
-* **partsLimit**() - Emitted when the configured `limits.parts` limit has been reached. No more `'file'` or `'field'` events will be emitted.
-
-* **filesLimit**() - Emitted when the configured `limits.files` limit has been reached. No more `'file'` events will be emitted.
-
-* **fieldsLimit**() - Emitted when the configured `limits.fields` limit has been reached. No more `'field'` events will be emitted.
diff --git a/deps/undici/src/node_modules/busboy/bench/bench-multipart-fields-100mb-big.js b/deps/undici/src/node_modules/busboy/bench/bench-multipart-fields-100mb-big.js
deleted file mode 100644
index ef15729ea65c38..00000000000000
--- a/deps/undici/src/node_modules/busboy/bench/bench-multipart-fields-100mb-big.js
+++ /dev/null
@@ -1,149 +0,0 @@
-'use strict';
-
-function createMultipartBuffers(boundary, sizes) {
- const bufs = [];
- for (let i = 0; i < sizes.length; ++i) {
- const mb = sizes[i] * 1024 * 1024;
- bufs.push(Buffer.from([
- `--${boundary}`,
- `content-disposition: form-data; name="field${i + 1}"`,
- '',
- '0'.repeat(mb),
- '',
- ].join('\r\n')));
- }
- bufs.push(Buffer.from([
- `--${boundary}--`,
- '',
- ].join('\r\n')));
- return bufs;
-}
-
-const boundary = '-----------------------------168072824752491622650073';
-const buffers = createMultipartBuffers(boundary, [
- 10,
- 10,
- 10,
- 20,
- 50,
-]);
-const calls = {
- partBegin: 0,
- headerField: 0,
- headerValue: 0,
- headerEnd: 0,
- headersEnd: 0,
- partData: 0,
- partEnd: 0,
- end: 0,
-};
-
-const moduleName = process.argv[2];
-switch (moduleName) {
- case 'busboy': {
- const busboy = require('busboy');
-
- const parser = busboy({
- limits: {
- fieldSizeLimit: Infinity,
- },
- headers: {
- 'content-type': `multipart/form-data; boundary=${boundary}`,
- },
- });
- parser.on('field', (name, val, info) => {
- ++calls.partBegin;
- ++calls.partData;
- ++calls.partEnd;
- }).on('close', () => {
- ++calls.end;
- console.timeEnd(moduleName);
- });
-
- console.time(moduleName);
- for (const buf of buffers)
- parser.write(buf);
- break;
- }
-
- case 'formidable': {
- const { MultipartParser } = require('formidable');
-
- const parser = new MultipartParser();
- parser.initWithBoundary(boundary);
- parser.on('data', ({ name }) => {
- ++calls[name];
- if (name === 'end')
- console.timeEnd(moduleName);
- });
-
- console.time(moduleName);
- for (const buf of buffers)
- parser.write(buf);
-
- break;
- }
-
- case 'multiparty': {
- const { Readable } = require('stream');
-
- const { Form } = require('multiparty');
-
- const form = new Form({
- maxFieldsSize: Infinity,
- maxFields: Infinity,
- maxFilesSize: Infinity,
- autoFields: false,
- autoFiles: false,
- });
-
- const req = new Readable({ read: () => {} });
- req.headers = {
- 'content-type': `multipart/form-data; boundary=${boundary}`,
- };
-
- function hijack(name, fn) {
- const oldFn = form[name];
- form[name] = function() {
- fn();
- return oldFn.apply(this, arguments);
- };
- }
-
- hijack('onParseHeaderField', () => {
- ++calls.headerField;
- });
- hijack('onParseHeaderValue', () => {
- ++calls.headerValue;
- });
- hijack('onParsePartBegin', () => {
- ++calls.partBegin;
- });
- hijack('onParsePartData', () => {
- ++calls.partData;
- });
- hijack('onParsePartEnd', () => {
- ++calls.partEnd;
- });
-
- form.on('close', () => {
- ++calls.end;
- console.timeEnd(moduleName);
- }).on('part', (p) => p.resume());
-
- console.time(moduleName);
- form.parse(req);
- for (const buf of buffers)
- req.push(buf);
- req.push(null);
-
- break;
- }
-
- default:
- if (moduleName === undefined)
- console.error('Missing parser module name');
- else
- console.error(`Invalid parser module name: ${moduleName}`);
- process.exit(1);
-}
diff --git a/deps/undici/src/node_modules/busboy/bench/bench-multipart-fields-100mb-small.js b/deps/undici/src/node_modules/busboy/bench/bench-multipart-fields-100mb-small.js
deleted file mode 100644
index f32d421c735d32..00000000000000
--- a/deps/undici/src/node_modules/busboy/bench/bench-multipart-fields-100mb-small.js
+++ /dev/null
@@ -1,143 +0,0 @@
-'use strict';
-
-function createMultipartBuffers(boundary, sizes) {
- const bufs = [];
- for (let i = 0; i < sizes.length; ++i) {
- const mb = sizes[i] * 1024 * 1024;
- bufs.push(Buffer.from([
- `--${boundary}`,
- `content-disposition: form-data; name="field${i + 1}"`,
- '',
- '0'.repeat(mb),
- '',
- ].join('\r\n')));
- }
- bufs.push(Buffer.from([
- `--${boundary}--`,
- '',
- ].join('\r\n')));
- return bufs;
-}
-
-const boundary = '-----------------------------168072824752491622650073';
-const buffers = createMultipartBuffers(boundary, (new Array(100)).fill(1));
-const calls = {
- partBegin: 0,
- headerField: 0,
- headerValue: 0,
- headerEnd: 0,
- headersEnd: 0,
- partData: 0,
- partEnd: 0,
- end: 0,
-};
-
-const moduleName = process.argv[2];
-switch (moduleName) {
- case 'busboy': {
- const busboy = require('busboy');
-
- const parser = busboy({
- limits: {
- fieldSizeLimit: Infinity,
- },
- headers: {
- 'content-type': `multipart/form-data; boundary=${boundary}`,
- },
- });
- parser.on('field', (name, val, info) => {
- ++calls.partBegin;
- ++calls.partData;
- ++calls.partEnd;
- }).on('close', () => {
- ++calls.end;
- console.timeEnd(moduleName);
- });
-
- console.time(moduleName);
- for (const buf of buffers)
- parser.write(buf);
- break;
- }
-
- case 'formidable': {
- const { MultipartParser } = require('formidable');
-
- const parser = new MultipartParser();
- parser.initWithBoundary(boundary);
- parser.on('data', ({ name }) => {
- ++calls[name];
- if (name === 'end')
- console.timeEnd(moduleName);
- });
-
- console.time(moduleName);
- for (const buf of buffers)
- parser.write(buf);
-
- break;
- }
-
- case 'multiparty': {
- const { Readable } = require('stream');
-
- const { Form } = require('multiparty');
-
- const form = new Form({
- maxFieldsSize: Infinity,
- maxFields: Infinity,
- maxFilesSize: Infinity,
- autoFields: false,
- autoFiles: false,
- });
-
- const req = new Readable({ read: () => {} });
- req.headers = {
- 'content-type': `multipart/form-data; boundary=${boundary}`,
- };
-
- function hijack(name, fn) {
- const oldFn = form[name];
- form[name] = function() {
- fn();
- return oldFn.apply(this, arguments);
- };
- }
-
- hijack('onParseHeaderField', () => {
- ++calls.headerField;
- });
- hijack('onParseHeaderValue', () => {
- ++calls.headerValue;
- });
- hijack('onParsePartBegin', () => {
- ++calls.partBegin;
- });
- hijack('onParsePartData', () => {
- ++calls.partData;
- });
- hijack('onParsePartEnd', () => {
- ++calls.partEnd;
- });
-
- form.on('close', () => {
- ++calls.end;
- console.timeEnd(moduleName);
- }).on('part', (p) => p.resume());
-
- console.time(moduleName);
- form.parse(req);
- for (const buf of buffers)
- req.push(buf);
- req.push(null);
-
- break;
- }
-
- default:
- if (moduleName === undefined)
- console.error('Missing parser module name');
- else
- console.error(`Invalid parser module name: ${moduleName}`);
- process.exit(1);
-}
diff --git a/deps/undici/src/node_modules/busboy/bench/bench-multipart-files-100mb-big.js b/deps/undici/src/node_modules/busboy/bench/bench-multipart-files-100mb-big.js
deleted file mode 100644
index b46bdee02cdded..00000000000000
--- a/deps/undici/src/node_modules/busboy/bench/bench-multipart-files-100mb-big.js
+++ /dev/null
@@ -1,154 +0,0 @@
-'use strict';
-
-function createMultipartBuffers(boundary, sizes) {
- const bufs = [];
- for (let i = 0; i < sizes.length; ++i) {
- const mb = sizes[i] * 1024 * 1024;
- bufs.push(Buffer.from([
- `--${boundary}`,
- `content-disposition: form-data; name="file${i + 1}"; `
- + `filename="random${i + 1}.bin"`,
- 'content-type: application/octet-stream',
- '',
- '0'.repeat(mb),
- '',
- ].join('\r\n')));
- }
- bufs.push(Buffer.from([
- `--${boundary}--`,
- '',
- ].join('\r\n')));
- return bufs;
-}
-
-const boundary = '-----------------------------168072824752491622650073';
-const buffers = createMultipartBuffers(boundary, [
- 10,
- 10,
- 10,
- 20,
- 50,
-]);
-const calls = {
- partBegin: 0,
- headerField: 0,
- headerValue: 0,
- headerEnd: 0,
- headersEnd: 0,
- partData: 0,
- partEnd: 0,
- end: 0,
-};
-
-const moduleName = process.argv[2];
-switch (moduleName) {
- case 'busboy': {
- const busboy = require('busboy');
-
- const parser = busboy({
- limits: {
- fieldSizeLimit: Infinity,
- },
- headers: {
- 'content-type': `multipart/form-data; boundary=${boundary}`,
- },
- });
- parser.on('file', (name, stream, info) => {
- ++calls.partBegin;
- stream.on('data', (chunk) => {
- ++calls.partData;
- }).on('end', () => {
- ++calls.partEnd;
- });
- }).on('close', () => {
- ++calls.end;
- console.timeEnd(moduleName);
- });
-
- console.time(moduleName);
- for (const buf of buffers)
- parser.write(buf);
- break;
- }
-
- case 'formidable': {
- const { MultipartParser } = require('formidable');
-
- const parser = new MultipartParser();
- parser.initWithBoundary(boundary);
- parser.on('data', ({ name }) => {
- ++calls[name];
- if (name === 'end')
- console.timeEnd(moduleName);
- });
-
- console.time(moduleName);
- for (const buf of buffers)
- parser.write(buf);
-
- break;
- }
-
- case 'multiparty': {
- const { Readable } = require('stream');
-
- const { Form } = require('multiparty');
-
- const form = new Form({
- maxFieldsSize: Infinity,
- maxFields: Infinity,
- maxFilesSize: Infinity,
- autoFields: false,
- autoFiles: false,
- });
-
- const req = new Readable({ read: () => {} });
- req.headers = {
- 'content-type': `multipart/form-data; boundary=${boundary}`,
- };
-
- function hijack(name, fn) {
- const oldFn = form[name];
- form[name] = function() {
- fn();
- return oldFn.apply(this, arguments);
- };
- }
-
- hijack('onParseHeaderField', () => {
- ++calls.headerField;
- });
- hijack('onParseHeaderValue', () => {
- ++calls.headerValue;
- });
- hijack('onParsePartBegin', () => {
- ++calls.partBegin;
- });
- hijack('onParsePartData', () => {
- ++calls.partData;
- });
- hijack('onParsePartEnd', () => {
- ++calls.partEnd;
- });
-
- form.on('close', () => {
- ++calls.end;
- console.timeEnd(moduleName);
- }).on('part', (p) => p.resume());
-
- console.time(moduleName);
- form.parse(req);
- for (const buf of buffers)
- req.push(buf);
- req.push(null);
-
- break;
- }
-
- default:
- if (moduleName === undefined)
- console.error('Missing parser module name');
- else
- console.error(`Invalid parser module name: ${moduleName}`);
- process.exit(1);
-}
diff --git a/deps/undici/src/node_modules/busboy/bench/bench-multipart-files-100mb-small.js b/deps/undici/src/node_modules/busboy/bench/bench-multipart-files-100mb-small.js
deleted file mode 100644
index 46b5dffb0c74d8..00000000000000
--- a/deps/undici/src/node_modules/busboy/bench/bench-multipart-files-100mb-small.js
+++ /dev/null
@@ -1,148 +0,0 @@
-'use strict';
-
-function createMultipartBuffers(boundary, sizes) {
- const bufs = [];
- for (let i = 0; i < sizes.length; ++i) {
- const mb = sizes[i] * 1024 * 1024;
- bufs.push(Buffer.from([
- `--${boundary}`,
- `content-disposition: form-data; name="file${i + 1}"; `
- + `filename="random${i + 1}.bin"`,
- 'content-type: application/octet-stream',
- '',
- '0'.repeat(mb),
- '',
- ].join('\r\n')));
- }
- bufs.push(Buffer.from([
- `--${boundary}--`,
- '',
- ].join('\r\n')));
- return bufs;
-}
-
-const boundary = '-----------------------------168072824752491622650073';
-const buffers = createMultipartBuffers(boundary, (new Array(100)).fill(1));
-const calls = {
- partBegin: 0,
- headerField: 0,
- headerValue: 0,
- headerEnd: 0,
- headersEnd: 0,
- partData: 0,
- partEnd: 0,
- end: 0,
-};
-
-const moduleName = process.argv[2];
-switch (moduleName) {
- case 'busboy': {
- const busboy = require('busboy');
-
- const parser = busboy({
- limits: {
- fieldSizeLimit: Infinity,
- },
- headers: {
- 'content-type': `multipart/form-data; boundary=${boundary}`,
- },
- });
- parser.on('file', (name, stream, info) => {
- ++calls.partBegin;
- stream.on('data', (chunk) => {
- ++calls.partData;
- }).on('end', () => {
- ++calls.partEnd;
- });
- }).on('close', () => {
- ++calls.end;
- console.timeEnd(moduleName);
- });
-
- console.time(moduleName);
- for (const buf of buffers)
- parser.write(buf);
- break;
- }
-
- case 'formidable': {
- const { MultipartParser } = require('formidable');
-
- const parser = new MultipartParser();
- parser.initWithBoundary(boundary);
- parser.on('data', ({ name }) => {
- ++calls[name];
- if (name === 'end')
- console.timeEnd(moduleName);
- });
-
- console.time(moduleName);
- for (const buf of buffers)
- parser.write(buf);
-
- break;
- }
-
- case 'multiparty': {
- const { Readable } = require('stream');
-
- const { Form } = require('multiparty');
-
- const form = new Form({
- maxFieldsSize: Infinity,
- maxFields: Infinity,
- maxFilesSize: Infinity,
- autoFields: false,
- autoFiles: false,
- });
-
- const req = new Readable({ read: () => {} });
- req.headers = {
- 'content-type': `multipart/form-data; boundary=${boundary}`,
- };
-
- function hijack(name, fn) {
- const oldFn = form[name];
- form[name] = function() {
- fn();
- return oldFn.apply(this, arguments);
- };
- }
-
- hijack('onParseHeaderField', () => {
- ++calls.headerField;
- });
- hijack('onParseHeaderValue', () => {
- ++calls.headerValue;
- });
- hijack('onParsePartBegin', () => {
- ++calls.partBegin;
- });
- hijack('onParsePartData', () => {
- ++calls.partData;
- });
- hijack('onParsePartEnd', () => {
- ++calls.partEnd;
- });
-
- form.on('close', () => {
- ++calls.end;
- console.timeEnd(moduleName);
- }).on('part', (p) => p.resume());
-
- console.time(moduleName);
- form.parse(req);
- for (const buf of buffers)
- req.push(buf);
- req.push(null);
-
- break;
- }
-
- default:
- if (moduleName === undefined)
- console.error('Missing parser module name');
- else
- console.error(`Invalid parser module name: ${moduleName}`);
- process.exit(1);
-}
diff --git a/deps/undici/src/node_modules/busboy/bench/bench-urlencoded-fields-100pairs-small.js b/deps/undici/src/node_modules/busboy/bench/bench-urlencoded-fields-100pairs-small.js
deleted file mode 100644
index 5c337df2ef951f..00000000000000
--- a/deps/undici/src/node_modules/busboy/bench/bench-urlencoded-fields-100pairs-small.js
+++ /dev/null
@@ -1,101 +0,0 @@
-'use strict';
-
-const buffers = [
- Buffer.from(
- (new Array(100)).fill('').map((_, i) => `key${i}=value${i}`).join('&')
- ),
-];
-const calls = {
- field: 0,
- end: 0,
-};
-
-let n = 3e3;
-
-const moduleName = process.argv[2];
-switch (moduleName) {
- case 'busboy': {
- const busboy = require('busboy');
-
- console.time(moduleName);
- (function next() {
- const parser = busboy({
- limits: {
- fieldSizeLimit: Infinity,
- },
- headers: {
- 'content-type': 'application/x-www-form-urlencoded; charset=utf-8',
- },
- });
- parser.on('field', (name, val, info) => {
- ++calls.field;
- }).on('close', () => {
- ++calls.end;
- if (--n === 0)
- console.timeEnd(moduleName);
- else
- process.nextTick(next);
- });
-
- for (const buf of buffers)
- parser.write(buf);
- parser.end();
- })();
- break;
- }
-
- case 'formidable': {
- const QuerystringParser =
- require('formidable/src/parsers/Querystring.js');
-
- console.time(moduleName);
- (function next() {
- const parser = new QuerystringParser();
- parser.on('data', (obj) => {
- ++calls.field;
- }).on('end', () => {
- ++calls.end;
- if (--n === 0)
- console.timeEnd(moduleName);
- else
- process.nextTick(next);
- });
-
- for (const buf of buffers)
- parser.write(buf);
- parser.end();
- })();
- break;
- }
-
- case 'formidable-streaming': {
- const QuerystringParser =
- require('formidable/src/parsers/StreamingQuerystring.js');
-
- console.time(moduleName);
- (function next() {
- const parser = new QuerystringParser();
- parser.on('data', (obj) => {
- ++calls.field;
- }).on('end', () => {
- ++calls.end;
- if (--n === 0)
- console.timeEnd(moduleName);
- else
- process.nextTick(next);
- });
-
- for (const buf of buffers)
- parser.write(buf);
- parser.end();
- })();
- break;
- }
-
- default:
- if (moduleName === undefined)
- console.error('Missing parser module name');
- else
- console.error(`Invalid parser module name: ${moduleName}`);
- process.exit(1);
-}
diff --git a/deps/undici/src/node_modules/busboy/bench/bench-urlencoded-fields-900pairs-small-alt.js b/deps/undici/src/node_modules/busboy/bench/bench-urlencoded-fields-900pairs-small-alt.js
deleted file mode 100644
index 1f5645cb8cc43f..00000000000000
--- a/deps/undici/src/node_modules/busboy/bench/bench-urlencoded-fields-900pairs-small-alt.js
+++ /dev/null
@@ -1,84 +0,0 @@
-'use strict';
-
-const buffers = [
- Buffer.from(
- (new Array(900)).fill('').map((_, i) => `key${i}=value${i}`).join('&')
- ),
-];
-const calls = {
- field: 0,
- end: 0,
-};
-
-const moduleName = process.argv[2];
-switch (moduleName) {
- case 'busboy': {
- const busboy = require('busboy');
-
- console.time(moduleName);
- const parser = busboy({
- limits: {
- fieldSizeLimit: Infinity,
- },
- headers: {
- 'content-type': 'application/x-www-form-urlencoded; charset=utf-8',
- },
- });
- parser.on('field', (name, val, info) => {
- ++calls.field;
- }).on('close', () => {
- ++calls.end;
- console.timeEnd(moduleName);
- });
-
- for (const buf of buffers)
- parser.write(buf);
- parser.end();
- break;
- }
-
- case 'formidable': {
- const QuerystringParser =
- require('formidable/src/parsers/Querystring.js');
-
- console.time(moduleName);
- const parser = new QuerystringParser();
- parser.on('data', (obj) => {
- ++calls.field;
- }).on('end', () => {
- ++calls.end;
- console.timeEnd(moduleName);
- });
-
- for (const buf of buffers)
- parser.write(buf);
- parser.end();
- break;
- }
-
- case 'formidable-streaming': {
- const QuerystringParser =
- require('formidable/src/parsers/StreamingQuerystring.js');
-
- console.time(moduleName);
- const parser = new QuerystringParser();
- parser.on('data', (obj) => {
- ++calls.field;
- }).on('end', () => {
- ++calls.end;
- console.timeEnd(moduleName);
- });
-
- for (const buf of buffers)
- parser.write(buf);
- parser.end();
- break;
- }
-
- default:
- if (moduleName === undefined)
- console.error('Missing parser module name');
- else
- console.error(`Invalid parser module name: ${moduleName}`);
- process.exit(1);
-}
diff --git a/deps/undici/src/node_modules/busboy/lib/index.js b/deps/undici/src/node_modules/busboy/lib/index.js
deleted file mode 100644
index 873272d93cf34c..00000000000000
--- a/deps/undici/src/node_modules/busboy/lib/index.js
+++ /dev/null
@@ -1,57 +0,0 @@
-'use strict';
-
-const { parseContentType } = require('./utils.js');
-
-function getInstance(cfg) {
- const headers = cfg.headers;
- const conType = parseContentType(headers['content-type']);
- if (!conType)
- throw new Error('Malformed content type');
-
- for (const type of TYPES) {
- const matched = type.detect(conType);
- if (!matched)
- continue;
-
- const instanceCfg = {
- limits: cfg.limits,
- headers,
- conType,
- highWaterMark: undefined,
- fileHwm: undefined,
- defCharset: undefined,
- defParamCharset: undefined,
- preservePath: false,
- };
- if (cfg.highWaterMark)
- instanceCfg.highWaterMark = cfg.highWaterMark;
- if (cfg.fileHwm)
- instanceCfg.fileHwm = cfg.fileHwm;
- instanceCfg.defCharset = cfg.defCharset;
- instanceCfg.defParamCharset = cfg.defParamCharset;
- instanceCfg.preservePath = cfg.preservePath;
- return new type(instanceCfg);
- }
-
- throw new Error(`Unsupported content type: ${headers['content-type']}`);
-}
-
-// Note: types are explicitly listed here for easier bundling
-// See: https://github.com/mscdex/busboy/issues/121
-const TYPES = [
- require('./types/multipart'),
- require('./types/urlencoded'),
-].filter(function(typemod) { return typeof typemod.detect === 'function'; });
-
-module.exports = (cfg) => {
- if (typeof cfg !== 'object' || cfg === null)
- cfg = {};
-
- if (typeof cfg.headers !== 'object'
- || cfg.headers === null
- || typeof cfg.headers['content-type'] !== 'string') {
- throw new Error('Missing Content-Type');
- }
-
- return getInstance(cfg);
-};
diff --git a/deps/undici/src/node_modules/busboy/lib/types/multipart.js b/deps/undici/src/node_modules/busboy/lib/types/multipart.js
deleted file mode 100644
index cc0d7bb6638a12..00000000000000
--- a/deps/undici/src/node_modules/busboy/lib/types/multipart.js
+++ /dev/null
@@ -1,653 +0,0 @@
-'use strict';
-
-const { Readable, Writable } = require('stream');
-
-const StreamSearch = require('streamsearch');
-
-const {
- basename,
- convertToUTF8,
- getDecoder,
- parseContentType,
- parseDisposition,
-} = require('../utils.js');
-
-const BUF_CRLF = Buffer.from('\r\n');
-const BUF_CR = Buffer.from('\r');
-const BUF_DASH = Buffer.from('-');
-
-function noop() {}
-
-const MAX_HEADER_PAIRS = 2000; // From node
-const MAX_HEADER_SIZE = 16 * 1024; // From node (its default value)
-
-const HPARSER_NAME = 0;
-const HPARSER_PRE_OWS = 1;
-const HPARSER_VALUE = 2;
-class HeaderParser {
- constructor(cb) {
- this.header = Object.create(null);
- this.pairCount = 0;
- this.byteCount = 0;
- this.state = HPARSER_NAME;
- this.name = '';
- this.value = '';
- this.crlf = 0;
- this.cb = cb;
- }
-
- reset() {
- this.header = Object.create(null);
- this.pairCount = 0;
- this.byteCount = 0;
- this.state = HPARSER_NAME;
- this.name = '';
- this.value = '';
- this.crlf = 0;
- }
-
- push(chunk, pos, end) {
- let start = pos;
- while (pos < end) {
- switch (this.state) {
- case HPARSER_NAME: {
- let done = false;
- for (; pos < end; ++pos) {
- if (this.byteCount === MAX_HEADER_SIZE)
- return -1;
- ++this.byteCount;
- const code = chunk[pos];
- if (TOKEN[code] !== 1) {
- if (code !== 58/* ':' */)
- return -1;
- this.name += chunk.latin1Slice(start, pos);
- if (this.name.length === 0)
- return -1;
- ++pos;
- done = true;
- this.state = HPARSER_PRE_OWS;
- break;
- }
- }
- if (!done) {
- this.name += chunk.latin1Slice(start, pos);
- break;
- }
- // FALLTHROUGH
- }
- case HPARSER_PRE_OWS: {
- // Skip optional whitespace
- let done = false;
- for (; pos < end; ++pos) {
- if (this.byteCount === MAX_HEADER_SIZE)
- return -1;
- ++this.byteCount;
- const code = chunk[pos];
- if (code !== 32/* ' ' */ && code !== 9/* '\t' */) {
- start = pos;
- done = true;
- this.state = HPARSER_VALUE;
- break;
- }
- }
- if (!done)
- break;
- // FALLTHROUGH
- }
- case HPARSER_VALUE:
- switch (this.crlf) {
- case 0: // Nothing yet
- for (; pos < end; ++pos) {
- if (this.byteCount === MAX_HEADER_SIZE)
- return -1;
- ++this.byteCount;
- const code = chunk[pos];
- if (FIELD_VCHAR[code] !== 1) {
- if (code !== 13/* '\r' */)
- return -1;
- ++this.crlf;
- break;
- }
- }
- this.value += chunk.latin1Slice(start, pos++);
- break;
- case 1: // Received CR
- if (this.byteCount === MAX_HEADER_SIZE)
- return -1;
- ++this.byteCount;
- if (chunk[pos++] !== 10/* '\n' */)
- return -1;
- ++this.crlf;
- break;
- case 2: { // Received CR LF
- if (this.byteCount === MAX_HEADER_SIZE)
- return -1;
- ++this.byteCount;
- const code = chunk[pos];
- if (code === 32/* ' ' */ || code === 9/* '\t' */) {
- // Folded value
- start = pos;
- this.crlf = 0;
- } else {
- if (++this.pairCount < MAX_HEADER_PAIRS) {
- this.name = this.name.toLowerCase();
- if (this.header[this.name] === undefined)
- this.header[this.name] = [this.value];
- else
- this.header[this.name].push(this.value);
- }
- if (code === 13/* '\r' */) {
- ++this.crlf;
- ++pos;
- } else {
- // Assume start of next header field name
- start = pos;
- this.crlf = 0;
- this.state = HPARSER_NAME;
- this.name = '';
- this.value = '';
- }
- }
- break;
- }
- case 3: { // Received CR LF CR
- if (this.byteCount === MAX_HEADER_SIZE)
- return -1;
- ++this.byteCount;
- if (chunk[pos++] !== 10/* '\n' */)
- return -1;
- // End of header
- const header = this.header;
- this.reset();
- this.cb(header);
- return pos;
- }
- }
- break;
- }
- }
-
- return pos;
- }
-}
-
-class FileStream extends Readable {
- constructor(opts, owner) {
- super(opts);
- this.truncated = false;
- this._readcb = null;
- this.once('end', () => {
- // We need to make sure that we call any outstanding _writecb() that is
- // associated with this file so that processing of the rest of the form
- // can continue. This may not happen if the file stream ends right after
- // backpressure kicks in, so we force it here.
- this._read();
- if (--owner._fileEndsLeft === 0 && owner._finalcb) {
- const cb = owner._finalcb;
- owner._finalcb = null;
- // Make sure other 'end' event handlers get a chance to be executed
- // before busboy's 'finish' event is emitted
- process.nextTick(cb);
- }
- });
- }
- _read(n) {
- const cb = this._readcb;
- if (cb) {
- this._readcb = null;
- cb();
- }
- }
-}
-
-const ignoreData = {
- push: (chunk, pos) => {},
- destroy: () => {},
-};
-
-function callAndUnsetCb(self, err) {
- const cb = self._writecb;
- self._writecb = null;
- if (err)
- self.destroy(err);
- else if (cb)
- cb();
-}
-
-function nullDecoder(val, hint) {
- return val;
-}
-
-class Multipart extends Writable {
- constructor(cfg) {
- const streamOpts = {
- autoDestroy: true,
- emitClose: true,
- highWaterMark: (typeof cfg.highWaterMark === 'number'
- ? cfg.highWaterMark
- : undefined),
- };
- super(streamOpts);
-
- if (!cfg.conType.params || typeof cfg.conType.params.boundary !== 'string')
- throw new Error('Multipart: Boundary not found');
-
- const boundary = cfg.conType.params.boundary;
- const paramDecoder = (typeof cfg.defParamCharset === 'string'
- && cfg.defParamCharset
- ? getDecoder(cfg.defParamCharset)
- : nullDecoder);
- const defCharset = (cfg.defCharset || 'utf8');
- const preservePath = cfg.preservePath;
- const fileOpts = {
- autoDestroy: true,
- emitClose: true,
- highWaterMark: (typeof cfg.fileHwm === 'number'
- ? cfg.fileHwm
- : undefined),
- };
-
- const limits = cfg.limits;
- const fieldSizeLimit = (limits && typeof limits.fieldSize === 'number'
- ? limits.fieldSize
- : 1 * 1024 * 1024);
- const fileSizeLimit = (limits && typeof limits.fileSize === 'number'
- ? limits.fileSize
- : Infinity);
- const filesLimit = (limits && typeof limits.files === 'number'
- ? limits.files
- : Infinity);
- const fieldsLimit = (limits && typeof limits.fields === 'number'
- ? limits.fields
- : Infinity);
- const partsLimit = (limits && typeof limits.parts === 'number'
- ? limits.parts
- : Infinity);
-
- let parts = -1; // Account for initial boundary
- let fields = 0;
- let files = 0;
- let skipPart = false;
-
- this._fileEndsLeft = 0;
- this._fileStream = undefined;
- this._complete = false;
- let fileSize = 0;
-
- let field;
- let fieldSize = 0;
- let partCharset;
- let partEncoding;
- let partType;
- let partName;
- let partTruncated = false;
-
- let hitFilesLimit = false;
- let hitFieldsLimit = false;
-
- this._hparser = null;
- const hparser = new HeaderParser((header) => {
- this._hparser = null;
- skipPart = false;
-
- partType = 'text/plain';
- partCharset = defCharset;
- partEncoding = '7bit';
- partName = undefined;
- partTruncated = false;
-
- let filename;
- if (!header['content-disposition']) {
- skipPart = true;
- return;
- }
-
- const disp = parseDisposition(header['content-disposition'][0],
- paramDecoder);
- if (!disp || disp.type !== 'form-data') {
- skipPart = true;
- return;
- }
-
- if (disp.params) {
- if (disp.params.name)
- partName = disp.params.name;
-
- if (disp.params['filename*'])
- filename = disp.params['filename*'];
- else if (disp.params.filename)
- filename = disp.params.filename;
-
- if (filename !== undefined && !preservePath)
- filename = basename(filename);
- }
-
- if (header['content-type']) {
- const conType = parseContentType(header['content-type'][0]);
- if (conType) {
- partType = `${conType.type}/${conType.subtype}`;
- if (conType.params && typeof conType.params.charset === 'string')
- partCharset = conType.params.charset.toLowerCase();
- }
- }
-
- if (header['content-transfer-encoding'])
- partEncoding = header['content-transfer-encoding'][0].toLowerCase();
-
- if (partType === 'application/octet-stream' || filename !== undefined) {
- // File
-
- if (files === filesLimit) {
- if (!hitFilesLimit) {
- hitFilesLimit = true;
- this.emit('filesLimit');
- }
- skipPart = true;
- return;
- }
- ++files;
-
- if (this.listenerCount('file') === 0) {
- skipPart = true;
- return;
- }
-
- fileSize = 0;
- this._fileStream = new FileStream(fileOpts, this);
- ++this._fileEndsLeft;
- this.emit(
- 'file',
- partName,
- this._fileStream,
- { filename,
- encoding: partEncoding,
- mimeType: partType }
- );
- } else {
- // Non-file
-
- if (fields === fieldsLimit) {
- if (!hitFieldsLimit) {
- hitFieldsLimit = true;
- this.emit('fieldsLimit');
- }
- skipPart = true;
- return;
- }
- ++fields;
-
- if (this.listenerCount('field') === 0) {
- skipPart = true;
- return;
- }
-
- field = [];
- fieldSize = 0;
- }
- });
-
- let matchPostBoundary = 0;
- const ssCb = (isMatch, data, start, end, isDataSafe) => {
-retrydata:
- while (data) {
- if (this._hparser !== null) {
- const ret = this._hparser.push(data, start, end);
- if (ret === -1) {
- this._hparser = null;
- hparser.reset();
- this.emit('error', new Error('Malformed part header'));
- break;
- }
- start = ret;
- }
-
- if (start === end)
- break;
-
- if (matchPostBoundary !== 0) {
- if (matchPostBoundary === 1) {
- switch (data[start]) {
- case 45: // '-'
- // Try matching '--' after boundary
- matchPostBoundary = 2;
- ++start;
- break;
- case 13: // '\r'
- // Try matching CR LF before header
- matchPostBoundary = 3;
- ++start;
- break;
- default:
- matchPostBoundary = 0;
- }
- if (start === end)
- return;
- }
-
- if (matchPostBoundary === 2) {
- matchPostBoundary = 0;
- if (data[start] === 45/* '-' */) {
- // End of multipart data
- this._complete = true;
- this._bparser = ignoreData;
- return;
- }
- // We saw something other than '-', so put the dash we consumed
- // "back"
- const writecb = this._writecb;
- this._writecb = noop;
- ssCb(false, BUF_DASH, 0, 1, false);
- this._writecb = writecb;
- } else if (matchPostBoundary === 3) {
- matchPostBoundary = 0;
- if (data[start] === 10/* '\n' */) {
- ++start;
- if (parts >= partsLimit)
- break;
- // Prepare the header parser
- this._hparser = hparser;
- if (start === end)
- break;
- // Process the remaining data as a header
- continue retrydata;
- } else {
- // We saw something other than LF, so put the CR we consumed
- // "back"
- const writecb = this._writecb;
- this._writecb = noop;
- ssCb(false, BUF_CR, 0, 1, false);
- this._writecb = writecb;
- }
- }
- }
-
- if (!skipPart) {
- if (this._fileStream) {
- let chunk;
- const actualLen = Math.min(end - start, fileSizeLimit - fileSize);
- if (!isDataSafe) {
- chunk = Buffer.allocUnsafe(actualLen);
- data.copy(chunk, 0, start, start + actualLen);
- } else {
- chunk = data.slice(start, start + actualLen);
- }
-
- fileSize += chunk.length;
- if (fileSize === fileSizeLimit) {
- if (chunk.length > 0)
- this._fileStream.push(chunk);
- this._fileStream.emit('limit');
- this._fileStream.truncated = true;
- skipPart = true;
- } else if (!this._fileStream.push(chunk)) {
- if (this._writecb)
- this._fileStream._readcb = this._writecb;
- this._writecb = null;
- }
- } else if (field !== undefined) {
- let chunk;
- const actualLen = Math.min(
- end - start,
- fieldSizeLimit - fieldSize
- );
- if (!isDataSafe) {
- chunk = Buffer.allocUnsafe(actualLen);
- data.copy(chunk, 0, start, start + actualLen);
- } else {
- chunk = data.slice(start, start + actualLen);
- }
-
- fieldSize += actualLen;
- field.push(chunk);
- if (fieldSize === fieldSizeLimit) {
- skipPart = true;
- partTruncated = true;
- }
- }
- }
-
- break;
- }
-
- if (isMatch) {
- matchPostBoundary = 1;
-
- if (this._fileStream) {
- // End the active file stream if the previous part was a file
- this._fileStream.push(null);
- this._fileStream = null;
- } else if (field !== undefined) {
- let data;
- switch (field.length) {
- case 0:
- data = '';
- break;
- case 1:
- data = convertToUTF8(field[0], partCharset, 0);
- break;
- default:
- data = convertToUTF8(
- Buffer.concat(field, fieldSize),
- partCharset,
- 0
- );
- }
- field = undefined;
- fieldSize = 0;
- this.emit(
- 'field',
- partName,
- data,
- { nameTruncated: false,
- valueTruncated: partTruncated,
- encoding: partEncoding,
- mimeType: partType }
- );
- }
-
- if (++parts === partsLimit)
- this.emit('partsLimit');
- }
- };
- this._bparser = new StreamSearch(`\r\n--${boundary}`, ssCb);
-
- this._writecb = null;
- this._finalcb = null;
-
- // Just in case there is no preamble
- this.write(BUF_CRLF);
- }
-
- static detect(conType) {
- return (conType.type === 'multipart' && conType.subtype === 'form-data');
- }
-
- _write(chunk, enc, cb) {
- this._writecb = cb;
- this._bparser.push(chunk, 0);
- if (this._writecb)
- callAndUnsetCb(this);
- }
-
- _destroy(err, cb) {
- this._hparser = null;
- this._bparser = ignoreData;
- if (!err)
- err = checkEndState(this);
- const fileStream = this._fileStream;
- if (fileStream) {
- this._fileStream = null;
- fileStream.destroy(err);
- }
- cb(err);
- }
-
- _final(cb) {
- this._bparser.destroy();
- if (!this._complete)
- return cb(new Error('Unexpected end of form'));
- if (this._fileEndsLeft)
- this._finalcb = finalcb.bind(null, this, cb);
- else
- finalcb(this, cb);
- }
-}
-
-function finalcb(self, cb, err) {
- if (err)
- return cb(err);
- err = checkEndState(self);
- cb(err);
-}
-
-function checkEndState(self) {
- if (self._hparser)
- return new Error('Malformed part header');
- const fileStream = self._fileStream;
- if (fileStream) {
- self._fileStream = null;
- fileStream.destroy(new Error('Unexpected end of file'));
- }
- if (!self._complete)
- return new Error('Unexpected end of form');
-}
-
-const TOKEN = [
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
- 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-];
-
-const FIELD_VCHAR = [
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
-];
-
-module.exports = Multipart;
diff --git a/deps/undici/src/node_modules/busboy/lib/types/urlencoded.js b/deps/undici/src/node_modules/busboy/lib/types/urlencoded.js
deleted file mode 100644
index 5c463a25899a72..00000000000000
--- a/deps/undici/src/node_modules/busboy/lib/types/urlencoded.js
+++ /dev/null
@@ -1,350 +0,0 @@
-'use strict';
-
-const { Writable } = require('stream');
-
-const { getDecoder } = require('../utils.js');
-
-class URLEncoded extends Writable {
- constructor(cfg) {
- const streamOpts = {
- autoDestroy: true,
- emitClose: true,
- highWaterMark: (typeof cfg.highWaterMark === 'number'
- ? cfg.highWaterMark
- : undefined),
- };
- super(streamOpts);
-
- let charset = (cfg.defCharset || 'utf8');
- if (cfg.conType.params && typeof cfg.conType.params.charset === 'string')
- charset = cfg.conType.params.charset;
-
- this.charset = charset;
-
- const limits = cfg.limits;
- this.fieldSizeLimit = (limits && typeof limits.fieldSize === 'number'
- ? limits.fieldSize
- : 1 * 1024 * 1024);
- this.fieldsLimit = (limits && typeof limits.fields === 'number'
- ? limits.fields
- : Infinity);
- this.fieldNameSizeLimit = (
- limits && typeof limits.fieldNameSize === 'number'
- ? limits.fieldNameSize
- : 100
- );
-
- this._inKey = true;
- this._keyTrunc = false;
- this._valTrunc = false;
- this._bytesKey = 0;
- this._bytesVal = 0;
- this._fields = 0;
- this._key = '';
- this._val = '';
- this._byte = -2;
- this._lastPos = 0;
- this._encode = 0;
- this._decoder = getDecoder(charset);
- }
-
- static detect(conType) {
- return (conType.type === 'application'
- && conType.subtype === 'x-www-form-urlencoded');
- }
-
- _write(chunk, enc, cb) {
- if (this._fields >= this.fieldsLimit)
- return cb();
-
- let i = 0;
- const len = chunk.length;
- this._lastPos = 0;
-
- // Check if we last ended mid-percent-encoded byte
- if (this._byte !== -2) {
- i = readPctEnc(this, chunk, i, len);
- if (i === -1)
- return cb(new Error('Malformed urlencoded form'));
- if (i >= len)
- return cb();
- if (this._inKey)
- ++this._bytesKey;
- else
- ++this._bytesVal;
- }
-
-main:
- while (i < len) {
- if (this._inKey) {
- // Parsing key
-
- i = skipKeyBytes(this, chunk, i, len);
-
- while (i < len) {
- switch (chunk[i]) {
- case 61: // '='
- if (this._lastPos < i)
- this._key += chunk.latin1Slice(this._lastPos, i);
- this._lastPos = ++i;
- this._key = this._decoder(this._key, this._encode);
- this._encode = 0;
- this._inKey = false;
- continue main;
- case 38: // '&'
- if (this._lastPos < i)
- this._key += chunk.latin1Slice(this._lastPos, i);
- this._lastPos = ++i;
- this._key = this._decoder(this._key, this._encode);
- this._encode = 0;
- if (this._bytesKey > 0) {
- this.emit(
- 'field',
- this._key,
- '',
- { nameTruncated: this._keyTrunc,
- valueTruncated: false,
- encoding: this.charset,
- mimeType: 'text/plain' }
- );
- }
- this._key = '';
- this._val = '';
- this._keyTrunc = false;
- this._valTrunc = false;
- this._bytesKey = 0;
- this._bytesVal = 0;
- if (++this._fields >= this.fieldsLimit) {
- this.emit('fieldsLimit');
- return cb();
- }
- continue;
- case 43: // '+'
- if (this._lastPos < i)
- this._key += chunk.latin1Slice(this._lastPos, i);
- this._key += ' ';
- this._lastPos = i + 1;
- break;
- case 37: // '%'
- if (this._encode === 0)
- this._encode = 1;
- if (this._lastPos < i)
- this._key += chunk.latin1Slice(this._lastPos, i);
- this._lastPos = i + 1;
- this._byte = -1;
- i = readPctEnc(this, chunk, i + 1, len);
- if (i === -1)
- return cb(new Error('Malformed urlencoded form'));
- if (i >= len)
- return cb();
- ++this._bytesKey;
- i = skipKeyBytes(this, chunk, i, len);
- continue;
- }
- ++i;
- ++this._bytesKey;
- i = skipKeyBytes(this, chunk, i, len);
- }
- if (this._lastPos < i)
- this._key += chunk.latin1Slice(this._lastPos, i);
- } else {
- // Parsing value
-
- i = skipValBytes(this, chunk, i, len);
-
- while (i < len) {
- switch (chunk[i]) {
- case 38: // '&'
- if (this._lastPos < i)
- this._val += chunk.latin1Slice(this._lastPos, i);
- this._lastPos = ++i;
- this._inKey = true;
- this._val = this._decoder(this._val, this._encode);
- this._encode = 0;
- if (this._bytesKey > 0 || this._bytesVal > 0) {
- this.emit(
- 'field',
- this._key,
- this._val,
- { nameTruncated: this._keyTrunc,
- valueTruncated: this._valTrunc,
- encoding: this.charset,
- mimeType: 'text/plain' }
- );
- }
- this._key = '';
- this._val = '';
- this._keyTrunc = false;
- this._valTrunc = false;
- this._bytesKey = 0;
- this._bytesVal = 0;
- if (++this._fields >= this.fieldsLimit) {
- this.emit('fieldsLimit');
- return cb();
- }
- continue main;
- case 43: // '+'
- if (this._lastPos < i)
- this._val += chunk.latin1Slice(this._lastPos, i);
- this._val += ' ';
- this._lastPos = i + 1;
- break;
- case 37: // '%'
- if (this._encode === 0)
- this._encode = 1;
- if (this._lastPos < i)
- this._val += chunk.latin1Slice(this._lastPos, i);
- this._lastPos = i + 1;
- this._byte = -1;
- i = readPctEnc(this, chunk, i + 1, len);
- if (i === -1)
- return cb(new Error('Malformed urlencoded form'));
- if (i >= len)
- return cb();
- ++this._bytesVal;
- i = skipValBytes(this, chunk, i, len);
- continue;
- }
- ++i;
- ++this._bytesVal;
- i = skipValBytes(this, chunk, i, len);
- }
- if (this._lastPos < i)
- this._val += chunk.latin1Slice(this._lastPos, i);
- }
- }
-
- cb();
- }
-
- _final(cb) {
- if (this._byte !== -2)
- return cb(new Error('Malformed urlencoded form'));
- if (!this._inKey || this._bytesKey > 0 || this._bytesVal > 0) {
- if (this._inKey)
- this._key = this._decoder(this._key, this._encode);
- else
- this._val = this._decoder(this._val, this._encode);
- this.emit(
- 'field',
- this._key,
- this._val,
- { nameTruncated: this._keyTrunc,
- valueTruncated: this._valTrunc,
- encoding: this.charset,
- mimeType: 'text/plain' }
- );
- }
- cb();
- }
-}
-
-function readPctEnc(self, chunk, pos, len) {
- if (pos >= len)
- return len;
-
- if (self._byte === -1) {
- // We saw a '%' but no hex characters yet
- const hexUpper = HEX_VALUES[chunk[pos++]];
- if (hexUpper === -1)
- return -1;
-
- if (hexUpper >= 8)
- self._encode = 2; // Indicate high bits detected
-
- if (pos < len) {
- // Both hex characters are in this chunk
- const hexLower = HEX_VALUES[chunk[pos++]];
- if (hexLower === -1)
- return -1;
-
- if (self._inKey)
- self._key += String.fromCharCode((hexUpper << 4) + hexLower);
- else
- self._val += String.fromCharCode((hexUpper << 4) + hexLower);
-
- self._byte = -2;
- self._lastPos = pos;
- } else {
- // Only one hex character was available in this chunk
- self._byte = hexUpper;
- }
- } else {
- // We saw only one hex character so far
- const hexLower = HEX_VALUES[chunk[pos++]];
- if (hexLower === -1)
- return -1;
-
- if (self._inKey)
- self._key += String.fromCharCode((self._byte << 4) + hexLower);
- else
- self._val += String.fromCharCode((self._byte << 4) + hexLower);
-
- self._byte = -2;
- self._lastPos = pos;
- }
-
- return pos;
-}
-
-function skipKeyBytes(self, chunk, pos, len) {
- // Skip bytes if we've truncated
- if (self._bytesKey > self.fieldNameSizeLimit) {
- if (!self._keyTrunc) {
- if (self._lastPos < pos)
- self._key += chunk.latin1Slice(self._lastPos, pos - 1);
- }
- self._keyTrunc = true;
- for (; pos < len; ++pos) {
- const code = chunk[pos];
- if (code === 61/* '=' */ || code === 38/* '&' */)
- break;
- ++self._bytesKey;
- }
- self._lastPos = pos;
- }
-
- return pos;
-}
-
-function skipValBytes(self, chunk, pos, len) {
- // Skip bytes if we've truncated
- if (self._bytesVal > self.fieldSizeLimit) {
- if (!self._valTrunc) {
- if (self._lastPos < pos)
- self._val += chunk.latin1Slice(self._lastPos, pos - 1);
- }
- self._valTrunc = true;
- for (; pos < len; ++pos) {
- if (chunk[pos] === 38/* '&' */)
- break;
- ++self._bytesVal;
- }
- self._lastPos = pos;
- }
-
- return pos;
-}
-
-/* eslint-disable no-multi-spaces */
-const HEX_VALUES = [
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1,
- -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-];
-/* eslint-enable no-multi-spaces */
-
-module.exports = URLEncoded;
diff --git a/deps/undici/src/node_modules/busboy/lib/utils.js b/deps/undici/src/node_modules/busboy/lib/utils.js
deleted file mode 100644
index 8274f6c3aef47a..00000000000000
--- a/deps/undici/src/node_modules/busboy/lib/utils.js
+++ /dev/null
@@ -1,596 +0,0 @@
-'use strict';
-
-function parseContentType(str) {
- if (str.length === 0)
- return;
-
- const params = Object.create(null);
- let i = 0;
-
- // Parse type
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- if (code !== 47/* '/' */ || i === 0)
- return;
- break;
- }
- }
- // Check for type without subtype
- if (i === str.length)
- return;
-
- const type = str.slice(0, i).toLowerCase();
-
- // Parse subtype
- const subtypeStart = ++i;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- // Make sure we have a subtype
- if (i === subtypeStart)
- return;
-
- if (parseContentTypeParams(str, i, params) === undefined)
- return;
- break;
- }
- }
- // Make sure we have a subtype
- if (i === subtypeStart)
- return;
-
- const subtype = str.slice(subtypeStart, i).toLowerCase();
-
- return { type, subtype, params };
-}
-
-function parseContentTypeParams(str, i, params) {
- while (i < str.length) {
- // Consume whitespace
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code !== 32/* ' ' */ && code !== 9/* '\t' */)
- break;
- }
-
- // Ended on whitespace
- if (i === str.length)
- break;
-
- // Check for malformed parameter
- if (str.charCodeAt(i++) !== 59/* ';' */)
- return;
-
- // Consume whitespace
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code !== 32/* ' ' */ && code !== 9/* '\t' */)
- break;
- }
-
- // Ended on whitespace (malformed)
- if (i === str.length)
- return;
-
- let name;
- const nameStart = i;
- // Parse parameter name
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- if (code !== 61/* '=' */)
- return;
- break;
- }
- }
-
- // No value (malformed)
- if (i === str.length)
- return;
-
- name = str.slice(nameStart, i);
- ++i; // Skip over '='
-
- // No value (malformed)
- if (i === str.length)
- return;
-
- let value = '';
- let valueStart;
- if (str.charCodeAt(i) === 34/* '"' */) {
- valueStart = ++i;
- let escaping = false;
- // Parse quoted value
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code === 92/* '\\' */) {
- if (escaping) {
- valueStart = i;
- escaping = false;
- } else {
- value += str.slice(valueStart, i);
- escaping = true;
- }
- continue;
- }
- if (code === 34/* '"' */) {
- if (escaping) {
- valueStart = i;
- escaping = false;
- continue;
- }
- value += str.slice(valueStart, i);
- break;
- }
- if (escaping) {
- valueStart = i - 1;
- escaping = false;
- }
- // Invalid unescaped quoted character (malformed)
- if (QDTEXT[code] !== 1)
- return;
- }
-
- // No end quote (malformed)
- if (i === str.length)
- return;
-
- ++i; // Skip over double quote
- } else {
- valueStart = i;
- // Parse unquoted value
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- // No value (malformed)
- if (i === valueStart)
- return;
- break;
- }
- }
- value = str.slice(valueStart, i);
- }
-
- name = name.toLowerCase();
- if (params[name] === undefined)
- params[name] = value;
- }
-
- return params;
-}
-
-function parseDisposition(str, defDecoder) {
- if (str.length === 0)
- return;
-
- const params = Object.create(null);
- let i = 0;
-
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- if (parseDispositionParams(str, i, params, defDecoder) === undefined)
- return;
- break;
- }
- }
-
- const type = str.slice(0, i).toLowerCase();
-
- return { type, params };
-}
-
-function parseDispositionParams(str, i, params, defDecoder) {
- while (i < str.length) {
- // Consume whitespace
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code !== 32/* ' ' */ && code !== 9/* '\t' */)
- break;
- }
-
- // Ended on whitespace
- if (i === str.length)
- break;
-
- // Check for malformed parameter
- if (str.charCodeAt(i++) !== 59/* ';' */)
- return;
-
- // Consume whitespace
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code !== 32/* ' ' */ && code !== 9/* '\t' */)
- break;
- }
-
- // Ended on whitespace (malformed)
- if (i === str.length)
- return;
-
- let name;
- const nameStart = i;
- // Parse parameter name
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- if (code === 61/* '=' */)
- break;
- return;
- }
- }
-
- // No value (malformed)
- if (i === str.length)
- return;
-
- let value = '';
- let valueStart;
- let charset;
- //~ let lang;
- name = str.slice(nameStart, i);
- if (name.charCodeAt(name.length - 1) === 42/* '*' */) {
- // Extended value
-
- const charsetStart = ++i;
- // Parse charset name
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (CHARSET[code] !== 1) {
- if (code !== 39/* '\'' */)
- return;
- break;
- }
- }
-
- // Incomplete charset (malformed)
- if (i === str.length)
- return;
-
- charset = str.slice(charsetStart, i);
- ++i; // Skip over the '\''
-
- //~ const langStart = ++i;
- // Parse language name
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code === 39/* '\'' */)
- break;
- }
-
- // Incomplete language (malformed)
- if (i === str.length)
- return;
-
- //~ lang = str.slice(langStart, i);
- ++i; // Skip over the '\''
-
- // No value (malformed)
- if (i === str.length)
- return;
-
- valueStart = i;
-
- let encode = 0;
- // Parse value
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (EXTENDED_VALUE[code] !== 1) {
- if (code === 37/* '%' */) {
- let hexUpper;
- let hexLower;
- if (i + 2 < str.length
- && (hexUpper = HEX_VALUES[str.charCodeAt(i + 1)]) !== -1
- && (hexLower = HEX_VALUES[str.charCodeAt(i + 2)]) !== -1) {
- const byteVal = (hexUpper << 4) + hexLower;
- value += str.slice(valueStart, i);
- value += String.fromCharCode(byteVal);
- i += 2;
- valueStart = i + 1;
- if (byteVal >= 128)
- encode = 2;
- else if (encode === 0)
- encode = 1;
- continue;
- }
- // '%' disallowed in non-percent encoded contexts (malformed)
- return;
- }
- break;
- }
- }
-
- value += str.slice(valueStart, i);
- value = convertToUTF8(value, charset, encode);
- if (value === undefined)
- return;
- } else {
- // Non-extended value
-
- ++i; // Skip over '='
-
- // No value (malformed)
- if (i === str.length)
- return;
-
- if (str.charCodeAt(i) === 34/* '"' */) {
- valueStart = ++i;
- let escaping = false;
- // Parse quoted value
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code === 92/* '\\' */) {
- if (escaping) {
- valueStart = i;
- escaping = false;
- } else {
- value += str.slice(valueStart, i);
- escaping = true;
- }
- continue;
- }
- if (code === 34/* '"' */) {
- if (escaping) {
- valueStart = i;
- escaping = false;
- continue;
- }
- value += str.slice(valueStart, i);
- break;
- }
- if (escaping) {
- valueStart = i - 1;
- escaping = false;
- }
- // Invalid unescaped quoted character (malformed)
- if (QDTEXT[code] !== 1)
- return;
- }
-
- // No end quote (malformed)
- if (i === str.length)
- return;
-
- ++i; // Skip over double quote
- } else {
- valueStart = i;
- // Parse unquoted value
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- // No value (malformed)
- if (i === valueStart)
- return;
- break;
- }
- }
- value = str.slice(valueStart, i);
- }
-
- value = defDecoder(value, 2);
- if (value === undefined)
- return;
- }
-
- name = name.toLowerCase();
- if (params[name] === undefined)
- params[name] = value;
- }
-
- return params;
-}
-
-function getDecoder(charset) {
- let lc;
- while (true) {
- switch (charset) {
- case 'utf-8':
- case 'utf8':
- return decoders.utf8;
- case 'latin1':
- case 'ascii': // TODO: Make these a separate, strict decoder?
- case 'us-ascii':
- case 'iso-8859-1':
- case 'iso8859-1':
- case 'iso88591':
- case 'iso_8859-1':
- case 'windows-1252':
- case 'iso_8859-1:1987':
- case 'cp1252':
- case 'x-cp1252':
- return decoders.latin1;
- case 'utf16le':
- case 'utf-16le':
- case 'ucs2':
- case 'ucs-2':
- return decoders.utf16le;
- case 'base64':
- return decoders.base64;
- default:
- if (lc === undefined) {
- lc = true;
- charset = charset.toLowerCase();
- continue;
- }
- return decoders.other.bind(charset);
- }
- }
-}
-
-const decoders = {
- utf8: (data, hint) => {
- if (data.length === 0)
- return '';
- if (typeof data === 'string') {
- // If `data` never had any percent-encoded bytes or never had any that
- // were outside of the ASCII range, then we can safely just return the
- // input since UTF-8 is ASCII compatible
- if (hint < 2)
- return data;
-
- data = Buffer.from(data, 'latin1');
- }
- return data.utf8Slice(0, data.length);
- },
-
- latin1: (data, hint) => {
- if (data.length === 0)
- return '';
- if (typeof data === 'string')
- return data;
- return data.latin1Slice(0, data.length);
- },
-
- utf16le: (data, hint) => {
- if (data.length === 0)
- return '';
- if (typeof data === 'string')
- data = Buffer.from(data, 'latin1');
- return data.ucs2Slice(0, data.length);
- },
-
- base64: (data, hint) => {
- if (data.length === 0)
- return '';
- if (typeof data === 'string')
- data = Buffer.from(data, 'latin1');
- return data.base64Slice(0, data.length);
- },
-
- other: (data, hint) => {
- if (data.length === 0)
- return '';
- if (typeof data === 'string')
- data = Buffer.from(data, 'latin1');
- try {
- const decoder = new TextDecoder(this);
- return decoder.decode(data);
- } catch {}
- },
-};
-
-function convertToUTF8(data, charset, hint) {
- const decode = getDecoder(charset);
- if (decode)
- return decode(data, hint);
-}
-
-function basename(path) {
- if (typeof path !== 'string')
- return '';
- for (let i = path.length - 1; i >= 0; --i) {
- switch (path.charCodeAt(i)) {
- case 0x2F: // '/'
- case 0x5C: // '\'
- path = path.slice(i + 1);
- return (path === '..' || path === '.' ? '' : path);
- }
- }
- return (path === '..' || path === '.' ? '' : path);
-}
-
-const TOKEN = [
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
- 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-];
-
-const QDTEXT = [
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
-];
-
-const CHARSET = [
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
- 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-];
-
-const EXTENDED_VALUE = [
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
- 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-];
-
-/* eslint-disable no-multi-spaces */
-const HEX_VALUES = [
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1,
- -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-];
-/* eslint-enable no-multi-spaces */
-
-module.exports = {
- basename,
- convertToUTF8,
- getDecoder,
- parseContentType,
- parseDisposition,
-};
diff --git a/deps/undici/src/node_modules/busboy/package.json b/deps/undici/src/node_modules/busboy/package.json
deleted file mode 100644
index ac2577fe2c5873..00000000000000
--- a/deps/undici/src/node_modules/busboy/package.json
+++ /dev/null
@@ -1,22 +0,0 @@
-{ "name": "busboy",
- "version": "1.6.0",
- "author": "Brian White ",
- "description": "A streaming parser for HTML form data for node.js",
- "main": "./lib/index.js",
- "dependencies": {
- "streamsearch": "^1.1.0"
- },
- "devDependencies": {
- "@mscdex/eslint-config": "^1.1.0",
- "eslint": "^7.32.0"
- },
- "scripts": {
- "test": "node test/test.js",
- "lint": "eslint --cache --report-unused-disable-directives --ext=.js .eslintrc.js lib test bench",
- "lint:fix": "npm run lint -- --fix"
- },
- "engines": { "node": ">=10.16.0" },
- "keywords": [ "uploads", "forms", "multipart", "form-data" ],
- "licenses": [ { "type": "MIT", "url": "http://github.com/mscdex/busboy/raw/master/LICENSE" } ],
- "repository": { "type": "git", "url": "http://github.com/mscdex/busboy.git" }
-}
diff --git a/deps/undici/src/node_modules/busboy/test/common.js b/deps/undici/src/node_modules/busboy/test/common.js
deleted file mode 100644
index fb82ad81b1b9ef..00000000000000
--- a/deps/undici/src/node_modules/busboy/test/common.js
+++ /dev/null
@@ -1,109 +0,0 @@
-'use strict';
-
-const assert = require('assert');
-const { inspect } = require('util');
-
-const mustCallChecks = [];
-
-function noop() {}
-
-function runCallChecks(exitCode) {
- if (exitCode !== 0) return;
-
- const failed = mustCallChecks.filter((context) => {
- if ('minimum' in context) {
- context.messageSegment = `at least ${context.minimum}`;
- return context.actual < context.minimum;
- }
- context.messageSegment = `exactly ${context.exact}`;
- return context.actual !== context.exact;
- });
-
- failed.forEach((context) => {
- console.error('Mismatched %s function calls. Expected %s, actual %d.',
- context.name,
- context.messageSegment,
- context.actual);
- console.error(context.stack.split('\n').slice(2).join('\n'));
- });
-
- if (failed.length)
- process.exit(1);
-}
-
-function mustCall(fn, exact) {
- return _mustCallInner(fn, exact, 'exact');
-}
-
-function mustCallAtLeast(fn, minimum) {
- return _mustCallInner(fn, minimum, 'minimum');
-}
-
-function _mustCallInner(fn, criteria = 1, field) {
- if (process._exiting)
- throw new Error('Cannot use common.mustCall*() in process exit handler');
-
- if (typeof fn === 'number') {
- criteria = fn;
- fn = noop;
- } else if (fn === undefined) {
- fn = noop;
- }
-
- if (typeof criteria !== 'number')
- throw new TypeError(`Invalid ${field} value: ${criteria}`);
-
- const context = {
- [field]: criteria,
- actual: 0,
- stack: inspect(new Error()),
- name: fn.name || ''
- };
-
- // Add the exit listener only once to avoid listener leak warnings
- if (mustCallChecks.length === 0)
- process.on('exit', runCallChecks);
-
- mustCallChecks.push(context);
-
- function wrapped(...args) {
- ++context.actual;
- return fn.call(this, ...args);
- }
- // TODO: remove origFn?
- wrapped.origFn = fn;
-
- return wrapped;
-}
-
-function getCallSite(top) {
- const originalStackFormatter = Error.prepareStackTrace;
- Error.prepareStackTrace = (err, stack) =>
- `${stack[0].getFileName()}:${stack[0].getLineNumber()}`;
- const err = new Error();
- Error.captureStackTrace(err, top);
- // With the V8 Error API, the stack is not formatted until it is accessed
- // eslint-disable-next-line no-unused-expressions
- err.stack;
- Error.prepareStackTrace = originalStackFormatter;
- return err.stack;
-}
-
-function mustNotCall(msg) {
- const callSite = getCallSite(mustNotCall);
- return function mustNotCall(...args) {
- args = args.map(inspect).join(', ');
- const argsInfo = (args.length > 0
- ? `\ncalled with arguments: ${args}`
- : '');
- assert.fail(
- `${msg || 'function should not have been called'} at ${callSite}`
- + argsInfo);
- };
-}
-
-module.exports = {
- mustCall,
- mustCallAtLeast,
- mustNotCall,
-};
diff --git a/deps/undici/src/node_modules/busboy/test/test-types-multipart-charsets.js b/deps/undici/src/node_modules/busboy/test/test-types-multipart-charsets.js
deleted file mode 100644
index ed9c38aeb6c1f3..00000000000000
--- a/deps/undici/src/node_modules/busboy/test/test-types-multipart-charsets.js
+++ /dev/null
@@ -1,94 +0,0 @@
-'use strict';
-
-const assert = require('assert');
-const { inspect } = require('util');
-
-const { mustCall } = require(`${__dirname}/common.js`);
-
-const busboy = require('..');
-
-const input = Buffer.from([
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_0"; filename="テスト.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'A'.repeat(1023),
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
-].join('\r\n'));
-const boundary = '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k';
-const expected = [
- { type: 'file',
- name: 'upload_file_0',
- data: Buffer.from('A'.repeat(1023)),
- info: {
- filename: 'テスト.dat',
- encoding: '7bit',
- mimeType: 'application/octet-stream',
- },
- limited: false,
- },
-];
-const bb = busboy({
- defParamCharset: 'utf8',
- headers: {
- 'content-type': `multipart/form-data; boundary=${boundary}`,
- }
-});
-const results = [];
-
-bb.on('field', (name, val, info) => {
- results.push({ type: 'field', name, val, info });
-});
-
-bb.on('file', (name, stream, info) => {
- const data = [];
- let nb = 0;
- const file = {
- type: 'file',
- name,
- data: null,
- info,
- limited: false,
- };
- results.push(file);
- stream.on('data', (d) => {
- data.push(d);
- nb += d.length;
- }).on('limit', () => {
- file.limited = true;
- }).on('close', () => {
- file.data = Buffer.concat(data, nb);
- assert.strictEqual(stream.truncated, file.limited);
- }).once('error', (err) => {
- file.err = err.message;
- });
-});
-
-bb.on('error', (err) => {
- results.push({ error: err.message });
-});
-
-bb.on('partsLimit', () => {
- results.push('partsLimit');
-});
-
-bb.on('filesLimit', () => {
- results.push('filesLimit');
-});
-
-bb.on('fieldsLimit', () => {
- results.push('fieldsLimit');
-});
-
-bb.on('close', mustCall(() => {
- assert.deepStrictEqual(
- results,
- expected,
- 'Results mismatch.\n'
- + `Parsed: ${inspect(results)}\n`
- + `Expected: ${inspect(expected)}`
- );
-}));
-
-bb.end(input);
diff --git a/deps/undici/src/node_modules/busboy/test/test-types-multipart-stream-pause.js b/deps/undici/src/node_modules/busboy/test/test-types-multipart-stream-pause.js
deleted file mode 100644
index df7268a4b17f73..00000000000000
--- a/deps/undici/src/node_modules/busboy/test/test-types-multipart-stream-pause.js
+++ /dev/null
@@ -1,102 +0,0 @@
-'use strict';
-
-const assert = require('assert');
-const { randomFillSync } = require('crypto');
-const { inspect } = require('util');
-
-const busboy = require('..');
-
-const { mustCall } = require('./common.js');
-
-const BOUNDARY = 'u2KxIV5yF1y+xUspOQCCZopaVgeV6Jxihv35XQJmuTx8X3sh';
-
-function formDataSection(key, value) {
- return Buffer.from(
- `\r\n--${BOUNDARY}`
- + `\r\nContent-Disposition: form-data; name="${key}"`
- + `\r\n\r\n${value}`
- );
-}
-
-function formDataFile(key, filename, contentType) {
- const buf = Buffer.allocUnsafe(100000);
- return Buffer.concat([
- Buffer.from(`\r\n--${BOUNDARY}\r\n`),
- Buffer.from(`Content-Disposition: form-data; name="${key}"`
- + `; filename="${filename}"\r\n`),
- Buffer.from(`Content-Type: ${contentType}\r\n\r\n`),
- randomFillSync(buf)
- ]);
-}
-
-const reqChunks = [
- Buffer.concat([
- formDataFile('file', 'file.bin', 'application/octet-stream'),
- formDataSection('foo', 'foo value'),
- ]),
- formDataSection('bar', 'bar value'),
- Buffer.from(`\r\n--${BOUNDARY}--\r\n`)
-];
-const bb = busboy({
- headers: {
- 'content-type': `multipart/form-data; boundary=${BOUNDARY}`
- }
-});
-const expected = [
- { type: 'file',
- name: 'file',
- info: {
- filename: 'file.bin',
- encoding: '7bit',
- mimeType: 'application/octet-stream',
- },
- },
- { type: 'field',
- name: 'foo',
- val: 'foo value',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
- { type: 'field',
- name: 'bar',
- val: 'bar value',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
-];
-const results = [];
-
-bb.on('field', (name, val, info) => {
- results.push({ type: 'field', name, val, info });
-});
-
-bb.on('file', (name, stream, info) => {
- results.push({ type: 'file', name, info });
- // Simulate a pipe where the destination is pausing (perhaps due to waiting
- // for file system write to finish)
- setTimeout(() => {
- stream.resume();
- }, 10);
-});
-
-bb.on('close', mustCall(() => {
- assert.deepStrictEqual(
- results,
- expected,
- 'Results mismatch.\n'
- + `Parsed: ${inspect(results)}\n`
- + `Expected: ${inspect(expected)}`
- );
-}));
-
-for (const chunk of reqChunks)
- bb.write(chunk);
-bb.end();
diff --git a/deps/undici/src/node_modules/busboy/test/test-types-multipart.js b/deps/undici/src/node_modules/busboy/test/test-types-multipart.js
deleted file mode 100644
index 9755642ad9060c..00000000000000
--- a/deps/undici/src/node_modules/busboy/test/test-types-multipart.js
+++ /dev/null
@@ -1,1053 +0,0 @@
-'use strict';
-
-const assert = require('assert');
-const { inspect } = require('util');
-
-const busboy = require('..');
-
-const active = new Map();
-
-const tests = [
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; name="file_name_0"',
- '',
- 'super alpha file',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; name="file_name_1"',
- '',
- 'super beta file',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_0"; filename="1k_a.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'A'.repeat(1023),
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_1"; filename="1k_b.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'B'.repeat(1023),
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- expected: [
- { type: 'field',
- name: 'file_name_0',
- val: 'super alpha file',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
- { type: 'field',
- name: 'file_name_1',
- val: 'super beta file',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
- { type: 'file',
- name: 'upload_file_0',
- data: Buffer.from('A'.repeat(1023)),
- info: {
- filename: '1k_a.dat',
- encoding: '7bit',
- mimeType: 'application/octet-stream',
- },
- limited: false,
- },
- { type: 'file',
- name: 'upload_file_1',
- data: Buffer.from('B'.repeat(1023)),
- info: {
- filename: '1k_b.dat',
- encoding: '7bit',
- mimeType: 'application/octet-stream',
- },
- limited: false,
- },
- ],
- what: 'Fields and files'
- },
- { source: [
- ['------WebKitFormBoundaryTB2MiQ36fnSJlrhY',
- 'Content-Disposition: form-data; name="cont"',
- '',
- 'some random content',
- '------WebKitFormBoundaryTB2MiQ36fnSJlrhY',
- 'Content-Disposition: form-data; name="pass"',
- '',
- 'some random pass',
- '------WebKitFormBoundaryTB2MiQ36fnSJlrhY',
- 'Content-Disposition: form-data; name=bit',
- '',
- '2',
- '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--'
- ].join('\r\n')
- ],
- boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY',
- expected: [
- { type: 'field',
- name: 'cont',
- val: 'some random content',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
- { type: 'field',
- name: 'pass',
- val: 'some random pass',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
- { type: 'field',
- name: 'bit',
- val: '2',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
- ],
- what: 'Fields only'
- },
- { source: [
- ''
- ],
- boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY',
- expected: [
- { error: 'Unexpected end of form' },
- ],
- what: 'No fields and no files'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; name="file_name_0"',
- '',
- 'super alpha file',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_0"; filename="1k_a.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- limits: {
- fileSize: 13,
- fieldSize: 5
- },
- expected: [
- { type: 'field',
- name: 'file_name_0',
- val: 'super',
- info: {
- nameTruncated: false,
- valueTruncated: true,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
- { type: 'file',
- name: 'upload_file_0',
- data: Buffer.from('ABCDEFGHIJKLM'),
- info: {
- filename: '1k_a.dat',
- encoding: '7bit',
- mimeType: 'application/octet-stream',
- },
- limited: true,
- },
- ],
- what: 'Fields and files (limits)'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; name="file_name_0"',
- '',
- 'super alpha file',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_0"; filename="1k_a.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- limits: {
- files: 0
- },
- expected: [
- { type: 'field',
- name: 'file_name_0',
- val: 'super alpha file',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
- 'filesLimit',
- ],
- what: 'Fields and files (limits: 0 files)'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; name="file_name_0"',
- '',
- 'super alpha file',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; name="file_name_1"',
- '',
- 'super beta file',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_0"; filename="1k_a.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'A'.repeat(1023),
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_1"; filename="1k_b.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'B'.repeat(1023),
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- expected: [
- { type: 'field',
- name: 'file_name_0',
- val: 'super alpha file',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
- { type: 'field',
- name: 'file_name_1',
- val: 'super beta file',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
- ],
- events: ['field'],
- what: 'Fields and (ignored) files'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_0"; filename="/tmp/1k_a.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_1"; filename="C:\\files\\1k_b.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_2"; filename="relative/1k_c.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- expected: [
- { type: 'file',
- name: 'upload_file_0',
- data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'),
- info: {
- filename: '1k_a.dat',
- encoding: '7bit',
- mimeType: 'application/octet-stream',
- },
- limited: false,
- },
- { type: 'file',
- name: 'upload_file_1',
- data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'),
- info: {
- filename: '1k_b.dat',
- encoding: '7bit',
- mimeType: 'application/octet-stream',
- },
- limited: false,
- },
- { type: 'file',
- name: 'upload_file_2',
- data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'),
- info: {
- filename: '1k_c.dat',
- encoding: '7bit',
- mimeType: 'application/octet-stream',
- },
- limited: false,
- },
- ],
- what: 'Files with filenames containing paths'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_0"; filename="/absolute/1k_a.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_1"; filename="C:\\absolute\\1k_b.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_2"; filename="relative/1k_c.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- preservePath: true,
- expected: [
- { type: 'file',
- name: 'upload_file_0',
- data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'),
- info: {
- filename: '/absolute/1k_a.dat',
- encoding: '7bit',
- mimeType: 'application/octet-stream',
- },
- limited: false,
- },
- { type: 'file',
- name: 'upload_file_1',
- data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'),
- info: {
- filename: 'C:\\absolute\\1k_b.dat',
- encoding: '7bit',
- mimeType: 'application/octet-stream',
- },
- limited: false,
- },
- { type: 'file',
- name: 'upload_file_2',
- data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'),
- info: {
- filename: 'relative/1k_c.dat',
- encoding: '7bit',
- mimeType: 'application/octet-stream',
- },
- limited: false,
- },
- ],
- what: 'Paths to be preserved through the preservePath option'
- },
- { source: [
- ['------WebKitFormBoundaryTB2MiQ36fnSJlrhY',
- 'Content-Disposition: form-data; name="cont"',
- 'Content-Type: ',
- '',
- 'some random content',
- '------WebKitFormBoundaryTB2MiQ36fnSJlrhY',
- 'Content-Disposition: ',
- '',
- 'some random pass',
- '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--'
- ].join('\r\n')
- ],
- boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY',
- expected: [
- { type: 'field',
- name: 'cont',
- val: 'some random content',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
- ],
- what: 'Empty content-type and empty content-disposition'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="file"; filename*=utf-8\'\'n%C3%A4me.txt',
- 'Content-Type: application/octet-stream',
- '',
- 'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- expected: [
- { type: 'file',
- name: 'file',
- data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'),
- info: {
- filename: 'näme.txt',
- encoding: '7bit',
- mimeType: 'application/octet-stream',
- },
- limited: false,
- },
- ],
- what: 'Unicode filenames'
- },
- { source: [
- ['--asdasdasdasd\r\n',
- 'Content-Type: text/plain\r\n',
- 'Content-Disposition: form-data; name="foo"\r\n',
- '\r\n',
- 'asd\r\n',
- '--asdasdasdasd--'
- ].join(':)')
- ],
- boundary: 'asdasdasdasd',
- expected: [
- { error: 'Malformed part header' },
- { error: 'Unexpected end of form' },
- ],
- what: 'Stopped mid-header'
- },
- { source: [
- ['------WebKitFormBoundaryTB2MiQ36fnSJlrhY',
- 'Content-Disposition: form-data; name="cont"',
- 'Content-Type: application/json',
- '',
- '{}',
- '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--',
- ].join('\r\n')
- ],
- boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY',
- expected: [
- { type: 'field',
- name: 'cont',
- val: '{}',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'application/json',
- },
- },
- ],
- what: 'content-type for fields'
- },
- { source: [
- '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--',
- ],
- boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY',
- expected: [],
- what: 'empty form'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name=upload_file_0; filename="1k_a.dat"',
- 'Content-Type: application/octet-stream',
- 'Content-Transfer-Encoding: binary',
- '',
- '',
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- expected: [
- { type: 'file',
- name: 'upload_file_0',
- data: Buffer.alloc(0),
- info: {
- filename: '1k_a.dat',
- encoding: 'binary',
- mimeType: 'application/octet-stream',
- },
- limited: false,
- err: 'Unexpected end of form',
- },
- { error: 'Unexpected end of form' },
- ],
- what: 'Stopped mid-file #1'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name=upload_file_0; filename="1k_a.dat"',
- 'Content-Type: application/octet-stream',
- '',
- 'a',
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- expected: [
- { type: 'file',
- name: 'upload_file_0',
- data: Buffer.from('a'),
- info: {
- filename: '1k_a.dat',
- encoding: '7bit',
- mimeType: 'application/octet-stream',
- },
- limited: false,
- err: 'Unexpected end of form',
- },
- { error: 'Unexpected end of form' },
- ],
- what: 'Stopped mid-file #2'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_0"; filename="notes.txt"',
- 'Content-Type: text/plain; charset=utf8',
- '',
- 'a',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--',
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- expected: [
- { type: 'file',
- name: 'upload_file_0',
- data: Buffer.from('a'),
- info: {
- filename: 'notes.txt',
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- limited: false,
- },
- ],
- what: 'Text file with charset'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_0"; filename="notes.txt"',
- 'Content-Type: ',
- ' text/plain; charset=utf8',
- '',
- 'a',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--',
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- expected: [
- { type: 'file',
- name: 'upload_file_0',
- data: Buffer.from('a'),
- info: {
- filename: 'notes.txt',
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- limited: false,
- },
- ],
- what: 'Folded header value'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Type: text/plain; charset=utf8',
- '',
- 'a',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--',
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- expected: [],
- what: 'No Content-Disposition'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; name="file_name_0"',
- '',
- 'a'.repeat(64 * 1024),
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_0"; filename="notes.txt"',
- 'Content-Type: ',
- ' text/plain; charset=utf8',
- '',
- 'bc',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--',
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- limits: {
- fieldSize: Infinity,
- },
- expected: [
- { type: 'file',
- name: 'upload_file_0',
- data: Buffer.from('bc'),
- info: {
- filename: 'notes.txt',
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- limited: false,
- },
- ],
- events: [ 'file' ],
- what: 'Skip field parts if no listener'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; name="file_name_0"',
- '',
- 'a',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_0"; filename="notes.txt"',
- 'Content-Type: ',
- ' text/plain; charset=utf8',
- '',
- 'bc',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--',
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- limits: {
- parts: 1,
- },
- expected: [
- { type: 'field',
- name: 'file_name_0',
- val: 'a',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
- 'partsLimit',
- ],
- what: 'Parts limit'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; name="file_name_0"',
- '',
- 'a',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; name="file_name_1"',
- '',
- 'b',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--',
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- limits: {
- fields: 1,
- },
- expected: [
- { type: 'field',
- name: 'file_name_0',
- val: 'a',
- info: {
- nameTruncated: false,
- valueTruncated: false,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- },
- 'fieldsLimit',
- ],
- what: 'Fields limit'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_0"; filename="notes.txt"',
- 'Content-Type: text/plain; charset=utf8',
- '',
- 'ab',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_1"; filename="notes2.txt"',
- 'Content-Type: text/plain; charset=utf8',
- '',
- 'cd',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--',
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- limits: {
- files: 1,
- },
- expected: [
- { type: 'file',
- name: 'upload_file_0',
- data: Buffer.from('ab'),
- info: {
- filename: 'notes.txt',
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- limited: false,
- },
- 'filesLimit',
- ],
- what: 'Files limit'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + `name="upload_file_0"; filename="${'a'.repeat(64 * 1024)}.txt"`,
- 'Content-Type: text/plain; charset=utf8',
- '',
- 'ab',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_1"; filename="notes2.txt"',
- 'Content-Type: text/plain; charset=utf8',
- '',
- 'cd',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--',
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- expected: [
- { error: 'Malformed part header' },
- { type: 'file',
- name: 'upload_file_1',
- data: Buffer.from('cd'),
- info: {
- filename: 'notes2.txt',
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- limited: false,
- },
- ],
- what: 'Oversized part header'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + 'name="upload_file_0"; filename="notes.txt"',
- 'Content-Type: text/plain; charset=utf8',
- '',
- 'a'.repeat(31) + '\r',
- ].join('\r\n'),
- 'b'.repeat(40),
- '\r\n-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--',
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- fileHwm: 32,
- expected: [
- { type: 'file',
- name: 'upload_file_0',
- data: Buffer.from('a'.repeat(31) + '\r' + 'b'.repeat(40)),
- info: {
- filename: 'notes.txt',
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- limited: false,
- },
- ],
- what: 'Lookbehind data should not stall file streams'
- },
- { source: [
- ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + `name="upload_file_0"; filename="${'a'.repeat(8 * 1024)}.txt"`,
- 'Content-Type: text/plain; charset=utf8',
- '',
- 'ab',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + `name="upload_file_1"; filename="${'b'.repeat(8 * 1024)}.txt"`,
- 'Content-Type: text/plain; charset=utf8',
- '',
- 'cd',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- 'Content-Disposition: form-data; '
- + `name="upload_file_2"; filename="${'c'.repeat(8 * 1024)}.txt"`,
- 'Content-Type: text/plain; charset=utf8',
- '',
- 'ef',
- '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--',
- ].join('\r\n')
- ],
- boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
- expected: [
- { type: 'file',
- name: 'upload_file_0',
- data: Buffer.from('ab'),
- info: {
- filename: `${'a'.repeat(8 * 1024)}.txt`,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- limited: false,
- },
- { type: 'file',
- name: 'upload_file_1',
- data: Buffer.from('cd'),
- info: {
- filename: `${'b'.repeat(8 * 1024)}.txt`,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- limited: false,
- },
- { type: 'file',
- name: 'upload_file_2',
- data: Buffer.from('ef'),
- info: {
- filename: `${'c'.repeat(8 * 1024)}.txt`,
- encoding: '7bit',
- mimeType: 'text/plain',
- },
- limited: false,
- },
- ],
- what: 'Header size limit should be per part'
- },
- { source: [
- '\r\n--d1bf46b3-aa33-4061-b28d-6c5ced8b08ee\r\n',
- 'Content-Type: application/gzip\r\n'
- + 'Content-Encoding: gzip\r\n'
- + 'Content-Disposition: form-data; name=batch-1; filename=batch-1'
- + '\r\n\r\n',
- '\r\n--d1bf46b3-aa33-4061-b28d-6c5ced8b08ee--',
- ],
- boundary: 'd1bf46b3-aa33-4061-b28d-6c5ced8b08ee',
- expected: [
- { type: 'file',
- name: 'batch-1',
- data: Buffer.alloc(0),
- info: {
- filename: 'batch-1',
- encoding: '7bit',
- mimeType: 'application/gzip',
- },
- limited: false,
- },
- ],
- what: 'Empty part'
- },
-];
-
-for (const test of tests) {
- active.set(test, 1);
-
- const { what, boundary, events, limits, preservePath, fileHwm } = test;
- const bb = busboy({
- fileHwm,
- limits,
- preservePath,
- headers: {
- 'content-type': `multipart/form-data; boundary=${boundary}`,
- }
- });
- const results = [];
-
- if (events === undefined || events.includes('field')) {
- bb.on('field', (name, val, info) => {
- results.push({ type: 'field', name, val, info });
- });
- }
-
- if (events === undefined || events.includes('file')) {
- bb.on('file', (name, stream, info) => {
- const data = [];
- let nb = 0;
- const file = {
- type: 'file',
- name,
- data: null,
- info,
- limited: false,
- };
- results.push(file);
- stream.on('data', (d) => {
- data.push(d);
- nb += d.length;
- }).on('limit', () => {
- file.limited = true;
- }).on('close', () => {
- file.data = Buffer.concat(data, nb);
- assert.strictEqual(stream.truncated, file.limited);
- }).once('error', (err) => {
- file.err = err.message;
- });
- });
- }
-
- bb.on('error', (err) => {
- results.push({ error: err.message });
- });
-
- bb.on('partsLimit', () => {
- results.push('partsLimit');
- });
-
- bb.on('filesLimit', () => {
- results.push('filesLimit');
- });
-
- bb.on('fieldsLimit', () => {
- results.push('fieldsLimit');
- });
-
- bb.on('close', () => {
- active.delete(test);
-
- assert.deepStrictEqual(
- results,
- test.expected,
- `[${what}] Results mismatch.\n`
- + `Parsed: ${inspect(results)}\n`
- + `Expected: ${inspect(test.expected)}`
- );
- });
-
- for (const src of test.source) {
- const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src);
- bb.write(buf);
- }
- bb.end();
-}
-
-// Byte-by-byte versions
-for (let test of tests) {
- test = { ...test };
- test.what += ' (byte-by-byte)';
- active.set(test, 1);
-
- const { what, boundary, events, limits, preservePath, fileHwm } = test;
- const bb = busboy({
- fileHwm,
- limits,
- preservePath,
- headers: {
- 'content-type': `multipart/form-data; boundary=${boundary}`,
- }
- });
- const results = [];
-
- if (events === undefined || events.includes('field')) {
- bb.on('field', (name, val, info) => {
- results.push({ type: 'field', name, val, info });
- });
- }
-
- if (events === undefined || events.includes('file')) {
- bb.on('file', (name, stream, info) => {
- const data = [];
- let nb = 0;
- const file = {
- type: 'file',
- name,
- data: null,
- info,
- limited: false,
- };
- results.push(file);
- stream.on('data', (d) => {
- data.push(d);
- nb += d.length;
- }).on('limit', () => {
- file.limited = true;
- }).on('close', () => {
- file.data = Buffer.concat(data, nb);
- assert.strictEqual(stream.truncated, file.limited);
- }).once('error', (err) => {
- file.err = err.message;
- });
- });
- }
-
- bb.on('error', (err) => {
- results.push({ error: err.message });
- });
-
- bb.on('partsLimit', () => {
- results.push('partsLimit');
- });
-
- bb.on('filesLimit', () => {
- results.push('filesLimit');
- });
-
- bb.on('fieldsLimit', () => {
- results.push('fieldsLimit');
- });
-
- bb.on('close', () => {
- active.delete(test);
-
- assert.deepStrictEqual(
- results,
- test.expected,
- `[${what}] Results mismatch.\n`
- + `Parsed: ${inspect(results)}\n`
- + `Expected: ${inspect(test.expected)}`
- );
- });
-
- for (const src of test.source) {
- const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src);
- for (let i = 0; i < buf.length; ++i)
- bb.write(buf.slice(i, i + 1));
- }
- bb.end();
-}
-
-{
- let exception = false;
- process.once('uncaughtException', (ex) => {
- exception = true;
- throw ex;
- });
- process.on('exit', () => {
- if (exception || active.size === 0)
- return;
- process.exitCode = 1;
- console.error('==========================');
- console.error(`${active.size} test(s) did not finish:`);
- console.error('==========================');
- console.error(Array.from(active.keys()).map((v) => v.what).join('\n'));
- });
-}
diff --git a/deps/undici/src/node_modules/busboy/test/test-types-urlencoded.js b/deps/undici/src/node_modules/busboy/test/test-types-urlencoded.js
deleted file mode 100644
index c35962b973f29a..00000000000000
--- a/deps/undici/src/node_modules/busboy/test/test-types-urlencoded.js
+++ /dev/null
@@ -1,488 +0,0 @@
-'use strict';
-
-const assert = require('assert');
-const { transcode } = require('buffer');
-const { inspect } = require('util');
-
-const busboy = require('..');
-
-const active = new Map();
-
-const tests = [
- { source: ['foo'],
- expected: [
- ['foo',
- '',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Unassigned value'
- },
- { source: ['foo=bar'],
- expected: [
- ['foo',
- 'bar',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Assigned value'
- },
- { source: ['foo&bar=baz'],
- expected: [
- ['foo',
- '',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ['bar',
- 'baz',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Unassigned and assigned value'
- },
- { source: ['foo=bar&baz'],
- expected: [
- ['foo',
- 'bar',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ['baz',
- '',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Assigned and unassigned value'
- },
- { source: ['foo=bar&baz=bla'],
- expected: [
- ['foo',
- 'bar',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ['baz',
- 'bla',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Two assigned values'
- },
- { source: ['foo&bar'],
- expected: [
- ['foo',
- '',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ['bar',
- '',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Two unassigned values'
- },
- { source: ['foo&bar&'],
- expected: [
- ['foo',
- '',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ['bar',
- '',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Two unassigned values and ampersand'
- },
- { source: ['foo+1=bar+baz%2Bquux'],
- expected: [
- ['foo 1',
- 'bar baz+quux',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Assigned key and value with (plus) space'
- },
- { source: ['foo=bar%20baz%21'],
- expected: [
- ['foo',
- 'bar baz!',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Assigned value with encoded bytes'
- },
- { source: ['foo%20bar=baz%20bla%21'],
- expected: [
- ['foo bar',
- 'baz bla!',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Assigned value with encoded bytes #2'
- },
- { source: ['foo=bar%20baz%21&num=1000'],
- expected: [
- ['foo',
- 'bar baz!',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ['num',
- '1000',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Two assigned values, one with encoded bytes'
- },
- { source: [
- Array.from(transcode(Buffer.from('foo'), 'utf8', 'utf16le')).map(
- (n) => `%${n.toString(16).padStart(2, '0')}`
- ).join(''),
- '=',
- Array.from(transcode(Buffer.from('😀!'), 'utf8', 'utf16le')).map(
- (n) => `%${n.toString(16).padStart(2, '0')}`
- ).join(''),
- ],
- expected: [
- ['foo',
- '😀!',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'UTF-16LE',
- mimeType: 'text/plain' },
- ],
- ],
- charset: 'UTF-16LE',
- what: 'Encoded value with multi-byte charset'
- },
- { source: [
- 'foo=<',
- Array.from(transcode(Buffer.from('©:^þ'), 'utf8', 'latin1')).map(
- (n) => `%${n.toString(16).padStart(2, '0')}`
- ).join(''),
- ],
- expected: [
- ['foo',
- '<©:^þ',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'ISO-8859-1',
- mimeType: 'text/plain' },
- ],
- ],
- charset: 'ISO-8859-1',
- what: 'Encoded value with single-byte, ASCII-compatible, non-UTF8 charset'
- },
- { source: ['foo=bar&baz=bla'],
- expected: [],
- what: 'Limits: zero fields',
- limits: { fields: 0 }
- },
- { source: ['foo=bar&baz=bla'],
- expected: [
- ['foo',
- 'bar',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Limits: one field',
- limits: { fields: 1 }
- },
- { source: ['foo=bar&baz=bla'],
- expected: [
- ['foo',
- 'bar',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ['baz',
- 'bla',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Limits: field part lengths match limits',
- limits: { fieldNameSize: 3, fieldSize: 3 }
- },
- { source: ['foo=bar&baz=bla'],
- expected: [
- ['fo',
- 'bar',
- { nameTruncated: true,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ['ba',
- 'bla',
- { nameTruncated: true,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Limits: truncated field name',
- limits: { fieldNameSize: 2 }
- },
- { source: ['foo=bar&baz=bla'],
- expected: [
- ['foo',
- 'ba',
- { nameTruncated: false,
- valueTruncated: true,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ['baz',
- 'bl',
- { nameTruncated: false,
- valueTruncated: true,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Limits: truncated field value',
- limits: { fieldSize: 2 }
- },
- { source: ['foo=bar&baz=bla'],
- expected: [
- ['fo',
- 'ba',
- { nameTruncated: true,
- valueTruncated: true,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ['ba',
- 'bl',
- { nameTruncated: true,
- valueTruncated: true,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Limits: truncated field name and value',
- limits: { fieldNameSize: 2, fieldSize: 2 }
- },
- { source: ['foo=bar&baz=bla'],
- expected: [
- ['fo',
- '',
- { nameTruncated: true,
- valueTruncated: true,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ['ba',
- '',
- { nameTruncated: true,
- valueTruncated: true,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Limits: truncated field name and zero value limit',
- limits: { fieldNameSize: 2, fieldSize: 0 }
- },
- { source: ['foo=bar&baz=bla'],
- expected: [
- ['',
- '',
- { nameTruncated: true,
- valueTruncated: true,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ['',
- '',
- { nameTruncated: true,
- valueTruncated: true,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Limits: truncated zero field name and zero value limit',
- limits: { fieldNameSize: 0, fieldSize: 0 }
- },
- { source: ['&'],
- expected: [],
- what: 'Ampersand'
- },
- { source: ['&&&&&'],
- expected: [],
- what: 'Many ampersands'
- },
- { source: ['='],
- expected: [
- ['',
- '',
- { nameTruncated: false,
- valueTruncated: false,
- encoding: 'utf-8',
- mimeType: 'text/plain' },
- ],
- ],
- what: 'Assigned value, empty name and value'
- },
- { source: [''],
- expected: [],
- what: 'Nothing'
- },
-];
-
-for (const test of tests) {
- active.set(test, 1);
-
- const { what } = test;
- const charset = test.charset || 'utf-8';
- const bb = busboy({
- limits: test.limits,
- headers: {
- 'content-type': `application/x-www-form-urlencoded; charset=${charset}`,
- },
- });
- const results = [];
-
- bb.on('field', (key, val, info) => {
- results.push([key, val, info]);
- });
-
- bb.on('file', () => {
- throw new Error(`[${what}] Unexpected file`);
- });
-
- bb.on('close', () => {
- active.delete(test);
-
- assert.deepStrictEqual(
- results,
- test.expected,
- `[${what}] Results mismatch.\n`
- + `Parsed: ${inspect(results)}\n`
- + `Expected: ${inspect(test.expected)}`
- );
- });
-
- for (const src of test.source) {
- const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src);
- bb.write(buf);
- }
- bb.end();
-}
-
-// Byte-by-byte versions
-for (let test of tests) {
- test = { ...test };
- test.what += ' (byte-by-byte)';
- active.set(test, 1);
-
- const { what } = test;
- const charset = test.charset || 'utf-8';
- const bb = busboy({
- limits: test.limits,
- headers: {
- 'content-type': `application/x-www-form-urlencoded; charset="${charset}"`,
- },
- });
- const results = [];
-
- bb.on('field', (key, val, info) => {
- results.push([key, val, info]);
- });
-
- bb.on('file', () => {
- throw new Error(`[${what}] Unexpected file`);
- });
-
- bb.on('close', () => {
- active.delete(test);
-
- assert.deepStrictEqual(
- results,
- test.expected,
- `[${what}] Results mismatch.\n`
- + `Parsed: ${inspect(results)}\n`
- + `Expected: ${inspect(test.expected)}`
- );
- });
-
- for (const src of test.source) {
- const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src);
- for (let i = 0; i < buf.length; ++i)
- bb.write(buf.slice(i, i + 1));
- }
- bb.end();
-}
-
-{
- let exception = false;
- process.once('uncaughtException', (ex) => {
- exception = true;
- throw ex;
- });
- process.on('exit', () => {
- if (exception || active.size === 0)
- return;
- process.exitCode = 1;
- console.error('==========================');
- console.error(`${active.size} test(s) did not finish:`);
- console.error('==========================');
- console.error(Array.from(active.keys()).map((v) => v.what).join('\n'));
- });
-}
diff --git a/deps/undici/src/node_modules/busboy/test/test.js b/deps/undici/src/node_modules/busboy/test/test.js
deleted file mode 100644
index d0380f29de7842..00000000000000
--- a/deps/undici/src/node_modules/busboy/test/test.js
+++ /dev/null
@@ -1,20 +0,0 @@
-'use strict';
-
-const { spawnSync } = require('child_process');
-const { readdirSync } = require('fs');
-const { join } = require('path');
-
-const files = readdirSync(__dirname).sort();
-for (const filename of files) {
- if (filename.startsWith('test-')) {
- const path = join(__dirname, filename);
- console.log(`> Running ${filename} ...`);
- const result = spawnSync(`${process.argv0} ${path}`, {
- shell: true,
- stdio: 'inherit',
- windowsHide: true
- });
- if (result.status !== 0)
- process.exitCode = 1;
- }
-}
diff --git a/deps/undici/src/node_modules/streamsearch/.eslintrc.js b/deps/undici/src/node_modules/streamsearch/.eslintrc.js
deleted file mode 100644
index be9311d02655a2..00000000000000
--- a/deps/undici/src/node_modules/streamsearch/.eslintrc.js
+++ /dev/null
@@ -1,5 +0,0 @@
-'use strict';
-
-module.exports = {
- extends: '@mscdex/eslint-config',
-};
diff --git a/deps/undici/src/node_modules/streamsearch/.github/workflows/ci.yml b/deps/undici/src/node_modules/streamsearch/.github/workflows/ci.yml
deleted file mode 100644
index 29d51782c77a93..00000000000000
--- a/deps/undici/src/node_modules/streamsearch/.github/workflows/ci.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-name: CI
-
-on:
- pull_request:
- push:
- branches: [ master ]
-
-jobs:
- tests-linux:
- runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- node-version: [10.x, 12.x, 14.x, 16.x]
- steps:
- - uses: actions/checkout@v2
- - name: Use Node.js ${{ matrix.node-version }}
- uses: actions/setup-node@v1
- with:
- node-version: ${{ matrix.node-version }}
- - name: Install module
- run: npm install
- - name: Run tests
- run: npm test
diff --git a/deps/undici/src/node_modules/streamsearch/.github/workflows/lint.yml b/deps/undici/src/node_modules/streamsearch/.github/workflows/lint.yml
deleted file mode 100644
index 9f9e1f589a30be..00000000000000
--- a/deps/undici/src/node_modules/streamsearch/.github/workflows/lint.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-name: lint
-
-on:
- pull_request:
- push:
- branches: [ master ]
-
-env:
- NODE_VERSION: 16.x
-
-jobs:
- lint-js:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: Use Node.js ${{ env.NODE_VERSION }}
- uses: actions/setup-node@v1
- with:
- node-version: ${{ env.NODE_VERSION }}
- - name: Install ESLint + ESLint configs/plugins
- run: npm install --only=dev
- - name: Lint files
- run: npm run lint
diff --git a/deps/undici/src/node_modules/streamsearch/README.md b/deps/undici/src/node_modules/streamsearch/README.md
deleted file mode 100644
index c3934d1c7d5711..00000000000000
--- a/deps/undici/src/node_modules/streamsearch/README.md
+++ /dev/null
@@ -1,95 +0,0 @@
-Description
-===========
-
-streamsearch is a module for [node.js](http://nodejs.org/) that allows searching a stream using the Boyer-Moore-Horspool algorithm.
-
-This module is based heavily on the Streaming Boyer-Moore-Horspool C++ implementation by Hongli Lai [here](https://github.com/FooBarWidget/boyer-moore-horspool).
-
-
-Requirements
-============
-
-* [node.js](http://nodejs.org/) -- v10.0.0 or newer
-
-
-Installation
-============
-
- npm install streamsearch
-
-Example
-=======
-
-```js
- const { inspect } = require('util');
-
- const StreamSearch = require('streamsearch');
-
- const needle = Buffer.from('\r\n');
- const ss = new StreamSearch(needle, (isMatch, data, start, end) => {
- if (data)
- console.log('data: ' + inspect(data.toString('latin1', start, end)));
- if (isMatch)
- console.log('match!');
- });
-
- const chunks = [
- 'foo',
- ' bar',
- '\r',
- '\n',
- 'baz, hello\r',
- '\n world.',
- '\r\n Node.JS rules!!\r\n\r\n',
- ];
- for (const chunk of chunks)
- ss.push(Buffer.from(chunk));
-
- // output:
- //
- // data: 'foo'
- // data: ' bar'
- // match!
- // data: 'baz, hello'
- // match!
- // data: ' world.'
- // match!
- // data: ' Node.JS rules!!'
- // match!
- // data: ''
- // match!
-```
-
-
-API
-===
-
-Properties
-----------
-
-* **maxMatches** - < _integer_ > - The maximum number of matches. Defaults to `Infinity`.
-
-* **matches** - < _integer_ > - The current match count.
-
-
-Functions
----------
-
-* **(constructor)**(< _mixed_ >needle, < _function_ >callback) - Creates and returns a new instance for searching for a _Buffer_ or _string_ `needle`. `callback` is called any time there is non-matching data and/or there is a needle match. `callback` will be called with the following arguments:
-
- 1. `isMatch` - _boolean_ - Indicates whether a match has been found
-
- 2. `data` - _mixed_ - If set, this contains data that did not match the needle.
-
- 3. `start` - _integer_ - The index in `data` where the non-matching data begins (inclusive).
-
- 4. `end` - _integer_ - The index in `data` where the non-matching data ends (exclusive).
-
- 5. `isSafeData` - _boolean_ - Indicates if it is safe to store a reference to `data` (e.g. as-is or via `data.slice()`) or not, as in some cases `data` may point to a Buffer whose contents change over time.
-
-* **destroy**() - _(void)_ - Emits any last remaining unmatched data that may still be buffered and then resets internal state.
-
-* **push**(< _Buffer_ >chunk) - _integer_ - Processes `chunk`, searching for a match. The return value is the last processed index in `chunk` + 1.
-
-* **reset**() - _(void)_ - Resets internal state. Useful for when you wish to start searching a new/different stream for example.
-
diff --git a/deps/undici/src/node_modules/streamsearch/lib/sbmh.js b/deps/undici/src/node_modules/streamsearch/lib/sbmh.js
deleted file mode 100644
index 510cae26e67a58..00000000000000
--- a/deps/undici/src/node_modules/streamsearch/lib/sbmh.js
+++ /dev/null
@@ -1,267 +0,0 @@
-'use strict';
-/*
- Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation
- by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool
-*/
-function memcmp(buf1, pos1, buf2, pos2, num) {
- for (let i = 0; i < num; ++i) {
- if (buf1[pos1 + i] !== buf2[pos2 + i])
- return false;
- }
- return true;
-}
-
-class SBMH {
- constructor(needle, cb) {
- if (typeof cb !== 'function')
- throw new Error('Missing match callback');
-
- if (typeof needle === 'string')
- needle = Buffer.from(needle);
- else if (!Buffer.isBuffer(needle))
- throw new Error(`Expected Buffer for needle, got ${typeof needle}`);
-
- const needleLen = needle.length;
-
- this.maxMatches = Infinity;
- this.matches = 0;
-
- this._cb = cb;
- this._lookbehindSize = 0;
- this._needle = needle;
- this._bufPos = 0;
-
- this._lookbehind = Buffer.allocUnsafe(needleLen);
-
- // Initialize occurrence table.
- this._occ = [
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen, needleLen, needleLen,
- needleLen, needleLen, needleLen, needleLen
- ];
-
- // Populate occurrence table with analysis of the needle, ignoring the last
- // letter.
- if (needleLen > 1) {
- for (let i = 0; i < needleLen - 1; ++i)
- this._occ[needle[i]] = needleLen - 1 - i;
- }
- }
-
- reset() {
- this.matches = 0;
- this._lookbehindSize = 0;
- this._bufPos = 0;
- }
-
- push(chunk, pos) {
- let result;
- if (!Buffer.isBuffer(chunk))
- chunk = Buffer.from(chunk, 'latin1');
- const chunkLen = chunk.length;
- this._bufPos = pos || 0;
- while (result !== chunkLen && this.matches < this.maxMatches)
- result = feed(this, chunk);
- return result;
- }
-
- destroy() {
- const lbSize = this._lookbehindSize;
- if (lbSize)
- this._cb(false, this._lookbehind, 0, lbSize, false);
- this.reset();
- }
-}
-
-function feed(self, data) {
- const len = data.length;
- const needle = self._needle;
- const needleLen = needle.length;
-
- // Positive: points to a position in `data`
- // pos == 3 points to data[3]
- // Negative: points to a position in the lookbehind buffer
- // pos == -2 points to lookbehind[lookbehindSize - 2]
- let pos = -self._lookbehindSize;
- const lastNeedleCharPos = needleLen - 1;
- const lastNeedleChar = needle[lastNeedleCharPos];
- const end = len - needleLen;
- const occ = self._occ;
- const lookbehind = self._lookbehind;
-
- if (pos < 0) {
- // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool
- // search with character lookup code that considers both the
- // lookbehind buffer and the current round's haystack data.
- //
- // Loop until
- // there is a match.
- // or until
- // we've moved past the position that requires the
- // lookbehind buffer. In this case we switch to the
- // optimized loop.
- // or until
- // the character to look at lies outside the haystack.
- while (pos < 0 && pos <= end) {
- const nextPos = pos + lastNeedleCharPos;
- const ch = (nextPos < 0
- ? lookbehind[self._lookbehindSize + nextPos]
- : data[nextPos]);
-
- if (ch === lastNeedleChar
- && matchNeedle(self, data, pos, lastNeedleCharPos)) {
- self._lookbehindSize = 0;
- ++self.matches;
- if (pos > -self._lookbehindSize)
- self._cb(true, lookbehind, 0, self._lookbehindSize + pos, false);
- else
- self._cb(true, undefined, 0, 0, true);
-
- return (self._bufPos = pos + needleLen);
- }
-
- pos += occ[ch];
- }
-
- // No match.
-
- // There's too few data for Boyer-Moore-Horspool to run,
- // so let's use a different algorithm to skip as much as
- // we can.
- // Forward pos until
- // the trailing part of lookbehind + data
- // looks like the beginning of the needle
- // or until
- // pos == 0
- while (pos < 0 && !matchNeedle(self, data, pos, len - pos))
- ++pos;
-
- if (pos < 0) {
- // Cut off part of the lookbehind buffer that has
- // been processed and append the entire haystack
- // into it.
- const bytesToCutOff = self._lookbehindSize + pos;
-
- if (bytesToCutOff > 0) {
- // The cut off data is guaranteed not to contain the needle.
- self._cb(false, lookbehind, 0, bytesToCutOff, false);
- }
-
- self._lookbehindSize -= bytesToCutOff;
- lookbehind.copy(lookbehind, 0, bytesToCutOff, self._lookbehindSize);
- lookbehind.set(data, self._lookbehindSize);
- self._lookbehindSize += len;
-
- self._bufPos = len;
- return len;
- }
-
- // Discard lookbehind buffer.
- self._cb(false, lookbehind, 0, self._lookbehindSize, false);
- self._lookbehindSize = 0;
- }
-
- pos += self._bufPos;
-
- const firstNeedleChar = needle[0];
-
- // Lookbehind buffer is now empty. Perform Boyer-Moore-Horspool
- // search with optimized character lookup code that only considers
- // the current round's haystack data.
- while (pos <= end) {
- const ch = data[pos + lastNeedleCharPos];
-
- if (ch === lastNeedleChar
- && data[pos] === firstNeedleChar
- && memcmp(needle, 0, data, pos, lastNeedleCharPos)) {
- ++self.matches;
- if (pos > 0)
- self._cb(true, data, self._bufPos, pos, true);
- else
- self._cb(true, undefined, 0, 0, true);
-
- return (self._bufPos = pos + needleLen);
- }
-
- pos += occ[ch];
- }
-
- // There was no match. If there's trailing haystack data that we cannot
- // match yet using the Boyer-Moore-Horspool algorithm (because the trailing
- // data is less than the needle size) then match using a modified
- // algorithm that starts matching from the beginning instead of the end.
- // Whatever trailing data is left after running this algorithm is added to
- // the lookbehind buffer.
- while (pos < len) {
- if (data[pos] !== firstNeedleChar
- || !memcmp(data, pos, needle, 0, len - pos)) {
- ++pos;
- continue;
- }
- data.copy(lookbehind, 0, pos, len);
- self._lookbehindSize = len - pos;
- break;
- }
-
- // Everything until `pos` is guaranteed not to contain needle data.
- if (pos > 0)
- self._cb(false, data, self._bufPos, pos < len ? pos : len, true);
-
- self._bufPos = len;
- return len;
-}
-
-function matchNeedle(self, data, pos, len) {
- const lb = self._lookbehind;
- const lbSize = self._lookbehindSize;
- const needle = self._needle;
-
- for (let i = 0; i < len; ++i, ++pos) {
- const ch = (pos < 0 ? lb[lbSize + pos] : data[pos]);
- if (ch !== needle[i])
- return false;
- }
- return true;
-}
-
-module.exports = SBMH;
diff --git a/deps/undici/src/node_modules/streamsearch/package.json b/deps/undici/src/node_modules/streamsearch/package.json
deleted file mode 100644
index 51df8f9707cebd..00000000000000
--- a/deps/undici/src/node_modules/streamsearch/package.json
+++ /dev/null
@@ -1,34 +0,0 @@
-{
- "name": "streamsearch",
- "version": "1.1.0",
- "author": "Brian White ",
- "description": "Streaming Boyer-Moore-Horspool searching for node.js",
- "main": "./lib/sbmh.js",
- "engines": {
- "node": ">=10.0.0"
- },
- "devDependencies": {
- "@mscdex/eslint-config": "^1.1.0",
- "eslint": "^7.32.0"
- },
- "scripts": {
- "test": "node test/test.js",
- "lint": "eslint --cache --report-unused-disable-directives --ext=.js .eslintrc.js lib test",
- "lint:fix": "npm run lint -- --fix"
- },
- "keywords": [
- "stream",
- "horspool",
- "boyer-moore-horspool",
- "boyer-moore",
- "search"
- ],
- "licenses": [{
- "type": "MIT",
- "url": "http://github.com/mscdex/streamsearch/raw/master/LICENSE"
- }],
- "repository": {
- "type": "git",
- "url": "http://github.com/mscdex/streamsearch.git"
- }
-}
diff --git a/deps/undici/src/node_modules/streamsearch/test/test.js b/deps/undici/src/node_modules/streamsearch/test/test.js
deleted file mode 100644
index 39a04d7f834bea..00000000000000
--- a/deps/undici/src/node_modules/streamsearch/test/test.js
+++ /dev/null
@@ -1,70 +0,0 @@
-'use strict';
-
-const assert = require('assert');
-
-const StreamSearch = require('../lib/sbmh.js');
-
-[
- {
- needle: '\r\n',
- chunks: [
- 'foo',
- ' bar',
- '\r',
- '\n',
- 'baz, hello\r',
- '\n world.',
- '\r\n Node.JS rules!!\r\n\r\n',
- ],
- expect: [
- [false, 'foo'],
- [false, ' bar'],
- [ true, null],
- [false, 'baz, hello'],
- [ true, null],
- [false, ' world.'],
- [ true, null],
- [ true, ' Node.JS rules!!'],
- [ true, ''],
- ],
- },
- {
- needle: '---foobarbaz',
- chunks: [
- '---foobarbaz',
- 'asdf',
- '\r\n',
- '---foobarba',
- '---foobar',
- 'ba',
- '\r\n---foobarbaz--\r\n',
- ],
- expect: [
- [ true, null],
- [false, 'asdf'],
- [false, '\r\n'],
- [false, '---foobarba'],
- [false, '---foobarba'],
- [ true, '\r\n'],
- [false, '--\r\n'],
- ],
- },
-].forEach((test, i) => {
- console.log(`Running test #${i + 1}`);
- const { needle, chunks, expect } = test;
-
- const results = [];
- const ss = new StreamSearch(Buffer.from(needle),
- (isMatch, data, start, end) => {
- if (data)
- data = data.toString('latin1', start, end);
- else
- data = null;
- results.push([isMatch, data]);
- });
-
- for (const chunk of chunks)
- ss.push(Buffer.from(chunk));
-
- assert.deepStrictEqual(results, expect);
-});
diff --git a/deps/undici/src/package.json b/deps/undici/src/package.json
index 3846b9dc3988c5..1d2032652aeb4f 100644
--- a/deps/undici/src/package.json
+++ b/deps/undici/src/package.json
@@ -1,6 +1,6 @@
{
"name": "undici",
- "version": "5.25.2",
+ "version": "5.25.4",
"description": "An HTTP/1.1 client, written from scratch for Node.js",
"homepage": "https://undici.nodejs.org",
"bugs": {
@@ -93,7 +93,6 @@
"bench:run": "CONNECTIONS=1 node benchmarks/benchmark.js; CONNECTIONS=50 node benchmarks/benchmark.js",
"serve:website": "docsify serve .",
"prepare": "husky install",
- "postpublish": "node scripts/update-undici-types-version.js && cd types && npm publish",
"fuzz": "jsfuzz test/fuzzing/fuzz.js corpus"
},
"devDependencies": {
@@ -161,6 +160,6 @@
]
},
"dependencies": {
- "busboy": "^1.6.0"
+ "@fastify/busboy": "^2.0.0"
}
}
diff --git a/deps/undici/src/types/package.json b/deps/undici/src/types/package.json
deleted file mode 100644
index 16bf97c4ddf83c..00000000000000
--- a/deps/undici/src/types/package.json
+++ /dev/null
@@ -1,55 +0,0 @@
-{
- "name": "undici-types",
- "version": "5.25.1",
- "description": "A stand-alone types package for Undici",
- "homepage": "https://undici.nodejs.org",
- "bugs": {
- "url": "https://github.com/nodejs/undici/issues"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/nodejs/undici.git"
- },
- "license": "MIT",
- "types": "index.d.ts",
- "files": [
- "*.d.ts"
- ],
- "contributors": [
- {
- "name": "Daniele Belardi",
- "url": "https://github.com/dnlup",
- "author": true
- },
- {
- "name": "Ethan Arrowood",
- "url": "https://github.com/ethan-arrowood",
- "author": true
- },
- {
- "name": "Matteo Collina",
- "url": "https://github.com/mcollina",
- "author": true
- },
- {
- "name": "Matthew Aitken",
- "url": "https://github.com/KhafraDev",
- "author": true
- },
- {
- "name": "Robert Nagy",
- "url": "https://github.com/ronag",
- "author": true
- },
- {
- "name": "Szymon Marczak",
- "url": "https://github.com/szmarczak",
- "author": true
- },
- {
- "name": "Tomas Della Vedova",
- "url": "https://github.com/delvedor",
- "author": true
- }
- ]
-}
\ No newline at end of file
diff --git a/deps/undici/undici.js b/deps/undici/undici.js
index cd6308f9f3cc2d..db87da4137a247 100644
--- a/deps/undici/undici.js
+++ b/deps/undici/undici.js
@@ -331,25 +331,25 @@ var require_util = __commonJS({
if (!url || typeof url !== "object") {
throw new InvalidArgumentError("Invalid URL: The URL argument must be a non-null object.");
}
- if (url.port != null && url.port !== "" && !Number.isFinite(parseInt(url.port))) {
- throw new InvalidArgumentError("Invalid URL: port must be a valid integer or a string representation of an integer.");
- }
- if (url.path != null && typeof url.path !== "string") {
- throw new InvalidArgumentError("Invalid URL path: the path must be a string or null/undefined.");
- }
- if (url.pathname != null && typeof url.pathname !== "string") {
- throw new InvalidArgumentError("Invalid URL pathname: the pathname must be a string or null/undefined.");
- }
- if (url.hostname != null && typeof url.hostname !== "string") {
- throw new InvalidArgumentError("Invalid URL hostname: the hostname must be a string or null/undefined.");
- }
- if (url.origin != null && typeof url.origin !== "string") {
- throw new InvalidArgumentError("Invalid URL origin: the origin must be a string or null/undefined.");
- }
if (!/^https?:/.test(url.origin || url.protocol)) {
throw new InvalidArgumentError("Invalid URL protocol: the URL must start with `http:` or `https:`.");
}
if (!(url instanceof URL)) {
+ if (url.port != null && url.port !== "" && !Number.isFinite(parseInt(url.port))) {
+ throw new InvalidArgumentError("Invalid URL: port must be a valid integer or a string representation of an integer.");
+ }
+ if (url.path != null && typeof url.path !== "string") {
+ throw new InvalidArgumentError("Invalid URL path: the path must be a string or null/undefined.");
+ }
+ if (url.pathname != null && typeof url.pathname !== "string") {
+ throw new InvalidArgumentError("Invalid URL pathname: the pathname must be a string or null/undefined.");
+ }
+ if (url.hostname != null && typeof url.hostname !== "string") {
+ throw new InvalidArgumentError("Invalid URL hostname: the hostname must be a string or null/undefined.");
+ }
+ if (url.origin != null && typeof url.origin !== "string") {
+ throw new InvalidArgumentError("Invalid URL origin: the origin must be a string or null/undefined.");
+ }
const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
let path = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
@@ -848,9 +848,6 @@ var require_global = __commonJS({
return globalThis[globalOrigin];
}
function setGlobalOrigin(newOrigin) {
- if (newOrigin !== void 0 && typeof newOrigin !== "string" && !(newOrigin instanceof URL)) {
- throw new Error("Invalid base url");
- }
if (newOrigin === void 0) {
Object.defineProperty(globalThis, globalOrigin, {
value: void 0,
@@ -2083,382 +2080,649 @@ var require_headers = __commonJS({
}
});
-// node_modules/busboy/lib/utils.js
-var require_utils = __commonJS({
- "node_modules/busboy/lib/utils.js"(exports2, module2) {
+// node_modules/@fastify/busboy/deps/streamsearch/sbmh.js
+var require_sbmh = __commonJS({
+ "node_modules/@fastify/busboy/deps/streamsearch/sbmh.js"(exports2, module2) {
"use strict";
- function parseContentType(str) {
- if (str.length === 0)
- return;
- const params = /* @__PURE__ */ Object.create(null);
- let i = 0;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- if (code !== 47 || i === 0)
- return;
- break;
- }
+ var EventEmitter = require("node:events").EventEmitter;
+ var inherits = require("node:util").inherits;
+ function SBMH(needle) {
+ if (typeof needle === "string") {
+ needle = Buffer.from(needle);
+ }
+ if (!Buffer.isBuffer(needle)) {
+ throw new TypeError("The needle has to be a String or a Buffer.");
+ }
+ const needleLength = needle.length;
+ if (needleLength === 0) {
+ throw new Error("The needle cannot be an empty String/Buffer.");
+ }
+ if (needleLength > 256) {
+ throw new Error("The needle cannot have a length bigger than 256.");
+ }
+ this.maxMatches = Infinity;
+ this.matches = 0;
+ this._occ = new Array(256).fill(needleLength);
+ this._lookbehind_size = 0;
+ this._needle = needle;
+ this._bufpos = 0;
+ this._lookbehind = Buffer.alloc(needleLength);
+ for (var i = 0; i < needleLength - 1; ++i) {
+ this._occ[needle[i]] = needleLength - 1 - i;
+ }
+ }
+ inherits(SBMH, EventEmitter);
+ SBMH.prototype.reset = function() {
+ this._lookbehind_size = 0;
+ this.matches = 0;
+ this._bufpos = 0;
+ };
+ SBMH.prototype.push = function(chunk, pos) {
+ if (!Buffer.isBuffer(chunk)) {
+ chunk = Buffer.from(chunk, "binary");
}
- if (i === str.length)
- return;
- const type = str.slice(0, i).toLowerCase();
- const subtypeStart = ++i;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- if (i === subtypeStart)
- return;
- if (parseContentTypeParams(str, i, params) === void 0)
- return;
- break;
- }
+ const chlen = chunk.length;
+ this._bufpos = pos || 0;
+ let r;
+ while (r !== chlen && this.matches < this.maxMatches) {
+ r = this._sbmh_feed(chunk);
}
- if (i === subtypeStart)
- return;
- const subtype = str.slice(subtypeStart, i).toLowerCase();
- return { type, subtype, params };
- }
- function parseContentTypeParams(str, i, params) {
- while (i < str.length) {
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code !== 32 && code !== 9)
- break;
- }
- if (i === str.length)
- break;
- if (str.charCodeAt(i++) !== 59)
- return;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code !== 32 && code !== 9)
- break;
- }
- if (i === str.length)
- return;
- let name;
- const nameStart = i;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- if (code !== 61)
- return;
- break;
+ return r;
+ };
+ SBMH.prototype._sbmh_feed = function(data) {
+ const len = data.length;
+ const needle = this._needle;
+ const needleLength = needle.length;
+ const lastNeedleChar = needle[needleLength - 1];
+ let pos = -this._lookbehind_size;
+ let ch;
+ if (pos < 0) {
+ while (pos < 0 && pos <= len - needleLength) {
+ ch = this._sbmh_lookup_char(data, pos + needleLength - 1);
+ if (ch === lastNeedleChar && this._sbmh_memcmp(data, pos, needleLength - 1)) {
+ this._lookbehind_size = 0;
+ ++this.matches;
+ this.emit("info", true);
+ return this._bufpos = pos + needleLength;
}
+ pos += this._occ[ch];
}
- if (i === str.length)
- return;
- name = str.slice(nameStart, i);
- ++i;
- if (i === str.length)
- return;
- let value = "";
- let valueStart;
- if (str.charCodeAt(i) === 34) {
- valueStart = ++i;
- let escaping = false;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code === 92) {
- if (escaping) {
- valueStart = i;
- escaping = false;
- } else {
- value += str.slice(valueStart, i);
- escaping = true;
- }
- continue;
- }
- if (code === 34) {
- if (escaping) {
- valueStart = i;
- escaping = false;
- continue;
- }
- value += str.slice(valueStart, i);
- break;
- }
- if (escaping) {
- valueStart = i - 1;
- escaping = false;
- }
- if (QDTEXT[code] !== 1)
- return;
+ if (pos < 0) {
+ while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) {
+ ++pos;
}
- if (i === str.length)
- return;
- ++i;
+ }
+ if (pos >= 0) {
+ this.emit("info", false, this._lookbehind, 0, this._lookbehind_size);
+ this._lookbehind_size = 0;
} else {
- valueStart = i;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- if (i === valueStart)
- return;
- break;
- }
+ const bytesToCutOff = this._lookbehind_size + pos;
+ if (bytesToCutOff > 0) {
+ this.emit("info", false, this._lookbehind, 0, bytesToCutOff);
}
- value = str.slice(valueStart, i);
+ this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, this._lookbehind_size - bytesToCutOff);
+ this._lookbehind_size -= bytesToCutOff;
+ data.copy(this._lookbehind, this._lookbehind_size);
+ this._lookbehind_size += len;
+ this._bufpos = len;
+ return len;
}
- name = name.toLowerCase();
- if (params[name] === void 0)
- params[name] = value;
}
- return params;
+ pos += (pos >= 0) * this._bufpos;
+ if (data.indexOf(needle, pos) !== -1) {
+ pos = data.indexOf(needle, pos);
+ ++this.matches;
+ if (pos > 0) {
+ this.emit("info", true, data, this._bufpos, pos);
+ } else {
+ this.emit("info", true);
+ }
+ return this._bufpos = pos + needleLength;
+ } else {
+ pos = len - needleLength;
+ }
+ while (pos < len && (data[pos] !== needle[0] || Buffer.compare(data.subarray(pos, pos + len - pos), needle.subarray(0, len - pos)) !== 0)) {
+ ++pos;
+ }
+ if (pos < len) {
+ data.copy(this._lookbehind, 0, pos, pos + (len - pos));
+ this._lookbehind_size = len - pos;
+ }
+ if (pos > 0) {
+ this.emit("info", false, data, this._bufpos, pos < len ? pos : len);
+ }
+ this._bufpos = len;
+ return len;
+ };
+ SBMH.prototype._sbmh_lookup_char = function(data, pos) {
+ return pos < 0 ? this._lookbehind[this._lookbehind_size + pos] : data[pos];
+ };
+ SBMH.prototype._sbmh_memcmp = function(data, pos, len) {
+ for (var i = 0; i < len; ++i) {
+ if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) {
+ return false;
+ }
+ }
+ return true;
+ };
+ module2.exports = SBMH;
+ }
+});
+
+// node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js
+var require_PartStream = __commonJS({
+ "node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js"(exports2, module2) {
+ "use strict";
+ var inherits = require("node:util").inherits;
+ var ReadableStream = require("node:stream").Readable;
+ function PartStream(opts) {
+ ReadableStream.call(this, opts);
+ }
+ inherits(PartStream, ReadableStream);
+ PartStream.prototype._read = function(n) {
+ };
+ module2.exports = PartStream;
+ }
+});
+
+// node_modules/@fastify/busboy/lib/utils/getLimit.js
+var require_getLimit = __commonJS({
+ "node_modules/@fastify/busboy/lib/utils/getLimit.js"(exports2, module2) {
+ "use strict";
+ module2.exports = function getLimit(limits, name, defaultLimit) {
+ if (!limits || limits[name] === void 0 || limits[name] === null) {
+ return defaultLimit;
+ }
+ if (typeof limits[name] !== "number" || isNaN(limits[name])) {
+ throw new TypeError("Limit " + name + " is not a valid number");
+ }
+ return limits[name];
+ };
+ }
+});
+
+// node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js
+var require_HeaderParser = __commonJS({
+ "node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js"(exports2, module2) {
+ "use strict";
+ var EventEmitter = require("node:events").EventEmitter;
+ var inherits = require("node:util").inherits;
+ var getLimit = require_getLimit();
+ var StreamSearch = require_sbmh();
+ var B_DCRLF = Buffer.from("\r\n\r\n");
+ var RE_CRLF = /\r\n/g;
+ var RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/;
+ function HeaderParser(cfg) {
+ EventEmitter.call(this);
+ cfg = cfg || {};
+ const self = this;
+ this.nread = 0;
+ this.maxed = false;
+ this.npairs = 0;
+ this.maxHeaderPairs = getLimit(cfg, "maxHeaderPairs", 2e3);
+ this.maxHeaderSize = getLimit(cfg, "maxHeaderSize", 80 * 1024);
+ this.buffer = "";
+ this.header = {};
+ this.finished = false;
+ this.ss = new StreamSearch(B_DCRLF);
+ this.ss.on("info", function(isMatch, data, start, end) {
+ if (data && !self.maxed) {
+ if (self.nread + end - start >= self.maxHeaderSize) {
+ end = self.maxHeaderSize - self.nread + start;
+ self.nread = self.maxHeaderSize;
+ self.maxed = true;
+ } else {
+ self.nread += end - start;
+ }
+ self.buffer += data.toString("binary", start, end);
+ }
+ if (isMatch) {
+ self._finish();
+ }
+ });
}
- function parseDisposition(str, defDecoder) {
- if (str.length === 0)
+ inherits(HeaderParser, EventEmitter);
+ HeaderParser.prototype.push = function(data) {
+ const r = this.ss.push(data);
+ if (this.finished) {
+ return r;
+ }
+ };
+ HeaderParser.prototype.reset = function() {
+ this.finished = false;
+ this.buffer = "";
+ this.header = {};
+ this.ss.reset();
+ };
+ HeaderParser.prototype._finish = function() {
+ if (this.buffer) {
+ this._parseHeader();
+ }
+ this.ss.matches = this.ss.maxMatches;
+ const header = this.header;
+ this.header = {};
+ this.buffer = "";
+ this.finished = true;
+ this.nread = this.npairs = 0;
+ this.maxed = false;
+ this.emit("header", header);
+ };
+ HeaderParser.prototype._parseHeader = function() {
+ if (this.npairs === this.maxHeaderPairs) {
return;
- const params = /* @__PURE__ */ Object.create(null);
- let i = 0;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- if (parseDispositionParams(str, i, params, defDecoder) === void 0)
- return;
+ }
+ const lines = this.buffer.split(RE_CRLF);
+ const len = lines.length;
+ let m, h;
+ for (var i = 0; i < len; ++i) {
+ if (lines[i].length === 0) {
+ continue;
+ }
+ if (lines[i][0] === " " || lines[i][0] === " ") {
+ if (h) {
+ this.header[h][this.header[h].length - 1] += lines[i];
+ continue;
+ }
+ }
+ const posColon = lines[i].indexOf(":");
+ if (posColon === -1 || posColon === 0) {
+ return;
+ }
+ m = RE_HDR.exec(lines[i]);
+ h = m[1].toLowerCase();
+ this.header[h] = this.header[h] || [];
+ this.header[h].push(m[2] || "");
+ if (++this.npairs === this.maxHeaderPairs) {
break;
}
}
- const type = str.slice(0, i).toLowerCase();
- return { type, params };
+ };
+ module2.exports = HeaderParser;
+ }
+});
+
+// node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js
+var require_Dicer = __commonJS({
+ "node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js"(exports2, module2) {
+ "use strict";
+ var WritableStream = require("node:stream").Writable;
+ var inherits = require("node:util").inherits;
+ var StreamSearch = require_sbmh();
+ var PartStream = require_PartStream();
+ var HeaderParser = require_HeaderParser();
+ var DASH = 45;
+ var B_ONEDASH = Buffer.from("-");
+ var B_CRLF = Buffer.from("\r\n");
+ var EMPTY_FN = function() {
+ };
+ function Dicer(cfg) {
+ if (!(this instanceof Dicer)) {
+ return new Dicer(cfg);
+ }
+ WritableStream.call(this, cfg);
+ if (!cfg || !cfg.headerFirst && typeof cfg.boundary !== "string") {
+ throw new TypeError("Boundary required");
+ }
+ if (typeof cfg.boundary === "string") {
+ this.setBoundary(cfg.boundary);
+ } else {
+ this._bparser = void 0;
+ }
+ this._headerFirst = cfg.headerFirst;
+ this._dashes = 0;
+ this._parts = 0;
+ this._finished = false;
+ this._realFinish = false;
+ this._isPreamble = true;
+ this._justMatched = false;
+ this._firstWrite = true;
+ this._inHeader = true;
+ this._part = void 0;
+ this._cb = void 0;
+ this._ignoreData = false;
+ this._partOpts = { highWaterMark: cfg.partHwm };
+ this._pause = false;
+ const self = this;
+ this._hparser = new HeaderParser(cfg);
+ this._hparser.on("header", function(header) {
+ self._inHeader = false;
+ self._part.emit("header", header);
+ });
}
- function parseDispositionParams(str, i, params, defDecoder) {
- while (i < str.length) {
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code !== 32 && code !== 9)
- break;
+ inherits(Dicer, WritableStream);
+ Dicer.prototype.emit = function(ev) {
+ if (ev === "finish" && !this._realFinish) {
+ if (!this._finished) {
+ const self = this;
+ process.nextTick(function() {
+ self.emit("error", new Error("Unexpected end of multipart data"));
+ if (self._part && !self._ignoreData) {
+ const type = self._isPreamble ? "Preamble" : "Part";
+ self._part.emit("error", new Error(type + " terminated early due to unexpected end of multipart data"));
+ self._part.push(null);
+ process.nextTick(function() {
+ self._realFinish = true;
+ self.emit("finish");
+ self._realFinish = false;
+ });
+ return;
+ }
+ self._realFinish = true;
+ self.emit("finish");
+ self._realFinish = false;
+ });
}
- if (i === str.length)
- break;
- if (str.charCodeAt(i++) !== 59)
- return;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code !== 32 && code !== 9)
+ } else {
+ WritableStream.prototype.emit.apply(this, arguments);
+ }
+ };
+ Dicer.prototype._write = function(data, encoding, cb) {
+ if (!this._hparser && !this._bparser) {
+ return cb();
+ }
+ if (this._headerFirst && this._isPreamble) {
+ if (!this._part) {
+ this._part = new PartStream(this._partOpts);
+ if (this._events.preamble) {
+ this.emit("preamble", this._part);
+ } else {
+ this._ignore();
+ }
+ }
+ const r = this._hparser.push(data);
+ if (!this._inHeader && r !== void 0 && r < data.length) {
+ data = data.slice(r);
+ } else {
+ return cb();
+ }
+ }
+ if (this._firstWrite) {
+ this._bparser.push(B_CRLF);
+ this._firstWrite = false;
+ }
+ this._bparser.push(data);
+ if (this._pause) {
+ this._cb = cb;
+ } else {
+ cb();
+ }
+ };
+ Dicer.prototype.reset = function() {
+ this._part = void 0;
+ this._bparser = void 0;
+ this._hparser = void 0;
+ };
+ Dicer.prototype.setBoundary = function(boundary) {
+ const self = this;
+ this._bparser = new StreamSearch("\r\n--" + boundary);
+ this._bparser.on("info", function(isMatch, data, start, end) {
+ self._oninfo(isMatch, data, start, end);
+ });
+ };
+ Dicer.prototype._ignore = function() {
+ if (this._part && !this._ignoreData) {
+ this._ignoreData = true;
+ this._part.on("error", EMPTY_FN);
+ this._part.resume();
+ }
+ };
+ Dicer.prototype._oninfo = function(isMatch, data, start, end) {
+ let buf;
+ const self = this;
+ let i = 0;
+ let r;
+ let shouldWriteMore = true;
+ if (!this._part && this._justMatched && data) {
+ while (this._dashes < 2 && start + i < end) {
+ if (data[start + i] === DASH) {
+ ++i;
+ ++this._dashes;
+ } else {
+ if (this._dashes) {
+ buf = B_ONEDASH;
+ }
+ this._dashes = 0;
break;
+ }
}
- if (i === str.length)
- return;
- let name;
- const nameStart = i;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- if (code === 61)
- break;
- return;
+ if (this._dashes === 2) {
+ if (start + i < end && this._events.trailer) {
+ this.emit("trailer", data.slice(start + i, end));
+ }
+ this.reset();
+ this._finished = true;
+ if (self._parts === 0) {
+ self._realFinish = true;
+ self.emit("finish");
+ self._realFinish = false;
}
}
- if (i === str.length)
+ if (this._dashes) {
return;
- let value = "";
- let valueStart;
- let charset;
- name = str.slice(nameStart, i);
- if (name.charCodeAt(name.length - 1) === 42) {
- const charsetStart = ++i;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (CHARSET[code] !== 1) {
- if (code !== 39)
- return;
- break;
- }
+ }
+ }
+ if (this._justMatched) {
+ this._justMatched = false;
+ }
+ if (!this._part) {
+ this._part = new PartStream(this._partOpts);
+ this._part._read = function(n) {
+ self._unpause();
+ };
+ if (this._isPreamble && this._events.preamble) {
+ this.emit("preamble", this._part);
+ } else if (this._isPreamble !== true && this._events.part) {
+ this.emit("part", this._part);
+ } else {
+ this._ignore();
+ }
+ if (!this._isPreamble) {
+ this._inHeader = true;
+ }
+ }
+ if (data && start < end && !this._ignoreData) {
+ if (this._isPreamble || !this._inHeader) {
+ if (buf) {
+ shouldWriteMore = this._part.push(buf);
}
- if (i === str.length)
- return;
- charset = str.slice(charsetStart, i);
- ++i;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code === 39)
- break;
+ shouldWriteMore = this._part.push(data.slice(start, end));
+ if (!shouldWriteMore) {
+ this._pause = true;
}
- if (i === str.length)
- return;
- ++i;
- if (i === str.length)
- return;
- valueStart = i;
- let encode = 0;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (EXTENDED_VALUE[code] !== 1) {
- if (code === 37) {
- let hexUpper;
- let hexLower;
- if (i + 2 < str.length && (hexUpper = HEX_VALUES[str.charCodeAt(i + 1)]) !== -1 && (hexLower = HEX_VALUES[str.charCodeAt(i + 2)]) !== -1) {
- const byteVal = (hexUpper << 4) + hexLower;
- value += str.slice(valueStart, i);
- value += String.fromCharCode(byteVal);
- i += 2;
- valueStart = i + 1;
- if (byteVal >= 128)
- encode = 2;
- else if (encode === 0)
- encode = 1;
- continue;
- }
- return;
- }
- break;
- }
+ } else if (!this._isPreamble && this._inHeader) {
+ if (buf) {
+ this._hparser.push(buf);
}
- value += str.slice(valueStart, i);
- value = convertToUTF8(value, charset, encode);
- if (value === void 0)
- return;
+ r = this._hparser.push(data.slice(start, end));
+ if (!this._inHeader && r !== void 0 && r < end) {
+ this._oninfo(false, data, start + r, end);
+ }
+ }
+ }
+ if (isMatch) {
+ this._hparser.reset();
+ if (this._isPreamble) {
+ this._isPreamble = false;
} else {
- ++i;
- if (i === str.length)
- return;
- if (str.charCodeAt(i) === 34) {
- valueStart = ++i;
- let escaping = false;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (code === 92) {
- if (escaping) {
- valueStart = i;
- escaping = false;
+ if (start !== end) {
+ ++this._parts;
+ this._part.on("end", function() {
+ if (--self._parts === 0) {
+ if (self._finished) {
+ self._realFinish = true;
+ self.emit("finish");
+ self._realFinish = false;
} else {
- value += str.slice(valueStart, i);
- escaping = true;
+ self._unpause();
}
- continue;
- }
- if (code === 34) {
- if (escaping) {
- valueStart = i;
- escaping = false;
- continue;
- }
- value += str.slice(valueStart, i);
- break;
- }
- if (escaping) {
- valueStart = i - 1;
- escaping = false;
}
- if (QDTEXT[code] !== 1)
- return;
+ });
+ }
+ }
+ this._part.push(null);
+ this._part = void 0;
+ this._ignoreData = false;
+ this._justMatched = true;
+ this._dashes = 0;
+ }
+ };
+ Dicer.prototype._unpause = function() {
+ if (!this._pause) {
+ return;
+ }
+ this._pause = false;
+ if (this._cb) {
+ const cb = this._cb;
+ this._cb = void 0;
+ cb();
+ }
+ };
+ module2.exports = Dicer;
+ }
+});
+
+// node_modules/@fastify/busboy/lib/utils/decodeText.js
+var require_decodeText = __commonJS({
+ "node_modules/@fastify/busboy/lib/utils/decodeText.js"(exports2, module2) {
+ "use strict";
+ var utf8Decoder = new TextDecoder("utf-8");
+ var textDecoders = /* @__PURE__ */ new Map([
+ ["utf-8", utf8Decoder],
+ ["utf8", utf8Decoder]
+ ]);
+ function decodeText(text, textEncoding, destEncoding) {
+ if (text) {
+ if (textDecoders.has(destEncoding)) {
+ try {
+ return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding));
+ } catch (e) {
+ }
+ } else {
+ try {
+ textDecoders.set(destEncoding, new TextDecoder(destEncoding));
+ return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding));
+ } catch (e) {
+ }
+ }
+ }
+ return text;
+ }
+ module2.exports = decodeText;
+ }
+});
+
+// node_modules/@fastify/busboy/lib/utils/parseParams.js
+var require_parseParams = __commonJS({
+ "node_modules/@fastify/busboy/lib/utils/parseParams.js"(exports2, module2) {
+ "use strict";
+ var decodeText = require_decodeText();
+ var RE_ENCODED = /%([a-fA-F0-9]{2})/g;
+ function encodedReplacer(match, byte) {
+ return String.fromCharCode(parseInt(byte, 16));
+ }
+ function parseParams(str) {
+ const res = [];
+ let state = "key";
+ let charset = "";
+ let inquote = false;
+ let escaping = false;
+ let p = 0;
+ let tmp = "";
+ for (var i = 0, len = str.length; i < len; ++i) {
+ const char = str[i];
+ if (char === "\\" && inquote) {
+ if (escaping) {
+ escaping = false;
+ } else {
+ escaping = true;
+ continue;
+ }
+ } else if (char === '"') {
+ if (!escaping) {
+ if (inquote) {
+ inquote = false;
+ state = "key";
+ } else {
+ inquote = true;
}
- if (i === str.length)
- return;
- ++i;
+ continue;
} else {
- valueStart = i;
- for (; i < str.length; ++i) {
- const code = str.charCodeAt(i);
- if (TOKEN[code] !== 1) {
- if (i === valueStart)
- return;
- break;
+ escaping = false;
+ }
+ } else {
+ if (escaping && inquote) {
+ tmp += "\\";
+ }
+ escaping = false;
+ if ((state === "charset" || state === "lang") && char === "'") {
+ if (state === "charset") {
+ state = "lang";
+ charset = tmp.substring(1);
+ } else {
+ state = "value";
+ }
+ tmp = "";
+ continue;
+ } else if (state === "key" && (char === "*" || char === "=") && res.length) {
+ if (char === "*") {
+ state = "charset";
+ } else {
+ state = "value";
+ }
+ res[p] = [tmp, void 0];
+ tmp = "";
+ continue;
+ } else if (!inquote && char === ";") {
+ state = "key";
+ if (charset) {
+ if (tmp.length) {
+ tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), "binary", charset);
}
+ charset = "";
+ } else if (tmp.length) {
+ tmp = decodeText(tmp, "binary", "utf8");
+ }
+ if (res[p] === void 0) {
+ res[p] = tmp;
+ } else {
+ res[p][1] = tmp;
}
- value = str.slice(valueStart, i);
+ tmp = "";
+ ++p;
+ continue;
+ } else if (!inquote && (char === " " || char === " ")) {
+ continue;
}
- value = defDecoder(value, 2);
- if (value === void 0)
- return;
}
- name = name.toLowerCase();
- if (params[name] === void 0)
- params[name] = value;
+ tmp += char;
}
- return params;
- }
- function getDecoder(charset) {
- let lc;
- while (true) {
- switch (charset) {
- case "utf-8":
- case "utf8":
- return decoders.utf8;
- case "latin1":
- case "ascii":
- case "us-ascii":
- case "iso-8859-1":
- case "iso8859-1":
- case "iso88591":
- case "iso_8859-1":
- case "windows-1252":
- case "iso_8859-1:1987":
- case "cp1252":
- case "x-cp1252":
- return decoders.latin1;
- case "utf16le":
- case "utf-16le":
- case "ucs2":
- case "ucs-2":
- return decoders.utf16le;
- case "base64":
- return decoders.base64;
- default:
- if (lc === void 0) {
- lc = true;
- charset = charset.toLowerCase();
- continue;
- }
- return decoders.other.bind(charset);
+ if (charset && tmp.length) {
+ tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), "binary", charset);
+ } else if (tmp) {
+ tmp = decodeText(tmp, "binary", "utf8");
+ }
+ if (res[p] === void 0) {
+ if (tmp) {
+ res[p] = tmp;
}
+ } else {
+ res[p][1] = tmp;
}
+ return res;
}
- var decoders = {
- utf8: (data, hint) => {
- if (data.length === 0)
- return "";
- if (typeof data === "string") {
- if (hint < 2)
- return data;
- data = Buffer.from(data, "latin1");
- }
- return data.utf8Slice(0, data.length);
- },
- latin1: (data, hint) => {
- if (data.length === 0)
- return "";
- if (typeof data === "string")
- return data;
- return data.latin1Slice(0, data.length);
- },
- utf16le: (data, hint) => {
- if (data.length === 0)
- return "";
- if (typeof data === "string")
- data = Buffer.from(data, "latin1");
- return data.ucs2Slice(0, data.length);
- },
- base64: (data, hint) => {
- if (data.length === 0)
- return "";
- if (typeof data === "string")
- data = Buffer.from(data, "latin1");
- return data.base64Slice(0, data.length);
- },
- other: (data, hint) => {
- if (data.length === 0)
- return "";
- if (typeof data === "string")
- data = Buffer.from(data, "latin1");
- try {
- const decoder = new TextDecoder(exports2);
- return decoder.decode(data);
- } catch {
- }
- }
- };
- function convertToUTF8(data, charset, hint) {
- const decode = getDecoder(charset);
- if (decode)
- return decode(data, hint);
- }
- function basename(path) {
- if (typeof path !== "string")
+ module2.exports = parseParams;
+ }
+});
+
+// node_modules/@fastify/busboy/lib/utils/basename.js
+var require_basename = __commonJS({
+ "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
+ "use strict";
+ module2.exports = function basename(path) {
+ if (typeof path !== "string") {
return "";
- for (let i = path.length - 1; i >= 0; --i) {
+ }
+ for (var i = path.length - 1; i >= 0; --i) {
switch (path.charCodeAt(i)) {
case 47:
case 92:
@@ -2467,177 +2731,297 @@ var require_utils = __commonJS({
}
}
return path === ".." || path === "." ? "" : path;
+ };
+ }
+});
+
+// node_modules/@fastify/busboy/lib/types/multipart.js
+var require_multipart = __commonJS({
+ "node_modules/@fastify/busboy/lib/types/multipart.js"(exports2, module2) {
+ "use strict";
+ var { Readable } = require("node:stream");
+ var { inherits } = require("node:util");
+ var Dicer = require_Dicer();
+ var parseParams = require_parseParams();
+ var decodeText = require_decodeText();
+ var basename = require_basename();
+ var getLimit = require_getLimit();
+ var RE_BOUNDARY = /^boundary$/i;
+ var RE_FIELD = /^form-data$/i;
+ var RE_CHARSET = /^charset$/i;
+ var RE_FILENAME = /^filename$/i;
+ var RE_NAME = /^name$/i;
+ Multipart.detect = /^multipart\/form-data/i;
+ function Multipart(boy, cfg) {
+ let i;
+ let len;
+ const self = this;
+ let boundary;
+ const limits = cfg.limits;
+ const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => contentType === "application/octet-stream" || fileName !== void 0);
+ const parsedConType = cfg.parsedConType || [];
+ const defCharset = cfg.defCharset || "utf8";
+ const preservePath = cfg.preservePath;
+ const fileOpts = { highWaterMark: cfg.fileHwm };
+ for (i = 0, len = parsedConType.length; i < len; ++i) {
+ if (Array.isArray(parsedConType[i]) && RE_BOUNDARY.test(parsedConType[i][0])) {
+ boundary = parsedConType[i][1];
+ break;
+ }
+ }
+ function checkFinished() {
+ if (nends === 0 && finished && !boy._done) {
+ finished = false;
+ self.end();
+ }
+ }
+ if (typeof boundary !== "string") {
+ throw new Error("Multipart: Boundary not found");
+ }
+ const fieldSizeLimit = getLimit(limits, "fieldSize", 1 * 1024 * 1024);
+ const fileSizeLimit = getLimit(limits, "fileSize", Infinity);
+ const filesLimit = getLimit(limits, "files", Infinity);
+ const fieldsLimit = getLimit(limits, "fields", Infinity);
+ const partsLimit = getLimit(limits, "parts", Infinity);
+ const headerPairsLimit = getLimit(limits, "headerPairs", 2e3);
+ const headerSizeLimit = getLimit(limits, "headerSize", 80 * 1024);
+ let nfiles = 0;
+ let nfields = 0;
+ let nends = 0;
+ let curFile;
+ let curField;
+ let finished = false;
+ this._needDrain = false;
+ this._pause = false;
+ this._cb = void 0;
+ this._nparts = 0;
+ this._boy = boy;
+ const parserCfg = {
+ boundary,
+ maxHeaderPairs: headerPairsLimit,
+ maxHeaderSize: headerSizeLimit,
+ partHwm: fileOpts.highWaterMark,
+ highWaterMark: cfg.highWaterMark
+ };
+ this.parser = new Dicer(parserCfg);
+ this.parser.on("drain", function() {
+ self._needDrain = false;
+ if (self._cb && !self._pause) {
+ const cb = self._cb;
+ self._cb = void 0;
+ cb();
+ }
+ }).on("part", function onPart(part) {
+ if (++self._nparts > partsLimit) {
+ self.parser.removeListener("part", onPart);
+ self.parser.on("part", skipPart);
+ boy.hitPartsLimit = true;
+ boy.emit("partsLimit");
+ return skipPart(part);
+ }
+ if (curField) {
+ const field = curField;
+ field.emit("end");
+ field.removeAllListeners("end");
+ }
+ part.on("header", function(header) {
+ let contype;
+ let fieldname;
+ let parsed;
+ let charset;
+ let encoding;
+ let filename;
+ let nsize = 0;
+ if (header["content-type"]) {
+ parsed = parseParams(header["content-type"][0]);
+ if (parsed[0]) {
+ contype = parsed[0].toLowerCase();
+ for (i = 0, len = parsed.length; i < len; ++i) {
+ if (RE_CHARSET.test(parsed[i][0])) {
+ charset = parsed[i][1].toLowerCase();
+ break;
+ }
+ }
+ }
+ }
+ if (contype === void 0) {
+ contype = "text/plain";
+ }
+ if (charset === void 0) {
+ charset = defCharset;
+ }
+ if (header["content-disposition"]) {
+ parsed = parseParams(header["content-disposition"][0]);
+ if (!RE_FIELD.test(parsed[0])) {
+ return skipPart(part);
+ }
+ for (i = 0, len = parsed.length; i < len; ++i) {
+ if (RE_NAME.test(parsed[i][0])) {
+ fieldname = parsed[i][1];
+ } else if (RE_FILENAME.test(parsed[i][0])) {
+ filename = parsed[i][1];
+ if (!preservePath) {
+ filename = basename(filename);
+ }
+ }
+ }
+ } else {
+ return skipPart(part);
+ }
+ if (header["content-transfer-encoding"]) {
+ encoding = header["content-transfer-encoding"][0].toLowerCase();
+ } else {
+ encoding = "7bit";
+ }
+ let onData, onEnd;
+ if (isPartAFile(fieldname, contype, filename)) {
+ if (nfiles === filesLimit) {
+ if (!boy.hitFilesLimit) {
+ boy.hitFilesLimit = true;
+ boy.emit("filesLimit");
+ }
+ return skipPart(part);
+ }
+ ++nfiles;
+ if (!boy._events.file) {
+ self.parser._ignore();
+ return;
+ }
+ ++nends;
+ const file = new FileStream(fileOpts);
+ curFile = file;
+ file.on("end", function() {
+ --nends;
+ self._pause = false;
+ checkFinished();
+ if (self._cb && !self._needDrain) {
+ const cb = self._cb;
+ self._cb = void 0;
+ cb();
+ }
+ });
+ file._read = function(n) {
+ if (!self._pause) {
+ return;
+ }
+ self._pause = false;
+ if (self._cb && !self._needDrain) {
+ const cb = self._cb;
+ self._cb = void 0;
+ cb();
+ }
+ };
+ boy.emit("file", fieldname, file, filename, encoding, contype);
+ onData = function(data) {
+ if ((nsize += data.length) > fileSizeLimit) {
+ const extralen = fileSizeLimit - nsize + data.length;
+ if (extralen > 0) {
+ file.push(data.slice(0, extralen));
+ }
+ file.truncated = true;
+ file.bytesRead = fileSizeLimit;
+ part.removeAllListeners("data");
+ file.emit("limit");
+ return;
+ } else if (!file.push(data)) {
+ self._pause = true;
+ }
+ file.bytesRead = nsize;
+ };
+ onEnd = function() {
+ curFile = void 0;
+ file.push(null);
+ };
+ } else {
+ if (nfields === fieldsLimit) {
+ if (!boy.hitFieldsLimit) {
+ boy.hitFieldsLimit = true;
+ boy.emit("fieldsLimit");
+ }
+ return skipPart(part);
+ }
+ ++nfields;
+ ++nends;
+ let buffer = "";
+ let truncated = false;
+ curField = part;
+ onData = function(data) {
+ if ((nsize += data.length) > fieldSizeLimit) {
+ const extralen = fieldSizeLimit - (nsize - data.length);
+ buffer += data.toString("binary", 0, extralen);
+ truncated = true;
+ part.removeAllListeners("data");
+ } else {
+ buffer += data.toString("binary");
+ }
+ };
+ onEnd = function() {
+ curField = void 0;
+ if (buffer.length) {
+ buffer = decodeText(buffer, "binary", charset);
+ }
+ boy.emit("field", fieldname, buffer, false, truncated, encoding, contype);
+ --nends;
+ checkFinished();
+ };
+ }
+ part._readableState.sync = false;
+ part.on("data", onData);
+ part.on("end", onEnd);
+ }).on("error", function(err) {
+ if (curFile) {
+ curFile.emit("error", err);
+ }
+ });
+ }).on("error", function(err) {
+ boy.emit("error", err);
+ }).on("finish", function() {
+ finished = true;
+ checkFinished();
+ });
}
- var TOKEN = [
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 0,
- 1,
- 1,
- 0,
- 1,
- 1,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 0,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
+ Multipart.prototype.write = function(chunk, cb) {
+ const r = this.parser.write(chunk);
+ if (r && !this._pause) {
+ cb();
+ } else {
+ this._needDrain = !r;
+ this._cb = cb;
+ }
+ };
+ Multipart.prototype.end = function() {
+ const self = this;
+ if (self.parser.writable) {
+ self.parser.end();
+ } else if (!self._boy._done) {
+ process.nextTick(function() {
+ self._boy._done = true;
+ self._boy.emit("finish");
+ });
+ }
+ };
+ function skipPart(part) {
+ part.resume();
+ }
+ function FileStream(opts) {
+ Readable.call(this, opts);
+ this.bytesRead = 0;
+ this.truncated = false;
+ }
+ inherits(FileStream, Readable);
+ FileStream.prototype._read = function(n) {
+ };
+ module2.exports = Multipart;
+ }
+});
+
+// node_modules/@fastify/busboy/lib/utils/Decoder.js
+var require_Decoder = __commonJS({
+ "node_modules/@fastify/busboy/lib/utils/Decoder.js"(exports2, module2) {
+ "use strict";
+ var RE_PLUS = /\+/g;
+ var HEX = [
+ 0,
0,
0,
0,
@@ -2685,2485 +3069,6 @@ var require_utils = __commonJS({
0,
0,
0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0
- ];
- var QDTEXT = [
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 1,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1
- ];
- var CHARSET = [
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 0,
- 1,
- 1,
- 1,
- 1,
- 0,
- 0,
- 0,
- 0,
- 1,
- 0,
- 1,
- 0,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 0,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 1,
- 1,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0
- ];
- var EXTENDED_VALUE = [
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 0,
- 1,
- 1,
- 0,
- 1,
- 0,
- 0,
- 0,
- 0,
- 1,
- 0,
- 1,
- 1,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 0,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0
- ];
- var HEX_VALUES = [
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- 0,
- 1,
- 2,
- 3,
- 4,
- 5,
- 6,
- 7,
- 8,
- 9,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- 10,
- 11,
- 12,
- 13,
- 14,
- 15,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- 10,
- 11,
- 12,
- 13,
- 14,
- 15,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1
- ];
- module2.exports = {
- basename,
- convertToUTF8,
- getDecoder,
- parseContentType,
- parseDisposition
- };
- }
-});
-
-// node_modules/streamsearch/lib/sbmh.js
-var require_sbmh = __commonJS({
- "node_modules/streamsearch/lib/sbmh.js"(exports2, module2) {
- "use strict";
- function memcmp(buf1, pos1, buf2, pos2, num) {
- for (let i = 0; i < num; ++i) {
- if (buf1[pos1 + i] !== buf2[pos2 + i])
- return false;
- }
- return true;
- }
- var SBMH = class {
- constructor(needle, cb) {
- if (typeof cb !== "function")
- throw new Error("Missing match callback");
- if (typeof needle === "string")
- needle = Buffer.from(needle);
- else if (!Buffer.isBuffer(needle))
- throw new Error(`Expected Buffer for needle, got ${typeof needle}`);
- const needleLen = needle.length;
- this.maxMatches = Infinity;
- this.matches = 0;
- this._cb = cb;
- this._lookbehindSize = 0;
- this._needle = needle;
- this._bufPos = 0;
- this._lookbehind = Buffer.allocUnsafe(needleLen);
- this._occ = [
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen,
- needleLen
- ];
- if (needleLen > 1) {
- for (let i = 0; i < needleLen - 1; ++i)
- this._occ[needle[i]] = needleLen - 1 - i;
- }
- }
- reset() {
- this.matches = 0;
- this._lookbehindSize = 0;
- this._bufPos = 0;
- }
- push(chunk, pos) {
- let result;
- if (!Buffer.isBuffer(chunk))
- chunk = Buffer.from(chunk, "latin1");
- const chunkLen = chunk.length;
- this._bufPos = pos || 0;
- while (result !== chunkLen && this.matches < this.maxMatches)
- result = feed(this, chunk);
- return result;
- }
- destroy() {
- const lbSize = this._lookbehindSize;
- if (lbSize)
- this._cb(false, this._lookbehind, 0, lbSize, false);
- this.reset();
- }
- };
- function feed(self, data) {
- const len = data.length;
- const needle = self._needle;
- const needleLen = needle.length;
- let pos = -self._lookbehindSize;
- const lastNeedleCharPos = needleLen - 1;
- const lastNeedleChar = needle[lastNeedleCharPos];
- const end = len - needleLen;
- const occ = self._occ;
- const lookbehind = self._lookbehind;
- if (pos < 0) {
- while (pos < 0 && pos <= end) {
- const nextPos = pos + lastNeedleCharPos;
- const ch = nextPos < 0 ? lookbehind[self._lookbehindSize + nextPos] : data[nextPos];
- if (ch === lastNeedleChar && matchNeedle(self, data, pos, lastNeedleCharPos)) {
- self._lookbehindSize = 0;
- ++self.matches;
- if (pos > -self._lookbehindSize)
- self._cb(true, lookbehind, 0, self._lookbehindSize + pos, false);
- else
- self._cb(true, void 0, 0, 0, true);
- return self._bufPos = pos + needleLen;
- }
- pos += occ[ch];
- }
- while (pos < 0 && !matchNeedle(self, data, pos, len - pos))
- ++pos;
- if (pos < 0) {
- const bytesToCutOff = self._lookbehindSize + pos;
- if (bytesToCutOff > 0) {
- self._cb(false, lookbehind, 0, bytesToCutOff, false);
- }
- self._lookbehindSize -= bytesToCutOff;
- lookbehind.copy(lookbehind, 0, bytesToCutOff, self._lookbehindSize);
- lookbehind.set(data, self._lookbehindSize);
- self._lookbehindSize += len;
- self._bufPos = len;
- return len;
- }
- self._cb(false, lookbehind, 0, self._lookbehindSize, false);
- self._lookbehindSize = 0;
- }
- pos += self._bufPos;
- const firstNeedleChar = needle[0];
- while (pos <= end) {
- const ch = data[pos + lastNeedleCharPos];
- if (ch === lastNeedleChar && data[pos] === firstNeedleChar && memcmp(needle, 0, data, pos, lastNeedleCharPos)) {
- ++self.matches;
- if (pos > 0)
- self._cb(true, data, self._bufPos, pos, true);
- else
- self._cb(true, void 0, 0, 0, true);
- return self._bufPos = pos + needleLen;
- }
- pos += occ[ch];
- }
- while (pos < len) {
- if (data[pos] !== firstNeedleChar || !memcmp(data, pos, needle, 0, len - pos)) {
- ++pos;
- continue;
- }
- data.copy(lookbehind, 0, pos, len);
- self._lookbehindSize = len - pos;
- break;
- }
- if (pos > 0)
- self._cb(false, data, self._bufPos, pos < len ? pos : len, true);
- self._bufPos = len;
- return len;
- }
- function matchNeedle(self, data, pos, len) {
- const lb = self._lookbehind;
- const lbSize = self._lookbehindSize;
- const needle = self._needle;
- for (let i = 0; i < len; ++i, ++pos) {
- const ch = pos < 0 ? lb[lbSize + pos] : data[pos];
- if (ch !== needle[i])
- return false;
- }
- return true;
- }
- module2.exports = SBMH;
- }
-});
-
-// node_modules/busboy/lib/types/multipart.js
-var require_multipart = __commonJS({
- "node_modules/busboy/lib/types/multipart.js"(exports2, module2) {
- "use strict";
- var { Readable, Writable } = require("stream");
- var StreamSearch = require_sbmh();
- var {
- basename,
- convertToUTF8,
- getDecoder,
- parseContentType,
- parseDisposition
- } = require_utils();
- var BUF_CRLF = Buffer.from("\r\n");
- var BUF_CR = Buffer.from("\r");
- var BUF_DASH = Buffer.from("-");
- function noop() {
- }
- var MAX_HEADER_PAIRS = 2e3;
- var MAX_HEADER_SIZE = 16 * 1024;
- var HPARSER_NAME = 0;
- var HPARSER_PRE_OWS = 1;
- var HPARSER_VALUE = 2;
- var HeaderParser = class {
- constructor(cb) {
- this.header = /* @__PURE__ */ Object.create(null);
- this.pairCount = 0;
- this.byteCount = 0;
- this.state = HPARSER_NAME;
- this.name = "";
- this.value = "";
- this.crlf = 0;
- this.cb = cb;
- }
- reset() {
- this.header = /* @__PURE__ */ Object.create(null);
- this.pairCount = 0;
- this.byteCount = 0;
- this.state = HPARSER_NAME;
- this.name = "";
- this.value = "";
- this.crlf = 0;
- }
- push(chunk, pos, end) {
- let start = pos;
- while (pos < end) {
- switch (this.state) {
- case HPARSER_NAME: {
- let done = false;
- for (; pos < end; ++pos) {
- if (this.byteCount === MAX_HEADER_SIZE)
- return -1;
- ++this.byteCount;
- const code = chunk[pos];
- if (TOKEN[code] !== 1) {
- if (code !== 58)
- return -1;
- this.name += chunk.latin1Slice(start, pos);
- if (this.name.length === 0)
- return -1;
- ++pos;
- done = true;
- this.state = HPARSER_PRE_OWS;
- break;
- }
- }
- if (!done) {
- this.name += chunk.latin1Slice(start, pos);
- break;
- }
- }
- case HPARSER_PRE_OWS: {
- let done = false;
- for (; pos < end; ++pos) {
- if (this.byteCount === MAX_HEADER_SIZE)
- return -1;
- ++this.byteCount;
- const code = chunk[pos];
- if (code !== 32 && code !== 9) {
- start = pos;
- done = true;
- this.state = HPARSER_VALUE;
- break;
- }
- }
- if (!done)
- break;
- }
- case HPARSER_VALUE:
- switch (this.crlf) {
- case 0:
- for (; pos < end; ++pos) {
- if (this.byteCount === MAX_HEADER_SIZE)
- return -1;
- ++this.byteCount;
- const code = chunk[pos];
- if (FIELD_VCHAR[code] !== 1) {
- if (code !== 13)
- return -1;
- ++this.crlf;
- break;
- }
- }
- this.value += chunk.latin1Slice(start, pos++);
- break;
- case 1:
- if (this.byteCount === MAX_HEADER_SIZE)
- return -1;
- ++this.byteCount;
- if (chunk[pos++] !== 10)
- return -1;
- ++this.crlf;
- break;
- case 2: {
- if (this.byteCount === MAX_HEADER_SIZE)
- return -1;
- ++this.byteCount;
- const code = chunk[pos];
- if (code === 32 || code === 9) {
- start = pos;
- this.crlf = 0;
- } else {
- if (++this.pairCount < MAX_HEADER_PAIRS) {
- this.name = this.name.toLowerCase();
- if (this.header[this.name] === void 0)
- this.header[this.name] = [this.value];
- else
- this.header[this.name].push(this.value);
- }
- if (code === 13) {
- ++this.crlf;
- ++pos;
- } else {
- start = pos;
- this.crlf = 0;
- this.state = HPARSER_NAME;
- this.name = "";
- this.value = "";
- }
- }
- break;
- }
- case 3: {
- if (this.byteCount === MAX_HEADER_SIZE)
- return -1;
- ++this.byteCount;
- if (chunk[pos++] !== 10)
- return -1;
- const header = this.header;
- this.reset();
- this.cb(header);
- return pos;
- }
- }
- break;
- }
- }
- return pos;
- }
- };
- var FileStream = class extends Readable {
- constructor(opts, owner) {
- super(opts);
- this.truncated = false;
- this._readcb = null;
- this.once("end", () => {
- this._read();
- if (--owner._fileEndsLeft === 0 && owner._finalcb) {
- const cb = owner._finalcb;
- owner._finalcb = null;
- process.nextTick(cb);
- }
- });
- }
- _read(n) {
- const cb = this._readcb;
- if (cb) {
- this._readcb = null;
- cb();
- }
- }
- };
- var ignoreData = {
- push: (chunk, pos) => {
- },
- destroy: () => {
- }
- };
- function callAndUnsetCb(self, err) {
- const cb = self._writecb;
- self._writecb = null;
- if (err)
- self.destroy(err);
- else if (cb)
- cb();
- }
- function nullDecoder(val, hint) {
- return val;
- }
- var Multipart = class extends Writable {
- constructor(cfg) {
- const streamOpts = {
- autoDestroy: true,
- emitClose: true,
- highWaterMark: typeof cfg.highWaterMark === "number" ? cfg.highWaterMark : void 0
- };
- super(streamOpts);
- if (!cfg.conType.params || typeof cfg.conType.params.boundary !== "string")
- throw new Error("Multipart: Boundary not found");
- const boundary = cfg.conType.params.boundary;
- const paramDecoder = typeof cfg.defParamCharset === "string" && cfg.defParamCharset ? getDecoder(cfg.defParamCharset) : nullDecoder;
- const defCharset = cfg.defCharset || "utf8";
- const preservePath = cfg.preservePath;
- const fileOpts = {
- autoDestroy: true,
- emitClose: true,
- highWaterMark: typeof cfg.fileHwm === "number" ? cfg.fileHwm : void 0
- };
- const limits = cfg.limits;
- const fieldSizeLimit = limits && typeof limits.fieldSize === "number" ? limits.fieldSize : 1 * 1024 * 1024;
- const fileSizeLimit = limits && typeof limits.fileSize === "number" ? limits.fileSize : Infinity;
- const filesLimit = limits && typeof limits.files === "number" ? limits.files : Infinity;
- const fieldsLimit = limits && typeof limits.fields === "number" ? limits.fields : Infinity;
- const partsLimit = limits && typeof limits.parts === "number" ? limits.parts : Infinity;
- let parts = -1;
- let fields = 0;
- let files = 0;
- let skipPart = false;
- this._fileEndsLeft = 0;
- this._fileStream = void 0;
- this._complete = false;
- let fileSize = 0;
- let field;
- let fieldSize = 0;
- let partCharset;
- let partEncoding;
- let partType;
- let partName;
- let partTruncated = false;
- let hitFilesLimit = false;
- let hitFieldsLimit = false;
- this._hparser = null;
- const hparser = new HeaderParser((header) => {
- this._hparser = null;
- skipPart = false;
- partType = "text/plain";
- partCharset = defCharset;
- partEncoding = "7bit";
- partName = void 0;
- partTruncated = false;
- let filename;
- if (!header["content-disposition"]) {
- skipPart = true;
- return;
- }
- const disp = parseDisposition(header["content-disposition"][0], paramDecoder);
- if (!disp || disp.type !== "form-data") {
- skipPart = true;
- return;
- }
- if (disp.params) {
- if (disp.params.name)
- partName = disp.params.name;
- if (disp.params["filename*"])
- filename = disp.params["filename*"];
- else if (disp.params.filename)
- filename = disp.params.filename;
- if (filename !== void 0 && !preservePath)
- filename = basename(filename);
- }
- if (header["content-type"]) {
- const conType = parseContentType(header["content-type"][0]);
- if (conType) {
- partType = `${conType.type}/${conType.subtype}`;
- if (conType.params && typeof conType.params.charset === "string")
- partCharset = conType.params.charset.toLowerCase();
- }
- }
- if (header["content-transfer-encoding"])
- partEncoding = header["content-transfer-encoding"][0].toLowerCase();
- if (partType === "application/octet-stream" || filename !== void 0) {
- if (files === filesLimit) {
- if (!hitFilesLimit) {
- hitFilesLimit = true;
- this.emit("filesLimit");
- }
- skipPart = true;
- return;
- }
- ++files;
- if (this.listenerCount("file") === 0) {
- skipPart = true;
- return;
- }
- fileSize = 0;
- this._fileStream = new FileStream(fileOpts, this);
- ++this._fileEndsLeft;
- this.emit("file", partName, this._fileStream, {
- filename,
- encoding: partEncoding,
- mimeType: partType
- });
- } else {
- if (fields === fieldsLimit) {
- if (!hitFieldsLimit) {
- hitFieldsLimit = true;
- this.emit("fieldsLimit");
- }
- skipPart = true;
- return;
- }
- ++fields;
- if (this.listenerCount("field") === 0) {
- skipPart = true;
- return;
- }
- field = [];
- fieldSize = 0;
- }
- });
- let matchPostBoundary = 0;
- const ssCb = (isMatch, data, start, end, isDataSafe) => {
- retrydata:
- while (data) {
- if (this._hparser !== null) {
- const ret = this._hparser.push(data, start, end);
- if (ret === -1) {
- this._hparser = null;
- hparser.reset();
- this.emit("error", new Error("Malformed part header"));
- break;
- }
- start = ret;
- }
- if (start === end)
- break;
- if (matchPostBoundary !== 0) {
- if (matchPostBoundary === 1) {
- switch (data[start]) {
- case 45:
- matchPostBoundary = 2;
- ++start;
- break;
- case 13:
- matchPostBoundary = 3;
- ++start;
- break;
- default:
- matchPostBoundary = 0;
- }
- if (start === end)
- return;
- }
- if (matchPostBoundary === 2) {
- matchPostBoundary = 0;
- if (data[start] === 45) {
- this._complete = true;
- this._bparser = ignoreData;
- return;
- }
- const writecb = this._writecb;
- this._writecb = noop;
- ssCb(false, BUF_DASH, 0, 1, false);
- this._writecb = writecb;
- } else if (matchPostBoundary === 3) {
- matchPostBoundary = 0;
- if (data[start] === 10) {
- ++start;
- if (parts >= partsLimit)
- break;
- this._hparser = hparser;
- if (start === end)
- break;
- continue retrydata;
- } else {
- const writecb = this._writecb;
- this._writecb = noop;
- ssCb(false, BUF_CR, 0, 1, false);
- this._writecb = writecb;
- }
- }
- }
- if (!skipPart) {
- if (this._fileStream) {
- let chunk;
- const actualLen = Math.min(end - start, fileSizeLimit - fileSize);
- if (!isDataSafe) {
- chunk = Buffer.allocUnsafe(actualLen);
- data.copy(chunk, 0, start, start + actualLen);
- } else {
- chunk = data.slice(start, start + actualLen);
- }
- fileSize += chunk.length;
- if (fileSize === fileSizeLimit) {
- if (chunk.length > 0)
- this._fileStream.push(chunk);
- this._fileStream.emit("limit");
- this._fileStream.truncated = true;
- skipPart = true;
- } else if (!this._fileStream.push(chunk)) {
- if (this._writecb)
- this._fileStream._readcb = this._writecb;
- this._writecb = null;
- }
- } else if (field !== void 0) {
- let chunk;
- const actualLen = Math.min(end - start, fieldSizeLimit - fieldSize);
- if (!isDataSafe) {
- chunk = Buffer.allocUnsafe(actualLen);
- data.copy(chunk, 0, start, start + actualLen);
- } else {
- chunk = data.slice(start, start + actualLen);
- }
- fieldSize += actualLen;
- field.push(chunk);
- if (fieldSize === fieldSizeLimit) {
- skipPart = true;
- partTruncated = true;
- }
- }
- }
- break;
- }
- if (isMatch) {
- matchPostBoundary = 1;
- if (this._fileStream) {
- this._fileStream.push(null);
- this._fileStream = null;
- } else if (field !== void 0) {
- let data2;
- switch (field.length) {
- case 0:
- data2 = "";
- break;
- case 1:
- data2 = convertToUTF8(field[0], partCharset, 0);
- break;
- default:
- data2 = convertToUTF8(Buffer.concat(field, fieldSize), partCharset, 0);
- }
- field = void 0;
- fieldSize = 0;
- this.emit("field", partName, data2, {
- nameTruncated: false,
- valueTruncated: partTruncated,
- encoding: partEncoding,
- mimeType: partType
- });
- }
- if (++parts === partsLimit)
- this.emit("partsLimit");
- }
- };
- this._bparser = new StreamSearch(`\r
---${boundary}`, ssCb);
- this._writecb = null;
- this._finalcb = null;
- this.write(BUF_CRLF);
- }
- static detect(conType) {
- return conType.type === "multipart" && conType.subtype === "form-data";
- }
- _write(chunk, enc, cb) {
- this._writecb = cb;
- this._bparser.push(chunk, 0);
- if (this._writecb)
- callAndUnsetCb(this);
- }
- _destroy(err, cb) {
- this._hparser = null;
- this._bparser = ignoreData;
- if (!err)
- err = checkEndState(this);
- const fileStream = this._fileStream;
- if (fileStream) {
- this._fileStream = null;
- fileStream.destroy(err);
- }
- cb(err);
- }
- _final(cb) {
- this._bparser.destroy();
- if (!this._complete)
- return cb(new Error("Unexpected end of form"));
- if (this._fileEndsLeft)
- this._finalcb = finalcb.bind(null, this, cb);
- else
- finalcb(this, cb);
- }
- };
- function finalcb(self, cb, err) {
- if (err)
- return cb(err);
- err = checkEndState(self);
- cb(err);
- }
- function checkEndState(self) {
- if (self._hparser)
- return new Error("Malformed part header");
- const fileStream = self._fileStream;
- if (fileStream) {
- self._fileStream = null;
- fileStream.destroy(new Error("Unexpected end of file"));
- }
- if (!self._complete)
- return new Error("Unexpected end of form");
- }
- var TOKEN = [
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 0,
- 1,
- 1,
- 0,
- 1,
- 1,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 0,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0
- ];
- var FIELD_VCHAR = [
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 0,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
1,
1,
1,
@@ -5174,605 +3079,388 @@ var require_multipart = __commonJS({
1,
1,
1,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
1,
1,
1,
1,
1,
1,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
1,
1,
1,
1,
1,
1,
- 1
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0
];
- module2.exports = Multipart;
+ function Decoder() {
+ this.buffer = void 0;
+ }
+ Decoder.prototype.write = function(str) {
+ str = str.replace(RE_PLUS, " ");
+ let res = "";
+ let i = 0;
+ let p = 0;
+ const len = str.length;
+ for (; i < len; ++i) {
+ if (this.buffer !== void 0) {
+ if (!HEX[str.charCodeAt(i)]) {
+ res += "%" + this.buffer;
+ this.buffer = void 0;
+ --i;
+ } else {
+ this.buffer += str[i];
+ ++p;
+ if (this.buffer.length === 2) {
+ res += String.fromCharCode(parseInt(this.buffer, 16));
+ this.buffer = void 0;
+ }
+ }
+ } else if (str[i] === "%") {
+ if (i > p) {
+ res += str.substring(p, i);
+ p = i;
+ }
+ this.buffer = "";
+ ++p;
+ }
+ }
+ if (p < len && this.buffer === void 0) {
+ res += str.substring(p);
+ }
+ return res;
+ };
+ Decoder.prototype.reset = function() {
+ this.buffer = void 0;
+ };
+ module2.exports = Decoder;
}
});
-// node_modules/busboy/lib/types/urlencoded.js
+// node_modules/@fastify/busboy/lib/types/urlencoded.js
var require_urlencoded = __commonJS({
- "node_modules/busboy/lib/types/urlencoded.js"(exports2, module2) {
+ "node_modules/@fastify/busboy/lib/types/urlencoded.js"(exports2, module2) {
"use strict";
- var { Writable } = require("stream");
- var { getDecoder } = require_utils();
- var URLEncoded = class extends Writable {
- constructor(cfg) {
- const streamOpts = {
- autoDestroy: true,
- emitClose: true,
- highWaterMark: typeof cfg.highWaterMark === "number" ? cfg.highWaterMark : void 0
- };
- super(streamOpts);
- let charset = cfg.defCharset || "utf8";
- if (cfg.conType.params && typeof cfg.conType.params.charset === "string")
- charset = cfg.conType.params.charset;
- this.charset = charset;
- const limits = cfg.limits;
- this.fieldSizeLimit = limits && typeof limits.fieldSize === "number" ? limits.fieldSize : 1 * 1024 * 1024;
- this.fieldsLimit = limits && typeof limits.fields === "number" ? limits.fields : Infinity;
- this.fieldNameSizeLimit = limits && typeof limits.fieldNameSize === "number" ? limits.fieldNameSize : 100;
- this._inKey = true;
- this._keyTrunc = false;
- this._valTrunc = false;
- this._bytesKey = 0;
- this._bytesVal = 0;
- this._fields = 0;
- this._key = "";
- this._val = "";
- this._byte = -2;
- this._lastPos = 0;
- this._encode = 0;
- this._decoder = getDecoder(charset);
- }
- static detect(conType) {
- return conType.type === "application" && conType.subtype === "x-www-form-urlencoded";
- }
- _write(chunk, enc, cb) {
- if (this._fields >= this.fieldsLimit)
- return cb();
- let i = 0;
- const len = chunk.length;
- this._lastPos = 0;
- if (this._byte !== -2) {
- i = readPctEnc(this, chunk, i, len);
- if (i === -1)
- return cb(new Error("Malformed urlencoded form"));
- if (i >= len)
- return cb();
- if (this._inKey)
- ++this._bytesKey;
- else
- ++this._bytesVal;
- }
- main:
- while (i < len) {
- if (this._inKey) {
- i = skipKeyBytes(this, chunk, i, len);
- while (i < len) {
- switch (chunk[i]) {
- case 61:
- if (this._lastPos < i)
- this._key += chunk.latin1Slice(this._lastPos, i);
- this._lastPos = ++i;
- this._key = this._decoder(this._key, this._encode);
- this._encode = 0;
- this._inKey = false;
- continue main;
- case 38:
- if (this._lastPos < i)
- this._key += chunk.latin1Slice(this._lastPos, i);
- this._lastPos = ++i;
- this._key = this._decoder(this._key, this._encode);
- this._encode = 0;
- if (this._bytesKey > 0) {
- this.emit("field", this._key, "", {
- nameTruncated: this._keyTrunc,
- valueTruncated: false,
- encoding: this.charset,
- mimeType: "text/plain"
- });
- }
- this._key = "";
- this._val = "";
- this._keyTrunc = false;
- this._valTrunc = false;
- this._bytesKey = 0;
- this._bytesVal = 0;
- if (++this._fields >= this.fieldsLimit) {
- this.emit("fieldsLimit");
- return cb();
- }
- continue;
- case 43:
- if (this._lastPos < i)
- this._key += chunk.latin1Slice(this._lastPos, i);
- this._key += " ";
- this._lastPos = i + 1;
- break;
- case 37:
- if (this._encode === 0)
- this._encode = 1;
- if (this._lastPos < i)
- this._key += chunk.latin1Slice(this._lastPos, i);
- this._lastPos = i + 1;
- this._byte = -1;
- i = readPctEnc(this, chunk, i + 1, len);
- if (i === -1)
- return cb(new Error("Malformed urlencoded form"));
- if (i >= len)
- return cb();
- ++this._bytesKey;
- i = skipKeyBytes(this, chunk, i, len);
- continue;
- }
- ++i;
- ++this._bytesKey;
- i = skipKeyBytes(this, chunk, i, len);
- }
- if (this._lastPos < i)
- this._key += chunk.latin1Slice(this._lastPos, i);
+ var Decoder = require_Decoder();
+ var decodeText = require_decodeText();
+ var getLimit = require_getLimit();
+ var RE_CHARSET = /^charset$/i;
+ UrlEncoded.detect = /^application\/x-www-form-urlencoded/i;
+ function UrlEncoded(boy, cfg) {
+ const limits = cfg.limits;
+ const parsedConType = cfg.parsedConType;
+ this.boy = boy;
+ this.fieldSizeLimit = getLimit(limits, "fieldSize", 1 * 1024 * 1024);
+ this.fieldNameSizeLimit = getLimit(limits, "fieldNameSize", 100);
+ this.fieldsLimit = getLimit(limits, "fields", Infinity);
+ let charset;
+ for (var i = 0, len = parsedConType.length; i < len; ++i) {
+ if (Array.isArray(parsedConType[i]) && RE_CHARSET.test(parsedConType[i][0])) {
+ charset = parsedConType[i][1].toLowerCase();
+ break;
+ }
+ }
+ if (charset === void 0) {
+ charset = cfg.defCharset || "utf8";
+ }
+ this.decoder = new Decoder();
+ this.charset = charset;
+ this._fields = 0;
+ this._state = "key";
+ this._checkingBytes = true;
+ this._bytesKey = 0;
+ this._bytesVal = 0;
+ this._key = "";
+ this._val = "";
+ this._keyTrunc = false;
+ this._valTrunc = false;
+ this._hitLimit = false;
+ }
+ UrlEncoded.prototype.write = function(data, cb) {
+ if (this._fields === this.fieldsLimit) {
+ if (!this.boy.hitFieldsLimit) {
+ this.boy.hitFieldsLimit = true;
+ this.boy.emit("fieldsLimit");
+ }
+ return cb();
+ }
+ let idxeq;
+ let idxamp;
+ let i;
+ let p = 0;
+ const len = data.length;
+ while (p < len) {
+ if (this._state === "key") {
+ idxeq = idxamp = void 0;
+ for (i = p; i < len; ++i) {
+ if (!this._checkingBytes) {
+ ++p;
+ }
+ if (data[i] === 61) {
+ idxeq = i;
+ break;
+ } else if (data[i] === 38) {
+ idxamp = i;
+ break;
+ }
+ if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {
+ this._hitLimit = true;
+ break;
+ } else if (this._checkingBytes) {
+ ++this._bytesKey;
+ }
+ }
+ if (idxeq !== void 0) {
+ if (idxeq > p) {
+ this._key += this.decoder.write(data.toString("binary", p, idxeq));
+ }
+ this._state = "val";
+ this._hitLimit = false;
+ this._checkingBytes = true;
+ this._val = "";
+ this._bytesVal = 0;
+ this._valTrunc = false;
+ this.decoder.reset();
+ p = idxeq + 1;
+ } else if (idxamp !== void 0) {
+ ++this._fields;
+ let key;
+ const keyTrunc = this._keyTrunc;
+ if (idxamp > p) {
+ key = this._key += this.decoder.write(data.toString("binary", p, idxamp));
} else {
- i = skipValBytes(this, chunk, i, len);
- while (i < len) {
- switch (chunk[i]) {
- case 38:
- if (this._lastPos < i)
- this._val += chunk.latin1Slice(this._lastPos, i);
- this._lastPos = ++i;
- this._inKey = true;
- this._val = this._decoder(this._val, this._encode);
- this._encode = 0;
- if (this._bytesKey > 0 || this._bytesVal > 0) {
- this.emit("field", this._key, this._val, {
- nameTruncated: this._keyTrunc,
- valueTruncated: this._valTrunc,
- encoding: this.charset,
- mimeType: "text/plain"
- });
- }
- this._key = "";
- this._val = "";
- this._keyTrunc = false;
- this._valTrunc = false;
- this._bytesKey = 0;
- this._bytesVal = 0;
- if (++this._fields >= this.fieldsLimit) {
- this.emit("fieldsLimit");
- return cb();
- }
- continue main;
- case 43:
- if (this._lastPos < i)
- this._val += chunk.latin1Slice(this._lastPos, i);
- this._val += " ";
- this._lastPos = i + 1;
- break;
- case 37:
- if (this._encode === 0)
- this._encode = 1;
- if (this._lastPos < i)
- this._val += chunk.latin1Slice(this._lastPos, i);
- this._lastPos = i + 1;
- this._byte = -1;
- i = readPctEnc(this, chunk, i + 1, len);
- if (i === -1)
- return cb(new Error("Malformed urlencoded form"));
- if (i >= len)
- return cb();
- ++this._bytesVal;
- i = skipValBytes(this, chunk, i, len);
- continue;
- }
- ++i;
- ++this._bytesVal;
- i = skipValBytes(this, chunk, i, len);
- }
- if (this._lastPos < i)
- this._val += chunk.latin1Slice(this._lastPos, i);
+ key = this._key;
+ }
+ this._hitLimit = false;
+ this._checkingBytes = true;
+ this._key = "";
+ this._bytesKey = 0;
+ this._keyTrunc = false;
+ this.decoder.reset();
+ if (key.length) {
+ this.boy.emit("field", decodeText(key, "binary", this.charset), "", keyTrunc, false);
+ }
+ p = idxamp + 1;
+ if (this._fields === this.fieldsLimit) {
+ return cb();
+ }
+ } else if (this._hitLimit) {
+ if (i > p) {
+ this._key += this.decoder.write(data.toString("binary", p, i));
}
+ p = i;
+ if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {
+ this._checkingBytes = false;
+ this._keyTrunc = true;
+ }
+ } else {
+ if (p < len) {
+ this._key += this.decoder.write(data.toString("binary", p));
+ }
+ p = len;
+ }
+ } else {
+ idxamp = void 0;
+ for (i = p; i < len; ++i) {
+ if (!this._checkingBytes) {
+ ++p;
+ }
+ if (data[i] === 38) {
+ idxamp = i;
+ break;
+ }
+ if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {
+ this._hitLimit = true;
+ break;
+ } else if (this._checkingBytes) {
+ ++this._bytesVal;
+ }
+ }
+ if (idxamp !== void 0) {
+ ++this._fields;
+ if (idxamp > p) {
+ this._val += this.decoder.write(data.toString("binary", p, idxamp));
+ }
+ this.boy.emit("field", decodeText(this._key, "binary", this.charset), decodeText(this._val, "binary", this.charset), this._keyTrunc, this._valTrunc);
+ this._state = "key";
+ this._hitLimit = false;
+ this._checkingBytes = true;
+ this._key = "";
+ this._bytesKey = 0;
+ this._keyTrunc = false;
+ this.decoder.reset();
+ p = idxamp + 1;
+ if (this._fields === this.fieldsLimit) {
+ return cb();
+ }
+ } else if (this._hitLimit) {
+ if (i > p) {
+ this._val += this.decoder.write(data.toString("binary", p, i));
+ }
+ p = i;
+ if (this._val === "" && this.fieldSizeLimit === 0 || (this._bytesVal = this._val.length) === this.fieldSizeLimit) {
+ this._checkingBytes = false;
+ this._valTrunc = true;
+ }
+ } else {
+ if (p < len) {
+ this._val += this.decoder.write(data.toString("binary", p));
+ }
+ p = len;
}
- cb();
- }
- _final(cb) {
- if (this._byte !== -2)
- return cb(new Error("Malformed urlencoded form"));
- if (!this._inKey || this._bytesKey > 0 || this._bytesVal > 0) {
- if (this._inKey)
- this._key = this._decoder(this._key, this._encode);
- else
- this._val = this._decoder(this._val, this._encode);
- this.emit("field", this._key, this._val, {
- nameTruncated: this._keyTrunc,
- valueTruncated: this._valTrunc,
- encoding: this.charset,
- mimeType: "text/plain"
- });
}
- cb();
}
+ cb();
};
- function readPctEnc(self, chunk, pos, len) {
- if (pos >= len)
- return len;
- if (self._byte === -1) {
- const hexUpper = HEX_VALUES[chunk[pos++]];
- if (hexUpper === -1)
- return -1;
- if (hexUpper >= 8)
- self._encode = 2;
- if (pos < len) {
- const hexLower = HEX_VALUES[chunk[pos++]];
- if (hexLower === -1)
- return -1;
- if (self._inKey)
- self._key += String.fromCharCode((hexUpper << 4) + hexLower);
- else
- self._val += String.fromCharCode((hexUpper << 4) + hexLower);
- self._byte = -2;
- self._lastPos = pos;
- } else {
- self._byte = hexUpper;
- }
- } else {
- const hexLower = HEX_VALUES[chunk[pos++]];
- if (hexLower === -1)
- return -1;
- if (self._inKey)
- self._key += String.fromCharCode((self._byte << 4) + hexLower);
- else
- self._val += String.fromCharCode((self._byte << 4) + hexLower);
- self._byte = -2;
- self._lastPos = pos;
- }
- return pos;
- }
- function skipKeyBytes(self, chunk, pos, len) {
- if (self._bytesKey > self.fieldNameSizeLimit) {
- if (!self._keyTrunc) {
- if (self._lastPos < pos)
- self._key += chunk.latin1Slice(self._lastPos, pos - 1);
- }
- self._keyTrunc = true;
- for (; pos < len; ++pos) {
- const code = chunk[pos];
- if (code === 61 || code === 38)
- break;
- ++self._bytesKey;
- }
- self._lastPos = pos;
+ UrlEncoded.prototype.end = function() {
+ if (this.boy._done) {
+ return;
}
- return pos;
- }
- function skipValBytes(self, chunk, pos, len) {
- if (self._bytesVal > self.fieldSizeLimit) {
- if (!self._valTrunc) {
- if (self._lastPos < pos)
- self._val += chunk.latin1Slice(self._lastPos, pos - 1);
- }
- self._valTrunc = true;
- for (; pos < len; ++pos) {
- if (chunk[pos] === 38)
- break;
- ++self._bytesVal;
- }
- self._lastPos = pos;
- }
- return pos;
- }
- var HEX_VALUES = [
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- 0,
- 1,
- 2,
- 3,
- 4,
- 5,
- 6,
- 7,
- 8,
- 9,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- 10,
- 11,
- 12,
- 13,
- 14,
- 15,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- 10,
- 11,
- 12,
- 13,
- 14,
- 15,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1,
- -1
- ];
- module2.exports = URLEncoded;
+ if (this._state === "key" && this._key.length > 0) {
+ this.boy.emit("field", decodeText(this._key, "binary", this.charset), "", this._keyTrunc, false);
+ } else if (this._state === "val") {
+ this.boy.emit("field", decodeText(this._key, "binary", this.charset), decodeText(this._val, "binary", this.charset), this._keyTrunc, this._valTrunc);
+ }
+ this.boy._done = true;
+ this.boy.emit("finish");
+ };
+ module2.exports = UrlEncoded;
}
});
-// node_modules/busboy/lib/index.js
-var require_lib = __commonJS({
- "node_modules/busboy/lib/index.js"(exports2, module2) {
+// node_modules/@fastify/busboy/lib/main.js
+var require_main = __commonJS({
+ "node_modules/@fastify/busboy/lib/main.js"(exports2, module2) {
"use strict";
- var { parseContentType } = require_utils();
- function getInstance(cfg) {
- const headers = cfg.headers;
- const conType = parseContentType(headers["content-type"]);
- if (!conType)
- throw new Error("Malformed content type");
- for (const type of TYPES) {
- const matched = type.detect(conType);
- if (!matched)
- continue;
- const instanceCfg = {
- limits: cfg.limits,
- headers,
- conType,
- highWaterMark: void 0,
- fileHwm: void 0,
- defCharset: void 0,
- defParamCharset: void 0,
- preservePath: false
- };
- if (cfg.highWaterMark)
- instanceCfg.highWaterMark = cfg.highWaterMark;
- if (cfg.fileHwm)
- instanceCfg.fileHwm = cfg.fileHwm;
- instanceCfg.defCharset = cfg.defCharset;
- instanceCfg.defParamCharset = cfg.defParamCharset;
- instanceCfg.preservePath = cfg.preservePath;
- return new type(instanceCfg);
- }
- throw new Error(`Unsupported content type: ${headers["content-type"]}`);
- }
- var TYPES = [
- require_multipart(),
- require_urlencoded()
- ].filter(function(typemod) {
- return typeof typemod.detect === "function";
- });
- module2.exports = (cfg) => {
- if (typeof cfg !== "object" || cfg === null)
- cfg = {};
- if (typeof cfg.headers !== "object" || cfg.headers === null || typeof cfg.headers["content-type"] !== "string") {
- throw new Error("Missing Content-Type");
+ var WritableStream = require("node:stream").Writable;
+ var { inherits } = require("node:util");
+ var Dicer = require_Dicer();
+ var MultipartParser = require_multipart();
+ var UrlencodedParser = require_urlencoded();
+ var parseParams = require_parseParams();
+ function Busboy(opts) {
+ if (!(this instanceof Busboy)) {
+ return new Busboy(opts);
+ }
+ if (typeof opts !== "object") {
+ throw new TypeError("Busboy expected an options-Object.");
+ }
+ if (typeof opts.headers !== "object") {
+ throw new TypeError("Busboy expected an options-Object with headers-attribute.");
+ }
+ if (typeof opts.headers["content-type"] !== "string") {
+ throw new TypeError("Missing Content-Type-header.");
+ }
+ const {
+ headers,
+ ...streamOptions
+ } = opts;
+ this.opts = {
+ autoDestroy: false,
+ ...streamOptions
+ };
+ WritableStream.call(this, this.opts);
+ this._done = false;
+ this._parser = this.getParserByHeaders(headers);
+ this._finished = false;
+ }
+ inherits(Busboy, WritableStream);
+ Busboy.prototype.emit = function(ev) {
+ if (ev === "finish") {
+ if (!this._done) {
+ this._parser?.end();
+ return;
+ } else if (this._finished) {
+ return;
+ }
+ this._finished = true;
+ }
+ WritableStream.prototype.emit.apply(this, arguments);
+ };
+ Busboy.prototype.getParserByHeaders = function(headers) {
+ const parsed = parseParams(headers["content-type"]);
+ const cfg = {
+ defCharset: this.opts.defCharset,
+ fileHwm: this.opts.fileHwm,
+ headers,
+ highWaterMark: this.opts.highWaterMark,
+ isPartAFile: this.opts.isPartAFile,
+ limits: this.opts.limits,
+ parsedConType: parsed,
+ preservePath: this.opts.preservePath
+ };
+ if (MultipartParser.detect.test(parsed[0])) {
+ return new MultipartParser(this, cfg);
+ }
+ if (UrlencodedParser.detect.test(parsed[0])) {
+ return new UrlencodedParser(this, cfg);
}
- return getInstance(cfg);
+ throw new Error("Unsupported Content-Type.");
};
+ Busboy.prototype._write = function(chunk, encoding, cb) {
+ this._parser.write(chunk, cb);
+ };
+ module2.exports = Busboy;
+ module2.exports.default = Busboy;
+ module2.exports.Busboy = Busboy;
+ module2.exports.Dicer = Dicer;
}
});
@@ -6352,7 +4040,7 @@ var require_formdata = __commonJS({
var require_body = __commonJS({
"lib/fetch/body.js"(exports2, module2) {
"use strict";
- var Busboy = require_lib();
+ var Busboy = require_main();
var util = require_util();
var {
ReadableStreamFrom,
@@ -6583,10 +4271,9 @@ Content-Type: ${value.type || "application/octet-stream"}\r
const responseFormData = new FormData();
let busboy;
try {
- busboy = Busboy({
+ busboy = new Busboy({
headers,
- preservePath: true,
- defParamCharset: "utf8"
+ preservePath: true
});
} catch (err) {
throw new DOMException(`${err}`, "AbortError");
@@ -6594,8 +4281,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r
busboy.on("field", (name, value) => {
responseFormData.append(name, value);
});
- busboy.on("file", (name, value, info) => {
- const { filename, encoding, mimeType } = info;
+ busboy.on("file", (name, value, filename, encoding, mimeType) => {
const chunks = [];
if (encoding === "base64" || encoding.toLowerCase() === "base64") {
let base64chunk = "";
@@ -7100,6 +4786,12 @@ var require_dispatcher_weakref = __commonJS({
}
};
module2.exports = function() {
+ if (process.env.NODE_V8_COVERAGE) {
+ return {
+ WeakRef: CompatWeakRef,
+ FinalizationRegistry: CompatFinalizer
+ };
+ }
return {
WeakRef: global.WeakRef || CompatWeakRef,
FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer
@@ -8486,7 +6178,7 @@ var require_connect = __commonJS({
var { InvalidArgumentError, ConnectTimeoutError } = require_errors();
var tls;
var SessionCache;
- if (global.FinalizationRegistry) {
+ if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) {
SessionCache = class WeakSessionCache {
constructor(maxCachedSessions) {
this._maxCachedSessions = maxCachedSessions;
@@ -8629,7 +6321,7 @@ var require_connect = __commonJS({
});
// lib/llhttp/utils.js
-var require_utils2 = __commonJS({
+var require_utils = __commonJS({
"lib/llhttp/utils.js"(exports2) {
"use strict";
Object.defineProperty(exports2, "__esModule", { value: true });
@@ -8654,7 +6346,7 @@ var require_constants2 = __commonJS({
"use strict";
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.SPECIAL_HEADERS = exports2.HEADER_STATE = exports2.MINOR = exports2.MAJOR = exports2.CONNECTION_TOKEN_CHARS = exports2.HEADER_CHARS = exports2.TOKEN = exports2.STRICT_TOKEN = exports2.HEX = exports2.URL_CHAR = exports2.STRICT_URL_CHAR = exports2.USERINFO_CHARS = exports2.MARK = exports2.ALPHANUM = exports2.NUM = exports2.HEX_MAP = exports2.NUM_MAP = exports2.ALPHA = exports2.FINISH = exports2.H_METHOD_MAP = exports2.METHOD_MAP = exports2.METHODS_RTSP = exports2.METHODS_ICE = exports2.METHODS_HTTP = exports2.METHODS = exports2.LENIENT_FLAGS = exports2.FLAGS = exports2.TYPE = exports2.ERROR = void 0;
- var utils_1 = require_utils2();
+ var utils_1 = require_utils();
var ERROR;
(function(ERROR2) {
ERROR2[ERROR2["OK"] = 0] = "OK";
diff --git a/doc/contributing/maintaining/maintaining-dependencies.md b/doc/contributing/maintaining/maintaining-dependencies.md
index ff33469b03b901..0e421a240c3d1f 100644
--- a/doc/contributing/maintaining/maintaining-dependencies.md
+++ b/doc/contributing/maintaining/maintaining-dependencies.md
@@ -28,7 +28,7 @@ This a list of all the dependencies:
* [openssl 3.0.8][]
* [postject 1.0.0-alpha.6][]
* [simdutf 3.2.17][]
-* [undici 5.25.2][]
+* [undici 5.25.4][]
* [uvwasi 0.0.19][]
* [V8 11.3.244.8][]
* [zlib 1.2.13.1-motley-f5fd0ad][]
@@ -291,7 +291,7 @@ The [postject](https://github.com/nodejs/postject) dependency is used for the
The [simdutf](https://github.com/simdutf/simdutf) dependency is
a C++ library for fast UTF-8 decoding and encoding.
-### undici 5.25.2
+### undici 5.25.4
The [undici](https://github.com/nodejs/undici) dependency is an HTTP/1.1 client,
written from scratch for Node.js..
@@ -345,7 +345,7 @@ performance improvements not currently available in standard zlib.
[openssl 3.0.8]: #openssl-308
[postject 1.0.0-alpha.6]: #postject-100-alpha6
[simdutf 3.2.17]: #simdutf-3217
-[undici 5.25.2]: #undici-5252
+[undici 5.25.4]: #undici-5254
[update-openssl-action]: ../../../.github/workflows/update-openssl.yml
[uvwasi 0.0.19]: #uvwasi-0019
[v8 11.3.244.8]: #v8-1132448
diff --git a/src/undici_version.h b/src/undici_version.h
index d47c6d538b7355..dc4530d2b86b57 100644
--- a/src/undici_version.h
+++ b/src/undici_version.h
@@ -2,5 +2,5 @@
// Refer to tools/update-undici.sh
#ifndef SRC_UNDICI_VERSION_H_
#define SRC_UNDICI_VERSION_H_
-#define UNDICI_VERSION "5.25.2"
+#define UNDICI_VERSION "5.25.4"
#endif // SRC_UNDICI_VERSION_H_