diff --git a/library/.oxlintrc.json b/library/.oxlintrc.json
index 48cc5dcda..a84555584 100644
--- a/library/.oxlintrc.json
+++ b/library/.oxlintrc.json
@@ -31,5 +31,5 @@
}
}
],
- "ignorePatterns": ["**/wasm/**"]
+ "ignorePatterns": ["**/wasm/**", "helpers/form-parsing/**"]
}
diff --git a/library/helpers/form-parsing/LICENSE b/library/helpers/form-parsing/LICENSE
new file mode 100644
index 000000000..e138e7181
--- /dev/null
+++ b/library/helpers/form-parsing/LICENSE
@@ -0,0 +1,24 @@
+MIT License
+
+Copyright Brian White. All rights reserved.
+Copyright (c) 2021-present The Fastify team
+
+The Fastify team members are listed at https://github.com/fastify/fastify#team.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/library/helpers/form-parsing/README.md b/library/helpers/form-parsing/README.md
new file mode 100644
index 000000000..612f8ff61
--- /dev/null
+++ b/library/helpers/form-parsing/README.md
@@ -0,0 +1,285 @@
+# busboy
+
+
+
+[](https://github.com/fastify/busboy/actions)
+[](https://standardjs.com/)
+[](https://github.com/fastify/.github/blob/main/SECURITY.md)
+
+
+
+
+
+[](https://www.npmjs.com/package/@fastify/busboy)
+[](https://www.npmjs.com/package/@fastify/busboy)
+
+
+
+# Description
+
+A Node.js module for parsing incoming HTML form data.
+
+This is an officially supported fork by [fastify](https://github.com/fastify/) organization of the amazing library [originally created](https://github.com/mscdex/busboy) by Brian White,
+aimed at addressing long-standing issues with it.
+
+Benchmark (Mean time for 500 Kb payload, 2000 cycles, 1000 cycle warmup):
+
+| Library | Version | Mean time in nanoseconds (less is better) |
+| --------------- | ------- | ----------------------------------------- |
+| busboy | 0.3.1 | `340114` |
+| @fastify/busboy | 1.0.0 | `270984` |
+
+[Changelog](https://github.com/fastify/busboy/blob/main/CHANGELOG.md) since busboy 0.31.
+
+# Requirements
+
+- [Node.js](http://nodejs.org/) 10+
+
+# Install
+
+ npm i @fastify/busboy
+
+# Examples
+
+- Parsing (multipart) with default options:
+
+```javascript
+const http = require("node:http");
+const { inspect } = require("node:util");
+const Busboy = require("@fastify/busboy");
+
+http
+ .createServer((req, res) => {
+ if (req.method === "POST") {
+ const busboy = new Busboy({ headers: req.headers });
+ busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
+ console.log(
+ `File [${fieldname}]: filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`
+ );
+ file.on("data", (data) => {
+ console.log(`File [${fieldname}] got ${data.length} bytes`);
+ });
+ file.on("end", () => {
+ console.log(`File [${fieldname}] Finished`);
+ });
+ });
+ busboy.on(
+ "field",
+ (
+ fieldname,
+ val,
+ fieldnameTruncated,
+ valTruncated,
+ encoding,
+ mimetype
+ ) => {
+ console.log(`Field [${fieldname}]: value: ${inspect(val)}`);
+ }
+ );
+ busboy.on("finish", () => {
+ console.log("Done parsing form!");
+ res.writeHead(303, { Connection: "close", Location: "/" });
+ res.end();
+ });
+ req.pipe(busboy);
+ } else if (req.method === "GET") {
+ res.writeHead(200, { Connection: "close" });
+ res.end(`
+
+ `);
+ }
+ })
+ .listen(8000, () => {
+ console.log("Listening for requests");
+ });
+
+// Example output, using http://nodejs.org/images/ryan-speaker.jpg as the file:
+//
+// Listening for requests
+// File [filefield]: filename: ryan-speaker.jpg, encoding: binary
+// File [filefield] got 11971 bytes
+// Field [textfield]: value: 'testing! :-)'
+// File [filefield] Finished
+// Done parsing form!
+```
+
+- Save all incoming files to disk:
+
+```javascript
+const http = require("node:http");
+const path = require("node:path");
+const os = require("node:os");
+const fs = require("node:fs");
+
+const Busboy = require("@fastify/busboy");
+
+http
+ .createServer(function (req, res) {
+ if (req.method === "POST") {
+ const busboy = new Busboy({ headers: req.headers });
+ busboy.on(
+ "file",
+ function (fieldname, file, filename, encoding, mimetype) {
+ var saveTo = path.join(os.tmpdir(), path.basename(fieldname));
+ file.pipe(fs.createWriteStream(saveTo));
+ }
+ );
+ busboy.on("finish", function () {
+ res.writeHead(200, { Connection: "close" });
+ res.end("That's all folks!");
+ });
+ return req.pipe(busboy);
+ }
+ res.writeHead(404);
+ res.end();
+ })
+ .listen(8000, function () {
+ console.log("Listening for requests");
+ });
+```
+
+- Parsing (urlencoded) with default options:
+
+```javascript
+const http = require("node:http");
+const { inspect } = require("node:util");
+
+const Busboy = require("@fastify/busboy");
+
+http
+ .createServer(function (req, res) {
+ if (req.method === "POST") {
+ const busboy = new Busboy({ headers: req.headers });
+ busboy.on(
+ "file",
+ function (fieldname, file, filename, encoding, mimetype) {
+ console.log("File [" + fieldname + "]: filename: " + filename);
+ file.on("data", function (data) {
+ console.log(
+ "File [" + fieldname + "] got " + data.length + " bytes"
+ );
+ });
+ file.on("end", function () {
+ console.log("File [" + fieldname + "] Finished");
+ });
+ }
+ );
+ busboy.on(
+ "field",
+ function (fieldname, val, fieldnameTruncated, valTruncated) {
+ console.log("Field [" + fieldname + "]: value: " + inspect(val));
+ }
+ );
+ busboy.on("finish", function () {
+ console.log("Done parsing form!");
+ res.writeHead(303, { Connection: "close", Location: "/" });
+ res.end();
+ });
+ req.pipe(busboy);
+ } else if (req.method === "GET") {
+ res.writeHead(200, { Connection: "close" });
+ res.end(
+ '\
+ \
+ '
+ );
+ }
+ })
+ .listen(8000, function () {
+ console.log("Listening for requests");
+ });
+
+// Example output:
+//
+// Listening for requests
+// Field [textfield]: value: 'testing! :-)'
+// Field [selectfield]: value: '9001'
+// Field [checkfield]: value: 'on'
+// Done parsing form!
+```
+
+# API
+
+_Busboy_ is a _Writable_ stream
+
+## Busboy (special) events
+
+- **file**(< _string_ >fieldname, < _ReadableStream_ >stream, < _string_ >filename, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new file form field found. `transferEncoding` contains the 'Content-Transfer-Encoding' value for the file stream. `mimeType` contains the 'Content-Type' value for the file stream.
+ - Note: if you listen for this event, you should always handle the `stream` no matter if you care about the file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically and safely discarded (these discarded files do still count towards `files` and `parts` limits).
+ - If a configured file size limit was reached, `stream` will both have a boolean property `truncated` (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens.
+ - The property `bytesRead` informs about the number of bytes that have been read so far.
+
+- **field**(< _string_ >fieldname, < _string_ >value, < _boolean_ >fieldnameTruncated, < _boolean_ >valueTruncated, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new non-file field found.
+
+- **partsLimit**() - Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted.
+
+- **filesLimit**() - Emitted when specified `files` limit has been reached. No more 'file' events will be emitted.
+
+- **fieldsLimit**() - Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted.
+
+## Busboy methods
+
+- **(constructor)**(< _object_ >config) - Creates and returns a new Busboy instance.
+ - The constructor takes the following valid `config` settings:
+ - **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers.
+
+ - **autoDestroy** - _boolean_ - Whether this stream should automatically call .destroy() on itself after ending. (Default: false).
+
+ - **highWaterMark** - _integer_ - highWaterMark to use for this Busboy instance (Default: WritableStream default).
+
+ - **fileHwm** - _integer_ - highWaterMark to use for file streams (Default: ReadableStream default).
+
+ - **defCharset** - _string_ - Default character set to use when one isn't defined (Default: 'utf8').
+
+ - **preservePath** - _boolean_ - If paths in the multipart 'filename' field shall be preserved. (Default: false).
+
+ - **isPartAFile** - **function** - Use this function to override the default file detection functionality. It has following parameters:
+ - fieldName - **string** The name of the field.
+
+ - contentType - **string** The content-type of the part, e.g. `text/plain`, `image/jpeg`, `application/octet-stream`
+
+ - fileName - **string** The name of a file supplied by the part.
+
+ (Default: `(fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)`)
+
+ - **limits** - _object_ - Various limits on incoming data. Valid properties are:
+ - **fieldNameSize** - _integer_ - Max field name size (in bytes) (Default: 100 bytes).
+
+ - **fieldSize** - _integer_ - Max field value size (in bytes) (Default: 1 MiB, which is 1024 x 1024 bytes).
+
+ - **fields** - _integer_ - Max number of non-file fields (Default: Infinity).
+
+ - **fileSize** - _integer_ - For multipart forms, the max file size (in bytes) (Default: Infinity).
+
+ - **files** - _integer_ - For multipart forms, the max number of file fields (Default: Infinity).
+
+ - **parts** - _integer_ - For multipart forms, the max number of parts (fields + files) (Default: Infinity).
+
+ - **headerPairs** - _integer_ - For multipart forms, the max number of header key=>value pairs to parse **Default:** 2000
+
+ - **headerSize** - _integer_ - For multipart forms, the max size of a multipart header **Default:** 81920.
+
+ - The constructor can throw errors:
+ - **Busboy expected an options-Object.** - Busboy expected an Object as first parameters.
+
+ - **Busboy expected an options-Object with headers-attribute.** - The first parameter is lacking of a headers-attribute.
+
+ - **Limit $limit is not a valid number** - Busboy expected the desired limit to be of type number. Busboy throws this Error to prevent a potential security issue by falling silently back to the Busboy-defaults. Potential source for this Error can be the direct use of environment variables without transforming them to the type number.
+
+ - **Unsupported Content-Type.** - The `Content-Type` isn't one Busboy can parse.
+
+ - **Missing Content-Type-header.** - The provided headers don't include `Content-Type` at all.
diff --git a/library/helpers/form-parsing/deps/dicer/LICENSE b/library/helpers/form-parsing/deps/dicer/LICENSE
new file mode 100644
index 000000000..290762e94
--- /dev/null
+++ b/library/helpers/form-parsing/deps/dicer/LICENSE
@@ -0,0 +1,19 @@
+Copyright Brian White. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
\ No newline at end of file
diff --git a/library/helpers/form-parsing/deps/dicer/lib/Dicer.d.ts b/library/helpers/form-parsing/deps/dicer/lib/Dicer.d.ts
new file mode 100644
index 000000000..c1c698bad
--- /dev/null
+++ b/library/helpers/form-parsing/deps/dicer/lib/Dicer.d.ts
@@ -0,0 +1,221 @@
+// Type definitions for dicer 0.2
+// Project: https://github.com/mscdex/dicer
+// Definitions by: BendingBender
+// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
+// TypeScript Version: 2.2
+///
+
+import stream = require("stream");
+
+// tslint:disable:unified-signatures
+
+/**
+ * A very fast streaming multipart parser for node.js.
+ * Dicer is a WritableStream
+ *
+ * Dicer (special) events:
+ * - on('finish', ()) - Emitted when all parts have been parsed and the Dicer instance has been ended.
+ * - on('part', (stream: PartStream)) - Emitted when a new part has been found.
+ * - on('preamble', (stream: PartStream)) - Emitted for preamble if you should happen to need it (can usually be ignored).
+ * - on('trailer', (data: Buffer)) - Emitted when trailing data was found after the terminating boundary (as with the preamble, this can usually be ignored too).
+ */
+export class Dicer extends stream.Writable {
+ /**
+ * Creates and returns a new Dicer instance with the following valid config settings:
+ *
+ * @param config The configuration to use
+ */
+ constructor(config: Dicer.Config);
+ /**
+ * Sets the boundary to use for parsing and performs some initialization needed for parsing.
+ * You should only need to use this if you set headerFirst to true in the constructor and are parsing the boundary from the preamble header.
+ *
+ * @param boundary The boundary to use
+ */
+ setBoundary(boundary: string): void;
+ addListener(event: "finish", listener: () => void): this;
+ addListener(
+ event: "part",
+ listener: (stream: Dicer.PartStream) => void
+ ): this;
+ addListener(
+ event: "preamble",
+ listener: (stream: Dicer.PartStream) => void
+ ): this;
+ addListener(event: "trailer", listener: (data: Buffer) => void): this;
+ addListener(event: "close", listener: () => void): this;
+ addListener(event: "drain", listener: () => void): this;
+ addListener(event: "error", listener: (err: Error) => void): this;
+ addListener(event: "pipe", listener: (src: stream.Readable) => void): this;
+ addListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ addListener(event: string, listener: (...args: any[]) => void): this;
+ on(event: "finish", listener: () => void): this;
+ on(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ on(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ on(event: "trailer", listener: (data: Buffer) => void): this;
+ on(event: "close", listener: () => void): this;
+ on(event: "drain", listener: () => void): this;
+ on(event: "error", listener: (err: Error) => void): this;
+ on(event: "pipe", listener: (src: stream.Readable) => void): this;
+ on(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ on(event: string, listener: (...args: any[]) => void): this;
+ once(event: "finish", listener: () => void): this;
+ once(event: "part", listener: (stream: Dicer.PartStream) => void): this;
+ once(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
+ once(event: "trailer", listener: (data: Buffer) => void): this;
+ once(event: "close", listener: () => void): this;
+ once(event: "drain", listener: () => void): this;
+ once(event: "error", listener: (err: Error) => void): this;
+ once(event: "pipe", listener: (src: stream.Readable) => void): this;
+ once(event: "unpipe", listener: (src: stream.Readable) => void): this;
+ once(event: string, listener: (...args: any[]) => void): this;
+ prependListener(event: "finish", listener: () => void): this;
+ prependListener(
+ event: "part",
+ listener: (stream: Dicer.PartStream) => void
+ ): this;
+ prependListener(
+ event: "preamble",
+ listener: (stream: Dicer.PartStream) => void
+ ): this;
+ prependListener(event: "trailer", listener: (data: Buffer) => void): this;
+ prependListener(event: "close", listener: () => void): this;
+ prependListener(event: "drain", listener: () => void): this;
+ prependListener(event: "error", listener: (err: Error) => void): this;
+ prependListener(
+ event: "pipe",
+ listener: (src: stream.Readable) => void
+ ): this;
+ prependListener(
+ event: "unpipe",
+ listener: (src: stream.Readable) => void
+ ): this;
+ prependListener(event: string, listener: (...args: any[]) => void): this;
+ prependOnceListener(event: "finish", listener: () => void): this;
+ prependOnceListener(
+ event: "part",
+ listener: (stream: Dicer.PartStream) => void
+ ): this;
+ prependOnceListener(
+ event: "preamble",
+ listener: (stream: Dicer.PartStream) => void
+ ): this;
+ prependOnceListener(event: "trailer", listener: (data: Buffer) => void): this;
+ prependOnceListener(event: "close", listener: () => void): this;
+ prependOnceListener(event: "drain", listener: () => void): this;
+ prependOnceListener(event: "error", listener: (err: Error) => void): this;
+ prependOnceListener(
+ event: "pipe",
+ listener: (src: stream.Readable) => void
+ ): this;
+ prependOnceListener(
+ event: "unpipe",
+ listener: (src: stream.Readable) => void
+ ): this;
+ prependOnceListener(event: string, listener: (...args: any[]) => void): this;
+ removeListener(event: "finish", listener: () => void): this;
+ removeListener(
+ event: "part",
+ listener: (stream: Dicer.PartStream) => void
+ ): this;
+ removeListener(
+ event: "preamble",
+ listener: (stream: Dicer.PartStream) => void
+ ): this;
+ removeListener(event: "trailer", listener: (data: Buffer) => void): this;
+ removeListener(event: "close", listener: () => void): this;
+ removeListener(event: "drain", listener: () => void): this;
+ removeListener(event: "error", listener: (err: Error) => void): this;
+ removeListener(event: "pipe", listener: (src: stream.Readable) => void): this;
+ removeListener(
+ event: "unpipe",
+ listener: (src: stream.Readable) => void
+ ): this;
+ removeListener(event: string, listener: (...args: any[]) => void): this;
+}
+
+declare namespace Dicer {
+ interface Config {
+ /**
+ * This is the boundary used to detect the beginning of a new part.
+ */
+ boundary?: string | undefined;
+ /**
+ * If true, preamble header parsing will be performed first.
+ */
+ headerFirst?: boolean | undefined;
+ /**
+ * The maximum number of header key=>value pairs to parse Default: 2000 (same as node's http).
+ */
+ maxHeaderPairs?: number | undefined;
+ }
+
+ /**
+ * PartStream is a _ReadableStream_
+ *
+ * PartStream (special) events:
+ * - on('header', (header: object)) - An object containing the header for this particular part. Each property value is an array of one or more string values.
+ */
+ interface PartStream extends stream.Readable {
+ addListener(event: "header", listener: (header: object) => void): this;
+ addListener(event: "close", listener: () => void): this;
+ addListener(
+ event: "data",
+ listener: (chunk: Buffer | string) => void
+ ): this;
+ addListener(event: "end", listener: () => void): this;
+ addListener(event: "readable", listener: () => void): this;
+ addListener(event: "error", listener: (err: Error) => void): this;
+ addListener(event: string, listener: (...args: any[]) => void): this;
+ on(event: "header", listener: (header: object) => void): this;
+ on(event: "close", listener: () => void): this;
+ on(event: "data", listener: (chunk: Buffer | string) => void): this;
+ on(event: "end", listener: () => void): this;
+ on(event: "readable", listener: () => void): this;
+ on(event: "error", listener: (err: Error) => void): this;
+ on(event: string, listener: (...args: any[]) => void): this;
+ once(event: "header", listener: (header: object) => void): this;
+ once(event: "close", listener: () => void): this;
+ once(event: "data", listener: (chunk: Buffer | string) => void): this;
+ once(event: "end", listener: () => void): this;
+ once(event: "readable", listener: () => void): this;
+ once(event: "error", listener: (err: Error) => void): this;
+ once(event: string, listener: (...args: any[]) => void): this;
+ prependListener(event: "header", listener: (header: object) => void): this;
+ prependListener(event: "close", listener: () => void): this;
+ prependListener(
+ event: "data",
+ listener: (chunk: Buffer | string) => void
+ ): this;
+ prependListener(event: "end", listener: () => void): this;
+ prependListener(event: "readable", listener: () => void): this;
+ prependListener(event: "error", listener: (err: Error) => void): this;
+ prependListener(event: string, listener: (...args: any[]) => void): this;
+ prependOnceListener(
+ event: "header",
+ listener: (header: object) => void
+ ): this;
+ prependOnceListener(event: "close", listener: () => void): this;
+ prependOnceListener(
+ event: "data",
+ listener: (chunk: Buffer | string) => void
+ ): this;
+ prependOnceListener(event: "end", listener: () => void): this;
+ prependOnceListener(event: "readable", listener: () => void): this;
+ prependOnceListener(event: "error", listener: (err: Error) => void): this;
+ prependOnceListener(
+ event: string,
+ listener: (...args: any[]) => void
+ ): this;
+ removeListener(event: "header", listener: (header: object) => void): this;
+ removeListener(event: "close", listener: () => void): this;
+ removeListener(
+ event: "data",
+ listener: (chunk: Buffer | string) => void
+ ): this;
+ removeListener(event: "end", listener: () => void): this;
+ removeListener(event: "readable", listener: () => void): this;
+ removeListener(event: "error", listener: (err: Error) => void): this;
+ removeListener(event: string, listener: (...args: any[]) => void): this;
+ }
+}
diff --git a/library/helpers/form-parsing/deps/dicer/lib/Dicer.js b/library/helpers/form-parsing/deps/dicer/lib/Dicer.js
new file mode 100644
index 000000000..7b4a7f585
--- /dev/null
+++ b/library/helpers/form-parsing/deps/dicer/lib/Dicer.js
@@ -0,0 +1,268 @@
+"use strict";
+
+const WritableStream = require("node:stream").Writable;
+const inherits = require("node:util").inherits;
+
+const StreamSearch = require("../../streamsearch/sbmh");
+
+const PartStream = require("./PartStream");
+const HeaderParser = require("./HeaderParser");
+
+const DASH = 45;
+const B_ONEDASH = Buffer.from("-");
+const B_CRLF = Buffer.from("\r\n");
+const EMPTY_FN = function () {};
+
+function Dicer(cfg) {
+ if (!(this instanceof Dicer)) {
+ return new Dicer(cfg);
+ }
+ WritableStream.call(this, cfg);
+
+ if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== "string")) {
+ throw new TypeError("Boundary required");
+ }
+
+ if (typeof cfg.boundary === "string") {
+ this.setBoundary(cfg.boundary);
+ } else {
+ this._bparser = undefined;
+ }
+
+ this._headerFirst = cfg.headerFirst;
+
+ this._dashes = 0;
+ this._parts = 0;
+ this._finished = false;
+ this._realFinish = false;
+ this._isPreamble = true;
+ this._justMatched = false;
+ this._firstWrite = true;
+ this._inHeader = true;
+ this._part = undefined;
+ this._cb = undefined;
+ this._ignoreData = false;
+ this._partOpts = { highWaterMark: cfg.partHwm };
+ this._pause = false;
+
+ const self = this;
+ this._hparser = new HeaderParser(cfg);
+ this._hparser.on("header", function (header) {
+ self._inHeader = false;
+ self._part.emit("header", header);
+ });
+}
+inherits(Dicer, WritableStream);
+
+Dicer.prototype.emit = function (ev) {
+ if (ev === "finish" && !this._realFinish) {
+ if (!this._finished) {
+ const self = this;
+ process.nextTick(function () {
+ self.emit("error", new Error("Unexpected end of multipart data"));
+ if (self._part && !self._ignoreData) {
+ const type = self._isPreamble ? "Preamble" : "Part";
+ self._part.emit(
+ "error",
+ new Error(
+ type + " terminated early due to unexpected end of multipart data"
+ )
+ );
+ self._part.push(null);
+ process.nextTick(function () {
+ self._realFinish = true;
+ self.emit("finish");
+ self._realFinish = false;
+ });
+ return;
+ }
+ self._realFinish = true;
+ self.emit("finish");
+ self._realFinish = false;
+ });
+ }
+ } else {
+ WritableStream.prototype.emit.apply(this, arguments);
+ }
+};
+
+Dicer.prototype._write = function (data, encoding, cb) {
+ // ignore unexpected data (e.g. extra trailer data after finished)
+ if (!this._hparser && !this._bparser) {
+ return cb();
+ }
+
+ if (this._headerFirst && this._isPreamble) {
+ if (!this._part) {
+ this._part = new PartStream(this._partOpts);
+ if (this.listenerCount("preamble") !== 0) {
+ this.emit("preamble", this._part);
+ } else {
+ this._ignore();
+ }
+ }
+ const r = this._hparser.push(data);
+ if (!this._inHeader && r !== undefined && r < data.length) {
+ data = data.slice(r);
+ } else {
+ return cb();
+ }
+ }
+
+ // allows for "easier" testing
+ if (this._firstWrite) {
+ this._bparser.push(B_CRLF);
+ this._firstWrite = false;
+ }
+
+ this._bparser.push(data);
+
+ if (this._pause) {
+ this._cb = cb;
+ } else {
+ cb();
+ }
+};
+
+Dicer.prototype.reset = function () {
+ this._part = undefined;
+ this._bparser = undefined;
+ this._hparser = undefined;
+};
+
+Dicer.prototype.setBoundary = function (boundary) {
+ const self = this;
+ this._bparser = new StreamSearch("\r\n--" + boundary);
+ this._bparser.on("info", function (isMatch, data, start, end) {
+ self._oninfo(isMatch, data, start, end);
+ });
+};
+
+Dicer.prototype._ignore = function () {
+ if (this._part && !this._ignoreData) {
+ this._ignoreData = true;
+ this._part.on("error", EMPTY_FN);
+ // we must perform some kind of read on the stream even though we are
+ // ignoring the data, otherwise node's Readable stream will not emit 'end'
+ // after pushing null to the stream
+ this._part.resume();
+ }
+};
+
+Dicer.prototype._oninfo = function (isMatch, data, start, end) {
+ let buf;
+ const self = this;
+ let i = 0;
+ let r;
+ let shouldWriteMore = true;
+
+ if (!this._part && this._justMatched && data) {
+ while (this._dashes < 2 && start + i < end) {
+ if (data[start + i] === DASH) {
+ ++i;
+ ++this._dashes;
+ } else {
+ if (this._dashes) {
+ buf = B_ONEDASH;
+ }
+ this._dashes = 0;
+ break;
+ }
+ }
+ if (this._dashes === 2) {
+ if (start + i < end && this.listenerCount("trailer") !== 0) {
+ this.emit("trailer", data.slice(start + i, end));
+ }
+ this.reset();
+ this._finished = true;
+ // no more parts will be added
+ if (self._parts === 0) {
+ self._realFinish = true;
+ self.emit("finish");
+ self._realFinish = false;
+ }
+ }
+ if (this._dashes) {
+ return;
+ }
+ }
+ if (this._justMatched) {
+ this._justMatched = false;
+ }
+ if (!this._part) {
+ this._part = new PartStream(this._partOpts);
+ this._part._read = function (n) {
+ self._unpause();
+ };
+ if (this._isPreamble && this.listenerCount("preamble") !== 0) {
+ this.emit("preamble", this._part);
+ } else if (this._isPreamble !== true && this.listenerCount("part") !== 0) {
+ this.emit("part", this._part);
+ } else {
+ this._ignore();
+ }
+ if (!this._isPreamble) {
+ this._inHeader = true;
+ }
+ }
+ if (data && start < end && !this._ignoreData) {
+ if (this._isPreamble || !this._inHeader) {
+ if (buf) {
+ shouldWriteMore = this._part.push(buf);
+ }
+ shouldWriteMore = this._part.push(data.slice(start, end));
+ if (!shouldWriteMore) {
+ this._pause = true;
+ }
+ } else if (!this._isPreamble && this._inHeader) {
+ if (buf) {
+ this._hparser.push(buf);
+ }
+ r = this._hparser.push(data.slice(start, end));
+ if (!this._inHeader && r !== undefined && r < end) {
+ this._oninfo(false, data, start + r, end);
+ }
+ }
+ }
+ if (isMatch) {
+ this._hparser.reset();
+ if (this._isPreamble) {
+ this._isPreamble = false;
+ } else {
+ if (start !== end) {
+ ++this._parts;
+ this._part.on("end", function () {
+ if (--self._parts === 0) {
+ if (self._finished) {
+ self._realFinish = true;
+ self.emit("finish");
+ self._realFinish = false;
+ } else {
+ self._unpause();
+ }
+ }
+ });
+ }
+ }
+ this._part.push(null);
+ this._part = undefined;
+ this._ignoreData = false;
+ this._justMatched = true;
+ this._dashes = 0;
+ }
+};
+
+Dicer.prototype._unpause = function () {
+ if (!this._pause) {
+ return;
+ }
+
+ this._pause = false;
+ if (this._cb) {
+ const cb = this._cb;
+ this._cb = undefined;
+ cb();
+ }
+};
+
+module.exports = Dicer;
diff --git a/library/helpers/form-parsing/deps/dicer/lib/HeaderParser.js b/library/helpers/form-parsing/deps/dicer/lib/HeaderParser.js
new file mode 100644
index 000000000..80867a7b8
--- /dev/null
+++ b/library/helpers/form-parsing/deps/dicer/lib/HeaderParser.js
@@ -0,0 +1,112 @@
+"use strict";
+
+const EventEmitter = require("node:events").EventEmitter;
+const inherits = require("node:util").inherits;
+const getLimit = require("../../../lib/utils/getLimit");
+
+const StreamSearch = require("../../streamsearch/sbmh");
+
+const B_DCRLF = Buffer.from("\r\n\r\n");
+const RE_CRLF = /\r\n/g;
+const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/; // eslint-disable-line no-control-regex
+
+function HeaderParser(cfg) {
+ EventEmitter.call(this);
+
+ cfg = cfg || {};
+ const self = this;
+ this.nread = 0;
+ this.maxed = false;
+ this.npairs = 0;
+ this.maxHeaderPairs = getLimit(cfg, "maxHeaderPairs", 2000);
+ this.maxHeaderSize = getLimit(cfg, "maxHeaderSize", 80 * 1024);
+ this.buffer = "";
+ this.header = {};
+ this.finished = false;
+ this.ss = new StreamSearch(B_DCRLF);
+ this.ss.on("info", function (isMatch, data, start, end) {
+ if (data && !self.maxed) {
+ if (self.nread + end - start >= self.maxHeaderSize) {
+ end = self.maxHeaderSize - self.nread + start;
+ self.nread = self.maxHeaderSize;
+ self.maxed = true;
+ } else {
+ self.nread += end - start;
+ }
+
+ self.buffer += data.toString("binary", start, end);
+ }
+ if (isMatch) {
+ self._finish();
+ }
+ });
+}
+inherits(HeaderParser, EventEmitter);
+
+HeaderParser.prototype.push = function (data) {
+ const r = this.ss.push(data);
+ if (this.finished) {
+ return r;
+ }
+};
+
+HeaderParser.prototype.reset = function () {
+ this.finished = false;
+ this.buffer = "";
+ this.header = {};
+ this.ss.reset();
+};
+
+HeaderParser.prototype._finish = function () {
+ if (this.buffer) {
+ this._parseHeader();
+ }
+ this.ss.matches = this.ss.maxMatches;
+ const header = this.header;
+ this.header = {};
+ this.buffer = "";
+ this.finished = true;
+ this.nread = this.npairs = 0;
+ this.maxed = false;
+ this.emit("header", header);
+};
+
+HeaderParser.prototype._parseHeader = function () {
+ if (this.npairs === this.maxHeaderPairs) {
+ return;
+ }
+
+ const lines = this.buffer.split(RE_CRLF);
+ const len = lines.length;
+ let m, h;
+
+ for (var i = 0; i < len; ++i) {
+ // eslint-disable-line no-var
+ if (lines[i].length === 0) {
+ continue;
+ }
+ if (lines[i][0] === "\t" || lines[i][0] === " ") {
+ // folded header content
+ // RFC2822 says to just remove the CRLF and not the whitespace following
+ // it, so we follow the RFC and include the leading whitespace ...
+ if (h) {
+ this.header[h][this.header[h].length - 1] += lines[i];
+ continue;
+ }
+ }
+
+ const posColon = lines[i].indexOf(":");
+ if (posColon === -1 || posColon === 0) {
+ return;
+ }
+ m = RE_HDR.exec(lines[i]);
+ h = m[1].toLowerCase();
+ this.header[h] = this.header[h] || [];
+ this.header[h].push(m[2] || "");
+ if (++this.npairs === this.maxHeaderPairs) {
+ break;
+ }
+ }
+};
+
+module.exports = HeaderParser;
diff --git a/library/helpers/form-parsing/deps/dicer/lib/PartStream.js b/library/helpers/form-parsing/deps/dicer/lib/PartStream.js
new file mode 100644
index 000000000..5e8c3dbdb
--- /dev/null
+++ b/library/helpers/form-parsing/deps/dicer/lib/PartStream.js
@@ -0,0 +1,13 @@
+"use strict";
+
+const inherits = require("node:util").inherits;
+const ReadableStream = require("node:stream").Readable;
+
+function PartStream(opts) {
+ ReadableStream.call(this, opts);
+}
+inherits(PartStream, ReadableStream);
+
+PartStream.prototype._read = function (n) {};
+
+module.exports = PartStream;
diff --git a/library/helpers/form-parsing/deps/streamsearch/sbmh.js b/library/helpers/form-parsing/deps/streamsearch/sbmh.js
new file mode 100644
index 000000000..a6d833f98
--- /dev/null
+++ b/library/helpers/form-parsing/deps/streamsearch/sbmh.js
@@ -0,0 +1,242 @@
+"use strict";
+
+/**
+ * Copyright Brian White. All rights reserved.
+ *
+ * @see https://github.com/mscdex/streamsearch
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ * IN THE SOFTWARE.
+ *
+ * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation
+ * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool
+ */
+
+const { EventEmitter } = require("node:events");
+const { inherits } = require("node:util");
+
+function SBMH(needle) {
+ if (typeof needle === "string") {
+ needle = Buffer.from(needle);
+ }
+
+ if (!Buffer.isBuffer(needle)) {
+ throw new TypeError("The needle has to be a String or a Buffer.");
+ }
+
+ const needleLength = needle.length;
+ const needleLastCharIndex = needleLength - 1;
+
+ if (needleLength === 0) {
+ throw new Error("The needle cannot be an empty String/Buffer.");
+ }
+
+ if (needleLength > 256) {
+ throw new Error("The needle cannot have a length bigger than 256.");
+ }
+
+ this.maxMatches = Infinity;
+ this.matches = 0;
+
+ this._occ = new Uint8Array(256).fill(needleLength); // Initialize occurrence table.
+ this._lookbehind_size = 0;
+ this._needle = needle;
+ this._bufpos = 0;
+
+ this._lookbehind = Buffer.alloc(needleLastCharIndex);
+
+ // Populate occurrence table with analysis of the needle,
+ // ignoring last letter.
+ for (var i = 0; i < needleLastCharIndex; ++i) {
+ // eslint-disable-line no-var
+ this._occ[needle[i]] = needleLastCharIndex - i;
+ }
+}
+inherits(SBMH, EventEmitter);
+
+SBMH.prototype.reset = function () {
+ this._lookbehind_size = 0;
+ this.matches = 0;
+ this._bufpos = 0;
+};
+
+SBMH.prototype.push = function (chunk, pos) {
+ if (!Buffer.isBuffer(chunk)) {
+ chunk = Buffer.from(chunk, "binary");
+ }
+ const chlen = chunk.length;
+ this._bufpos = pos || 0;
+ let r;
+ while (r !== chlen && this.matches < this.maxMatches) {
+ r = this._sbmh_feed(chunk);
+ }
+ return r;
+};
+
+SBMH.prototype._sbmh_feed = function (data) {
+ const len = data.length;
+ const needle = this._needle;
+ const needleLength = needle.length;
+ const needleLastCharIndex = needleLength - 1;
+ const needleLastChar = needle[needleLastCharIndex];
+
+ // Positive: points to a position in `data`
+ // pos == 3 points to data[3]
+ // Negative: points to a position in the lookbehind buffer
+ // pos == -2 points to lookbehind[lookbehind_size - 2]
+ let pos = -this._lookbehind_size;
+ let ch;
+
+ if (pos < 0) {
+ // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool
+ // search with character lookup code that considers both the
+ // lookbehind buffer and the current round's haystack data.
+ //
+ // Loop until
+ // there is a match.
+ // or until
+ // we've moved past the position that requires the
+ // lookbehind buffer. In this case we switch to the
+ // optimized loop.
+ // or until
+ // the character to look at lies outside the haystack.
+ while (pos < 0 && pos <= len - needleLength) {
+ ch = data[pos + needleLastCharIndex];
+
+ if (
+ ch === needleLastChar &&
+ this._sbmh_memcmp(data, pos, needleLastCharIndex)
+ ) {
+ this._lookbehind_size = 0;
+ ++this.matches;
+ this.emit("info", true);
+ return (this._bufpos = pos + needleLength);
+ }
+
+ pos += this._occ[ch];
+ }
+
+ // No match.
+
+ while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) {
+ // There's too few data for Boyer-Moore-Horspool to run,
+ // so let's use a different algorithm to skip as much as
+ // we can.
+ // Forward pos until
+ // the trailing part of lookbehind + data
+ // looks like the beginning of the needle
+ // or until
+ // pos == 0
+ ++pos;
+ }
+
+ if (pos >= 0) {
+ // Discard lookbehind buffer.
+ this.emit("info", false, this._lookbehind, 0, this._lookbehind_size);
+ this._lookbehind_size = 0;
+ } else {
+ // Cut off part of the lookbehind buffer that has
+ // been processed and append the entire haystack
+ // into it.
+ const bytesToCutOff = this._lookbehind_size + pos;
+ if (bytesToCutOff > 0) {
+ // The cut off data is guaranteed not to contain the needle.
+ this.emit("info", false, this._lookbehind, 0, bytesToCutOff);
+ }
+
+ this._lookbehind_size -= bytesToCutOff;
+ this._lookbehind.copy(
+ this._lookbehind,
+ 0,
+ bytesToCutOff,
+ this._lookbehind_size
+ );
+
+ data.copy(this._lookbehind, this._lookbehind_size);
+ this._lookbehind_size += len;
+
+ this._bufpos = len;
+ return len;
+ }
+ }
+
+ // Lookbehind buffer is now empty. We only need to check if the
+ // needle is in the haystack.
+ pos = data.indexOf(needle, pos + this._bufpos);
+
+ if (pos !== -1) {
+ ++this.matches;
+ if (pos === 0) {
+ this.emit("info", true);
+ } else {
+ this.emit("info", true, data, this._bufpos, pos);
+ }
+ return (this._bufpos = pos + needleLength);
+ }
+
+ pos = len - needleLastCharIndex;
+ if (pos < 0) {
+ pos = 0;
+ }
+
+ // There was no match. If there's trailing haystack data that we cannot
+ // match yet using the Boyer-Moore-Horspool algorithm (because the trailing
+ // data is less than the needle size) then match using a modified
+ // algorithm that starts matching from the beginning instead of the end.
+ // Whatever trailing data is left after running this algorithm is added to
+ // the lookbehind buffer.
+ while (
+ pos !== len &&
+ (data[pos] !== needle[0] ||
+ Buffer.compare(
+ data.subarray(pos + 1, len),
+ needle.subarray(1, len - pos)
+ ) !== 0)
+ ) {
+ ++pos;
+ }
+
+ if (pos !== len) {
+ data.copy(this._lookbehind, 0, pos, len);
+ this._lookbehind_size = len - pos;
+ }
+
+ // Everything until pos is guaranteed not to contain needle data.
+ if (pos !== 0) {
+ this.emit("info", false, data, this._bufpos, pos);
+ }
+
+ this._bufpos = len;
+ return len;
+};
+
+SBMH.prototype._sbmh_lookup_char = function (data, pos) {
+ return pos < 0 ? this._lookbehind[this._lookbehind_size + pos] : data[pos];
+};
+
+SBMH.prototype._sbmh_memcmp = function (data, pos, len) {
+ for (var i = 0; i < len; ++i) {
+ // eslint-disable-line no-var
+ if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) {
+ return false;
+ }
+ }
+ return true;
+};
+
+module.exports = SBMH;
diff --git a/library/helpers/form-parsing/index.d.ts b/library/helpers/form-parsing/index.d.ts
new file mode 100644
index 000000000..e37281e12
--- /dev/null
+++ b/library/helpers/form-parsing/index.d.ts
@@ -0,0 +1,229 @@
+// Definitions by: Jacob Baskin
+// BendingBender
+// Igor Savin
+
+///
+
+import * as http from "node:http";
+import { Readable, Writable } from "node:stream";
+export { Dicer } from "./deps/dicer/lib/Dicer";
+
+export const Busboy: BusboyConstructor;
+export default Busboy;
+
+export interface BusboyConfig {
+ /**
+ * These are the HTTP headers of the incoming request, which are used by individual parsers.
+ */
+ headers: BusboyHeaders;
+ /**
+ * `highWaterMark` to use for this Busboy instance.
+ * @default WritableStream default.
+ */
+ highWaterMark?: number | undefined;
+ /**
+ * highWaterMark to use for file streams.
+ * @default ReadableStream default.
+ */
+ fileHwm?: number | undefined;
+ /**
+ * Default character set to use when one isn't defined.
+ * @default 'utf8'
+ */
+ defCharset?: string | undefined;
+ /**
+ * Detect if a Part is a file.
+ *
+ * By default a file is detected if contentType
+ * is application/octet-stream or fileName is not
+ * undefined.
+ *
+ * Modify this to handle e.g. Blobs.
+ */
+ isPartAFile?: (
+ fieldName: string | undefined,
+ contentType: string | undefined,
+ fileName: string | undefined
+ ) => boolean;
+ /**
+ * If paths in the multipart 'filename' field shall be preserved.
+ * @default false
+ */
+ preservePath?: boolean | undefined;
+ /**
+ * Various limits on incoming data.
+ */
+ limits?:
+ | {
+ /**
+ * Max field name size (in bytes)
+ * @default 100 bytes
+ */
+ fieldNameSize?: number | undefined;
+ /**
+ * Max field value size (in bytes)
+ * @default 1MB
+ */
+ fieldSize?: number | undefined;
+ /**
+ * Max number of non-file fields
+ * @default Infinity
+ */
+ fields?: number | undefined;
+ /**
+ * For multipart forms, the max file size (in bytes)
+ * @default Infinity
+ */
+ fileSize?: number | undefined;
+ /**
+ * For multipart forms, the max number of file fields
+ * @default Infinity
+ */
+ files?: number | undefined;
+ /**
+ * For multipart forms, the max number of parts (fields + files)
+ * @default Infinity
+ */
+ parts?: number | undefined;
+ /**
+ * For multipart forms, the max number of header key=>value pairs to parse
+ * @default 2000
+ */
+ headerPairs?: number | undefined;
+
+ /**
+ * For multipart forms, the max size of a header part
+ * @default 81920
+ */
+ headerSize?: number | undefined;
+ }
+ | undefined;
+}
+
+export type BusboyHeaders = {
+ "content-type": string;
+} & http.IncomingHttpHeaders;
+
+export interface BusboyFileStream extends Readable {
+ truncated: boolean;
+
+ /**
+ * The number of bytes that have been read so far.
+ */
+ bytesRead: number;
+}
+
+export interface Busboy extends Writable {
+ addListener(
+ event: Event,
+ listener: BusboyEvents[Event]
+ ): this;
+
+ addListener(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ on(
+ event: Event,
+ listener: BusboyEvents[Event]
+ ): this;
+
+ on(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ once(
+ event: Event,
+ listener: BusboyEvents[Event]
+ ): this;
+
+ once(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ removeListener(
+ event: Event,
+ listener: BusboyEvents[Event]
+ ): this;
+
+ removeListener(
+ event: string | symbol,
+ listener: (...args: any[]) => void
+ ): this;
+
+ off(
+ event: Event,
+ listener: BusboyEvents[Event]
+ ): this;
+
+ off(event: string | symbol, listener: (...args: any[]) => void): this;
+
+ prependListener(
+ event: Event,
+ listener: BusboyEvents[Event]
+ ): this;
+
+ prependListener(
+ event: string | symbol,
+ listener: (...args: any[]) => void
+ ): this;
+
+ prependOnceListener(
+ event: Event,
+ listener: BusboyEvents[Event]
+ ): this;
+
+ prependOnceListener(
+ event: string | symbol,
+ listener: (...args: any[]) => void
+ ): this;
+}
+
+export interface BusboyEvents {
+ /**
+ * Emitted for each new file form field found.
+ *
+ * * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the
+ * file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents),
+ * otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any**
+ * incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically
+ * and safely discarded (these discarded files do still count towards `files` and `parts` limits).
+ * * If a configured file size limit was reached, `stream` will both have a boolean property `truncated`
+ * (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens.
+ *
+ * @param listener.transferEncoding Contains the 'Content-Transfer-Encoding' value for the file stream.
+ * @param listener.mimeType Contains the 'Content-Type' value for the file stream.
+ */
+ file: (
+ fieldname: string,
+ stream: BusboyFileStream,
+ filename: string,
+ transferEncoding: string,
+ mimeType: string
+ ) => void;
+ /**
+ * Emitted for each new non-file field found.
+ */
+ field: (
+ fieldname: string,
+ value: string,
+ fieldnameTruncated: boolean,
+ valueTruncated: boolean,
+ transferEncoding: string,
+ mimeType: string
+ ) => void;
+ finish: () => void;
+ /**
+ * Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted.
+ */
+ partsLimit: () => void;
+ /**
+ * Emitted when specified `files` limit has been reached. No more 'file' events will be emitted.
+ */
+ filesLimit: () => void;
+ /**
+ * Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted.
+ */
+ fieldsLimit: () => void;
+ error: (error: unknown) => void;
+}
+
+export interface BusboyConstructor {
+ (options: BusboyConfig): Busboy;
+
+ new (options: BusboyConfig): Busboy;
+}
diff --git a/library/helpers/form-parsing/index.js b/library/helpers/form-parsing/index.js
new file mode 100644
index 000000000..9989e45b4
--- /dev/null
+++ b/library/helpers/form-parsing/index.js
@@ -0,0 +1,86 @@
+"use strict";
+
+const WritableStream = require("node:stream").Writable;
+const { inherits } = require("node:util");
+const Dicer = require("./deps/dicer/lib/Dicer");
+
+const MultipartParser = require("./lib/types/multipart");
+const UrlencodedParser = require("./lib/types/urlencoded");
+const parseParams = require("./lib/utils/parseParams");
+
+function Busboy(opts) {
+ if (!(this instanceof Busboy)) {
+ return new Busboy(opts);
+ }
+
+ if (typeof opts !== "object") {
+ throw new TypeError("Busboy expected an options-Object.");
+ }
+ if (typeof opts.headers !== "object") {
+ throw new TypeError(
+ "Busboy expected an options-Object with headers-attribute."
+ );
+ }
+ if (typeof opts.headers["content-type"] !== "string") {
+ throw new TypeError("Missing Content-Type-header.");
+ }
+
+ const { headers, ...streamOptions } = opts;
+
+ this.opts = {
+ autoDestroy: false,
+ ...streamOptions,
+ };
+ WritableStream.call(this, this.opts);
+
+ this._done = false;
+ this._parser = this.getParserByHeaders(headers);
+ this._finished = false;
+}
+inherits(Busboy, WritableStream);
+
+Busboy.prototype.emit = function (ev) {
+ if (ev === "finish") {
+ if (!this._done) {
+ this._parser?.end();
+ return;
+ } else if (this._finished) {
+ return;
+ }
+ this._finished = true;
+ }
+ WritableStream.prototype.emit.apply(this, arguments);
+};
+
+Busboy.prototype.getParserByHeaders = function (headers) {
+ const parsed = parseParams(headers["content-type"]);
+
+ const cfg = {
+ defCharset: this.opts.defCharset,
+ fileHwm: this.opts.fileHwm,
+ headers,
+ highWaterMark: this.opts.highWaterMark,
+ isPartAFile: this.opts.isPartAFile,
+ limits: this.opts.limits,
+ parsedConType: parsed,
+ preservePath: this.opts.preservePath,
+ };
+
+ if (MultipartParser.detect.test(parsed[0])) {
+ return new MultipartParser(this, cfg);
+ }
+ if (UrlencodedParser.detect.test(parsed[0])) {
+ return new UrlencodedParser(this, cfg);
+ }
+ throw new Error("Unsupported Content-Type.");
+};
+
+Busboy.prototype._write = function (chunk, encoding, cb) {
+ this._parser.write(chunk, cb);
+};
+
+module.exports = Busboy;
+module.exports.default = Busboy;
+module.exports.Busboy = Busboy;
+
+module.exports.Dicer = Dicer;
diff --git a/library/helpers/form-parsing/lib/types/multipart.js b/library/helpers/form-parsing/lib/types/multipart.js
new file mode 100644
index 000000000..ae6566d5d
--- /dev/null
+++ b/library/helpers/form-parsing/lib/types/multipart.js
@@ -0,0 +1,352 @@
+"use strict";
+
+// TODO:
+// * support 1 nested multipart level
+// (see second multipart example here:
+// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data)
+// * support limits.fieldNameSize
+// -- this will require modifications to utils.parseParams
+
+const { Readable } = require("node:stream");
+const { inherits } = require("node:util");
+
+const Dicer = require("../../deps/dicer/lib/Dicer");
+
+const parseParams = require("../utils/parseParams");
+const decodeText = require("../utils/decodeText");
+const basename = require("../utils/basename");
+const getLimit = require("../utils/getLimit");
+
+const RE_BOUNDARY = /^boundary$/i;
+const RE_FIELD = /^form-data$/i;
+const RE_CHARSET = /^charset$/i;
+const RE_FILENAME = /^filename$/i;
+const RE_NAME = /^name$/i;
+
+Multipart.detect = /^multipart\/form-data/i;
+function Multipart(boy, cfg) {
+ let i;
+ let len;
+ const self = this;
+ let boundary;
+ const limits = cfg.limits;
+ const isPartAFile =
+ cfg.isPartAFile ||
+ ((fieldName, contentType, fileName) =>
+ contentType === "application/octet-stream" || fileName !== undefined);
+ const parsedConType = cfg.parsedConType || [];
+ const defCharset = cfg.defCharset || "utf8";
+ const preservePath = cfg.preservePath;
+ const fileOpts = { highWaterMark: cfg.fileHwm };
+
+ for (i = 0, len = parsedConType.length; i < len; ++i) {
+ if (
+ Array.isArray(parsedConType[i]) &&
+ RE_BOUNDARY.test(parsedConType[i][0])
+ ) {
+ boundary = parsedConType[i][1];
+ break;
+ }
+ }
+
+ function checkFinished() {
+ if (nends === 0 && finished && !boy._done) {
+ finished = false;
+ self.end();
+ }
+ }
+
+ if (typeof boundary !== "string") {
+ throw new Error("Multipart: Boundary not found");
+ }
+
+ const fieldSizeLimit = getLimit(limits, "fieldSize", 1 * 1024 * 1024);
+ const fileSizeLimit = getLimit(limits, "fileSize", Infinity);
+ const filesLimit = getLimit(limits, "files", Infinity);
+ const fieldsLimit = getLimit(limits, "fields", Infinity);
+ const partsLimit = getLimit(limits, "parts", Infinity);
+ const headerPairsLimit = getLimit(limits, "headerPairs", 2000);
+ const headerSizeLimit = getLimit(limits, "headerSize", 80 * 1024);
+
+ let nfiles = 0;
+ let nfields = 0;
+ let nends = 0;
+ let curFile;
+ let curField;
+ let finished = false;
+
+ this._needDrain = false;
+ this._pause = false;
+ this._cb = undefined;
+ this._nparts = 0;
+ this._boy = boy;
+
+ const parserCfg = {
+ boundary,
+ maxHeaderPairs: headerPairsLimit,
+ maxHeaderSize: headerSizeLimit,
+ partHwm: fileOpts.highWaterMark,
+ highWaterMark: cfg.highWaterMark,
+ };
+
+ this.parser = new Dicer(parserCfg);
+ this.parser
+ .on("drain", function () {
+ self._needDrain = false;
+ if (self._cb && !self._pause) {
+ const cb = self._cb;
+ self._cb = undefined;
+ cb();
+ }
+ })
+ .on("part", function onPart(part) {
+ if (++self._nparts > partsLimit) {
+ self.parser.removeListener("part", onPart);
+ self.parser.on("part", skipPart);
+ boy.hitPartsLimit = true;
+ boy.emit("partsLimit");
+ return skipPart(part);
+ }
+
+ // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let
+ // us emit 'end' early since we know the part has ended if we are already
+ // seeing the next part
+ if (curField) {
+ const field = curField;
+ field.emit("end");
+ field.removeAllListeners("end");
+ }
+
+ part
+ .on("header", function (header) {
+ let contype;
+ let fieldname;
+ let parsed;
+ let charset;
+ let encoding;
+ let filename;
+ let nsize = 0;
+
+ if (header["content-type"]) {
+ parsed = parseParams(header["content-type"][0]);
+ if (parsed[0]) {
+ contype = parsed[0].toLowerCase();
+ for (i = 0, len = parsed.length; i < len; ++i) {
+ if (RE_CHARSET.test(parsed[i][0])) {
+ charset = parsed[i][1].toLowerCase();
+ break;
+ }
+ }
+ }
+ }
+
+ if (contype === undefined) {
+ contype = "text/plain";
+ }
+ if (charset === undefined) {
+ charset = defCharset;
+ }
+
+ if (header["content-disposition"]) {
+ parsed = parseParams(header["content-disposition"][0]);
+ if (!RE_FIELD.test(parsed[0])) {
+ return skipPart(part);
+ }
+ for (i = 0, len = parsed.length; i < len; ++i) {
+ if (RE_NAME.test(parsed[i][0])) {
+ fieldname = parsed[i][1];
+ } else if (RE_FILENAME.test(parsed[i][0])) {
+ filename = parsed[i][1];
+ if (!preservePath) {
+ filename = basename(filename);
+ }
+ }
+ }
+ } else {
+ return skipPart(part);
+ }
+
+ if (header["content-transfer-encoding"]) {
+ encoding = header["content-transfer-encoding"][0].toLowerCase();
+ } else {
+ encoding = "7bit";
+ }
+
+ let onData, onEnd;
+
+ if (isPartAFile(fieldname, contype, filename)) {
+ // file/binary field
+ if (nfiles === filesLimit) {
+ if (!boy.hitFilesLimit) {
+ boy.hitFilesLimit = true;
+ boy.emit("filesLimit");
+ }
+ return skipPart(part);
+ }
+
+ ++nfiles;
+
+ if (boy.listenerCount("file") === 0) {
+ self.parser._ignore();
+ return;
+ }
+
+ ++nends;
+ const file = new FileStream(fileOpts);
+ curFile = file;
+ file.on("end", function () {
+ --nends;
+ self._pause = false;
+ checkFinished();
+ if (self._cb && !self._needDrain) {
+ const cb = self._cb;
+ self._cb = undefined;
+ cb();
+ }
+ });
+ file._read = function (n) {
+ if (!self._pause) {
+ return;
+ }
+ self._pause = false;
+ if (self._cb && !self._needDrain) {
+ const cb = self._cb;
+ self._cb = undefined;
+ cb();
+ }
+ };
+ boy.emit("file", fieldname, file, filename, encoding, contype);
+
+ onData = function (data) {
+ if ((nsize += data.length) > fileSizeLimit) {
+ const extralen = fileSizeLimit - nsize + data.length;
+ if (extralen > 0) {
+ file.push(data.slice(0, extralen));
+ }
+ file.truncated = true;
+ file.bytesRead = fileSizeLimit;
+ part.removeAllListeners("data");
+ file.emit("limit");
+ return;
+ } else if (!file.push(data)) {
+ self._pause = true;
+ }
+
+ file.bytesRead = nsize;
+ };
+
+ onEnd = function () {
+ curFile = undefined;
+ file.push(null);
+ };
+ } else {
+ // non-file field
+ if (nfields === fieldsLimit) {
+ if (!boy.hitFieldsLimit) {
+ boy.hitFieldsLimit = true;
+ boy.emit("fieldsLimit");
+ }
+ return skipPart(part);
+ }
+
+ ++nfields;
+ ++nends;
+ let buffer = "";
+ let truncated = false;
+ curField = part;
+
+ onData = function (data) {
+ if ((nsize += data.length) > fieldSizeLimit) {
+ const extralen = fieldSizeLimit - (nsize - data.length);
+ buffer += data.toString("binary", 0, extralen);
+ truncated = true;
+ part.removeAllListeners("data");
+ } else {
+ buffer += data.toString("binary");
+ }
+ };
+
+ onEnd = function () {
+ curField = undefined;
+ if (buffer.length) {
+ buffer = decodeText(buffer, "binary", charset);
+ }
+ boy.emit(
+ "field",
+ fieldname,
+ buffer,
+ false,
+ truncated,
+ encoding,
+ contype
+ );
+ --nends;
+ checkFinished();
+ };
+ }
+
+ /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become
+ broken. Streams2/streams3 is a huge black box of confusion, but
+ somehow overriding the sync state seems to fix things again (and still
+ seems to work for previous node versions).
+ */
+ part._readableState.sync = false;
+
+ part.on("data", onData);
+ part.on("end", onEnd);
+ })
+ .on("error", function (err) {
+ if (curFile) {
+ curFile.emit("error", err);
+ }
+ });
+ })
+ .on("error", function (err) {
+ boy.emit("error", err);
+ })
+ .on("finish", function () {
+ finished = true;
+ checkFinished();
+ });
+}
+
+Multipart.prototype.write = function (chunk, cb) {
+ const r = this.parser.write(chunk);
+ if (r && !this._pause) {
+ cb();
+ } else {
+ this._needDrain = !r;
+ this._cb = cb;
+ }
+};
+
+Multipart.prototype.end = function () {
+ const self = this;
+
+ if (self.parser.writable) {
+ self.parser.end();
+ } else if (!self._boy._done) {
+ process.nextTick(function () {
+ self._boy._done = true;
+ self._boy.emit("finish");
+ });
+ }
+};
+
+function skipPart(part) {
+ part.resume();
+}
+
+function FileStream(opts) {
+ Readable.call(this, opts);
+
+ this.bytesRead = 0;
+
+ this.truncated = false;
+}
+
+inherits(FileStream, Readable);
+
+FileStream.prototype._read = function (n) {};
+
+module.exports = Multipart;
diff --git a/library/helpers/form-parsing/lib/types/urlencoded.js b/library/helpers/form-parsing/lib/types/urlencoded.js
new file mode 100644
index 000000000..160ceba60
--- /dev/null
+++ b/library/helpers/form-parsing/lib/types/urlencoded.js
@@ -0,0 +1,246 @@
+"use strict";
+
+const Decoder = require("../utils/Decoder");
+const decodeText = require("../utils/decodeText");
+const getLimit = require("../utils/getLimit");
+
+const RE_CHARSET = /^charset$/i;
+
+UrlEncoded.detect = /^application\/x-www-form-urlencoded/i;
+function UrlEncoded(boy, cfg) {
+ const limits = cfg.limits;
+ const parsedConType = cfg.parsedConType;
+ this.boy = boy;
+
+ this.fieldSizeLimit = getLimit(limits, "fieldSize", 1 * 1024 * 1024);
+ this.fieldNameSizeLimit = getLimit(limits, "fieldNameSize", 100);
+ this.fieldsLimit = getLimit(limits, "fields", Infinity);
+
+ let charset;
+ for (var i = 0, len = parsedConType.length; i < len; ++i) {
+ // eslint-disable-line no-var
+ if (
+ Array.isArray(parsedConType[i]) &&
+ RE_CHARSET.test(parsedConType[i][0])
+ ) {
+ charset = parsedConType[i][1].toLowerCase();
+ break;
+ }
+ }
+
+ if (charset === undefined) {
+ charset = cfg.defCharset || "utf8";
+ }
+
+ this.decoder = new Decoder();
+ this.charset = charset;
+ this._fields = 0;
+ this._state = "key";
+ this._checkingBytes = true;
+ this._bytesKey = 0;
+ this._bytesVal = 0;
+ this._key = "";
+ this._val = "";
+ this._keyTrunc = false;
+ this._valTrunc = false;
+ this._hitLimit = false;
+}
+
+UrlEncoded.prototype.write = function (data, cb) {
+ if (this._fields === this.fieldsLimit) {
+ if (!this.boy.hitFieldsLimit) {
+ this.boy.hitFieldsLimit = true;
+ this.boy.emit("fieldsLimit");
+ }
+ return cb();
+ }
+
+ let idxeq;
+ let idxamp;
+ let i;
+ let p = 0;
+ const len = data.length;
+
+ while (p < len) {
+ if (this._state === "key") {
+ idxeq = idxamp = undefined;
+ for (i = p; i < len; ++i) {
+ if (!this._checkingBytes) {
+ ++p;
+ }
+ if (data[i] === 0x3d /* = */) {
+ idxeq = i;
+ break;
+ } else if (data[i] === 0x26 /* & */) {
+ idxamp = i;
+ break;
+ }
+ if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {
+ this._hitLimit = true;
+ break;
+ } else if (this._checkingBytes) {
+ ++this._bytesKey;
+ }
+ }
+
+ if (idxeq !== undefined) {
+ // key with assignment
+ if (idxeq > p) {
+ this._key += this.decoder.write(data.toString("binary", p, idxeq));
+ }
+ this._state = "val";
+
+ this._hitLimit = false;
+ this._checkingBytes = true;
+ this._val = "";
+ this._bytesVal = 0;
+ this._valTrunc = false;
+ this.decoder.reset();
+
+ p = idxeq + 1;
+ } else if (idxamp !== undefined) {
+ // key with no assignment
+ ++this._fields;
+ let key;
+ const keyTrunc = this._keyTrunc;
+ if (idxamp > p) {
+ key = this._key += this.decoder.write(
+ data.toString("binary", p, idxamp)
+ );
+ } else {
+ key = this._key;
+ }
+
+ this._hitLimit = false;
+ this._checkingBytes = true;
+ this._key = "";
+ this._bytesKey = 0;
+ this._keyTrunc = false;
+ this.decoder.reset();
+
+ if (key.length) {
+ this.boy.emit(
+ "field",
+ decodeText(key, "binary", this.charset),
+ "",
+ keyTrunc,
+ false
+ );
+ }
+
+ p = idxamp + 1;
+ if (this._fields === this.fieldsLimit) {
+ return cb();
+ }
+ } else if (this._hitLimit) {
+ // we may not have hit the actual limit if there are encoded bytes...
+ if (i > p) {
+ this._key += this.decoder.write(data.toString("binary", p, i));
+ }
+ p = i;
+ if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {
+ // yep, we actually did hit the limit
+ this._checkingBytes = false;
+ this._keyTrunc = true;
+ }
+ } else {
+ if (p < len) {
+ this._key += this.decoder.write(data.toString("binary", p));
+ }
+ p = len;
+ }
+ } else {
+ idxamp = undefined;
+ for (i = p; i < len; ++i) {
+ if (!this._checkingBytes) {
+ ++p;
+ }
+ if (data[i] === 0x26 /* & */) {
+ idxamp = i;
+ break;
+ }
+ if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {
+ this._hitLimit = true;
+ break;
+ } else if (this._checkingBytes) {
+ ++this._bytesVal;
+ }
+ }
+
+ if (idxamp !== undefined) {
+ ++this._fields;
+ if (idxamp > p) {
+ this._val += this.decoder.write(data.toString("binary", p, idxamp));
+ }
+ this.boy.emit(
+ "field",
+ decodeText(this._key, "binary", this.charset),
+ decodeText(this._val, "binary", this.charset),
+ this._keyTrunc,
+ this._valTrunc
+ );
+ this._state = "key";
+
+ this._hitLimit = false;
+ this._checkingBytes = true;
+ this._key = "";
+ this._bytesKey = 0;
+ this._keyTrunc = false;
+ this.decoder.reset();
+
+ p = idxamp + 1;
+ if (this._fields === this.fieldsLimit) {
+ return cb();
+ }
+ } else if (this._hitLimit) {
+ // we may not have hit the actual limit if there are encoded bytes...
+ if (i > p) {
+ this._val += this.decoder.write(data.toString("binary", p, i));
+ }
+ p = i;
+ if (
+ (this._val === "" && this.fieldSizeLimit === 0) ||
+ (this._bytesVal = this._val.length) === this.fieldSizeLimit
+ ) {
+ // yep, we actually did hit the limit
+ this._checkingBytes = false;
+ this._valTrunc = true;
+ }
+ } else {
+ if (p < len) {
+ this._val += this.decoder.write(data.toString("binary", p));
+ }
+ p = len;
+ }
+ }
+ }
+ cb();
+};
+
+UrlEncoded.prototype.end = function () {
+ if (this.boy._done) {
+ return;
+ }
+
+ if (this._state === "key" && this._key.length > 0) {
+ this.boy.emit(
+ "field",
+ decodeText(this._key, "binary", this.charset),
+ "",
+ this._keyTrunc,
+ false
+ );
+ } else if (this._state === "val") {
+ this.boy.emit(
+ "field",
+ decodeText(this._key, "binary", this.charset),
+ decodeText(this._val, "binary", this.charset),
+ this._keyTrunc,
+ this._valTrunc
+ );
+ }
+ this.boy._done = true;
+ this.boy.emit("finish");
+};
+
+module.exports = UrlEncoded;
diff --git a/library/helpers/form-parsing/lib/utils/Decoder.js b/library/helpers/form-parsing/lib/utils/Decoder.js
new file mode 100644
index 000000000..bdfed5c30
--- /dev/null
+++ b/library/helpers/form-parsing/lib/utils/Decoder.js
@@ -0,0 +1,55 @@
+"use strict";
+
+const RE_PLUS = /\+/g;
+
+const HEX = [
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+];
+
+function Decoder() {
+ this.buffer = undefined;
+}
+Decoder.prototype.write = function (str) {
+ // Replace '+' with ' ' before decoding
+ str = str.replace(RE_PLUS, " ");
+ let res = "";
+ let i = 0;
+ let p = 0;
+ const len = str.length;
+ for (; i < len; ++i) {
+ if (this.buffer !== undefined) {
+ if (!HEX[str.charCodeAt(i)]) {
+ res += "%" + this.buffer;
+ this.buffer = undefined;
+ --i; // retry character
+ } else {
+ this.buffer += str[i];
+ ++p;
+ if (this.buffer.length === 2) {
+ res += String.fromCharCode(parseInt(this.buffer, 16));
+ this.buffer = undefined;
+ }
+ }
+ } else if (str[i] === "%") {
+ if (i > p) {
+ res += str.substring(p, i);
+ p = i;
+ }
+ this.buffer = "";
+ ++p;
+ }
+ }
+ if (p < len && this.buffer === undefined) {
+ res += str.substring(p);
+ }
+ return res;
+};
+Decoder.prototype.reset = function () {
+ this.buffer = undefined;
+};
+
+module.exports = Decoder;
diff --git a/library/helpers/form-parsing/lib/utils/basename.js b/library/helpers/form-parsing/lib/utils/basename.js
new file mode 100644
index 000000000..361b67a73
--- /dev/null
+++ b/library/helpers/form-parsing/lib/utils/basename.js
@@ -0,0 +1,17 @@
+"use strict";
+
+module.exports = function basename(path) {
+ if (typeof path !== "string") {
+ return "";
+ }
+ for (var i = path.length - 1; i >= 0; --i) {
+ // eslint-disable-line no-var
+ switch (path.charCodeAt(i)) {
+ case 0x2f: // '/'
+ case 0x5c: // '\'
+ path = path.slice(i + 1);
+ return path === ".." || path === "." ? "" : path;
+ }
+ }
+ return path === ".." || path === "." ? "" : path;
+};
diff --git a/library/helpers/form-parsing/lib/utils/decodeText.js b/library/helpers/form-parsing/lib/utils/decodeText.js
new file mode 100644
index 000000000..ea0035b62
--- /dev/null
+++ b/library/helpers/form-parsing/lib/utils/decodeText.js
@@ -0,0 +1,112 @@
+"use strict";
+
+// Node has always utf-8
+const utf8Decoder = new TextDecoder("utf-8");
+const textDecoders = new Map([
+ ["utf-8", utf8Decoder],
+ ["utf8", utf8Decoder],
+]);
+
+function getDecoder(charset) {
+ let lc;
+ while (true) {
+ switch (charset) {
+ case "utf-8":
+ case "utf8":
+ return decoders.utf8;
+ case "latin1":
+ case "ascii": // TODO: Make these a separate, strict decoder?
+ case "us-ascii":
+ case "iso-8859-1":
+ case "iso8859-1":
+ case "iso88591":
+ case "iso_8859-1":
+ case "windows-1252":
+ case "iso_8859-1:1987":
+ case "cp1252":
+ case "x-cp1252":
+ return decoders.latin1;
+ case "utf16le":
+ case "utf-16le":
+ case "ucs2":
+ case "ucs-2":
+ return decoders.utf16le;
+ case "base64":
+ return decoders.base64;
+ default:
+ if (lc === undefined) {
+ lc = true;
+ charset = charset.toLowerCase();
+ continue;
+ }
+ return decoders.other.bind(charset);
+ }
+ }
+}
+
+const decoders = {
+ utf8: (data, sourceEncoding) => {
+ if (data.length === 0) {
+ return "";
+ }
+ if (typeof data === "string") {
+ data = Buffer.from(data, sourceEncoding);
+ }
+ return data.utf8Slice(0, data.length);
+ },
+
+ latin1: (data, sourceEncoding) => {
+ if (data.length === 0) {
+ return "";
+ }
+ if (typeof data === "string") {
+ return data;
+ }
+ return data.latin1Slice(0, data.length);
+ },
+
+ utf16le: (data, sourceEncoding) => {
+ if (data.length === 0) {
+ return "";
+ }
+ if (typeof data === "string") {
+ data = Buffer.from(data, sourceEncoding);
+ }
+ return data.ucs2Slice(0, data.length);
+ },
+
+ base64: (data, sourceEncoding) => {
+ if (data.length === 0) {
+ return "";
+ }
+ if (typeof data === "string") {
+ data = Buffer.from(data, sourceEncoding);
+ }
+ return data.base64Slice(0, data.length);
+ },
+
+ other: (data, sourceEncoding) => {
+ if (data.length === 0) {
+ return "";
+ }
+ if (typeof data === "string") {
+ data = Buffer.from(data, sourceEncoding);
+ }
+
+ if (textDecoders.has(this.toString())) {
+ try {
+ return textDecoders.get(this).decode(data);
+ } catch {}
+ }
+ return typeof data === "string" ? data : data.toString();
+ },
+};
+
+function decodeText(text, sourceEncoding, destEncoding) {
+ if (text) {
+ return getDecoder(destEncoding)(text, sourceEncoding);
+ }
+ return text;
+}
+
+module.exports = decodeText;
diff --git a/library/helpers/form-parsing/lib/utils/getLimit.js b/library/helpers/form-parsing/lib/utils/getLimit.js
new file mode 100644
index 000000000..8d1d17885
--- /dev/null
+++ b/library/helpers/form-parsing/lib/utils/getLimit.js
@@ -0,0 +1,13 @@
+"use strict";
+
+module.exports = function getLimit(limits, name, defaultLimit) {
+ if (!limits || limits[name] === undefined || limits[name] === null) {
+ return defaultLimit;
+ }
+
+ if (typeof limits[name] !== "number" || isNaN(limits[name])) {
+ throw new TypeError("Limit " + name + " is not a valid number");
+ }
+
+ return limits[name];
+};
diff --git a/library/helpers/form-parsing/lib/utils/parseParams.js b/library/helpers/form-parsing/lib/utils/parseParams.js
new file mode 100644
index 000000000..1e62ef4c9
--- /dev/null
+++ b/library/helpers/form-parsing/lib/utils/parseParams.js
@@ -0,0 +1,613 @@
+/* eslint-disable object-property-newline */
+"use strict";
+
+const decodeText = require("./decodeText");
+
+const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g;
+
+const EncodedLookup = {
+ "%00": "\x00",
+ "%01": "\x01",
+ "%02": "\x02",
+ "%03": "\x03",
+ "%04": "\x04",
+ "%05": "\x05",
+ "%06": "\x06",
+ "%07": "\x07",
+ "%08": "\x08",
+ "%09": "\x09",
+ "%0a": "\x0a",
+ "%0A": "\x0a",
+ "%0b": "\x0b",
+ "%0B": "\x0b",
+ "%0c": "\x0c",
+ "%0C": "\x0c",
+ "%0d": "\x0d",
+ "%0D": "\x0d",
+ "%0e": "\x0e",
+ "%0E": "\x0e",
+ "%0f": "\x0f",
+ "%0F": "\x0f",
+ "%10": "\x10",
+ "%11": "\x11",
+ "%12": "\x12",
+ "%13": "\x13",
+ "%14": "\x14",
+ "%15": "\x15",
+ "%16": "\x16",
+ "%17": "\x17",
+ "%18": "\x18",
+ "%19": "\x19",
+ "%1a": "\x1a",
+ "%1A": "\x1a",
+ "%1b": "\x1b",
+ "%1B": "\x1b",
+ "%1c": "\x1c",
+ "%1C": "\x1c",
+ "%1d": "\x1d",
+ "%1D": "\x1d",
+ "%1e": "\x1e",
+ "%1E": "\x1e",
+ "%1f": "\x1f",
+ "%1F": "\x1f",
+ "%20": "\x20",
+ "%21": "\x21",
+ "%22": "\x22",
+ "%23": "\x23",
+ "%24": "\x24",
+ "%25": "\x25",
+ "%26": "\x26",
+ "%27": "\x27",
+ "%28": "\x28",
+ "%29": "\x29",
+ "%2a": "\x2a",
+ "%2A": "\x2a",
+ "%2b": "\x2b",
+ "%2B": "\x2b",
+ "%2c": "\x2c",
+ "%2C": "\x2c",
+ "%2d": "\x2d",
+ "%2D": "\x2d",
+ "%2e": "\x2e",
+ "%2E": "\x2e",
+ "%2f": "\x2f",
+ "%2F": "\x2f",
+ "%30": "\x30",
+ "%31": "\x31",
+ "%32": "\x32",
+ "%33": "\x33",
+ "%34": "\x34",
+ "%35": "\x35",
+ "%36": "\x36",
+ "%37": "\x37",
+ "%38": "\x38",
+ "%39": "\x39",
+ "%3a": "\x3a",
+ "%3A": "\x3a",
+ "%3b": "\x3b",
+ "%3B": "\x3b",
+ "%3c": "\x3c",
+ "%3C": "\x3c",
+ "%3d": "\x3d",
+ "%3D": "\x3d",
+ "%3e": "\x3e",
+ "%3E": "\x3e",
+ "%3f": "\x3f",
+ "%3F": "\x3f",
+ "%40": "\x40",
+ "%41": "\x41",
+ "%42": "\x42",
+ "%43": "\x43",
+ "%44": "\x44",
+ "%45": "\x45",
+ "%46": "\x46",
+ "%47": "\x47",
+ "%48": "\x48",
+ "%49": "\x49",
+ "%4a": "\x4a",
+ "%4A": "\x4a",
+ "%4b": "\x4b",
+ "%4B": "\x4b",
+ "%4c": "\x4c",
+ "%4C": "\x4c",
+ "%4d": "\x4d",
+ "%4D": "\x4d",
+ "%4e": "\x4e",
+ "%4E": "\x4e",
+ "%4f": "\x4f",
+ "%4F": "\x4f",
+ "%50": "\x50",
+ "%51": "\x51",
+ "%52": "\x52",
+ "%53": "\x53",
+ "%54": "\x54",
+ "%55": "\x55",
+ "%56": "\x56",
+ "%57": "\x57",
+ "%58": "\x58",
+ "%59": "\x59",
+ "%5a": "\x5a",
+ "%5A": "\x5a",
+ "%5b": "\x5b",
+ "%5B": "\x5b",
+ "%5c": "\x5c",
+ "%5C": "\x5c",
+ "%5d": "\x5d",
+ "%5D": "\x5d",
+ "%5e": "\x5e",
+ "%5E": "\x5e",
+ "%5f": "\x5f",
+ "%5F": "\x5f",
+ "%60": "\x60",
+ "%61": "\x61",
+ "%62": "\x62",
+ "%63": "\x63",
+ "%64": "\x64",
+ "%65": "\x65",
+ "%66": "\x66",
+ "%67": "\x67",
+ "%68": "\x68",
+ "%69": "\x69",
+ "%6a": "\x6a",
+ "%6A": "\x6a",
+ "%6b": "\x6b",
+ "%6B": "\x6b",
+ "%6c": "\x6c",
+ "%6C": "\x6c",
+ "%6d": "\x6d",
+ "%6D": "\x6d",
+ "%6e": "\x6e",
+ "%6E": "\x6e",
+ "%6f": "\x6f",
+ "%6F": "\x6f",
+ "%70": "\x70",
+ "%71": "\x71",
+ "%72": "\x72",
+ "%73": "\x73",
+ "%74": "\x74",
+ "%75": "\x75",
+ "%76": "\x76",
+ "%77": "\x77",
+ "%78": "\x78",
+ "%79": "\x79",
+ "%7a": "\x7a",
+ "%7A": "\x7a",
+ "%7b": "\x7b",
+ "%7B": "\x7b",
+ "%7c": "\x7c",
+ "%7C": "\x7c",
+ "%7d": "\x7d",
+ "%7D": "\x7d",
+ "%7e": "\x7e",
+ "%7E": "\x7e",
+ "%7f": "\x7f",
+ "%7F": "\x7f",
+ "%80": "\x80",
+ "%81": "\x81",
+ "%82": "\x82",
+ "%83": "\x83",
+ "%84": "\x84",
+ "%85": "\x85",
+ "%86": "\x86",
+ "%87": "\x87",
+ "%88": "\x88",
+ "%89": "\x89",
+ "%8a": "\x8a",
+ "%8A": "\x8a",
+ "%8b": "\x8b",
+ "%8B": "\x8b",
+ "%8c": "\x8c",
+ "%8C": "\x8c",
+ "%8d": "\x8d",
+ "%8D": "\x8d",
+ "%8e": "\x8e",
+ "%8E": "\x8e",
+ "%8f": "\x8f",
+ "%8F": "\x8f",
+ "%90": "\x90",
+ "%91": "\x91",
+ "%92": "\x92",
+ "%93": "\x93",
+ "%94": "\x94",
+ "%95": "\x95",
+ "%96": "\x96",
+ "%97": "\x97",
+ "%98": "\x98",
+ "%99": "\x99",
+ "%9a": "\x9a",
+ "%9A": "\x9a",
+ "%9b": "\x9b",
+ "%9B": "\x9b",
+ "%9c": "\x9c",
+ "%9C": "\x9c",
+ "%9d": "\x9d",
+ "%9D": "\x9d",
+ "%9e": "\x9e",
+ "%9E": "\x9e",
+ "%9f": "\x9f",
+ "%9F": "\x9f",
+ "%a0": "\xa0",
+ "%A0": "\xa0",
+ "%a1": "\xa1",
+ "%A1": "\xa1",
+ "%a2": "\xa2",
+ "%A2": "\xa2",
+ "%a3": "\xa3",
+ "%A3": "\xa3",
+ "%a4": "\xa4",
+ "%A4": "\xa4",
+ "%a5": "\xa5",
+ "%A5": "\xa5",
+ "%a6": "\xa6",
+ "%A6": "\xa6",
+ "%a7": "\xa7",
+ "%A7": "\xa7",
+ "%a8": "\xa8",
+ "%A8": "\xa8",
+ "%a9": "\xa9",
+ "%A9": "\xa9",
+ "%aa": "\xaa",
+ "%Aa": "\xaa",
+ "%aA": "\xaa",
+ "%AA": "\xaa",
+ "%ab": "\xab",
+ "%Ab": "\xab",
+ "%aB": "\xab",
+ "%AB": "\xab",
+ "%ac": "\xac",
+ "%Ac": "\xac",
+ "%aC": "\xac",
+ "%AC": "\xac",
+ "%ad": "\xad",
+ "%Ad": "\xad",
+ "%aD": "\xad",
+ "%AD": "\xad",
+ "%ae": "\xae",
+ "%Ae": "\xae",
+ "%aE": "\xae",
+ "%AE": "\xae",
+ "%af": "\xaf",
+ "%Af": "\xaf",
+ "%aF": "\xaf",
+ "%AF": "\xaf",
+ "%b0": "\xb0",
+ "%B0": "\xb0",
+ "%b1": "\xb1",
+ "%B1": "\xb1",
+ "%b2": "\xb2",
+ "%B2": "\xb2",
+ "%b3": "\xb3",
+ "%B3": "\xb3",
+ "%b4": "\xb4",
+ "%B4": "\xb4",
+ "%b5": "\xb5",
+ "%B5": "\xb5",
+ "%b6": "\xb6",
+ "%B6": "\xb6",
+ "%b7": "\xb7",
+ "%B7": "\xb7",
+ "%b8": "\xb8",
+ "%B8": "\xb8",
+ "%b9": "\xb9",
+ "%B9": "\xb9",
+ "%ba": "\xba",
+ "%Ba": "\xba",
+ "%bA": "\xba",
+ "%BA": "\xba",
+ "%bb": "\xbb",
+ "%Bb": "\xbb",
+ "%bB": "\xbb",
+ "%BB": "\xbb",
+ "%bc": "\xbc",
+ "%Bc": "\xbc",
+ "%bC": "\xbc",
+ "%BC": "\xbc",
+ "%bd": "\xbd",
+ "%Bd": "\xbd",
+ "%bD": "\xbd",
+ "%BD": "\xbd",
+ "%be": "\xbe",
+ "%Be": "\xbe",
+ "%bE": "\xbe",
+ "%BE": "\xbe",
+ "%bf": "\xbf",
+ "%Bf": "\xbf",
+ "%bF": "\xbf",
+ "%BF": "\xbf",
+ "%c0": "\xc0",
+ "%C0": "\xc0",
+ "%c1": "\xc1",
+ "%C1": "\xc1",
+ "%c2": "\xc2",
+ "%C2": "\xc2",
+ "%c3": "\xc3",
+ "%C3": "\xc3",
+ "%c4": "\xc4",
+ "%C4": "\xc4",
+ "%c5": "\xc5",
+ "%C5": "\xc5",
+ "%c6": "\xc6",
+ "%C6": "\xc6",
+ "%c7": "\xc7",
+ "%C7": "\xc7",
+ "%c8": "\xc8",
+ "%C8": "\xc8",
+ "%c9": "\xc9",
+ "%C9": "\xc9",
+ "%ca": "\xca",
+ "%Ca": "\xca",
+ "%cA": "\xca",
+ "%CA": "\xca",
+ "%cb": "\xcb",
+ "%Cb": "\xcb",
+ "%cB": "\xcb",
+ "%CB": "\xcb",
+ "%cc": "\xcc",
+ "%Cc": "\xcc",
+ "%cC": "\xcc",
+ "%CC": "\xcc",
+ "%cd": "\xcd",
+ "%Cd": "\xcd",
+ "%cD": "\xcd",
+ "%CD": "\xcd",
+ "%ce": "\xce",
+ "%Ce": "\xce",
+ "%cE": "\xce",
+ "%CE": "\xce",
+ "%cf": "\xcf",
+ "%Cf": "\xcf",
+ "%cF": "\xcf",
+ "%CF": "\xcf",
+ "%d0": "\xd0",
+ "%D0": "\xd0",
+ "%d1": "\xd1",
+ "%D1": "\xd1",
+ "%d2": "\xd2",
+ "%D2": "\xd2",
+ "%d3": "\xd3",
+ "%D3": "\xd3",
+ "%d4": "\xd4",
+ "%D4": "\xd4",
+ "%d5": "\xd5",
+ "%D5": "\xd5",
+ "%d6": "\xd6",
+ "%D6": "\xd6",
+ "%d7": "\xd7",
+ "%D7": "\xd7",
+ "%d8": "\xd8",
+ "%D8": "\xd8",
+ "%d9": "\xd9",
+ "%D9": "\xd9",
+ "%da": "\xda",
+ "%Da": "\xda",
+ "%dA": "\xda",
+ "%DA": "\xda",
+ "%db": "\xdb",
+ "%Db": "\xdb",
+ "%dB": "\xdb",
+ "%DB": "\xdb",
+ "%dc": "\xdc",
+ "%Dc": "\xdc",
+ "%dC": "\xdc",
+ "%DC": "\xdc",
+ "%dd": "\xdd",
+ "%Dd": "\xdd",
+ "%dD": "\xdd",
+ "%DD": "\xdd",
+ "%de": "\xde",
+ "%De": "\xde",
+ "%dE": "\xde",
+ "%DE": "\xde",
+ "%df": "\xdf",
+ "%Df": "\xdf",
+ "%dF": "\xdf",
+ "%DF": "\xdf",
+ "%e0": "\xe0",
+ "%E0": "\xe0",
+ "%e1": "\xe1",
+ "%E1": "\xe1",
+ "%e2": "\xe2",
+ "%E2": "\xe2",
+ "%e3": "\xe3",
+ "%E3": "\xe3",
+ "%e4": "\xe4",
+ "%E4": "\xe4",
+ "%e5": "\xe5",
+ "%E5": "\xe5",
+ "%e6": "\xe6",
+ "%E6": "\xe6",
+ "%e7": "\xe7",
+ "%E7": "\xe7",
+ "%e8": "\xe8",
+ "%E8": "\xe8",
+ "%e9": "\xe9",
+ "%E9": "\xe9",
+ "%ea": "\xea",
+ "%Ea": "\xea",
+ "%eA": "\xea",
+ "%EA": "\xea",
+ "%eb": "\xeb",
+ "%Eb": "\xeb",
+ "%eB": "\xeb",
+ "%EB": "\xeb",
+ "%ec": "\xec",
+ "%Ec": "\xec",
+ "%eC": "\xec",
+ "%EC": "\xec",
+ "%ed": "\xed",
+ "%Ed": "\xed",
+ "%eD": "\xed",
+ "%ED": "\xed",
+ "%ee": "\xee",
+ "%Ee": "\xee",
+ "%eE": "\xee",
+ "%EE": "\xee",
+ "%ef": "\xef",
+ "%Ef": "\xef",
+ "%eF": "\xef",
+ "%EF": "\xef",
+ "%f0": "\xf0",
+ "%F0": "\xf0",
+ "%f1": "\xf1",
+ "%F1": "\xf1",
+ "%f2": "\xf2",
+ "%F2": "\xf2",
+ "%f3": "\xf3",
+ "%F3": "\xf3",
+ "%f4": "\xf4",
+ "%F4": "\xf4",
+ "%f5": "\xf5",
+ "%F5": "\xf5",
+ "%f6": "\xf6",
+ "%F6": "\xf6",
+ "%f7": "\xf7",
+ "%F7": "\xf7",
+ "%f8": "\xf8",
+ "%F8": "\xf8",
+ "%f9": "\xf9",
+ "%F9": "\xf9",
+ "%fa": "\xfa",
+ "%Fa": "\xfa",
+ "%fA": "\xfa",
+ "%FA": "\xfa",
+ "%fb": "\xfb",
+ "%Fb": "\xfb",
+ "%fB": "\xfb",
+ "%FB": "\xfb",
+ "%fc": "\xfc",
+ "%Fc": "\xfc",
+ "%fC": "\xfc",
+ "%FC": "\xfc",
+ "%fd": "\xfd",
+ "%Fd": "\xfd",
+ "%fD": "\xfd",
+ "%FD": "\xfd",
+ "%fe": "\xfe",
+ "%Fe": "\xfe",
+ "%fE": "\xfe",
+ "%FE": "\xfe",
+ "%ff": "\xff",
+ "%Ff": "\xff",
+ "%fF": "\xff",
+ "%FF": "\xff",
+};
+
+function encodedReplacer(match) {
+ return EncodedLookup[match];
+}
+
+const STATE_KEY = 0;
+const STATE_VALUE = 1;
+const STATE_CHARSET = 2;
+const STATE_LANG = 3;
+
+function parseParams(str) {
+ const res = [];
+ let state = STATE_KEY;
+ let charset = "";
+ let inquote = false;
+ let escaping = false;
+ let p = 0;
+ let tmp = "";
+ const len = str.length;
+
+ for (var i = 0; i < len; ++i) {
+ // eslint-disable-line no-var
+ const char = str[i];
+ if (char === "\\" && inquote) {
+ if (escaping) {
+ escaping = false;
+ } else {
+ escaping = true;
+ continue;
+ }
+ } else if (char === '"') {
+ if (!escaping) {
+ if (inquote) {
+ inquote = false;
+ state = STATE_KEY;
+ // Skip any remaining characters until we hit a semicolon or end of string
+ // This ensures we don't include characters after the closing quote
+ while (i + 1 < len && str[i + 1] !== ";") {
+ ++i;
+ }
+ } else {
+ inquote = true;
+ }
+ continue;
+ } else {
+ escaping = false;
+ }
+ } else {
+ if (escaping && inquote) {
+ tmp += "\\";
+ }
+ escaping = false;
+ if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") {
+ if (state === STATE_CHARSET) {
+ state = STATE_LANG;
+ charset = tmp.substring(1);
+ } else {
+ state = STATE_VALUE;
+ }
+ tmp = "";
+ continue;
+ } else if (
+ state === STATE_KEY &&
+ (char === "*" || char === "=") &&
+ res.length
+ ) {
+ state = char === "*" ? STATE_CHARSET : STATE_VALUE;
+ res[p] = [tmp, undefined];
+ tmp = "";
+ continue;
+ } else if (!inquote && char === ";") {
+ state = STATE_KEY;
+ if (charset) {
+ if (tmp.length) {
+ tmp = decodeText(
+ tmp.replace(RE_ENCODED, encodedReplacer),
+ "binary",
+ charset
+ );
+ }
+ charset = "";
+ } else if (tmp.length) {
+ tmp = decodeText(tmp, "binary", "utf8");
+ }
+ if (res[p] === undefined) {
+ res[p] = tmp;
+ } else {
+ res[p][1] = tmp;
+ }
+ tmp = "";
+ ++p;
+ continue;
+ } else if (!inquote && (char === " " || char === "\t")) {
+ continue;
+ }
+ }
+ tmp += char;
+ }
+ if (charset && tmp.length) {
+ tmp = decodeText(
+ tmp.replace(RE_ENCODED, encodedReplacer),
+ "binary",
+ charset
+ );
+ } else if (tmp) {
+ tmp = decodeText(tmp, "binary", "utf8");
+ }
+
+ if (res[p] === undefined) {
+ if (tmp) {
+ res[p] = tmp;
+ }
+ } else {
+ res[p][1] = tmp;
+ }
+
+ return res;
+}
+
+module.exports = parseParams;
diff --git a/library/helpers/form-parsing/package.json b/library/helpers/form-parsing/package.json
new file mode 100644
index 000000000..6c17931aa
--- /dev/null
+++ b/library/helpers/form-parsing/package.json
@@ -0,0 +1,81 @@
+{
+ "name": "@fastify/busboy",
+ "version": "3.2.0",
+ "private": false,
+ "author": "Brian White ",
+ "contributors": [
+ {
+ "name": "Igor Savin",
+ "email": "kibertoad@gmail.com",
+ "url": "https://github.com/kibertoad"
+ },
+ {
+ "name": "Aras Abbasi",
+ "email": "aras.abbasi@gmail.com",
+ "url": "https://github.com/uzlopak"
+ }
+ ],
+ "description": "A streaming parser for HTML form data for node.js",
+ "main": "index.js",
+ "type": "commonjs",
+ "types": "index.d.ts",
+ "scripts": {
+ "bench:busboy": "cd benchmarks && npm install && npm run benchmark-fastify",
+ "bench:dicer": "node bench/dicer/dicer-bench-multipart-parser.js",
+ "coveralls": "nyc report --reporter=lcov",
+ "lint": "npm run lint:standard",
+ "lint:fix": "standard --fix",
+ "lint:standard": "standard --verbose | snazzy",
+ "test:unit": "c8 --statements 98 --branches 97 --functions 96 --lines 98 node --test",
+ "test:types": "tsd",
+ "test": "npm run test:unit && npm run test:types"
+ },
+ "devDependencies": {
+ "@types/node": "^24.0.8",
+ "busboy": "^1.6.0",
+ "c8": "^10.1.2",
+ "photofinish": "^1.8.0",
+ "snazzy": "^9.0.0",
+ "standard": "^17.1.0",
+ "tinybench": "^4.0.1",
+ "tsd": "^0.32.0",
+ "tslib": "^2.8.1",
+ "typescript": "~5.9.2"
+ },
+ "keywords": [
+ "uploads",
+ "forms",
+ "multipart",
+ "form-data"
+ ],
+ "license": "MIT",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/fastify/busboy.git"
+ },
+ "bugs": {
+ "url": "https://github.com/fastify/busboy/issues"
+ },
+ "homepage": "https://github.com/fastify/busboy#readme",
+ "tsd": {
+ "directory": "test-types"
+ },
+ "standard": {
+ "globals": [
+ "describe",
+ "it"
+ ],
+ "ignore": [
+ "bench"
+ ]
+ },
+ "files": [
+ "README.md",
+ "LICENSE",
+ "lib/*",
+ "deps/encoding/*",
+ "deps/dicer/lib",
+ "deps/streamsearch/",
+ "deps/dicer/LICENSE"
+ ]
+}
diff --git a/library/helpers/tryParseJSON.test.ts b/library/helpers/tryParseJSON.test.ts
new file mode 100644
index 000000000..30e06df03
--- /dev/null
+++ b/library/helpers/tryParseJSON.test.ts
@@ -0,0 +1,28 @@
+import * as t from "tap";
+import { tryParseJSON } from "./tryParseJSON";
+
+t.test("tryParseJSON", async (t) => {
+ t.test("valid JSON string", async (t) => {
+ const jsonString = '{"name":"John","age":30}';
+ const result = tryParseJSON(jsonString);
+ t.same(result, { name: "John", age: 30 });
+ });
+
+ t.test("invalid JSON string", async (t) => {
+ const jsonString = '{"name":"John","age":30'; // Missing closing brace
+ const result = tryParseJSON(jsonString);
+ t.equal(result, undefined);
+ });
+
+ t.test("empty string", async (t) => {
+ const jsonString = "";
+ const result = tryParseJSON(jsonString);
+ t.equal(result, undefined);
+ });
+
+ t.test("non-string input", async (t) => {
+ // @ts-expect-error Testing invalid input
+ const result = tryParseJSON(new Date());
+ t.equal(result, undefined);
+ });
+});
diff --git a/library/helpers/tryParseJSON.ts b/library/helpers/tryParseJSON.ts
new file mode 100644
index 000000000..483a06fe3
--- /dev/null
+++ b/library/helpers/tryParseJSON.ts
@@ -0,0 +1,7 @@
+export function tryParseJSON(jsonString: string) {
+ try {
+ return JSON.parse(jsonString);
+ } catch {
+ return undefined;
+ }
+}
diff --git a/library/sources/HTTP2Server.test.ts b/library/sources/HTTP2Server.test.ts
index 264db54cd..2c0fc0493 100644
--- a/library/sources/HTTP2Server.test.ts
+++ b/library/sources/HTTP2Server.test.ts
@@ -733,3 +733,91 @@ t.test("it reports attack waves", async (t) => {
});
});
});
+
+t.test("it parses Multipart body", async () => {
+ // Enables body parsing
+ process.env.NEXT_DEPLOYMENT_ID = "";
+
+ const server = createMinimalTestServer();
+
+ await new Promise((resolve) => {
+ server.listen(3435, () => {
+ http2Request(
+ new URL("http://localhost:3435"),
+ "POST",
+ {
+ "Content-Type":
+ "multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW",
+ },
+ '------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="field1"\r\n\r\nvalue1\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="field2"\r\n\r\n{"abc": "test", "arr": ["c"]}\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW--'
+ ).then(({ body }) => {
+ const context = JSON.parse(body);
+ t.same(context.body, {
+ fields: [
+ { name: "field1", value: "value1" },
+ { name: "field2", value: { abc: "test", arr: ["c"] } },
+ ],
+ });
+ server.close();
+ resolve();
+ });
+ });
+ });
+});
+
+t.test("it ignores files in Multipart body", async () => {
+ // Enables body parsing
+ process.env.NEXT_DEPLOYMENT_ID = "";
+
+ const server = createMinimalTestServer();
+
+ await new Promise((resolve) => {
+ server.listen(3436, () => {
+ http2Request(
+ new URL("http://localhost:3436"),
+ "POST",
+ {
+ "Content-Type":
+ "multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW",
+ },
+ '------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="field1"\r\n\r\nvalueabc\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="file1"; filename="test.txt"\r\nContent-Type: text/plain\r\n\r\nThis is the content of the file.\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="field2"\r\n\r\n{"abc": "test", "arr": ["c"]}\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW--'
+ ).then(({ body }) => {
+ const context = JSON.parse(body);
+ t.same(context.body, {
+ fields: [
+ { name: "field1", value: "valueabc" },
+ { name: "field2", value: { abc: "test", arr: ["c"] } },
+ ],
+ });
+ server.close();
+ resolve();
+ });
+ });
+ });
+});
+
+t.test("invalid Multipart body results in empty body", async () => {
+ // Enables body parsing
+ process.env.NEXT_DEPLOYMENT_ID = "";
+
+ const server = createMinimalTestServer();
+
+ await new Promise((resolve) => {
+ server.listen(3437, () => {
+ http2Request(
+ new URL("http://localhost:3437"),
+ "POST",
+ {
+ "Content-Type":
+ "multipart/form-data; boundary=----WebKitFormBoundaryABCDEFGHIJ",
+ },
+ '------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="field1"\r\n\r\nvalueabc\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="field2"\r\n\r\n{"abc": "test", "arr": ["c"]}\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW--'
+ ).then(({ body }) => {
+ const context = JSON.parse(body);
+ t.same(context.body, undefined);
+ server.close();
+ resolve();
+ });
+ });
+ });
+});
diff --git a/library/sources/HTTPServer.test.ts b/library/sources/HTTPServer.test.ts
index 5153afb1c..2f053481f 100644
--- a/library/sources/HTTPServer.test.ts
+++ b/library/sources/HTTPServer.test.ts
@@ -1160,3 +1160,104 @@ t.test("it reports attack waves", async (t) => {
});
});
});
+
+t.test("It decodes multipart form data and sets body in context", async (t) => {
+ // Enables body parsing
+ process.env.NEXT_DEPLOYMENT_ID = "";
+
+ const server = http.createServer((req, res) => {
+ res.setHeader("Content-Type", "application/json");
+ res.end(JSON.stringify(getContext()));
+ });
+
+ await new Promise((resolve) => {
+ server.listen(3230, () => {
+ fetch({
+ url: new URL("http://localhost:3230"),
+ method: "POST",
+ headers: {
+ "Content-Type":
+ "multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW",
+ },
+ body: '------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="field1"\r\n\r\nvalue1\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="field2"\r\n\r\n{"abc": "test", "arr": ["c"]}\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW--',
+ timeoutInMS: 500,
+ }).then(({ body }) => {
+ const context = JSON.parse(body);
+ t.same(context.body, {
+ fields: [
+ { name: "field1", value: "value1" },
+ { name: "field2", value: { abc: "test", arr: ["c"] } },
+ ],
+ });
+ server.close();
+ resolve();
+ });
+ });
+ });
+});
+
+t.test("It ignores multipart form data files", async (t) => {
+ // Enables body parsing
+ process.env.NEXT_DEPLOYMENT_ID = "";
+
+ const server = http.createServer((req, res) => {
+ res.setHeader("Content-Type", "application/json");
+ res.end(JSON.stringify(getContext()));
+ });
+
+ await new Promise((resolve) => {
+ server.listen(3231, () => {
+ fetch({
+ url: new URL("http://localhost:3231"),
+ method: "POST",
+ headers: {
+ "Content-Type":
+ "multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW",
+ },
+ body: '------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="field1"\r\n\r\nvalueabc\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="file1"; filename="test.txt"\r\nContent-Type: text/plain\r\n\r\nThis is the content of the file.\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="field2"\r\n\r\n{"abc": "test", "arr": ["c"]}\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW--',
+ timeoutInMS: 500,
+ }).then(({ body }) => {
+ const context = JSON.parse(body);
+ t.same(context.body, {
+ fields: [
+ { name: "field1", value: "valueabc" },
+ { name: "field2", value: { abc: "test", arr: ["c"] } },
+ ],
+ });
+ server.close();
+ resolve();
+ });
+ });
+ });
+});
+
+t.test("Invalid multipart form data is ignored", async (t) => {
+ // Enables body parsing
+ process.env.NEXT_DEPLOYMENT_ID = "";
+
+ const server = http.createServer((req, res) => {
+ res.setHeader("Content-Type", "application/json");
+ res.end(JSON.stringify(getContext()));
+ });
+
+ await new Promise((resolve) => {
+ server.listen(3232, () => {
+ fetch({
+ url: new URL("http://localhost:3232"),
+ method: "POST",
+ headers: {
+ "Content-Type":
+ "multipart/form-data; boundary=----WebKitFormBoundaryABCDEFGHIJ",
+ },
+ body: '------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="field1"\r\n\r\nvalueabc\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="field2"\r\n\r\n{"abc": "test", "arr": ["c"]}\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW--',
+ timeoutInMS: 500,
+ }).then(({ statusCode, body }) => {
+ t.same(statusCode, 200);
+ const context = JSON.parse(body);
+ t.same(context.body, undefined);
+ server.close();
+ resolve();
+ });
+ });
+ });
+});
diff --git a/library/sources/PubSub.ts b/library/sources/PubSub.ts
index 561e1d189..48f7f2ae4 100644
--- a/library/sources/PubSub.ts
+++ b/library/sources/PubSub.ts
@@ -3,6 +3,7 @@ import { Hooks } from "../agent/hooks/Hooks";
import { wrapExport } from "../agent/hooks/wrapExport";
import { Wrapper } from "../agent/Wrapper";
import type { Message } from "@google-cloud/pubsub";
+import { tryParseJSON } from "../helpers/tryParseJSON";
export class PubSub implements Wrapper {
private wrapMessageHandler(args: unknown[]) {
@@ -86,11 +87,3 @@ function handleMessage(handler: Function) {
);
};
}
-
-function tryParseJSON(jsonString: string) {
- try {
- return JSON.parse(jsonString);
- } catch {
- return undefined;
- }
-}
diff --git a/library/sources/http-server/contextFromRequest.ts b/library/sources/http-server/contextFromRequest.ts
index 542b98d89..6b7087dc1 100644
--- a/library/sources/http-server/contextFromRequest.ts
+++ b/library/sources/http-server/contextFromRequest.ts
@@ -7,7 +7,7 @@ import { tryParseURLParams } from "../../helpers/tryParseURLParams";
export function contextFromRequest(
req: IncomingMessage,
- body: string | undefined,
+ body: unknown,
module: string
): Context {
const queryObject: Record = {};
@@ -18,15 +18,6 @@ export function contextFromRequest(
}
}
- let parsedBody: unknown = undefined;
- if (body) {
- try {
- parsedBody = JSON.parse(body);
- } catch {
- // Ignore
- }
- }
-
return {
url: req.url,
method: req.method,
@@ -36,7 +27,7 @@ export function contextFromRequest(
source: `${module}.createServer`,
routeParams: {},
cookies: req.headers?.cookie ? parse(req.headers.cookie) : {},
- body: parsedBody,
+ body: body ? body : undefined,
remoteAddress: getIPAddressFromRequest({
headers: req.headers,
remoteAddress: req.socket?.remoteAddress,
diff --git a/library/sources/http-server/createRequestListener.ts b/library/sources/http-server/createRequestListener.ts
index 89007f7e4..ceef2b31a 100644
--- a/library/sources/http-server/createRequestListener.ts
+++ b/library/sources/http-server/createRequestListener.ts
@@ -52,7 +52,7 @@ function callListenerWithContext(
res: ServerResponse,
module: string,
agent: Agent,
- body: string
+ body: unknown
) {
const context = contextFromRequest(req, body, module);
diff --git a/library/sources/http-server/readBodyStream.ts b/library/sources/http-server/readBodyStream.ts
index e59024dc3..ef8932c03 100644
--- a/library/sources/http-server/readBodyStream.ts
+++ b/library/sources/http-server/readBodyStream.ts
@@ -3,11 +3,16 @@ import { PassThrough } from "stream";
import { Agent } from "../../agent/Agent";
import { getMaxBodySize } from "../../helpers/getMaxBodySize";
import { replaceRequestBody } from "./replaceRequestBody";
+import { type BusboyHeaders, Busboy } from "../../helpers/form-parsing";
+
+import { getBodyDataType } from "../../agent/api-discovery/getBodyDataType";
+import { tryParseJSON } from "../../helpers/tryParseJSON";
+import { getInstance } from "../../agent/AgentSingleton";
type BodyReadResult =
| {
success: true;
- body: string;
+ body: unknown;
}
| {
success: false;
@@ -18,11 +23,44 @@ export async function readBodyStream(
res: ServerResponse,
agent: Agent
): Promise {
- let body = "";
+ let bodyText = "";
+ let bodyFields: { name: string; value: unknown }[] = [];
let bodySize = 0;
const maxBodySize = getMaxBodySize();
const stream = new PassThrough();
+ let busboy: Busboy | undefined = undefined;
+
+ if (req.headers["content-type"] !== undefined) {
+ const bodyType = getBodyDataType(req.headers);
+ if (bodyType === "form-data" || bodyType === "form-urlencoded") {
+ busboy = new Busboy({
+ headers: req.headers as BusboyHeaders,
+ });
+
+ busboy.on("error", (err) => {
+ getInstance()?.log(
+ `Error parsing form data body: ${err instanceof Error ? err.message : String(err)}`
+ );
+ });
+
+ busboy.on("field", (fieldname, val) => {
+ if (typeof val !== "string") {
+ return;
+ }
+
+ if (val.includes('"')) {
+ const decodedVal = tryParseJSON(val);
+ if (decodedVal !== undefined) {
+ bodyFields.push({ name: fieldname, value: decodedVal });
+ return;
+ }
+ }
+
+ bodyFields.push({ name: fieldname, value: val });
+ });
+ }
+ }
try {
for await (const chunk of req) {
if (bodySize + chunk.length > maxBodySize) {
@@ -41,8 +79,10 @@ export async function readBodyStream(
}
bodySize += chunk.length;
- body += chunk.toString();
+ bodyText += chunk.toString();
stream.push(chunk);
+
+ busboy?.write(chunk);
}
} catch {
res.statusCode = 500;
@@ -52,6 +92,7 @@ export async function readBodyStream(
req.destroy();
}
);
+ busboy?.end();
return {
success: false,
@@ -60,12 +101,30 @@ export async function readBodyStream(
// End the stream
stream.push(null);
+ busboy?.end();
// Ensure the body stream can be read again by the application
replaceRequestBody(req, stream);
+ if (bodyFields.length > 0) {
+ return {
+ success: true,
+ body: {
+ fields: bodyFields,
+ },
+ };
+ }
+
+ const parsedBodyText = tryParseJSON(bodyText);
+ if (parsedBodyText) {
+ return {
+ success: true,
+ body: parsedBodyText,
+ };
+ }
+
return {
success: true,
- body,
+ body: undefined,
};
}
diff --git a/library/tsconfig.build.json b/library/tsconfig.build.json
index 449b04a64..b78347bca 100644
--- a/library/tsconfig.build.json
+++ b/library/tsconfig.build.json
@@ -4,6 +4,6 @@
"outDir": "../build",
"noEmit": false
},
- "include": ["**/*.ts"],
+ "include": ["**/*.ts", "**/*.js"],
"exclude": ["**/*.test.ts", "**/*.tests.ts"]
}