@adonisjs/bodyparser
Version:
BodyParser middleware for AdonisJS http server to read and parse request body
851 lines (837 loc) • 24.8 kB
JavaScript
import {
MultipartFile,
computeFileTypeFromName,
getFileType,
supportMagicFileTypes
} from "./chunk-DKKMQ6FW.js";
// src/bodyparser_middleware.ts
import { tmpdir } from "node:os";
import { Exception as Exception4 } from "@poppinss/utils";
import { join, isAbsolute } from "node:path";
import { createId } from "@paralleldrive/cuid2";
// src/debug.ts
import { debuglog } from "node:util";
var debug_default = debuglog("adonisjs:bodyparser");
// src/parsers/form.ts
import raw from "raw-body";
import inflate from "inflation";
import qs from "qs";
async function parseForm(req, options) {
const normalizedOptions = Object.assign(
{
encoding: "utf8",
limit: "56kb",
length: 0
},
options
);
const queryStringOptions = Object.assign({}, normalizedOptions.queryString);
if (queryStringOptions.allowDots === void 0) {
queryStringOptions.allowDots = true;
}
const contentLength = req.headers["content-length"];
const encoding = req.headers["content-encoding"] || "identity";
if (contentLength && encoding === "identity") {
normalizedOptions.length = ~~contentLength;
}
if (normalizedOptions.convertEmptyStringsToNull) {
queryStringOptions.decoder = function(str, defaultDecoder, charset, type) {
const value = defaultDecoder(str, defaultDecoder, charset);
if (type === "value" && value === "") {
return null;
}
return value;
};
}
const requestBody = await raw(inflate(req), normalizedOptions);
const parsed = qs.parse(requestBody, queryStringOptions);
return { parsed, raw: requestBody };
}
// src/parsers/json.ts
import raw2 from "raw-body";
import inflate2 from "inflation";
import json from "@poppinss/utils/json";
import { Exception } from "@poppinss/utils";
var strictJSONReg = /^[\x20\x09\x0a\x0d]*(\[|\{)/;
function convertEmptyStringsToNull(key, value) {
if (key === "") {
return value;
}
if (value === "") {
return null;
}
return value;
}
async function parseJSON(req, options) {
const normalizedOptions = Object.assign(
{
encoding: "utf8",
limit: "1mb",
length: 0
},
options
);
const contentLength = req.headers["content-length"];
const encoding = req.headers["content-encoding"] || "identity";
if (contentLength && encoding === "identity") {
normalizedOptions.length = ~~contentLength;
}
const strict = normalizedOptions.strict !== false;
const reviver = normalizedOptions.convertEmptyStringsToNull ? convertEmptyStringsToNull : void 0;
const requestBody = await raw2(inflate2(req), normalizedOptions);
if (!requestBody) {
return strict ? {
parsed: {},
raw: requestBody
} : {
parsed: requestBody,
raw: requestBody
};
}
if (strict && !strictJSONReg.test(requestBody)) {
throw new Exception("Invalid JSON, only supports object and array", { status: 422 });
}
try {
return {
parsed: json.safeParse(requestBody, reviver),
raw: requestBody
};
} catch (error) {
error.status = 400;
error.body = requestBody;
throw error;
}
}
// src/multipart/main.ts
import multiparty from "@poppinss/multiparty";
import bytes from "bytes";
import { Exception as Exception3 } from "@poppinss/utils";
// src/form_fields.ts
import lodash from "@poppinss/utils/lodash";
var FormFields = class {
#fields = {};
#config;
constructor(config) {
this.#config = config;
}
/**
* Add a new key/value pair. The keys with array like
* expressions are handled properly.
*
* @example
* ```
* formfields.add('username', 'virk')
*
* // array
* formfields.add('username[]', 'virk')
* formfields.add('username[]', 'nikk')
*
* // Indexed keys are orderd properly
* formfields.add('username[1]', 'virk')
* formfields.add('username[0]', 'nikk')
* ```
*/
add(key, value) {
let isArray = false;
if (this.#config.convertEmptyStringsToNull && value === "") {
value = null;
}
key = key.replace(/\[]$/, () => {
isArray = true;
return "";
});
const existingValue = lodash.get(this.#fields, key);
if (!existingValue) {
lodash.set(this.#fields, key, isArray ? [value] : value);
return;
}
if (Array.isArray(existingValue)) {
existingValue.push(value);
return;
}
lodash.set(this.#fields, key, [existingValue, value]);
}
/**
* Returns the copy of form fields
*/
get() {
return this.#fields;
}
};
// src/multipart/part_handler.ts
import { extname } from "node:path";
import { Exception as Exception2 } from "@poppinss/utils";
var PartHandler = class {
#part;
#options;
/**
* The stream buffer reported by the stream consumer. We hold the buffer until are
* able to detect the file extension and then buff memory is released
*/
#buff;
/**
* A boolean to know, if we have emitted the error event after one or
* more validation errors. We need this flag, since the race conditions
* between `data` and `error` events will trigger multiple `error`
* emit.
*/
#emittedValidationError = false;
/**
* A boolean to know if we can use the magic number to detect the file type. This is how it
* works.
*
* - We begin by extracting the file extension from the file name
* - If the file has no extension, we try to inspect the buffer
* - If the extension is something we support via magic numbers, then we ignore the extension
* and inspect the buffer
* - Otherwise, we have no other option than to trust the extension
*
* Think of this as using the optimal way for validating the file type
*/
get #canFileTypeBeDetected() {
const fileExtension = extname(this.#part.filename).replace(/^\./, "");
return fileExtension ? supportMagicFileTypes.has(fileExtension) : true;
}
/**
* Creating a new file object for each part inside the multipart
* form data
*/
file;
constructor(part, options) {
this.#part = part;
this.#options = options;
this.file = new MultipartFile(
{
clientName: part.filename,
fieldName: part.name,
headers: part.headers
},
{
size: options.size,
extnames: options.extnames
}
);
}
/**
* Detects the file type and extension and also validates it when validations
* are not deferred.
*/
async #detectFileTypeAndExtension() {
if (!this.#buff) {
return;
}
const fileType = this.#canFileTypeBeDetected ? await getFileType(this.#buff) : computeFileTypeFromName(this.file.clientName, this.file.headers);
if (fileType) {
this.file.extname = fileType.ext;
this.file.type = fileType.type;
this.file.subtype = fileType.subtype;
}
}
/**
* Skip the stream or end it forcefully. This is invoked when the
* streaming consumer reports an error
*/
#skipEndStream() {
this.#part.emit("close");
}
/**
* Finish the process of listening for any more events and mark the
* file state as consumed.
*/
#finish() {
this.file.state = "consumed";
if (!this.#options.deferValidations) {
this.file.validate();
}
}
/**
* Start the process the updating the file state
* to streaming mode.
*/
begin() {
this.file.state = "streaming";
}
/**
* Handles the file upload progress by validating the file size and
* extension.
*/
async reportProgress(line, bufferLength) {
if (this.file.state !== "streaming") {
return;
}
if (this.file.extname === void 0) {
this.#buff = this.#buff ? Buffer.concat([this.#buff, line]) : line;
await this.#detectFileTypeAndExtension();
} else {
this.#buff = void 0;
}
this.file.size = this.file.size + bufferLength;
if (this.#options.deferValidations) {
return;
}
this.file.validate();
if (!this.file.isValid && !this.#emittedValidationError) {
this.#emittedValidationError = true;
this.#part.emit(
"error",
new Exception2("one or more validations failed", {
code: "E_STREAM_VALIDATION_FAILURE",
status: 400
})
);
}
}
/**
* Report errors encountered while processing the stream. These can be errors
* apart from the one reported by this class. For example: The `s3` failure
* due to some bad credentails.
*/
async reportError(error) {
if (this.file.state !== "streaming") {
return;
}
this.#skipEndStream();
this.#finish();
if (error.code === "E_STREAM_VALIDATION_FAILURE") {
return;
}
this.file.errors.push({
fieldName: this.file.fieldName,
clientName: this.file.clientName,
type: "fatal",
message: error.message
});
}
/**
* Report success data about the file.
*/
async reportSuccess(data) {
if (this.file.state !== "streaming") {
return;
}
if (this.file.extname === void 0) {
await this.#detectFileTypeAndExtension();
}
if (data) {
const { filePath, tmpPath, ...meta } = data;
if (filePath) {
this.file.filePath = filePath;
}
if (tmpPath) {
this.file.tmpPath = tmpPath;
}
this.file.meta = meta || {};
}
this.#finish();
}
};
// src/multipart/main.ts
var Multipart = class {
#ctx;
#config;
/**
* The registered handlers to handle the file uploads
*/
#handlers = {};
/**
* Collected fields from the multipart stream
*/
#fields;
/**
* Collected files from the multipart stream. Files are only collected
* when there is an attached listener for a given file.
*/
#files;
/**
* We track the finishing of `this.onFile` async handlers
* to make sure that `process` promise resolves for all
* handlers to finish.
*/
#pendingHandlers = 0;
/**
* The reference to underlying multiparty form
*/
#form;
/**
* Total size limit of the multipart stream. If it goes beyond
* the limit, then an exception will be raised.
*/
#upperLimit;
/**
* Total size in bytes for all the fields (not the files)
*/
#maxFieldsSize;
/**
* A track of total number of file bytes processed so far
*/
#processedBytes = 0;
/**
* The current state of the multipart form handler
*/
state = "idle";
constructor(ctx, config = {}) {
this.#ctx = ctx;
this.#config = config;
this.#fields = new FormFields({
convertEmptyStringsToNull: this.#config.convertEmptyStringsToNull === true
});
this.#files = new FormFields({
convertEmptyStringsToNull: this.#config.convertEmptyStringsToNull === true
});
}
/**
* Returns a boolean telling whether all streams have been
* consumed along with all handlers execution
*/
#isClosed() {
return this.#form["flushing"] <= 0 && this.#pendingHandlers <= 0;
}
/**
* Removes array like expression from the part name to
* find the handler
*/
#getHandlerName(name) {
return name.replace(/\[\d*\]/, "");
}
/**
* Validates and returns an error when upper limit is defined and
* processed bytes is over the upper limit
*/
#validateProcessedBytes(chunkLength) {
if (!this.#upperLimit) {
return;
}
this.#processedBytes += chunkLength;
if (this.#processedBytes > this.#upperLimit) {
return new Exception3("request entity too large", {
code: "E_REQUEST_ENTITY_TOO_LARGE",
status: 413
});
}
}
/**
* Handles a given part by invoking it's handler or
* by resuming the part, if there is no defined
* handler
*/
async #handlePart(part) {
if (!part.name || !part.filename) {
part.resume();
return;
}
const name = this.#getHandlerName(part.name);
const handler = this.#handlers[name] || this.#handlers["*"];
if (!handler) {
debug_default('skipping multipart part as there are no handlers "%s"', name);
part.resume();
return;
}
debug_default('processing multipart part "%s"', name);
this.#pendingHandlers++;
const partHandler = new PartHandler(part, handler.options);
partHandler.begin();
this.#files.add(partHandler.file.fieldName, partHandler.file);
part.file = partHandler.file;
try {
const response = await handler.handler(part, async (line) => {
if (this.state !== "processing") {
return;
}
const lineLength = line.length;
const error = this.#validateProcessedBytes(lineLength);
if (error) {
part.emit("error", error);
this.abort(error);
return;
}
try {
await partHandler.reportProgress(line, lineLength);
} catch (err) {
this.#ctx.logger.fatal(
'Unhandled multipart stream error. Make sure to handle "error" events for all manually processed streams'
);
}
});
await partHandler.reportSuccess(response || {});
} catch (error) {
await partHandler.reportError(error);
}
this.#pendingHandlers--;
}
/**
* Record the fields inside multipart contract
*/
#handleField(key, value) {
if (!key) {
return;
}
this.#fields.add(key, value);
}
/**
* Processes the user config and computes the `upperLimit` value from
* it.
*/
#processConfig(config) {
this.#config = Object.assign(this.#config, config);
this.#maxFieldsSize = typeof this.#config.fieldsLimit === "string" ? bytes(this.#config.fieldsLimit) : this.#config.fieldsLimit;
this.#upperLimit = typeof this.#config.limit === "string" ? bytes(this.#config.limit) : this.#config.limit;
}
/**
* Mark the process as finished
*/
#finish(newState) {
if (this.state === "idle" || this.state === "processing") {
this.state = newState;
this.#ctx.request["__raw_files"] = this.#files.get();
this.#ctx.request.setInitialBody(this.#fields.get());
}
}
/**
* Attach handler for a given file. To handle all files, you
* can attach a wildcard handler.
*
* @example
* ```ts
* multipart.onFile('package', {}, async (stream) => {
* })
*
* multipart.onFile('*', {}, async (stream) => {
* })
* ```
*/
onFile(name, options, handler) {
this.#handlers[name] = { handler, options };
return this;
}
/**
* Abort request by emitting error
*/
abort(error) {
this.#form.emit("error", error);
}
/**
* Process the request by going all the file and field
* streams.
*/
process(config) {
return new Promise((resolve, reject) => {
if (this.state !== "idle") {
reject(
new Exception3("multipart stream has already been consumed", {
code: "E_RUNTIME_EXCEPTION"
})
);
return;
}
this.state = "processing";
this.#processConfig(config);
this.#form = new multiparty.Form({
maxFields: this.#config.maxFields,
maxFieldsSize: this.#maxFieldsSize
});
debug_default("processing multipart body");
this.#form.on("error", (error) => {
this.#finish("error");
process.nextTick(() => {
if (this.#ctx.request.request.readable) {
this.#ctx.request.request.resume();
}
if (error.message.match(/stream ended unexpectedly/)) {
reject(
new Exception3("Invalid multipart request", {
status: 400,
code: "E_INVALID_MULTIPART_REQUEST"
})
);
} else if (error.message.match(/maxFields [0-9]+ exceeded/)) {
reject(
new Exception3("Fields length limit exceeded", {
status: 413,
code: "E_REQUEST_ENTITY_TOO_LARGE"
})
);
} else if (error.message.match(/maxFieldsSize [0-9]+ exceeded/)) {
reject(
new Exception3("Fields size in bytes exceeded", {
status: 413,
code: "E_REQUEST_ENTITY_TOO_LARGE"
})
);
} else {
reject(error);
}
});
});
this.#form.on("part", async (part) => {
await this.#handlePart(part);
if (this.#isClosed()) {
this.#finish("success");
resolve();
}
});
this.#form.on("field", (key, value) => {
try {
this.#handleField(key, value);
} catch (error) {
this.abort(error);
}
});
this.#form.on("close", () => {
if (this.#isClosed()) {
this.#finish("success");
resolve();
}
});
this.#form.parse(this.#ctx.request.request);
});
}
};
// src/multipart/stream_file.ts
import { unlink } from "node:fs/promises";
import { createWriteStream } from "node:fs";
import { pipeline } from "node:stream/promises";
async function streamFile(readStream, location, dataListener) {
if (typeof dataListener === "function") {
readStream.pause();
readStream.on("data", dataListener);
}
const writeStream = createWriteStream(location);
try {
await pipeline(readStream, writeStream);
} catch (error) {
unlink(writeStream.path).catch(() => {
});
throw error;
}
}
// src/bindings/request.ts
import lodash2 from "@poppinss/utils/lodash";
import { Request } from "@adonisjs/http-server";
import { RuntimeException } from "@poppinss/utils";
function setFileOptions(file, options) {
if (file.sizeLimit === void 0 && options && options.size) {
file.sizeLimit = options.size;
}
if (file.allowedExtensions === void 0 && options && options.extnames) {
file.allowedExtensions = options.extnames;
}
}
function isInstanceOfFile(file) {
return file && file instanceof MultipartFile;
}
debug_default('extending request class with "file", "files" and "allFiles" macros');
Request.macro("toJSON", function() {
return {
...this.serialize(),
files: this["__raw_files"] || {}
};
});
Request.macro(
"file",
function getFile(key, options) {
let file = lodash2.get(this.allFiles(), key);
file = Array.isArray(file) ? file[0] : file;
if (!isInstanceOfFile(file)) {
return null;
}
setFileOptions(file, options);
file.validate();
return file;
}
);
Request.macro(
"files",
function getFiles(key, options) {
let files = lodash2.get(this.allFiles(), key);
files = Array.isArray(files) ? files : files ? [files] : [];
return files.filter(isInstanceOfFile).map((file) => {
setFileOptions(file, options);
file.validate();
return file;
});
}
);
Request.macro("allFiles", function allFiles() {
if (!this.__raw_files) {
throw new RuntimeException(
"Cannot read files. Make sure the bodyparser middleware is registered"
);
}
return this["__raw_files"];
});
// src/parsers/text.ts
import raw3 from "raw-body";
import inflate3 from "inflation";
function parseText(req, options) {
const normalizedOptions = Object.assign(
{
encoding: "utf8",
limit: "1mb",
length: 0
},
options
);
const contentLength = req.headers["content-length"];
const encoding = req.headers["content-encoding"] || "identity";
if (contentLength && encoding === "identity") {
normalizedOptions.length = ~~contentLength;
}
return raw3(inflate3(req), normalizedOptions);
}
// src/bodyparser_middleware.ts
var BodyParserMiddleware = class {
/**
* Bodyparser config
*/
#config;
constructor(config) {
this.#config = config;
debug_default("using config %O", this.#config);
}
/**
* Returns config for a given type
*/
#getConfigFor(type) {
const config = this.#config[type];
return config;
}
/**
* Ensures that types exists and have length
*/
#ensureTypes(types) {
return !!(types && types.length);
}
/**
* Returns a boolean telling if request `content-type` header
* matches the expected types or not
*/
#isType(request, types) {
return !!(this.#ensureTypes(types) && request.is(types));
}
/**
* Returns a proper Adonis style exception for popular error codes
* returned by https://github.com/stream-utils/raw-body#readme.
*/
#getExceptionFor(error) {
switch (error.type) {
case "encoding.unsupported":
return new Exception4(error.message, {
status: error.status,
code: "E_ENCODING_UNSUPPORTED"
});
case "entity.too.large":
return new Exception4(error.message, {
status: error.status,
code: "E_REQUEST_ENTITY_TOO_LARGE"
});
case "request.aborted":
return new Exception4(error.message, { status: error.status, code: "E_REQUEST_ABORTED" });
default:
return error;
}
}
/**
* Returns the tmp path for storing the files temporarly
*/
#getTmpPath(config) {
if (typeof config.tmpFileName === "function") {
const tmpPath = config.tmpFileName();
return isAbsolute(tmpPath) ? tmpPath : join(tmpdir(), tmpPath);
}
return join(tmpdir(), createId());
}
/**
* Handle HTTP request body by parsing it as per the user
* config
*/
async handle(ctx, next) {
ctx.request["__raw_files"] = {};
const requestUrl = ctx.request.url();
const requestMethod = ctx.request.method();
if (!this.#config.allowedMethods.includes(requestMethod)) {
debug_default('skipping HTTP request "%s:%s"', requestMethod, requestUrl);
return next();
}
if (!ctx.request.hasBody()) {
debug_default('skipping as request has no body "%s:%s"', requestMethod, requestUrl);
return next();
}
const multipartConfig = this.#getConfigFor("multipart");
if (this.#isType(ctx.request, multipartConfig.types)) {
debug_default('detected multipart request "%s:%s"', requestMethod, requestUrl);
ctx.request.multipart = new Multipart(ctx, {
maxFields: multipartConfig.maxFields,
limit: multipartConfig.limit,
fieldsLimit: multipartConfig.fieldsLimit,
convertEmptyStringsToNull: multipartConfig.convertEmptyStringsToNull
});
if (multipartConfig.autoProcess === false) {
debug_default('skipping auto processing of multipart request "%s:%s"', requestMethod, requestUrl);
return next();
}
if (ctx.route && multipartConfig.processManually.includes(ctx.route.pattern)) {
debug_default('skipping auto processing of multipart request "%s:%s"', requestMethod, requestUrl);
return next();
}
if (ctx.route && Array.isArray(multipartConfig.autoProcess) && !multipartConfig.autoProcess.includes(ctx.route.pattern)) {
debug_default('skipping auto processing of multipart request "%s:%s"', requestMethod, requestUrl);
return next();
}
debug_default('auto processing multipart request "%s:%s"', requestMethod, requestUrl);
ctx.request.multipart.onFile("*", { deferValidations: true }, async (part, reporter) => {
try {
const tmpPath = this.#getTmpPath(multipartConfig);
await streamFile(part, tmpPath, reporter);
return { tmpPath };
} catch (error) {
ctx.request.multipart.abort(error);
}
});
try {
await ctx.request.multipart.process();
return next();
} catch (error) {
throw error;
}
}
const formConfig = this.#getConfigFor("form");
if (this.#isType(ctx.request, formConfig.types)) {
debug_default('detected urlencoded request "%s:%s"', requestMethod, requestUrl);
try {
const { parsed, raw: raw4 } = await parseForm(ctx.request.request, formConfig);
ctx.request.setInitialBody(parsed);
ctx.request.updateRawBody(raw4);
return next();
} catch (error) {
throw this.#getExceptionFor(error);
}
}
const jsonConfig = this.#getConfigFor("json");
if (this.#isType(ctx.request, jsonConfig.types)) {
debug_default('detected JSON request "%s:%s"', requestMethod, requestUrl);
try {
const { parsed, raw: raw4 } = await parseJSON(ctx.request.request, jsonConfig);
ctx.request.setInitialBody(parsed);
ctx.request.updateRawBody(raw4);
return next();
} catch (error) {
throw this.#getExceptionFor(error);
}
}
const rawConfig = this.#getConfigFor("raw");
if (this.#isType(ctx.request, rawConfig.types)) {
debug_default('parsing raw body "%s:%s"', requestMethod, requestUrl);
try {
ctx.request.setInitialBody({});
ctx.request.updateRawBody(await parseText(ctx.request.request, rawConfig));
return next();
} catch (error) {
throw this.#getExceptionFor(error);
}
}
await next();
}
};
export {
BodyParserMiddleware
};