@adonisjs/bodyparser
Version:
BodyParser middleware for AdonisJS http server to read and parse request body
415 lines (414 loc) • 13.6 kB
JavaScript
import { a as supportMagicFileTypes, i as getFileType, n as computeFileTypeFromName, r as formBodyNormalizers, t as MultipartFile } from "./file-CexcC_98.js";
import { extname } from "node:path";
import string from "@poppinss/utils/string";
import { Exception, RuntimeException } from "@poppinss/utils/exception";
import { unlink } from "node:fs/promises";
import { debuglog } from "node:util";
import multiparty from "@poppinss/multiparty";
import lodash from "@poppinss/utils/lodash";
import { createWriteStream } from "node:fs";
import { pipeline } from "node:stream/promises";
import inflate from "inflation";
import raw from "raw-body";
import { safeParse } from "@poppinss/utils/json";
import qs from "@poppinss/qs";
import { HttpRequest } from "@adonisjs/http-server";
var debug_default = debuglog("adonisjs:bodyparser");
var FormFields = class {
#fields = Object.create(null);
#normalizer;
constructor(normalizer) {
this.#normalizer = normalizer;
}
add(key, value) {
let isArray = false;
if (this.#normalizer && typeof value === "string") value = this.#normalizer(value);
key = key.replace(/\[]$/, () => {
isArray = true;
return "";
});
const existingValue = lodash.get(this.#fields, key);
if (!existingValue) {
lodash.set(this.#fields, key, isArray ? [value] : value);
return;
}
if (Array.isArray(existingValue)) {
existingValue.push(value);
return;
}
lodash.set(this.#fields, key, [existingValue, value]);
}
get() {
return this.#fields;
}
};
var PartHandler = class {
#part;
#options;
#buff;
#emittedValidationError = false;
get #canFileTypeBeDetected() {
const fileExtension = extname(this.#part.filename).replace(/^\./, "");
return fileExtension ? supportMagicFileTypes.has(fileExtension) : true;
}
file;
constructor(part, options) {
this.#part = part;
this.#options = options;
this.file = new MultipartFile({
clientName: part.filename,
fieldName: part.name,
headers: part.headers
}, {
size: options.size,
extnames: options.extnames
});
}
async #detectFileTypeAndExtension() {
if (!this.#buff) return;
let fileType = this.#canFileTypeBeDetected ? await getFileType(this.#buff) : computeFileTypeFromName(this.file.clientName, this.file.headers);
if (!fileType && this.#buff.length >= 4100) fileType = computeFileTypeFromName(this.file.clientName, this.file.headers);
if (fileType) {
this.file.extname = fileType.ext;
this.file.type = fileType.type;
this.file.subtype = fileType.subtype;
}
}
#skipEndStream() {
this.#part.emit("close");
}
#finish() {
this.file.state = "consumed";
if (!this.#options.deferValidations) this.file.validate();
}
begin() {
this.file.state = "streaming";
}
async reportProgress(line, bufferLength) {
if (this.file.state !== "streaming") return;
if (this.file.extname === void 0) {
this.#buff = this.#buff ? Buffer.concat([this.#buff, line]) : line;
await this.#detectFileTypeAndExtension();
} else this.#buff = void 0;
this.file.size = this.file.size + bufferLength;
if (this.#options.deferValidations) return;
this.file.validate();
if (!this.file.isValid && !this.#emittedValidationError) {
this.#emittedValidationError = true;
this.#part.emit("error", new Exception("one or more validations failed", {
code: "E_STREAM_VALIDATION_FAILURE",
status: 400
}));
}
}
async reportError(error) {
if (this.file.state !== "streaming") return;
this.#skipEndStream();
this.#finish();
if (error.code === "E_STREAM_VALIDATION_FAILURE") return;
this.file.errors.push({
fieldName: this.file.fieldName,
clientName: this.file.clientName,
type: "fatal",
message: error.message
});
}
async reportSuccess(data) {
if (this.file.state !== "streaming") return;
if (this.file.extname === void 0) await this.#detectFileTypeAndExtension();
if (data) {
const { filePath, tmpPath, ...meta } = data;
if (filePath) this.file.filePath = filePath;
if (tmpPath) this.file.tmpPath = tmpPath;
this.file.meta = meta || {};
}
this.#finish();
}
};
var Multipart = class {
#ctx;
#config;
#handlers = {};
#fields;
#files;
#pendingHandlers = 0;
#form;
#upperLimit;
#processedBytes = 0;
state = "idle";
constructor(ctx, config = {}, _featureFlags = {}) {
this.#ctx = ctx;
this.#config = config;
this.#fields = new FormFields(config.normalizer);
this.#files = new FormFields(config.normalizer);
this.#upperLimit = config.limit;
}
#isClosed() {
return this.#form["flushing"] <= 0 && this.#pendingHandlers <= 0;
}
#getHandlerName(name) {
return name.replace(/\[\d*\]/, "");
}
#validateProcessedBytes(chunkLength) {
if (!this.#upperLimit) return;
this.#processedBytes += chunkLength;
if (this.#processedBytes > this.#upperLimit) return new Exception("request entity too large", {
code: "E_REQUEST_ENTITY_TOO_LARGE",
status: 413
});
}
async #handlePart(part) {
if (!part.name || !part.filename) {
part.resume();
return;
}
const name = this.#getHandlerName(part.name);
const handler = this.#handlers[name] || this.#handlers["*"];
if (!handler) {
debug_default("skipping multipart part as there are no handlers \"%s\"", name);
part.resume();
return;
}
debug_default("processing multipart part \"%s\"", name);
this.#pendingHandlers++;
const partHandler = new PartHandler(part, handler.options);
partHandler.begin();
this.#files.add(partHandler.file.fieldName, partHandler.file);
this.#fields.add(partHandler.file.fieldName, partHandler.file);
part.file = partHandler.file;
try {
const response = await handler.handler(part, async (line) => {
if (this.state !== "processing") return;
const lineLength = line.length;
const error = this.#validateProcessedBytes(lineLength);
if (error) {
part.emit("error", error);
this.abort(error);
return;
}
try {
await partHandler.reportProgress(line, lineLength);
} catch (err) {
part.emit("error", err);
this.abort(err);
}
});
await partHandler.reportSuccess(response || {});
} catch (error) {
await partHandler.reportError(error);
}
this.#pendingHandlers--;
}
#handleField(key, value) {
if (!key) return;
this.#fields.add(key, value);
}
#finish(newState) {
if (this.state === "idle" || this.state === "processing") {
this.state = newState;
this.#ctx.request["__raw_files"] = this.#files.get();
this.#ctx.request.setInitialBody(this.#fields.get());
}
}
onFile(name, options, handler) {
this.#handlers[name] = {
handler,
options
};
return this;
}
abort(error) {
this.#form.emit("error", error);
}
process(config) {
return new Promise((resolve, reject) => {
if (this.state !== "idle") {
reject(new Exception("multipart stream has already been consumed", { code: "E_RUNTIME_EXCEPTION" }));
return;
}
this.state = "processing";
if (config && config.limit) this.#upperLimit = string.bytes.parse(config.limit);
this.#form = new multiparty.Form({
maxFields: config?.maxFields ?? this.#config.maxFields,
maxFieldsSize: this.#config.fieldsLimit
});
debug_default("processing multipart body");
this.#form.on("error", (error) => {
this.#finish("error");
process.nextTick(() => {
if (this.#ctx.request.request.readable) this.#ctx.request.request.resume();
if (error.message.match(/stream ended unexpectedly/)) reject(new Exception("Invalid multipart request", {
status: 400,
code: "E_INVALID_MULTIPART_REQUEST"
}));
else if (error.message.match(/maxFields [0-9]+ exceeded/)) reject(new Exception("Fields length limit exceeded", {
status: 413,
code: "E_REQUEST_ENTITY_TOO_LARGE"
}));
else if (error.message.match(/maxFieldsSize [0-9]+ exceeded/)) reject(new Exception("Fields size in bytes exceeded", {
status: 413,
code: "E_REQUEST_ENTITY_TOO_LARGE"
}));
else reject(error);
});
});
this.#form.on("part", async (part) => {
await this.#handlePart(part);
if (this.#isClosed()) {
this.#finish("success");
resolve();
}
});
this.#form.on("field", (key, value) => {
try {
this.#handleField(key, value);
} catch (error) {
this.abort(error);
}
});
this.#form.on("close", () => {
if (this.#isClosed()) {
this.#finish("success");
resolve();
}
});
this.#form.parse(this.#ctx.request.request);
});
}
};
async function streamFile(readStream, location, dataListener) {
if (typeof dataListener === "function") {
readStream.pause();
readStream.on("data", dataListener);
}
const writeStream = createWriteStream(location);
try {
await pipeline(readStream, writeStream);
} catch (error) {
unlink(writeStream.path).catch(() => {});
throw error;
}
}
function prepareTextParserOptions(options) {
return {
encoding: options.encoding ?? "utf8",
limit: options.limit ?? "56kb"
};
}
function parseText(req, options) {
const contentLength = req.headers["content-length"];
const encoding = req.headers["content-encoding"] || "identity";
if (contentLength && encoding === "identity") options = {
...options,
length: ~~contentLength
};
return raw(inflate(req), options);
}
const strictJSONReg = /^[\x20\x09\x0a\x0d]*(\[|\{)/;
function prepareJSONParserOptions(options) {
let normalizer;
if (options.convertEmptyStringsToNull && options.trimWhitespaces) normalizer = formBodyNormalizers.trimWhitespacesAndConvertToNull;
else if (options.convertEmptyStringsToNull) normalizer = formBodyNormalizers.convertToNull;
else if (options.trimWhitespaces) normalizer = formBodyNormalizers.trimWhitespaces;
return {
...prepareTextParserOptions(options),
strict: options.strict !== false,
reviver: normalizer ? function JSONReviver(key, value) {
if (key === "") return value;
return typeof value === "string" ? normalizer(value) : value;
} : void 0
};
}
async function parseJSON(req, options) {
const requestBody = await parseText(req, options);
if (!requestBody) return options.strict ? {
parsed: {},
raw: requestBody
} : {
parsed: requestBody,
raw: requestBody
};
if (options.strict && !strictJSONReg.test(requestBody)) throw new Exception("Invalid JSON, only supports object and array", { status: 422 });
try {
return {
parsed: safeParse(requestBody, options.reviver),
raw: requestBody
};
} catch (error) {
error.status = 400;
error.body = requestBody;
throw error;
}
}
function prepareFormParserOptions(options) {
const queryStringOptions = { ...options.queryString };
if (queryStringOptions.allowDots === void 0) queryStringOptions.allowDots = true;
let normalizer;
if (options.convertEmptyStringsToNull && options.trimWhitespaces) normalizer = formBodyNormalizers.trimWhitespacesAndConvertToNull;
else if (options.convertEmptyStringsToNull) normalizer = formBodyNormalizers.convertToNull;
else if (options.trimWhitespaces) normalizer = formBodyNormalizers.trimWhitespaces;
if (normalizer) queryStringOptions.decoder = function(str, defaultDecoder, charset, type) {
let value = defaultDecoder(str, defaultDecoder, charset);
if (type === "value") return normalizer(value);
return value;
};
return {
...prepareTextParserOptions(options),
qs: queryStringOptions
};
}
async function parseForm(req, options) {
const requestBody = await parseText(req, options);
return {
parsed: qs.parse(requestBody, options.qs),
raw: requestBody
};
}
function setFileOptions(file, options) {
if (file.sizeLimit === void 0 && options && options.size) file.sizeLimit = options.size;
if (file.allowedExtensions === void 0 && options && options.extnames) file.allowedExtensions = options.extnames;
}
function isInstanceOfFile(file) {
return file && file instanceof MultipartFile;
}
debug_default("extending request class with \"file\", \"files\" and \"allFiles\" macros");
HttpRequest.macro("toJSON", function() {
return {
...this.serialize(),
files: this["__raw_files"] || {}
};
});
HttpRequest.macro("file", function getFile(key, options) {
let file = lodash.get(this.allFiles(), key);
file = Array.isArray(file) ? file[0] : file;
if (!isInstanceOfFile(file)) return null;
setFileOptions(file, options);
file.validate();
return file;
});
HttpRequest.macro("files", function getFiles(key, options) {
let files = lodash.get(this.allFiles(), key);
files = Array.isArray(files) ? files : files ? [files] : [];
return files.filter(isInstanceOfFile).map((file) => {
setFileOptions(file, options);
file.validate();
return file;
});
});
HttpRequest.macro("allFiles", function allFiles() {
if (!this.__raw_files) throw new RuntimeException("Cannot read files. Make sure the bodyparser middleware is registered");
return this["__raw_files"];
});
function prepareMultipartConfig(config) {
let normalizer;
if (config.convertEmptyStringsToNull && config.trimWhitespaces) normalizer = formBodyNormalizers.trimWhitespacesAndConvertToNull;
else if (config.convertEmptyStringsToNull) normalizer = formBodyNormalizers.convertToNull;
else if (config.trimWhitespaces) normalizer = formBodyNormalizers.trimWhitespaces;
return {
limit: config.limit ? string.bytes.parse(config.limit) : void 0,
fieldsLimit: config.fieldsLimit ? string.bytes.parse(config.fieldsLimit) : void 0,
maxFields: config.maxFields,
normalizer
};
}
export { prepareJSONParserOptions as a, streamFile as c, parseJSON as i, Multipart as l, parseForm as n, parseText as o, prepareFormParserOptions as r, prepareTextParserOptions as s, prepareMultipartConfig as t, debug_default as u };