graphql-upload-ts
Version:
Typescript minimalistic and developer friendly middleware and an Upload scalar to add support for GraphQL multipart requests (file uploads via queries and mutations) to various Node.js GraphQL servers.
197 lines • 10.3 kB
JavaScript
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.processRequest = exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL = void 0;
const busboy_1 = __importDefault(require("busboy"));
const fs_capacitor_1 = require("./fs-capacitor");
const http_errors_1 = __importDefault(require("http-errors"));
const object_path_1 = __importDefault(require("object-path"));
const ignoreStream_1 = require("./ignoreStream");
const Upload_1 = require("./Upload");
exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL = 'https://github.com/jaydenseric/graphql-multipart-request-spec';
async function processRequest(request, response, options) {
const { maxFieldSize = 1000000, maxFileSize = Infinity, maxFiles = Infinity } = options || {};
return new Promise((resolve, reject) => {
let released;
let exitError;
let operations;
let operationsPath;
let map;
const parser = (0, busboy_1.default)({
headers: request.headers,
defParamCharset: 'utf8',
limits: {
fieldSize: maxFieldSize,
fields: 2, // Only operations and map.
fileSize: maxFileSize,
files: maxFiles,
},
});
/**
* Exits request processing with an error. Successive calls have no effect.
* @param {Error} error Error instance.
* @param {boolean} [isParserError] Is the error from the parser.
*/
function exit(error, isParserError = false) {
if (exitError)
return;
exitError = error;
if (map)
for (const upload of map.values())
if (!upload.file)
upload.reject(exitError);
// If the error came from the parser, don’t cause it to be emitted again.
isParserError ? parser.destroy() : parser.destroy(exitError);
request.unpipe(parser);
// With a sufficiently large request body, subsequent events in the same
// event frame cause the stream to pause after the parser is destroyed. To
// ensure that the request resumes, the call to .resume() is scheduled for
// later in the event loop.
setImmediate(() => {
request.resume();
});
reject(exitError);
}
parser.on('field', (fieldName, value, { valueTruncated }) => {
if (valueTruncated)
return exit((0, http_errors_1.default)(413, `The ‘${fieldName}’ multipart field value exceeds the ${maxFieldSize} byte size limit.`));
switch (fieldName) {
case 'operations':
try {
operations = JSON.parse(value);
}
catch (error) {
return exit((0, http_errors_1.default)(400, `Invalid JSON in the ‘operations’ multipart field (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
}
// `operations` should be an object or an array. Note that arrays
// and `null` have an `object` type.
if (typeof operations !== 'object' || !operations)
return exit((0, http_errors_1.default)(400, `Invalid type for the ‘operations’ multipart field (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
operationsPath = (0, object_path_1.default)(operations);
break;
case 'map': {
if (!operations)
return exit((0, http_errors_1.default)(400, `Disordered multipart fields; ‘map’ should follow ‘operations’ (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
let parsedMap;
try {
parsedMap = JSON.parse(value);
}
catch (error) {
return exit((0, http_errors_1.default)(400, `Invalid JSON in the ‘map’ multipart field (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
}
// `map` should be an object.
if (typeof parsedMap !== 'object' || !parsedMap || Array.isArray(parsedMap))
return exit((0, http_errors_1.default)(400, `Invalid type for the ‘map’ multipart field (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
const mapEntries = Object.entries(parsedMap);
// Check max files is not exceeded, even though the number of files
// to parse might not match the map provided by the client.
if (mapEntries.length > maxFiles)
return exit((0, http_errors_1.default)(413, `${maxFiles} max file uploads exceeded.`));
map = new Map();
for (const [fieldName, paths] of mapEntries) {
if (!Array.isArray(paths))
return exit((0, http_errors_1.default)(400, `Invalid type for the ‘map’ multipart field entry key ‘${fieldName}’ array (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
map.set(fieldName, new Upload_1.Upload());
for (const [index, path] of paths.entries()) {
if (typeof path !== 'string')
return exit((0, http_errors_1.default)(400, `Invalid type for the ‘map’ multipart field entry key ‘${fieldName}’ array index ‘${index}’ value (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
try {
operationsPath.set(path, map.get(fieldName));
}
catch (error) {
return exit((0, http_errors_1.default)(400, `Invalid object path for the ‘map’ multipart field entry key ‘${fieldName}’ array index ‘${index}’ value ‘${path}’ (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
}
}
}
resolve(operations);
}
}
});
parser.on('file', (fieldName, stream, { filename, encoding, mimeType: mimetype }) => {
if (!map) {
(0, ignoreStream_1.ignoreStream)(stream);
return exit((0, http_errors_1.default)(400, `Disordered multipart fields; files should follow ‘map’ (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
}
const upload = map.get(fieldName);
if (!upload) {
// The file is extraneous. As the rest can still be processed, just
// ignore it and don’t exit with an error.
(0, ignoreStream_1.ignoreStream)(stream);
return;
}
let fileError;
const capacitor = new fs_capacitor_1.WriteStream();
capacitor.on('error', () => {
stream.unpipe();
stream.resume();
});
stream.on('limit', () => {
fileError = (0, http_errors_1.default)(413, `File truncated as it exceeds the ${maxFileSize} byte size limit.`);
stream.unpipe();
capacitor.destroy(fileError);
});
stream.on('error', (error) => {
fileError = error;
stream.unpipe();
capacitor.destroy(fileError);
});
const file = {
fieldName,
filename,
mimetype,
encoding,
// @ts-ignore
createReadStream(options) {
const error = fileError || (released ? exitError : null);
if (error)
throw error;
return capacitor.createReadStream(options);
},
capacitor,
};
Object.defineProperty(file, 'capacitor', {
enumerable: false,
configurable: false,
writable: false,
});
stream.pipe(capacitor);
upload.resolve(file);
});
parser.once('filesLimit', () => exit((0, http_errors_1.default)(413, `${maxFiles} max file uploads exceeded.`)));
parser.once('finish', () => {
request.unpipe(parser);
request.resume();
if (!operations)
return exit((0, http_errors_1.default)(400, `Missing multipart field ‘operations’ (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
if (!map)
return exit((0, http_errors_1.default)(400, `Missing multipart field ‘map’ (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
for (const upload of map.values())
if (!upload.file)
upload.reject((0, http_errors_1.default)(400, 'File missing in the request.'));
});
// Use the `on` method instead of `once` as in edge cases the same parser
// could have multiple `error` events and all must be handled to prevent the
// Node.js process exiting with an error. One edge case is if there is a
// malformed part header as well as an unexpected end of the form.
parser.on('error', (error) => {
exit(error, true);
});
response.once('close', () => {
released = true;
if (map)
for (const upload of map.values())
if (upload.file)
// Release resources and clean up temporary files.
upload.file.capacitor.release();
});
request.once('close', () => {
if (!request.readableEnded)
exit((0, http_errors_1.default)(499, 'Request disconnected during file upload stream parsing.'));
});
request.pipe(parser);
});
}
exports.processRequest = processRequest;
//# sourceMappingURL=processRequest.js.map
;