UNPKG

elysia

Version:

Ergonomic Framework for Human

272 lines (270 loc) 10.5 kB
"use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: !0 }); }, __copyProps = (to, from, except, desc) => { if (from && typeof from == "object" || typeof from == "function") for (let key of __getOwnPropNames(from)) !__hasOwnProp.call(to, key) && key !== except && __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: !0 }), mod); var utils_exports = {}; __export(utils_exports, { createResponseHandler: () => createResponseHandler, createStreamHandler: () => createStreamHandler, handleFile: () => handleFile, handleSet: () => handleSet, parseSetCookies: () => parseSetCookies, responseToSetHeaders: () => responseToSetHeaders, streamResponse: () => streamResponse, tee: () => tee }); module.exports = __toCommonJS(utils_exports); var import_cookies = require('../cookies.js'), import_utils = require('../utils.js'), import_utils2 = require('../universal/utils.js'), import_universal = require('../universal/index.js'); const handleFile = (response, set) => { if (!import_utils2.isBun && response instanceof Promise) return response.then((res) => handleFile(res, set)); const size = response.size, immutable = set && (set.status === 206 || set.status === 304 || set.status === 412 || set.status === 416), defaultHeader = immutable ? {} : { "accept-ranges": "bytes", "content-range": size ? `bytes 0-${size - 1}/${size}` : void 0 }; if (!set && !size) return new Response(response); if (!set) return new Response(response, { headers: defaultHeader }); if (set.headers instanceof Headers) { for (const key of Object.keys(defaultHeader)) key in set.headers && set.headers.append(key, defaultHeader[key]); return immutable && (set.headers.delete("content-length"), set.headers.delete("accept-ranges")), new Response(response, set); } return (0, import_utils.isNotEmpty)(set.headers) ? new Response(response, { status: set.status, headers: Object.assign(defaultHeader, set.headers) }) : new Response(response, { status: set.status, headers: defaultHeader }); }, parseSetCookies = (headers, setCookie) => { if (!headers) return headers; headers.delete("set-cookie"); for (let i = 0; i < setCookie.length; i++) { const index = setCookie[i].indexOf("="); headers.append( "set-cookie", `${setCookie[i].slice(0, index)}=${setCookie[i].slice(index + 1) || ""}` ); } return headers; }, responseToSetHeaders = (response, set) => { if (set?.headers) { if (response) if (import_utils.hasHeaderShorthand) Object.assign(set.headers, response.headers.toJSON()); else for (const [key, value] of response.headers.entries()) key in set.headers && (set.headers[key] = value); return set.status === 200 && (set.status = response.status), set.headers["content-encoding"] && delete set.headers["content-encoding"], set; } if (!response) return { headers: {}, status: set?.status ?? 200 }; if (import_utils.hasHeaderShorthand) return set = { headers: response.headers.toJSON(), status: set?.status ?? 200 }, set.headers["content-encoding"] && delete set.headers["content-encoding"], set; set = { headers: {}, status: set?.status ?? 200 }; for (const [key, value] of response.headers.entries()) key !== "content-encoding" && key in set.headers && (set.headers[key] = value); return set; }, allowRapidStream = import_universal.env.ELYSIA_RAPID_STREAM === "true", createStreamHandler = ({ mapResponse, mapCompactResponse }) => async (generator, set, request) => { let init = generator.next?.(); if (set && handleSet(set), init instanceof Promise && (init = await init), init?.value instanceof ReadableStream) generator = init.value; else if (init && (typeof init?.done > "u" || init?.done)) return set ? mapResponse(init.value, set, request) : mapCompactResponse(init.value, request); const isSSE = ( // @ts-ignore First SSE result is wrapped with sse() init?.value?.sse ?? // @ts-ignore ReadableStream is wrapped with sse() generator?.sse ?? // User explicitly set content-type to SSE set?.headers["content-type"]?.startsWith("text/event-stream") ), format = isSSE ? (data) => `data: ${data} ` : (data) => data, contentType = isSSE ? "text/event-stream" : init?.value && typeof init?.value == "object" ? "application/json" : "text/plain"; set?.headers ? (set.headers["transfer-encoding"] || (set.headers["transfer-encoding"] = "chunked"), set.headers["content-type"] || (set.headers["content-type"] = contentType), set.headers["cache-control"] || (set.headers["cache-control"] = "no-cache")) : set = { status: 200, headers: { "content-type": contentType, "transfer-encoding": "chunked", "cache-control": "no-cache", connection: "keep-alive" } }; const isBrowser = request?.headers.has("Origin"); return new Response( new ReadableStream({ async start(controller) { let end = !1; if (request?.signal?.addEventListener("abort", () => { end = !0; try { controller.close(); } catch { } }), !(!init || init.value instanceof ReadableStream)) { if (init.value !== void 0 && init.value !== null) if (init.value.toSSE) controller.enqueue(init.value.toSSE()); else if (typeof init.value == "object") try { controller.enqueue( format(JSON.stringify(init.value)) ); } catch { controller.enqueue( format(init.value.toString()) ); } else controller.enqueue(format(init.value.toString())); } try { for await (const chunk of generator) { if (end) break; if (chunk != null) if (chunk.toSSE) controller.enqueue(chunk.toSSE()); else { if (typeof chunk == "object") try { controller.enqueue( format(JSON.stringify(chunk)) ); } catch { controller.enqueue( format(chunk.toString()) ); } else controller.enqueue(format(chunk.toString())); !allowRapidStream && isBrowser && !isSSE && await new Promise( (resolve) => setTimeout(() => resolve(), 0) ); } } } catch (error) { console.warn(error); } try { controller.close(); } catch { } } }), set ); }; async function* streamResponse(response) { const body = response.body; if (!body) return; const reader = body.getReader(), decoder = new TextDecoder(); try { for (; ; ) { const { done, value } = await reader.read(); if (done) break; typeof value == "string" ? yield value : yield decoder.decode(value); } } finally { reader.releaseLock(); } } const handleSet = (set) => { if (typeof set.status == "string" && (set.status = import_utils.StatusMap[set.status]), set.cookie && (0, import_utils.isNotEmpty)(set.cookie)) { const cookie = (0, import_cookies.serializeCookie)(set.cookie); cookie && (set.headers["set-cookie"] = cookie); } set.headers["set-cookie"] && Array.isArray(set.headers["set-cookie"]) && (set.headers = parseSetCookies( new Headers(set.headers), set.headers["set-cookie"] )); }, createResponseHandler = (handler) => { const handleStream = createStreamHandler(handler); return (response, set, request) => { let isCookieSet = !1; if (set.headers instanceof Headers) for (const key of set.headers.keys()) if (key === "set-cookie") { if (isCookieSet) continue; isCookieSet = !0; for (const cookie of set.headers.getSetCookie()) response.headers.append("set-cookie", cookie); } else response.headers.has(key) || response.headers.set(key, set.headers?.get(key) ?? ""); else for (const key in set.headers) key === "set-cookie" ? response.headers.append( key, set.headers[key] ) : response.headers.has(key) || response.headers.set( key, set.headers[key] ); const status = set.status ?? 200; if (response.status !== status && status !== 200 && (response.status <= 300 || response.status > 400)) { const newResponse = new Response(response.body, { headers: response.headers, status: set.status }); return !newResponse.headers.has("content-length") && newResponse.headers.get("transfer-encoding") === "chunked" ? handleStream( streamResponse(newResponse), responseToSetHeaders(newResponse, set), request ) : newResponse; } return !response.headers.has("content-length") && response.headers.get("transfer-encoding") === "chunked" ? handleStream( streamResponse(response), responseToSetHeaders(response, set), request ) : response; }; }; async function tee(source, branches = 2) { const buffer = []; let done = !1, waiting = []; (async () => { for await (const value of source) buffer.push(value), waiting.forEach((w) => w.resolve()), waiting = []; done = !0, waiting.forEach((w) => w.resolve()); })(); async function* makeIterator() { let i = 0; for (; ; ) if (i < buffer.length) yield buffer[i++]; else { if (done) return; await new Promise((resolve) => waiting.push({ resolve })); } } return Array.from({ length: branches }, makeIterator); } // Annotate the CommonJS export names for ESM import in node: 0 && (module.exports = { createResponseHandler, createStreamHandler, handleFile, handleSet, parseSetCookies, responseToSetHeaders, streamResponse, tee });