UNPKG

elysia

Version:

Ergonomic Framework for Human

247 lines (245 loc) 9.14 kB
import { serializeCookie } from "../cookies.mjs"; import { hasHeaderShorthand, isNotEmpty, StatusMap } from "../utils.mjs"; import { isBun } from "../universal/utils.mjs"; import { env } from "../universal/index.mjs"; const handleFile = (response, set) => { if (!isBun && response instanceof Promise) return response.then((res) => handleFile(res, set)); const size = response.size, immutable = set && (set.status === 206 || set.status === 304 || set.status === 412 || set.status === 416), defaultHeader = immutable ? {} : { "accept-ranges": "bytes", "content-range": size ? `bytes 0-${size - 1}/${size}` : void 0 }; if (!set && !size) return new Response(response); if (!set) return new Response(response, { headers: defaultHeader }); if (set.headers instanceof Headers) { for (const key of Object.keys(defaultHeader)) key in set.headers && set.headers.append(key, defaultHeader[key]); return immutable && (set.headers.delete("content-length"), set.headers.delete("accept-ranges")), new Response(response, set); } return isNotEmpty(set.headers) ? new Response(response, { status: set.status, headers: Object.assign(defaultHeader, set.headers) }) : new Response(response, { status: set.status, headers: defaultHeader }); }, parseSetCookies = (headers, setCookie) => { if (!headers) return headers; headers.delete("set-cookie"); for (let i = 0; i < setCookie.length; i++) { const index = setCookie[i].indexOf("="); headers.append( "set-cookie", `${setCookie[i].slice(0, index)}=${setCookie[i].slice(index + 1) || ""}` ); } return headers; }, responseToSetHeaders = (response, set) => { if (set?.headers) { if (response) if (hasHeaderShorthand) Object.assign(set.headers, response.headers.toJSON()); else for (const [key, value] of response.headers.entries()) key in set.headers && (set.headers[key] = value); return set.status === 200 && (set.status = response.status), set.headers["content-encoding"] && delete set.headers["content-encoding"], set; } if (!response) return { headers: {}, status: set?.status ?? 200 }; if (hasHeaderShorthand) return set = { headers: response.headers.toJSON(), status: set?.status ?? 200 }, set.headers["content-encoding"] && delete set.headers["content-encoding"], set; set = { headers: {}, status: set?.status ?? 200 }; for (const [key, value] of response.headers.entries()) key !== "content-encoding" && key in set.headers && (set.headers[key] = value); return set; }, allowRapidStream = env.ELYSIA_RAPID_STREAM === "true", createStreamHandler = ({ mapResponse, mapCompactResponse }) => async (generator, set, request) => { let init = generator.next?.(); if (set && handleSet(set), init instanceof Promise && (init = await init), init?.value instanceof ReadableStream) generator = init.value; else if (init && (typeof init?.done > "u" || init?.done)) return set ? mapResponse(init.value, set, request) : mapCompactResponse(init.value, request); const isSSE = ( // @ts-ignore First SSE result is wrapped with sse() init?.value?.sse ?? // @ts-ignore ReadableStream is wrapped with sse() generator?.sse ?? // User explicitly set content-type to SSE set?.headers["content-type"]?.startsWith("text/event-stream") ), format = isSSE ? (data) => `data: ${data} ` : (data) => data, contentType = isSSE ? "text/event-stream" : init?.value && typeof init?.value == "object" ? "application/json" : "text/plain"; set?.headers ? (set.headers["transfer-encoding"] || (set.headers["transfer-encoding"] = "chunked"), set.headers["content-type"] || (set.headers["content-type"] = contentType), set.headers["cache-control"] || (set.headers["cache-control"] = "no-cache")) : set = { status: 200, headers: { "content-type": contentType, "transfer-encoding": "chunked", "cache-control": "no-cache", connection: "keep-alive" } }; const isBrowser = request?.headers.has("Origin"); return new Response( new ReadableStream({ async start(controller) { let end = !1; if (request?.signal?.addEventListener("abort", () => { end = !0; try { controller.close(); } catch { } }), !(!init || init.value instanceof ReadableStream)) { if (init.value !== void 0 && init.value !== null) if (init.value.toSSE) controller.enqueue(init.value.toSSE()); else if (typeof init.value == "object") try { controller.enqueue( format(JSON.stringify(init.value)) ); } catch { controller.enqueue( format(init.value.toString()) ); } else controller.enqueue(format(init.value.toString())); } try { for await (const chunk of generator) { if (end) break; if (chunk != null) if (chunk.toSSE) controller.enqueue(chunk.toSSE()); else { if (typeof chunk == "object") try { controller.enqueue( format(JSON.stringify(chunk)) ); } catch { controller.enqueue( format(chunk.toString()) ); } else controller.enqueue(format(chunk.toString())); !allowRapidStream && isBrowser && !isSSE && await new Promise( (resolve) => setTimeout(() => resolve(), 0) ); } } } catch (error) { console.warn(error); } try { controller.close(); } catch { } } }), set ); }; async function* streamResponse(response) { const body = response.body; if (!body) return; const reader = body.getReader(), decoder = new TextDecoder(); try { for (; ; ) { const { done, value } = await reader.read(); if (done) break; typeof value == "string" ? yield value : yield decoder.decode(value); } } finally { reader.releaseLock(); } } const handleSet = (set) => { if (typeof set.status == "string" && (set.status = StatusMap[set.status]), set.cookie && isNotEmpty(set.cookie)) { const cookie = serializeCookie(set.cookie); cookie && (set.headers["set-cookie"] = cookie); } set.headers["set-cookie"] && Array.isArray(set.headers["set-cookie"]) && (set.headers = parseSetCookies( new Headers(set.headers), set.headers["set-cookie"] )); }, createResponseHandler = (handler) => { const handleStream = createStreamHandler(handler); return (response, set, request) => { let isCookieSet = !1; if (set.headers instanceof Headers) for (const key of set.headers.keys()) if (key === "set-cookie") { if (isCookieSet) continue; isCookieSet = !0; for (const cookie of set.headers.getSetCookie()) response.headers.append("set-cookie", cookie); } else response.headers.has(key) || response.headers.set(key, set.headers?.get(key) ?? ""); else for (const key in set.headers) key === "set-cookie" ? response.headers.append( key, set.headers[key] ) : response.headers.has(key) || response.headers.set( key, set.headers[key] ); const status = set.status ?? 200; if (response.status !== status && status !== 200 && (response.status <= 300 || response.status > 400)) { const newResponse = new Response(response.body, { headers: response.headers, status: set.status }); return !newResponse.headers.has("content-length") && newResponse.headers.get("transfer-encoding") === "chunked" ? handleStream( streamResponse(newResponse), responseToSetHeaders(newResponse, set), request ) : newResponse; } return !response.headers.has("content-length") && response.headers.get("transfer-encoding") === "chunked" ? handleStream( streamResponse(response), responseToSetHeaders(response, set), request ) : response; }; }; async function tee(source, branches = 2) { const buffer = []; let done = !1, waiting = []; (async () => { for await (const value of source) buffer.push(value), waiting.forEach((w) => w.resolve()), waiting = []; done = !0, waiting.forEach((w) => w.resolve()); })(); async function* makeIterator() { let i = 0; for (; ; ) if (i < buffer.length) yield buffer[i++]; else { if (done) return; await new Promise((resolve) => waiting.push({ resolve })); } } return Array.from({ length: branches }, makeIterator); } export { createResponseHandler, createStreamHandler, handleFile, handleSet, parseSetCookies, responseToSetHeaders, streamResponse, tee };