UNPKG

@tgwf/co2

Version:
47 lines (46 loc) 1.91 kB
"use strict"; var __create = Object.create; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __getProtoOf = Object.getPrototypeOf; var __hasOwnProp = Object.prototype.hasOwnProperty; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, mod )); var import_fs = __toESM(require("fs")); var import_zlib = __toESM(require("zlib")); var import_util = require("util"); const readFile = (0, import_util.promisify)(import_fs.default.readFile); const gunzip = (0, import_util.promisify)(import_zlib.default.gunzip); async function streamToString(stream) { return new Promise((resolve, reject) => { const chunks = []; stream.on("error", reject); stream.on("data", (chunk) => chunks.push(chunk)); stream.on("end", () => resolve(Buffer.concat(chunks))); }); } async function getGzippedFileAsJson(jsonPath) { const readStream = import_fs.default.createReadStream(jsonPath); const text = await streamToString(readStream); const unzipped = await gunzip(text); return unzipped.toString(); } async function loadJSON(jsonPath) { const jsonBuffer = jsonPath.endsWith(".gz") ? await getGzippedFileAsJson(jsonPath) : await readFile(jsonPath); return JSON.parse(jsonBuffer); } module.exports = { loadJSON }; //# sourceMappingURL=hosting-json.node.js.map