UNPKG

@dbl0null/slow-json-stringify

Version:

The slow.. well actually fastest JSON stringifier in the galaxy.

1 lines 12.2 kB
{"version":3,"file":"slow-json-stringify.mjs","sources":["../../src/_makeChunks.mjs","../../src/_utils.mjs","../../src/_makeQueue.mjs","../../src/_prepare.mjs","../../src/_select.mjs","../../src/sjs.mjs"],"sourcesContent":["const _replaceString = type => type.indexOf('string') !== -1 ? '\"__par__\"' : '__par__'\n\n// 3 possibilities after arbitrary property:\n// - \", => non-last string property\n// - , => non-last non-string property\n// - \" => last string property\nconst _matchStartRe = /^(\",|,|\")/\n\nconst _chunkRegex = /\"\\w+__sjs\"/g\n\n/**\n * @param {string} str - prepared string already validated.\n * @param {array} queue - queue containing the property name to match\n * (used for building dynamic regex) needed for the preparation of\n * chunks used in different scenarios.\n */\nconst _makeChunks = (str, queue) => {\n const chunks = str\n // Matching prepared properties and replacing with target with or without\n // double quotes.\n // => Avoiding unnecessary concatenation of doublequotes during serialization.\n .replace(_chunkRegex, _replaceString)\n .split('__par__')\n const result = []\n\n let _i\n const length = chunks.length\n for (let i = 0; i < length; ++i) {\n const chunk = chunks[i]\n\n // Using dynamic regex to ensure that only the correct property\n // at the end of the string it's actually selected.\n // => e.g. ,\"a\":{\"a\": => ,\"a\":{\n const matchProp = `(\"${queue[i]?.name}\":(\"?))$`\n\n // Check if current chunk is the last one inside a nested property\n const isLast = (_i = i + 1) === length || (\n (_i = chunks[_i].indexOf('}')) &&\n (_i === 0 || _i === 1)\n )\n\n // If the chunk is the last one the `isUndef` case should match\n // the preceding comma too.\n const matchPropRe = new RegExp(isLast ? `(,?)${matchProp}` : matchProp)\n\n const withoutInitial = chunk.replace(_matchStartRe, '')\n\n result.push({\n // notify that the chunk preceding the current one has not\n // its corresponding property undefined.\n // => This is a V8 optimization as it's way faster writing\n // the value of a property than writing the entire property.\n flag: false,\n pure: chunk,\n // Without initial part\n prevUndef: withoutInitial,\n // Without property chars\n isUndef: chunk.replace(matchPropRe, ''),\n // Only remaining chars (can be zero chars)\n bothUndef: withoutInitial.replace(matchPropRe, '')\n })\n }\n\n return result\n}\n\nexport { _makeChunks }\n","/**\n * `_find` is a super fast deep property finder.\n * It dynamically build the function needed to reach the desired\n * property.\n *\n * e.g.\n * obj = {a: {b: {c: 1}}}\n * _find(['a','b','c']) => (obj) => (((obj || {}).a || {}).b || {}).c\n *\n * @param {array} path - path to reach object property.\n */\nconst _find = path => {\n let str = 'obj'\n\n for (let i = 0; i < path.length; ++i) {\n str = `(${str}||{}).${path[i]}`\n }\n\n return eval(`(obj => ${str})`)\n}\n\nconst __find = path => {\n let str = 'obj'\n\n for (let i = 0; i < path.length; ++i) {\n str += `?.['${path[i]}']`\n }\n\n return eval(`(obj => ${str})`)\n}\n\nfunction _arraySerializer (serializer, array) {\n // Stringifying more complex array using the provided sjs schema\n let acc = ''\n const len = array.length - 1\n for (let i = 0; i < len; ++i) {\n acc += `${serializer(array[i])},`\n }\n\n // Prevent slice for removing unnecessary comma.\n acc += serializer(array[len])\n return `[${acc}]`\n}\n\n/**\n * `_makeArraySerializer` is simply a wrapper of another `sjs` schema\n * used for the serialization of arrais.\n */\nconst _makeArraySerializer = serializer => {\n if (typeof serializer === 'function') return _arraySerializer.bind(this, serializer)\n return JSON.stringify\n}\n\nconst TYPES = ['number', 'string', 'boolean', 'array', 'null']\n\n/* #__PURE__ */\nfunction checkType (type) {\n if (typeof process !== 'undefined' && process.env.NODE_ENV !== 'production' && !TYPES.includes(type)) {\n throw new Error(\n `Expected one of: \"number\", \"string\", \"boolean\", \"array\", \"null\". received \"${type}\" instead`\n )\n }\n}\n\nconst fnUser = value => value\n\n/**\n * @param type number|string|boolean|array|null\n * @param serializer\n * @returns\n */\nconst attr = (type, serializer) => {\n /* #__PURE__ */\n checkType(type)\n\n const usedSerializer = serializer || fnUser\n\n return {\n isSJS: true,\n type,\n serializer:\n type === 'array' ? _makeArraySerializer(serializer) : usedSerializer\n }\n}\n\n// Little utility for escaping convenience.\n// => if no regex is provided, a default one will be used.\nconst _defaultRegex = /[\\t\\n\\r\"\\\\]/g\nconst _escapeCallback = char => '\\\\' + char\nconst escape = (regex = _defaultRegex) => str => str.replace(regex, _escapeCallback)\n\nexport { __find, _find, escape, attr }\n","import { __find } from './_utils.mjs'\n\n/**\n * @param {object} originalSchema\n * @param {array} queue\n * @param {string|object} obj\n * @param {array} acc\n */\nfunction _prepareQueue (originalSchema, queue, obj, acc = []) {\n // this looks weird for objects, but is actually exactly what we want: object.toString() === '[object Object]'. We only want actual strings.\n if (obj.toString().indexOf('__sjs') !== -1) {\n const find = __find(acc)\n const { serializer } = find(originalSchema)\n\n queue.push({\n serializer,\n find,\n name: acc[acc.length - 1]\n })\n return\n }\n\n // Recursively going deeper.\n // NOTE: While going deeper, the current prop is pushed into the accumulator\n // to keep track of the position inside of the object.\n const keys = Object.keys(obj)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n _prepareQueue(originalSchema, queue, obj[key], [...acc, key])\n }\n}\n\n/**\n * @param {object} preparedSchema - schema already validated\n * with modified prop values to avoid clashes.\n * @param {object} originalSchema - User provided schema\n * => contains array stringification serializers that are lost during preparation.\n */\nconst _makeQueue = (preparedSchema, originalSchema) => {\n const queue = []\n _prepareQueue(originalSchema, queue, preparedSchema)\n return queue\n}\n\nexport { _makeQueue }\n","const _stringifyCallback = (_, value) => {\n if (!value.isSJS) return value\n return `${value.type}__sjs`\n}\n\n/**\n * `_prepare` - aims to normalize the schema provided by the user.\n * It will convert the schema in both a parseable string and an object\n * useable for making the chunks needed for the serialization part.\n * @param {object} schema - user provided schema\n */\nconst _prepare = schema => {\n const _preparedString = JSON.stringify(schema, _stringifyCallback)\n const _preparedSchema = JSON.parse(_preparedString)\n\n return {\n _preparedString,\n _preparedSchema\n }\n}\n\nexport { _prepare }\n","/**\n * `select` function takes all the possible chunks from the\n * current index and set the more appropriate one in relation\n * to the current `value` and the `flag` state.\n *\n * => This approach avoids the use of Regex during serialization.\n *\n * @param {any} chunks - value to serialize.\n */\nconst _select = chunks => (value, index) => {\n const chunk = chunks[index]\n\n if (value !== undefined) {\n if (chunk.flag) {\n return chunk.prevUndef + value\n }\n return chunk.pure + value\n }\n\n // If the current value is undefined set a flag on the next\n // chunk stating that the previous prop is undefined.\n chunks[index + 1].flag = true\n\n if (chunk.flag) {\n return chunk.bothUndef\n }\n return chunk.isUndef\n}\n\nexport { _select }\n","import { _makeChunks } from './_makeChunks.mjs'\nimport { _makeQueue } from './_makeQueue.mjs'\nimport { _prepare } from './_prepare.mjs'\nimport { _select } from './_select.mjs'\nimport { attr, escape } from './_utils.mjs'\n\n// Doing a lot of preparation work before returning the final function responsible for\n// the stringification.\nconst sjs = schema => {\n const { _preparedString, _preparedSchema } = _prepare(schema)\n\n // Providing preparedSchema for univocal correspondence between created queue and chunks.\n // Provided original schema to keep track of the original properties that gets destroied\n // during schema preparation => e.g. array stringification method.\n const queue = _makeQueue(_preparedSchema, schema)\n const chunks = _makeChunks(_preparedString, queue)\n const selectChunk = _select(chunks)\n\n // Exposed function\n return obj => {\n let temp = ''\n\n for (let i = 0; i < queue.length; ++i) {\n const { serializer, find } = queue[i]\n const raw = find(obj)\n\n temp += selectChunk(serializer(raw), i)\n }\n\n const { flag, pure, prevUndef } = chunks[chunks.length - 1]\n\n return temp + (flag ? prevUndef : pure)\n }\n}\n\nexport { sjs, attr, escape }\n"],"names":["_replaceString","type","indexOf","_matchStartRe","_chunkRegex","_makeChunks","str","queue","_i","chunks","replace","split","result","length","i","chunk","matchProp","_queue$i","name","isLast","matchPropRe","RegExp","withoutInitial","push","flag","pure","prevUndef","isUndef","bothUndef","__find","path","eval","_arraySerializer","serializer","array","acc","len","_makeArraySerializer","bind","_this","JSON","stringify","fnUser","value","attr","usedSerializer","isSJS","_defaultRegex","_escapeCallback","char","escape","regex","_prepareQueue","originalSchema","obj","toString","keys","Object","key","find","_makeQueue","preparedSchema","_stringifyCallback","_","_prepare","schema","_preparedString","_preparedSchema","parse","_select","index","undefined","sjs","selectChunk","temp","raw"],"mappings":"AAAA,IAAMA,eAAiB,SAAAC,UAAoC,IAA5BA,EAAKC,QAAQ,UAAmB,YAAc,WAMvEC,cAAgB,YAEhBC,YAAc,cAQdC,YAAc,SAACC,EAAKC,GAWxB,IAVA,IAQIC,EAREC,EAASH,EAIZI,QAAQN,YAAaJ,gBACrBW,MAAM,WACHC,EAAS,GAGTC,EAASJ,EAAOI,OACbC,EAAI,EAAGA,EAAID,IAAUC,EAAG,OACzBC,EAAQN,EAAOK,GAKfE,iBAAiBT,EAAMO,WAANG,EAAUC,iBAG3BC,GAAUX,EAAKM,EAAI,KAAOD,IAC7BL,EAAKC,EAAOD,GAAIN,QAAQ,QACjB,IAAPM,GAAmB,IAAPA,GAKTY,EAAc,IAAIC,OAAOF,SAAgBH,EAAcA,GAEvDM,EAAiBP,EAAML,QAAQP,cAAe,IAEpDS,EAAOW,KAAK,CAKVC,MAAM,EACNC,KAAMV,EAENW,UAAWJ,EAEXK,QAASZ,EAAML,QAAQU,EAAa,IAEpCQ,UAAWN,EAAeZ,QAAQU,EAAa,MAInD,OAAOR,gBC1CHiB,OAAS,SAATA,OAASC,MAGb,IAFA,IAAIxB,IAAM,MAEDQ,EAAI,EAAGA,EAAIgB,KAAKjB,SAAUC,EACjCR,YAAcwB,KAAKhB,QAGrB,OAAOiB,gBAAgBzB,UAGzB,SAAS0B,iBAAkBC,EAAYC,GAIrC,IAFA,IAAIC,EAAM,GACJC,EAAMF,EAAMrB,OAAS,EAClBC,EAAI,EAAGA,EAAIsB,IAAOtB,EACzBqB,GAAUF,EAAWC,EAAMpB,QAK7B,WADAqB,GAAOF,EAAWC,EAAME,SAQ1B,IAAMC,qBAAuB,SAAAJ,GAC3B,MAA0B,mBAAfA,EAAkCD,iBAAiBM,KAAKC,MAAMN,GAClEO,KAAKC,WAcRC,OAAS,SAAAC,UAASA,GAOlBC,KAAO,SAAC3C,EAAMgC,GAIlB,IAAMY,EAAiBZ,GAAcS,OAErC,MAAO,CACLI,OAAO,EACP7C,KAAAA,EACAgC,WACW,UAAThC,EAAmBoC,qBAAqBJ,GAAcY,IAMtDE,cAAgB,eAChBC,gBAAkB,SAAAC,SAAQ,KAAOA,GACjCC,OAAS,SAACC,mBAAAA,IAAAA,EAAQJ,wBAAkBzC,UAAOA,EAAII,QAAQyC,EAAOH,mBCjFpE,SAASI,cAAeC,EAAgB9C,EAAO+C,EAAKnB,GAElD,YAFkDA,IAAAA,EAAM,KAEf,IAArCmB,EAAIC,WAAWrD,QAAQ,SAgB3B,IADA,IAAMsD,EAAOC,OAAOD,KAAKF,GAChBxC,EAAI,EAAGA,EAAI0C,EAAK3C,SAAUC,EAAG,CACpC,IAAM4C,EAAMF,EAAK1C,GACjBsC,cAAcC,EAAgB9C,EAAO+C,EAAII,aAAUvB,GAAKuB,SAlB1D,CACE,IAAMC,EAAO9B,OAAOM,KACGwB,EAAKN,GAE5B9C,EAAMgB,KAAK,CACTU,aAHMA,WAIN0B,KAAAA,EACAzC,KAAMiB,EAAIA,EAAItB,OAAS,MAqB7B,IAAM+C,WAAa,SAACC,EAAgBR,GAClC,IAAM9C,EAAQ,GAEd,OADA6C,cAAcC,EAAgB9C,EAAOsD,GAC9BtD,GCzCHuD,mBAAqB,SAACC,EAAGpB,GAC7B,OAAKA,EAAMG,MACDH,EAAM1C,aADS0C,GAUrBqB,SAAW,SAAAC,GACf,IAAMC,EAAkB1B,KAAKC,UAAUwB,EAAQH,oBAG/C,MAAO,CACLI,gBAAAA,EACAC,gBAJsB3B,KAAK4B,MAAMF,KCJ/BG,QAAU,SAAA5D,mBAAWkC,EAAO2B,GAChC,IAAMvD,EAAQN,EAAO6D,GAErB,YAAcC,IAAV5B,EACE5B,EAAMS,KACDT,EAAMW,UAAYiB,EAEpB5B,EAAMU,KAAOkB,GAKtBlC,EAAO6D,EAAQ,GAAG9C,MAAO,EAErBT,EAAMS,KACDT,EAAMa,UAERb,EAAMY,WClBT6C,IAAM,SAAAP,GACV,MAA6CD,SAASC,GAA9CC,IAAAA,gBAKF3D,EAAQqD,aALWO,gBAKiBF,GACpCxD,EAASJ,YAAY6D,EAAiB3D,GACtCkE,EAAcJ,QAAQ5D,GAG5B,gBAAO6C,GAGL,IAFA,IAAIoB,EAAO,GAEF5D,EAAI,EAAGA,EAAIP,EAAMM,SAAUC,EAAG,CACrC,MAA6BP,EAAMO,GAA3BmB,IAAAA,WACF0C,GAAMhB,IADQA,MACHL,GAEjBoB,GAAQD,EAAYxC,EAAW0C,GAAM7D,GAGvC,MAAkCL,EAAOA,EAAOI,OAAS,GAEzD,OAAO6D,KAFClD,OAAYE,YAAND"}