UNPKG

@clynn-fe/akfe-editor-jsonc

Version:

JSON Compress by using a map to reduce the size of keys and using gzip

143 lines (129 loc) 3.35 kB
import pako from 'pako' import JSONF from '@clynn-fe/akfe-editor-jsonf' import { biDimensionalArrayToObject, isObject, numberToKey, unique, transformKeys } from './utils' let _nCode = -1 /** * Returns the string using an array of ASCII values */ const getSpecialKey = (aKeys: number[]) => String.fromCharCode(...aKeys) /** * Traverse all the objects looking for keys and set an array with the new keys */ const getKeys = (json: any, aKeys: [string, string][]) => { if (Array.isArray(json)) { aKeys = aKeys.concat( ...json.map((item: any) => isObject(item) || Array.isArray(item) ? unique(getKeys(item, aKeys)) : [] ) ) } else { for (const sKey in json) { if (Object.prototype.hasOwnProperty.call(json, sKey)) { const oItem = json[sKey] if (isObject(oItem) || Array.isArray(oItem)) { aKeys = aKeys.concat(unique(getKeys(oItem, aKeys))) } if (aKeys.every(key => key[1] !== sKey)) { _nCode += 1 const aKey: string[] = [] aKey.push(getSpecialKey(numberToKey(_nCode)), sKey) aKeys.push(aKey as unknown as [string, string]) } } } } return aKeys } /** * Method to compress array objects */ const compressArray = (json: any[], aKeys?: [string, string][]) => json.map(item => compress(item, aKeys)) /** * Method to compress anything but array */ const compressOther = (json: any, aKeys: [string, string][]) => { aKeys = getKeys(json, aKeys) aKeys = unique(aKeys) const oKeys = biDimensionalArrayToObject(aKeys) const oppositeOKeys = Object.entries(oKeys).reduce( (prev: { [key: string]: string }, [key, value]) => Object.assign(prev, { [value]: key }), {} ) return Object.assign(transformKeys(json, oppositeOKeys), { _: oKeys }) } /** * Method to decompress array objects */ const decompressArray = (json: any[]) => json.map(item => decompress(item)) /** * Method to decompress anything but array */ const decompressOther = (jsonCopy: any) => { const oKeys = JSON.parse(JSON.stringify(jsonCopy._)) delete jsonCopy._ return transformKeys(jsonCopy, oKeys) } /** * Compress a RAW JSON */ const compress = (json: any, optKeys?: [string, string][]): string => { !optKeys && (_nCode = -1) const aKeys = optKeys || [] return JSONF.stringify( Array.isArray(json) ? compressArray(json, aKeys) : compressOther(json, aKeys) ) } /** * Decompress a compressed JSON */ const decompress = (json: string): any => { const data = JSONF.parse(json) return Array.isArray(data) ? decompressArray(data) : decompressOther(data) } /** * Use LZString to get the compressed string. * @param json * @param bCompress * @returns {String} */ const pack = (json: any, bCompress: boolean) => pako.gzip( globalThis.encodeURIComponent( bCompress ? compress(json) : JSONF.stringify(json) ), { level: 9, to: 'string' } ) /** * Returns the JSON object from the LZW string */ const unpack = (gzipped: string, bDecompress: boolean) => { const str = globalThis.decodeURIComponent( pako.ungzip(gzipped, { to: 'string' }) ) return bDecompress ? decompress(str) : JSONF.parse(str) } export default Object.freeze({ compress, decompress, pack, unpack })