UNPKG

@atlaskit/editor-wikimarkup-transformer

Version:

Wiki markup transformer for JIRA and Confluence

76 lines (73 loc) 4.11 kB
"use strict"; var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); Object.defineProperty(exports, "__esModule", { value: true }); exports.default = exports.WikiMarkupTransformer = void 0; var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty")); var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck")); var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass")); var _schemaDefault = require("@atlaskit/adf-schema/schema-default"); var _encoder = require("./encoder"); var _abstractTree = _interopRequireDefault(require("./parser/abstract-tree")); var _issueKey = require("./parser/tokenize/issue-key"); function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; } function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { (0, _defineProperty2.default)(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; } var WikiMarkupTransformer = exports.WikiMarkupTransformer = /*#__PURE__*/function () { function WikiMarkupTransformer() { var schema = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : _schemaDefault.defaultSchema; (0, _classCallCheck2.default)(this, WikiMarkupTransformer); this.schema = schema; } // [ADFS-725] Jira breaks if there are null chars it is easier to remove them here // The following has to be a regex to remove all instances of null instead of the first return (0, _createClass2.default)(WikiMarkupTransformer, [{ key: "sanitizeWikiMarkup", value: function sanitizeWikiMarkup(wikiMarkup) { // Ignored via go/ees005 // eslint-disable-next-line require-unicode-regexp return wikiMarkup.replace(/\0/g, ''); } }, { key: "encode", value: function encode(node, context) { var wikiMarkup = (0, _encoder.encode)(node, normalizeContextObject(context)); // [ADFS-725] Jira breaks if there are null chars it is easier to remove them here var sanitizedWikiMarkup = this.sanitizeWikiMarkup(wikiMarkup); return sanitizedWikiMarkup; } }, { key: "parse", value: function parse(wikiMarkup, context) { // [ADFS-725] Jira breaks if there are null chars it is easier to remove them here var sanitizedWikiMarkup = this.sanitizeWikiMarkup(wikiMarkup); var tree = new _abstractTree.default(this.schema, sanitizedWikiMarkup); return tree.getProseMirrorModel(this.buildContext(normalizeContextObject(context))); } }, { key: "buildContext", value: function buildContext(context) { return context ? _objectSpread(_objectSpread({}, context), {}, { issueKeyRegex: context.conversion ? (0, _issueKey.buildIssueKeyRegex)(context.conversion.inlineCardConversion) : undefined }) : {}; } }]); }(); /** * Turns mentionConversion object keys to lowercase for case insensitivity matching * This is okay, because conversion context object contains mapping and that should be case insensitive */ var normalizeContextObject = function normalizeContextObject(context) { if (!context || !context.conversion || !context.conversion.mentionConversion) { // nothing to normalize, return original object return context; } var mentionConversion = {}; // eslint-disable-next-line guard-for-in for (var key in context.conversion.mentionConversion) { mentionConversion[key.toLowerCase()] = context.conversion.mentionConversion[key]; } context.conversion.mentionConversion = mentionConversion; return context; }; var _default = exports.default = WikiMarkupTransformer;