@sanity/tsdoc
Version:
Generate API reference docs from TypeScript projects and store in a Sanity-friendly JSON format. Render a static frontend, or as React components.
1,267 lines • 56.2 kB
JavaScript
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf, __hasOwnProp = Object.prototype.hasOwnProperty;
var __copyProps = (to, from, except, desc) => {
if (from && typeof from == "object" || typeof from == "function")
for (let key of __getOwnPropNames(from))
!__hasOwnProp.call(to, key) && key !== except && __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: !0 }) : target,
mod
));
Object.defineProperty(exports, "__esModule", { value: !0 });
var fs = require("fs"), chalk = require("chalk"), path = require("path"), node = require("esbuild-register/dist/node"), apiExtractor = require("@microsoft/api-extractor"), apiExtractorModel = require("@microsoft/api-extractor-model"), pkgUtils = require("@sanity/pkg-utils"), tmp = require("tmp"), tsdocConfig = require("@microsoft/tsdoc-config"), promises = require("fs/promises"), jsoncParser = require("jsonc-parser"), client = require("@sanity/client"), mkdirp = require("mkdirp"), util = require("util"), crypto = require("crypto"), slugify = require("slugify"), tsdoc = require("@microsoft/tsdoc");
function _interopDefaultCompat(e) {
return e && typeof e == "object" && "default" in e ? e : { default: e };
}
var fs__default = /* @__PURE__ */ _interopDefaultCompat(fs), chalk__default = /* @__PURE__ */ _interopDefaultCompat(chalk), path__default = /* @__PURE__ */ _interopDefaultCompat(path), tmp__default = /* @__PURE__ */ _interopDefaultCompat(tmp), mkdirp__default = /* @__PURE__ */ _interopDefaultCompat(mkdirp), util__default = /* @__PURE__ */ _interopDefaultCompat(util), crypto__default = /* @__PURE__ */ _interopDefaultCompat(crypto), slugify__default = /* @__PURE__ */ _interopDefaultCompat(slugify);
function _fileExists(file) {
try {
return fs.accessSync(file), !0;
} catch {
return !1;
}
}
function _printExtractMessages(cwd, messages) {
const warnings = messages.filter((msg) => msg.logLevel === "warning");
for (const msg of warnings) {
const sourceFilePath = msg.sourceFilePath && path__default.default.relative(cwd, msg.sourceFilePath);
msg.messageId !== "TS6307" && (console.log(""), console.log([`${chalk__default.default.cyan(sourceFilePath || "?")}`, `:${chalk__default.default.yellow(msg.sourceFileLine)}:${chalk__default.default.yellow(msg.sourceFileColumn)}`, ` - ${chalk__default.default.yellow("warning")} ${chalk__default.default.gray(msg.messageId)}
`, msg.text].join("")));
}
const errors = messages.filter((msg) => msg.logLevel === "error");
for (const msg of errors) {
const sourceFilePath = msg.sourceFilePath && path__default.default.relative(cwd, msg.sourceFilePath);
console.log(""), console.log([`${chalk__default.default.cyan(sourceFilePath || "?")}`, `:${chalk__default.default.yellow(msg.sourceFileLine)}:${chalk__default.default.yellow(msg.sourceFileColumn)}`, ` - ${chalk__default.default.red("error")} ${chalk__default.default.gray(msg.messageId)}
`, msg.text].join(""));
}
}
const CONFIG_FILE_NAMES = ["tsdoc.config.js", "tsdoc.config.jsx", "tsdoc.config.mjs", "tsdoc.config.cjs", "tsdoc.config.ts", "tsdoc.config.tsx"];
function _findConfigFile(options) {
const {
packagePath
} = options;
for (const f of CONFIG_FILE_NAMES) {
const file = path__default.default.resolve(packagePath, f);
if (_fileExists(file)) return file;
}
}
async function _loadConfig(options) {
const {
packagePath
} = options, configPath = _findConfigFile({
packagePath
});
if (!configPath)
return;
const esbuildOptions = {
jsx: "automatic",
jsxFactory: "createElement",
jsxFragment: "Fragment",
jsxImportSource: "react",
logLevel: "silent"
}, {
unregister
} = globalThis.__DEV__ ? {
unregister: () => {
}
} : node.register(esbuildOptions), config = require(configPath);
return unregister(), config?.default || config;
}
function defineConfig(config) {
return config;
}
const DEFAULT_MESSAGES_CONFIG = {
/**
* Configures handling of diagnostic messages reported by the TypeScript compiler engine while analyzing
* the input .d.ts files.
*
* TypeScript message identifiers start with "TS" followed by an integer. For example: "TS2551"
*
* DEFAULT VALUE: A single "default" entry with logLevel=warning.
*/
compilerMessageReporting: {
/**
* Configures the default routing for messages that don't match an explicit rule in this table.
*/
default: {
/**
* Specifies whether the message should be written to the the tool's output log. Note that
* the "addToApiReportFile" property may supersede this option.
*
* Possible values: "error", "warning", "none"
*
* Errors cause the build to fail and return a nonzero exit code. Warnings cause a production build fail
* and return a nonzero exit code. For a non-production build (e.g. when "api-extractor run" includes
* the "--local" option), the warning is displayed but the build will not fail.
*
* DEFAULT VALUE: "warning"
*/
logLevel: "warning",
/**
* When addToApiReportFile is true: If API Extractor is configured to write an API report file (.api.md),
* then the message will be written inside that file; otherwise, the message is instead logged according to
* the "logLevel" option.
*
* DEFAULT VALUE: false
*/
addToApiReportFile: !1
}
// "TS2551": {
// "logLevel": "warning",
// "addToApiReportFile": true
// },
//
// . . .
},
/**
* Configures handling of messages reported by API Extractor during its analysis.
*
* API Extractor message identifiers start with "ae-". For example: "ae-extra-release-tag"
*
* DEFAULT VALUE: See api-extractor-defaults.json for the complete table of extractorMessageReporting mappings
*/
extractorMessageReporting: {
default: {
logLevel: "warning",
addToApiReportFile: !1
},
// 'ae-extra-release-tag': {
// logLevel: 'warning' as ExtractorLogLevel,
// addToApiReportFile: false,
// },
// 'ae-forgotten-export': {
// logLevel: 'error' as ExtractorLogLevel,
// addToApiReportFile: false,
// },
"ae-incompatible-release-tags": {
logLevel: "none",
addToApiReportFile: !1
},
"ae-internal-missing-underscore": {
logLevel: "none",
addToApiReportFile: !1
},
"ae-missing-release-tag": {
logLevel: "none",
addToApiReportFile: !1
}
},
/**
* Configures handling of messages reported by the TSDoc parser when analyzing code comments.
*
* TSDoc message identifiers start with "tsdoc-". For example: "tsdoc-link-tag-unescaped-text"
*
* DEFAULT VALUE: A single "default" entry with logLevel=warning.
*/
tsdocMessageReporting: {
default: {
logLevel: "warning",
addToApiReportFile: !1
}
// 'tsdoc-link-tag-unescaped-text': {
// logLevel: 'warning' as ExtractorLogLevel,
// addToApiReportFile: false,
// },
// 'tsdoc-unsupported-tag': {
// logLevel: 'warning' as ExtractorLogLevel,
// addToApiReportFile: false,
// },
// 'tsdoc-undefined-tag': {
// logLevel: 'warning' as ExtractorLogLevel,
// addToApiReportFile: false,
// },
}
};
function createApiExtractorConfig(opts) {
return {
/**
* Optionally specifies another JSON config file that this file extends from. This provides a way for
* standard settings to be shared across multiple projects.
*
* If the path starts with "./" or "../", the path is resolved relative to the folder of the file that contains
* the "extends" field. Otherwise, the first path segment is interpreted as an NPM package name, and will be
* resolved using NodeJS require().
*
* SUPPORTED TOKENS: none
* DEFAULT VALUE: ""
*/
// "extends": "./shared/api-extractor-base.json"
// "extends": "my-package/include/api-extractor-base.json"
/**
* Determines the "<projectFolder>" token that can be used with other config file settings. The project folder
* typically contains the tsconfig.json and package.json config files, but the path is user-defined.
*
* The path is resolved relative to the folder of the config file that contains the setting.
*
* The default value for "projectFolder" is the token "<lookup>", which means the folder is determined by traversing
* parent folders, starting from the folder containing api-extractor.json, and stopping at the first folder
* that contains a tsconfig.json file. If a tsconfig.json file cannot be found in this way, then an error
* will be reported.
*
* SUPPORTED TOKENS: <lookup>
* DEFAULT VALUE: "<lookup>"
*/
projectFolder: opts.packagePath,
/**
* (REQUIRED) Specifies the .d.ts file to be used as the starting point for analysis. API Extractor
* analyzes the symbols exported by this module.
*
* The file extension must be ".d.ts" and not ".ts".
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
*/
mainEntryPointFilePath: path__default.default.resolve(opts.packagePath, opts.mainEntryPointFilePath),
/**
* A list of NPM package names whose exports should be treated as part of this package.
*
* For example, suppose that Webpack is used to generate a distributed bundle for the project "library1",
* and another NPM package "library2" is embedded in this bundle. Some types from library2 may become part
* of the exported API for library1, but by default API Extractor would generate a .d.ts rollup that explicitly
* imports library2. To avoid this, we can specify:
*
* "bundledPackages": [ "library2" ],
*
* This would direct API Extractor to embed those types directly in the .d.ts rollup, as if they had been
* local files for library1.
*/
bundledPackages: opts.bundledPackages,
/**
* Determines how the TypeScript compiler engine will be invoked by API Extractor.
*/
compiler: {
/**
* Specifies the path to the tsconfig.json file to be used by API Extractor when analyzing the project.
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* Note: This setting will be ignored if "overrideTsconfig" is used.
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<projectFolder>/tsconfig.json"
*/
// tsconfigFilePath: path.resolve(opts.packagePath, opts.tsconfigPath || 'tsconfig.json'),
tsconfigFilePath: `<projectFolder>/${opts.tsconfigPath}`
/**
* Provides a compiler configuration that will be used instead of reading the tsconfig.json file from disk.
* The object must conform to the TypeScript tsconfig schema:
*
* http://json.schemastore.org/tsconfig
*
* If omitted, then the tsconfig.json file will be read from the "projectFolder".
*
* DEFAULT VALUE: no overrideTsconfig section
*/
// overrideTsconfig: {
// // . . .
// },
/**
* This option causes the compiler to be invoked with the --skipLibCheck option. This option is not recommended
* and may cause API Extractor to produce incomplete or incorrect declarations, but it may be required when
* dependencies contain declarations that are incompatible with the TypeScript engine that API Extractor uses
* for its analysis. Where possible, the underlying issue should be fixed rather than relying on skipLibCheck.
*
* DEFAULT VALUE: false
*/
// "skipLibCheck": true,
},
/**
* Configures how the API report file (*.api.md) will be generated.
*/
apiReport: {
/**
* (REQUIRED) Whether to generate an API report.
*/
enabled: !1,
/**
* The filename for the API report files. It will be combined with "reportFolder" or "reportTempFolder" to produce
* a full file path.
*
* The file extension should be ".api.md", and the string should not contain a path separator such as "\" or "/".
*
* SUPPORTED TOKENS: <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<unscopedPackageName>.api.md"
*/
reportFileName: "<unscopedPackageName>.api.md"
/**
* Specifies the folder where the API report file is written. The file name portion is determined by
* the "reportFileName" setting.
*
* The API report file is normally tracked by Git. Changes to it can be used to trigger a branch policy,
* e.g. for an API review.
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<projectFolder>/etc/"
*/
// "reportFolder": "<projectFolder>/etc/",
/**
* Specifies the folder where the temporary report file is written. The file name portion is determined by
* the "reportFileName" setting.
*
* After the temporary file is written to disk, it is compared with the file in the "reportFolder".
* If they are different, a production build will fail.
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<projectFolder>/temp/"
*/
// "reportTempFolder": "<projectFolder>/temp/"
},
/**
* Configures how the doc model file (*.api.json) will be generated.
*/
docModel: {
/**
* (REQUIRED) Whether to generate a doc model file.
*/
enabled: !0,
/**
* The output path for the doc model file. The file extension should be ".api.json".
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<projectFolder>/temp/<unscopedPackageName>.api.json"
*/
apiJsonFilePath: path__default.default.resolve(opts.tempDirPath, "api.json")
},
/**
* Configures how the .d.ts rollup file will be generated.
*/
dtsRollup: {
/**
* (REQUIRED) Whether to generate the .d.ts rollup file.
*/
enabled: !1
/**
* Specifies the output path for a .d.ts rollup file to be generated without any trimming.
* This file will include all declarations that are exported by the main entry point.
*
* If the path is an empty string, then this file will not be written.
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<projectFolder>/dist/<unscopedPackageName>.d.ts"
*/
// untrimmedFilePath: '<projectFolder>/dist/es/<unscopedPackageName>.d.ts',
/**
* Specifies the output path for a .d.ts rollup file to be generated with trimming for a "beta" release.
* This file will include only declarations that are marked as "@public" or "@beta".
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: ""
*/
// betaTrimmedFilePath: '<projectFolder>/dist/es/<unscopedPackageName>-beta.d.ts',
/**
* Specifies the output path for a .d.ts rollup file to be generated with trimming for a "public" release.
* This file will include only declarations that are marked as "@public".
*
* If the path is an empty string, then this file will not be written.
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: ""
*/
// publicTrimmedFilePath: '<projectFolder>/dist/es/<unscopedPackageName>-public.d.ts',
/**
* When a declaration is trimmed, by default it will be replaced by a code comment such as
* "Excluded from this release type: exampleMember". Set "omitTrimmingComments" to true to remove the
* declaration completely.
*
* DEFAULT VALUE: false
*/
// "omitTrimmingComments": true
},
/**
* Configures how the tsdoc-metadata.json file will be generated.
*/
tsdocMetadata: {
/**
* Whether to generate the tsdoc-metadata.json file.
*
* DEFAULT VALUE: true
*/
enabled: !1
/**
* Specifies where the TSDoc metadata file should be written.
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* The default value is "<lookup>", which causes the path to be automatically inferred from the "tsdocMetadata",
* "typings" or "main" fields of the project's package.json. If none of these fields are set, the lookup
* falls back to "tsdoc-metadata.json" in the package folder.
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<lookup>"
*/
// tsdocMetadataFilePath: '<projectFolder>/dist/es/tsdoc-metadata.json',
},
/**
* Specifies what type of newlines API Extractor should use when writing output files. By default, the output files
* will be written with Windows-style newlines. To use POSIX-style newlines, specify "lf" instead.
* To use the OS's default newline kind, specify "os".
*
* DEFAULT VALUE: "crlf"
*/
// "newlineKind": "crlf",
/**
* Configures how API Extractor reports error and warning messages produced during analysis.
*
* There are three sources of messages: compiler messages, API Extractor messages, and TSDoc messages.
*/
messages: opts.messagesConfig || DEFAULT_MESSAGES_CONFIG
};
}
function createTempDir() {
return new Promise((resolve, reject) => {
tmp__default.default.dir((err, dirPath, cleanupCallback) => {
if (err) {
reject(err);
return;
}
resolve({
path: dirPath,
cleanup: cleanupCallback
});
});
});
}
async function createTSDocConfig(opts) {
const {
customTags
} = opts;
if (customTags.length === 0)
return;
const tsDocBaseBuf = await promises.readFile(require.resolve("@microsoft/api-extractor/extends/tsdoc-base.json")), tsDocBaseConfig = jsoncParser.parse(tsDocBaseBuf.toString()), tagDefinitions = (tsDocBaseConfig.tagDefinitions || []).concat(customTags.map((t) => ({
tagName: `@${t.name}`,
syntaxKind: t.syntaxKind,
allowMultiple: t.allowMultiple
}))), supportForTags = {
...tsDocBaseConfig.supportForTags
};
for (const customTag of customTags)
supportForTags[`@${customTag.name}`] = !0;
const tsDocConfig = {
...tsDocBaseConfig,
noStandardTags: !1,
tagDefinitions,
supportForTags
};
return tsdocConfig.TSDocConfigFile.loadFromObject(tsDocConfig);
}
async function extract(options) {
const {
customTags,
packagePath,
rules,
strict,
tsconfig: tsconfigPath = "tsconfig.json",
bundledPackages = []
} = options, tempDir = await createTempDir(), tempDirPath = tempDir.path, packageJsonFullPath = path__default.default.resolve(packagePath, "package.json"), cwd = packagePath, config = await pkgUtils.loadConfig({
cwd
}), strictOptions = pkgUtils.parseStrictOptions(config?.strictOptions ?? {}), logger = pkgUtils.createLogger(), pkg = await pkgUtils.loadPkgWithReporting({
cwd,
logger,
strict
});
logger.info("Using tsconfig: ", path__default.default.resolve(packagePath, tsconfigPath));
const exports2 = pkgUtils.parseExports({
cwd,
pkg,
strict,
logger,
strictOptions
});
try {
const results = [];
for (const exp of exports2) {
if (!exp.source || !exp.default)
continue;
const typesPath = exp.default.replace(/\.[mc]?js$/, ".d.ts"), result = await _doExtract({
customTags,
rules: rules ?? config?.extract?.rules,
mainEntryPointFilePath: typesPath,
packagePath,
tempDirPath,
tsconfigPath,
packageJsonFullPath,
bundledPackages
});
results.push({
exportPath: exp._path,
tempDirPath,
typesPath,
...result
});
}
return tempDir.cleanup(), {
pkg,
results
};
} catch (err) {
throw tempDir.cleanup(), err;
}
}
async function _doExtract(options) {
const {
customTags,
rules,
mainEntryPointFilePath,
packagePath,
tempDirPath,
tsconfigPath,
packageJsonFullPath,
bundledPackages
} = options, tsdocConfigFile = await createTSDocConfig({
customTags: customTags || []
}), extractorConfig = apiExtractor.ExtractorConfig.prepare({
configObject: createApiExtractorConfig({
mainEntryPointFilePath,
messagesConfig: pkgUtils.getExtractMessagesConfig({
rules
}),
packagePath,
tempDirPath,
tsconfigPath,
bundledPackages
}),
configObjectFullPath: void 0,
packageJson: void 0,
packageJsonFullPath,
tsdocConfigFile
}), messages = [], extractorResult = apiExtractor.Extractor.invoke(extractorConfig, {
// Equivalent to the "--local" command-line parameter
localBuild: !0,
// Equivalent to the "--verbose" command-line parameter
showVerboseMessages: !0,
// handle messages
messageCallback(message) {
messages.push(message), message.handled = !0;
}
});
return {
apiPackage: apiExtractorModel.ApiPackage.loadFromJsonFile(path__default.default.resolve(tempDirPath, "api.json")),
messages,
succeeded: extractorResult.succeeded
};
}
const writeFile = util__default.default.promisify(fs__default.default.writeFile);
async function load(transformed, opts) {
if (opts.fs) {
const dirPath = path__default.default.dirname(opts.fs.path);
await mkdirp__default.default(dirPath), await writeFile(opts.fs.path, JSON.stringify(transformed, null, 2) + `
`);
}
opts.sanity && opts.sanity.token && await _loadToSanity(opts.sanity, transformed);
}
async function _loadToSanity(sanity, docs) {
let tx = client.createClient({
...sanity,
apiVersion: "2022-10-01",
token: sanity.token,
useCdn: !1
}).transaction();
for (const doc of docs)
tx = tx.createOrReplace(doc);
await tx.commit();
}
function _hash(key) {
return crypto__default.default.createHash("md5").update(key).digest("hex");
}
function _createExportMemberId(_ctx, key) {
return _hash(key);
}
function _isArray(val) {
return Array.isArray(val);
}
function _isRecord(val) {
return typeof val == "object" && !!val;
}
function _sanitizeName(str) {
return str === "Text_2" ? "Text" : str;
}
function _slugify(str) {
return slugify__default.default(str);
}
function _parsePackageName(nameStr) {
const p = nameStr.split("/"), packageScope = p.length > 1 ? p[0] : void 0, packageName = p.length > 1 ? p[1] : p[0];
if (!packageName)
throw new Error(`Invalid package name: ${nameStr}`);
return [packageScope, packageName];
}
function _classIsReactComponentType(node2) {
const extendedName = node2.extendsType?.excerpt.tokens?.[1]?.text || "";
return !!(extendedName.startsWith("React.Component") || extendedName.startsWith("React_2.Component") || extendedName.startsWith("Component") || extendedName.startsWith("React.PureComponent") || extendedName.startsWith("React_2.PureComponent") || extendedName.startsWith("PureComponent"));
}
function _transformTokens(ctx, tokens) {
if (!ctx.package)
throw new Error("transformTokens: missing package document");
return tokens.map((t, idx) => {
if (t.kind === "Content")
return {
_type: "api.token",
_key: `token${idx}`,
text: t.text
};
if (t.kind === "Reference")
return !t.canonicalReference || !t.canonicalReference.source ? {
_type: "api.token",
_key: `token${idx}`,
text: t.text
} : {
_type: "api.token",
_key: `token${idx}`,
text: t.text,
member: {
_type: "reference",
_ref: _getTokenId(t)
}
};
throw new Error(`tokens: unknown type: ${t.kind}`);
});
}
function _getTokenId(t) {
const _ref = t.canonicalReference?.toString();
return _ref === "!unknown" ? "$$unknown$$" : `tsdoc-${_hash(_ref?.replace("~", "") || "")}`;
}
const RELEASE_TAGS = {
0: void 0,
1: "internal",
2: "alpha",
3: "beta",
4: "public"
}, RE_MARKDOWN_HEADER = /^([#]{1,6})\s(.*)$/;
function _transformDocNode(docNode) {
if (docNode.kind === "CodeSpan")
return {
_type: "span",
marks: ["code"],
text: docNode.code
};
if (docNode.kind === "ErrorText")
return {
_type: "span",
marks: [],
text: docNode.text
};
if (docNode.kind === "EscapedText")
return {
_type: "span",
marks: [],
text: docNode.decodedText
};
if (docNode.kind === "FencedCode") {
const node2 = docNode;
return {
_type: "code",
code: node2.code,
language: node2.language
};
}
if (docNode.kind === "LinkTag") {
const linkTag = docNode;
if (linkTag.urlDestination) {
const linkText = linkTag.linkText || linkTag.urlDestination;
return {
_type: "span",
_markDef: {
_type: "link",
href: linkTag.urlDestination
},
marks: [],
text: linkText
};
} else {
let identifier = "", fullReferenceURL = "";
if (linkTag.codeDestination) {
const memberReferences = linkTag.codeDestination.memberReferences;
if (memberReferences.length > 0) {
const memberIdentifier = memberReferences[memberReferences.length - 1]?.memberIdentifier;
fullReferenceURL = memberReferences.map((memberReference) => memberReference.memberIdentifier?.identifier).filter((identifier2) => !!identifier2).join("/"), memberIdentifier && (identifier = memberIdentifier.identifier);
}
}
const linkText = linkTag.linkText || identifier || "???";
return {
_type: "span",
_markDef: {
_type: "link",
href: fullReferenceURL
},
marks: [],
text: linkText
};
}
}
if (docNode.kind === "Paragraph") {
const transformedParagraph = tsdoc.DocNodeTransforms.trimSpacesInParagraph(docNode);
if (transformedParagraph.nodes.length === 1 && transformedParagraph.nodes[0]?.kind === "SoftBreak")
return;
const children = _transformDocCommentContent(transformedParagraph);
if (!children) return;
const markDefs = [];
let style = "normal";
if (children[0]?._type === "span") {
const headerMatch = RE_MARKDOWN_HEADER.exec(children[0].text);
if (headerMatch) {
const child = {
...children[0],
text: headerMatch[2]
};
style = `h${headerMatch[1]?.length}`, children[0] = child;
}
}
for (const child of children)
if (child._type === "span" && _isRecord(child._markDef)) {
const markDefKey = `${child._markDef._type}${markDefs.length}`;
child._markDef._key = markDefKey, _isArray(child.marks) && child.marks.push(markDefKey), markDefs.push({
_key: markDefKey,
...child._markDef
}), delete child._markDef;
}
return children.length === 0 && children.push({
_type: "span",
_key: "0",
marks: [],
text: ""
}), {
_type: "block",
style,
children,
markDefs
};
}
if (docNode.kind === "PlainText")
return {
_type: "span",
marks: [],
text: docNode.text
};
if (docNode.kind === "SoftBreak")
return {
_type: "span",
text: `
`
};
if (docNode.kind === "BlockTag")
return {
_type: "span",
marks: [],
text: docNode.tagName
};
throw new Error(`unknown doc node type: ${docNode.kind}`);
}
function _transformDocCommentContent(section) {
if (!section.nodes.length) return;
const nodes = section.nodes.map((node2, idx) => {
if (idx === 0 && node2.kind === "SoftBreak")
return;
const transformedNode = _transformDocNode(node2);
return transformedNode && {
_key: `node${idx}`,
...transformedNode
};
}).filter(Boolean);
return nodes.length ? nodes : void 0;
}
function _transformDocComment(docComment) {
const summary = _transformDocCommentContent(docComment.summarySection), parameters = docComment.params.blocks.length ? docComment.params.blocks.map((paramBlock, idx) => ({
_type: "tsdoc.paramBlock",
_key: `paramBlock${idx}`,
name: paramBlock.parameterName,
content: _transformDocCommentContent(paramBlock.content)
})) : void 0, returns = docComment.returnsBlock && {
_type: "tsdoc.returnsBlock",
content: _transformDocCommentContent(docComment.returnsBlock.content)
}, remarks = docComment.remarksBlock && {
_type: "tsdoc.remarksBlock",
content: _transformDocCommentContent(docComment.remarksBlock.content)
}, exampleBlocks = [], customBlocks = [];
for (let i = 0; i < docComment.customBlocks.length; i += 1) {
const customBlock = docComment.customBlocks[i];
customBlock.blockTag.tagNameWithUpperCase === tsdoc.StandardTags.example.tagNameWithUpperCase ? exampleBlocks.push({
_type: "tsdoc.exampleBlock",
_key: `exampleBlock${i}`,
content: _transformDocCommentContent(customBlock.content)
}) : customBlocks.push({
_type: "tsdoc.customBlock",
_key: `customBlock${i}`,
tag: customBlock.blockTag.tagName,
content: _transformDocCommentContent(customBlock.content)
});
}
const seeBlocks = docComment.seeBlocks.length ? docComment.seeBlocks.map((seeBlock, idx) => ({
_type: "tsdoc.seeBlock",
_key: `seeBlock${idx}`,
content: _transformDocCommentContent(seeBlock.content)
})) : void 0, deprecated = docComment.deprecatedBlock && {
_type: "tsdoc.deprecatedBlock",
content: _transformDocCommentContent(docComment.deprecatedBlock.content)
}, modifierTags = docComment.modifierTagSet.nodes.length ? docComment.modifierTagSet.nodes.map((modifierTag, idx) => ({
_type: "tsdoc.modifierTag",
_key: `modifierTag${idx}`,
name: modifierTag.tagName
})) : void 0;
return {
_type: "tsdoc.docComment",
customBlocks: customBlocks.length > 0 ? customBlocks : void 0,
deprecated,
exampleBlocks: exampleBlocks.length > 0 ? exampleBlocks : void 0,
modifierTags,
parameters,
remarks,
returns,
seeBlocks,
summary
};
}
function _transformParameter(ctx, node2, param, idx) {
const tsDocComment = param.tsdocParamBlock?.content;
return {
_type: "api.parameter",
_key: `param${idx}`,
comment: tsDocComment ? {
_type: "tsdoc.docComment",
summary: _transformDocCommentContent(tsDocComment)
} : void 0,
name: param.name,
isOptional: param.isOptional,
releaseTag: RELEASE_TAGS[node2.releaseTag],
type: _transformTokens(ctx, node2.excerptTokens.slice(param.parameterTypeExcerpt.tokenRange.startIndex, param.parameterTypeExcerpt.tokenRange.endIndex))
};
}
function _transformTypeParameter(ctx, node2, p, idx) {
return {
_type: "api.typeParameter",
_key: `typeParameter${idx}`,
name: p.name,
constraintType: _transformTokens(ctx, node2.excerptTokens.slice(p.constraintExcerpt.tokenRange.startIndex, p.constraintExcerpt.tokenRange.endIndex)),
defaultType: _transformTokens(ctx, node2.excerptTokens.slice(p.defaultTypeExcerpt.tokenRange.startIndex, p.defaultTypeExcerpt.tokenRange.endIndex))
};
}
function _transformClass(ctx, node2) {
if (!ctx.export)
throw new Error("transformClass: missing `export` document");
if (!ctx.package)
throw new Error("transformClass: missing `package` document");
if (!ctx.release)
throw new Error("transformClass: missing `release` document");
const docComment = node2.tsdocComment, name = _sanitizeName(node2.name), isReactComponentType = _classIsReactComponentType(node2);
return {
_type: "api.class",
comment: docComment ? _transformDocComment(docComment) : void 0,
export: {
_type: "reference",
_ref: ctx.export._id
},
isReactComponentType,
members: node2.members.map((m, idx) => ({
_key: `member${idx}`,
..._transformClassMember(ctx, m)
})),
name,
package: {
_type: "reference",
_ref: ctx.package._id
},
release: {
_type: "reference",
_ref: ctx.release._id
},
releaseTag: RELEASE_TAGS[node2.releaseTag],
slug: {
_type: "slug",
current: _slugify(name)
},
typeParameters: node2.typeParameters.map((p, idx) => _transformTypeParameter(ctx, node2, p, idx))
};
}
function _transformClassMember(ctx, m) {
if (m.kind === "Constructor") {
const mem = m, docComment = mem.tsdocComment;
return {
_type: "api.constructor",
releaseTag: RELEASE_TAGS[mem.releaseTag],
comment: docComment ? _transformDocComment(docComment) : void 0,
parameters: mem.parameters.map((p, idx) => _transformParameter(ctx, mem, p, idx))
};
}
if (m.kind === "Method") {
const mem = m, docComment = mem.tsdocComment;
return {
_type: "api.method",
comment: docComment ? _transformDocComment(docComment) : void 0,
name: mem.name,
isOptional: mem.isOptional,
isStatic: mem.isStatic,
parameters: mem.parameters.map((p, idx) => _transformParameter(ctx, mem, p, idx)),
releaseTag: RELEASE_TAGS[mem.releaseTag],
returnType: _transformTokens(ctx, mem.excerptTokens.slice(mem.returnTypeExcerpt.tokenRange.startIndex, mem.returnTypeExcerpt.tokenRange.endIndex)),
typeParameters: mem.typeParameters.map((p, idx) => _transformTypeParameter(ctx, mem, p, idx))
};
}
if (m.kind === "Property") {
const mem = m, docComment = mem.tsdocComment;
return {
_type: "api.property",
comment: docComment ? _transformDocComment(docComment) : void 0,
name: mem.name,
isEventProperty: mem.isEventProperty,
isOptional: mem.isOptional,
isStatic: mem.isStatic,
releaseTag: RELEASE_TAGS[mem.releaseTag],
type: _transformTokens(ctx, mem.excerptTokens.slice(mem.propertyTypeExcerpt.tokenRange.startIndex, mem.propertyTypeExcerpt.tokenRange.endIndex))
};
}
throw new Error(`Unknown class member kind: ${m.kind}`);
}
function _transformEnum(ctx, node2) {
if (!ctx.export)
throw new Error("transformEnum: missing `export` document");
if (!ctx.package)
throw new Error("transformEnum: missing `package` document");
if (!ctx.release)
throw new Error("transformEnum: missing `release` document");
const docComment = node2.tsdocComment;
return {
_type: "api.enum",
comment: docComment ? _transformDocComment(docComment) : void 0,
export: {
_type: "reference",
_ref: ctx.export._id
},
members: node2.members.map((m) => _transformEnumMember(ctx, m)),
name: node2.name,
package: {
_type: "reference",
_ref: ctx.package._id
},
release: {
_type: "reference",
_ref: ctx.release._id
},
releaseTag: RELEASE_TAGS[node2.releaseTag],
slug: {
_type: "slug",
current: _slugify(node2.name)
}
};
}
function _transformEnumMember(_ctx, m) {
if (m.kind === "EnumMember") {
const node2 = m;
return {
_type: "api.enumMember",
_key: _hash(node2.canonicalReference.toString()),
name: node2.name,
releaseTag: RELEASE_TAGS[node2.releaseTag]
};
}
throw new Error(`Unknown enum member kind: ${m.kind}`);
}
function _isUpperCase(char) {
return char === char.toUpperCase();
}
function _functionIsReactComponentType(node2) {
const firstChar = node2.name.slice(0, 1);
if (!_isUpperCase(firstChar) || firstChar === "_" || firstChar === "$")
return !1;
const returnTypeCode = node2.excerptTokens.slice(node2.returnTypeExcerpt.tokenRange.startIndex, node2.returnTypeExcerpt.tokenRange.endIndex).map((t) => t.text).join("").trim();
return returnTypeCode === "React.ReactPortal" || returnTypeCode === "React_2.ReactPortal" || returnTypeCode === "ReactPortal" || returnTypeCode.startsWith("React.ReactPortal |") || returnTypeCode.startsWith("React_2.ReactPortal |") || returnTypeCode.startsWith("ReactPortal |"), !0;
}
const hookRegex = /^use[A-Z0-9].*$/;
function _functionIsReactHook(node2) {
return hookRegex.test(node2.name);
}
function _functionPropsType(ctx, node2) {
const propsParam = node2.parameters[0] && node2.parameters[0].name === "props" && node2.parameters[0];
if (propsParam) {
const propsTokens = _transformTokens(ctx, node2.excerptTokens.slice(propsParam.parameterTypeExcerpt.tokenRange.startIndex, propsParam.parameterTypeExcerpt.tokenRange.endIndex));
if (propsTokens.length)
return propsTokens[0]?.member;
}
}
function _transformFunction(ctx, node2) {
if (!ctx.export)
throw new Error("transformFunction: missing `export` document");
if (!ctx.package)
throw new Error("transformFunction: missing `package` document");
if (!ctx.release)
throw new Error("transformFunction: missing `release` document");
const docComment = node2.tsdocComment, name = _sanitizeName(node2.name), isReactComponentType = _functionIsReactComponentType(node2), isReactHook = _functionIsReactHook(node2), propsType = isReactComponentType ? _functionPropsType(ctx, node2) : void 0;
return {
_type: "api.function",
comment: docComment ? _transformDocComment(docComment) : void 0,
export: {
_type: "reference",
_ref: ctx.export._id
},
isReactComponentType,
isReactHook,
name,
package: {
_type: "reference",
_ref: ctx.package._id
},
parameters: node2.parameters.map((p, idx) => _transformParameter(ctx, node2, p, idx)),
propsType,
release: {
_type: "reference",
_ref: ctx.release._id
},
releaseTag: RELEASE_TAGS[node2.releaseTag],
slug: {
_type: "slug",
current: _slugify(name)
},
returnType: _transformTokens(ctx, node2.excerptTokens.slice(node2.returnTypeExcerpt.tokenRange.startIndex, node2.returnTypeExcerpt.tokenRange.endIndex)),
typeParameters: node2.typeParameters.map((p, idx) => _transformTypeParameter(ctx, node2, p, idx)),
isOverloading: node2.overloadIndex > 1
};
}
function _transformInterface(ctx, node2) {
if (!ctx.export)
throw new Error("transformEnum: missing `export` document");
if (!ctx.package)
throw new Error("transformEnum: missing `package` document");
if (!ctx.release)
throw new Error("transformEnum: missing `release` document");
const docComment = node2.tsdocComment, name = _sanitizeName(node2.name);
return {
_type: "api.interface",
comment: docComment ? _transformDocComment(docComment) : void 0,
export: {
_type: "reference",
_ref: ctx.export._id
},
extends: node2.extendsTypes.map((t, idx) => ({
_type: "api.extend",
_key: `extend${idx}`,
type: _transformTokens(ctx, t.excerpt.tokens.slice(t.excerpt.tokenRange.startIndex, t.excerpt.tokenRange.endIndex))
})),
members: node2.members.map((m, idx) => ({
_key: `member${idx}`,
..._transformInterfaceMember(ctx, m)
})),
name,
package: {
_type: "reference",
_ref: ctx.package._id
},
release: {
_type: "reference",
_ref: ctx.release._id
},
releaseTag: RELEASE_TAGS[node2.releaseTag],
slug: {
_type: "slug",
current: _slugify(name)
},
typeParameters: node2.typeParameters.map((p, idx) => _transformTypeParameter(ctx, node2, p, idx))
};
}
function _transformInterfaceMember(ctx, m) {
if (m.kind === "CallSignature") {
const mem = m, docComment = mem.tsdocComment;
return {
_type: "api.callSignature",
comment: docComment ? _transformDocComment(docComment) : void 0,
// members: mem.members.map((m) => _transformInterfaceMember(ctx, m)),
parameters: mem.parameters.map((p, idx) => _transformParameter(ctx, mem, p, idx)),
releaseTag: RELEASE_TAGS[mem.releaseTag],
returnType: _transformTokens(ctx, mem.excerptTokens.slice(mem.returnTypeExcerpt.tokenRange.startIndex, mem.returnTypeExcerpt.tokenRange.endIndex)),
typeParameters: mem.typeParameters.map((p, idx) => _transformTypeParameter(ctx, mem, p, idx))
};
}
if (m.kind === "ConstructSignature") {
const mem = m, docComment = mem.tsdocComment;
return {
_type: "api.constructSignature",
comment: docComment ? _transformDocComment(docComment) : void 0,
// members: mem.members.map((m) => _transformInterfaceMember(ctx, m)),
parameters: mem.parameters.map((p, idx) => _transformParameter(ctx, mem, p, idx)),
releaseTag: RELEASE_TAGS[mem.releaseTag],
returnType: _transformTokens(ctx, mem.excerptTokens.slice(mem.returnTypeExcerpt.tokenRange.startIndex, mem.returnTypeExcerpt.tokenRange.endIndex)),
typeParameters: mem.typeParameters.map((p, idx) => _transformTypeParameter(ctx, mem, p, idx))
};
}
if (m.kind === "MethodSignature") {
const mem = m, docComment = mem.tsdocComment;
return {
_type: "api.methodSignature",
comment: docComment ? _transformDocComment(docComment) : void 0,
isOptional: mem.isOptional,
// members: mem.members.map((m) => _transformInterfaceMember(ctx, m)),
name: mem.name,
parameters: mem.parameters.map((p, idx) => _transformParameter(ctx, mem, p, idx)),
releaseTag: RELEASE_TAGS[mem.releaseTag],
returnType: _transformTokens(ctx, mem.excerptTokens.slice(mem.returnTypeExcerpt.tokenRange.startIndex, mem.returnTypeExcerpt.tokenRange.endIndex)),
typeParameters: mem.typeParameters.map((p, idx) => _transformTypeParameter(ctx, mem, p, idx))
};
}
if (m.kind === "PropertySignature") {
const mem = m, docComment = mem.tsdocComment;
return {
_type: "api.propertySignature",
comment: docComment ? _transformDocComment(docComment) : void 0,
isOptional: mem.isOptional,
name: mem.name,
releaseTag: RELEASE_TAGS[mem.releaseTag],
type: _transformTokens(ctx, mem.excerptTokens.slice(mem.propertyTypeExcerpt.tokenRange.startIndex, mem.propertyTypeExcerpt.tokenRange.endIndex))
};
}
if (m.kind === "IndexSignature") {
const mem = m, docComment = mem.tsdocComment;
return {
_type: "api.indexSignature",
comment: docComment ? _transformDocComment(docComment) : void 0,
parameters: mem.parameters.map((p, idx) => _transformParameter(ctx, mem, p, idx)),
releaseTag: RELEASE_TAGS[mem.releaseTag],
returnType: _transformTokens(ctx, mem.excerptTokens.slice(mem.returnTypeExcerpt.tokenRange.startIndex, mem.returnTypeExcerpt.tokenRange.endIndex))
};
}
throw new Error(`Unknown interface member kind: ${m.kind}`);
}
function _transformTypeAlias(ctx, node2) {
if (!ctx.export)
throw new Error("transformTypeAlias: missing `export` document");
if (!ctx.package)
throw new Error("transformTypeAlias: missing `package` document");
if (!ctx.release)
throw new Error("transformTypeAlias: missing `release` document");
const docComment = node2.tsdocComment, name = _sanitizeName(node2.name);
return {
_type: "api.typeAlias",
comment: docComment ? _transformDocComment(docComment) : void 0,
export: {
_type: "reference",
_ref: ctx.export._id
},
name,
package: {
_type: "reference",
_ref: ctx.package._id
},
release: {
_type: "reference",
_ref: ctx.release._id
},
releaseTag: RELEASE_TAGS[node2.releaseTag],
slug: {
_type: "slug",
current: _slugify(name)
},
type: _transformTokens(ctx, node2.excerptTokens.slice(node2.typeExcerpt.tokenRange.startIndex, node2.typeExcerpt.tokenRange.endIndex)),
typeParameters: node2.typeParameters.map((param, idx) => _transformTypeParameter(ctx, node2, param, idx))
};
}
function _transformVariable(ctx, node2) {
if (!ctx.export)
throw new Error("transformVariable: missing `export` document");
if (!ctx.package)
throw new Error("transformVariable: missing `package` document");
if (!ctx.release)
throw new Error("transformVariable: missing `release` document");
const name = _sanitizeName(node2.name), docComment = node2.tsdocComment, comment = docComment ? _transformDocComment(docComment) : void 0, type = _transformTokens(ctx, node2.excerptTokens.slice(node2.variableTypeExcerpt.tokenRange.startIndex, node2.variableTypeExcerpt.tokenRange.endIndex)), isReactComponentType = _variableIsReactComponentType(node2), propsType = isReactComponentType ? _variablePropsType(ctx, node2) : void 0;
return {
_type: "api.variable",
comment,
export: {
_type: "reference",
_ref: ctx.export._id
},
isReactComponentType,
name,
package: {
_type: "reference",
_ref: ctx.package._id
},
propsType,
release: {
_type: "reference",
_ref: ctx.release._id
},
releaseTag: RELEASE_TAGS[node2.releaseTag],
slug: {
_type: "slug",
current: _slugify(name)
},
type
};
}
function _variableIsReactComponentType(node2) {
const typeCode = node2.excerptTokens.slice(node2.variableTypeExcerpt.tokenRange.startIndex, node2.variableTypeExcerpt.tokenRange.endIndex).map((t) => t.text).join("").trim(), isNamedExoticComponent = typeCode.startsWith("React.NamedExoticComponent<") || typeCode.startsWith("React_2.NamedExoticComponent<") || typeCode.startsWith("NamedExoticComponent<"), isForwardRefExoticComponent = typeCode.startsWith("React.ForwardRefExoticComponent<") || typeCode.startsWith("React_2.ForwardRefExoticComponent<") || typeCode.startsWith("ForwardRefExoticComponent<"), isMemoExoticComponent = typeCode.startsWith("React.MemoExoticComponent<") || typeCode.startsWith("React_2.MemoExoticComponent<") || typeCode.startsWith("MemoExoticComponent<"), isStyledComponent = typeCode.startsWith("StyledComponent<"), returnsReactElement = typeCode.endsWith("=> React.ReactElement") || typeCode.endsWith("=> React_2.ReactElement") || typeCode.endsWith("=> ReactElement") || typeCode.endsWith("=> JSX.Element");
return !!(isNamedExoticComponent || isForwardRefExoticComponent || isMemoExoticComponent || isStyledComponent || returnsReactElement);
}
function _variablePropsType(ctx, node2) {
const componentRef = node2.excerptTokens.slice(node2.variableTypeExcerpt.tokenRange.startIndex, node2.variableTypeExcerpt.tokenRange.endIndex).find((t) => t.kind === "Reference" && t.text.endsWith("Props"));
if (componentRef && componentRef.canonicalReference)
return {
_type: "reference",
_ref: _createExportMemberId(ctx, componentRef.canonicalReference.toString())
};
}
function _transformNamespace(ctx, node2) {
if (!ctx.export)
throw new Error("transformNamespace: missing `export` document");
if (!ctx.package)
throw new Error("transformNamespace: missing `package` document");
if (!ctx.release)
throw new Error("transformNamespace: missing `release` document");
const docComment = node2.tsdocComment;
return {
_type: "api.namespace",
comment: docComment ? _transformDocComment(docComment) : void 0,
export: {
_type: "reference",
_ref: ctx.export._id
},
members: node2.members.map((m, idx) => ({
_key: `member${idx}`,
..._transformNamespaceMember(ctx, m)
})),
name: node2.name,
package: {
_type: "reference",
_ref: ctx.package._id
},
release: {
_type: "reference",
_ref: ctx.release._id
},
releaseTag: RELEASE_TAGS[node2.releaseTag],
slug: {
_type: "slug",
current: _slugify(node2.name)
}
};
}
function _transformNamespaceMember(ctx, m) {
if (m.kind === "Class")
return _transformClass(ct