UNPKG

@sanity/tsdoc

Version:

Generate API reference docs from TypeScript projects and store in a Sanity-friendly JSON format. Render a static frontend, or as React components.

1 lines 116 kB
{"version":3,"file":"index.mjs","sources":["../../src/core/_lib/_fileExists.ts","../../src/core/_printExtractMessages.ts","../../src/core/config/_findConfigFile.ts","../../src/core/config/_loadConfig.ts","../../src/core/config/defineConfig.ts","../../src/core/extract/defaults.ts","../../src/core/extract/apiExtractorConfig.ts","../../src/core/extract/helpers.ts","../../src/core/extract/tsDocConfig.ts","../../src/core/extract/extract.ts","../../src/core/load/helpers.ts","../../src/core/load/load.ts","../../src/core/transform/helpers.ts","../../src/core/transform/_classIsReactComponentType.ts","../../src/core/transform/_transformTokens.ts","../../src/core/transform/constants.ts","../../src/core/transform/transformDocComment.ts","../../src/core/transform/_transformParameter.ts","../../src/core/transform/_transformTypeParameter.ts","../../src/core/transform/transformClass.ts","../../src/core/transform/transformEnum.ts","../../src/core/transform/_functionIsReactComponentType.ts","../../src/core/transform/_functionIsReactHook.ts","../../src/core/transform/_functionReactComponentPropsType.ts","../../src/core/transform/transformFunction.ts","../../src/core/transform/transformInterface.ts","../../src/core/transform/transformTypeAlias.ts","../../src/core/transform/transformVariable.ts","../../src/core/transform/transformNamespace.ts","../../src/core/transform/transformExportMember.ts","../../src/core/transform/transformPackage.ts","../../src/core/transform/transform.ts"],"sourcesContent":["import {accessSync} from 'fs'\n\n/** @internal */\nexport function _fileExists(file: string): boolean {\n try {\n accessSync(file)\n\n return true\n } catch (_) {\n return false\n }\n}\n","/* eslint-disable no-console */\n\nimport {ExtractorMessage} from '@microsoft/api-extractor'\nimport chalk from 'chalk'\nimport path from 'path'\n\n/** @internal */\nexport function _printExtractMessages(cwd: string, messages: ExtractorMessage[]): void {\n const warnings = messages.filter((msg) => msg.logLevel === 'warning')\n\n for (const msg of warnings) {\n const sourceFilePath = msg.sourceFilePath && path.relative(cwd, msg.sourceFilePath)\n\n if (msg.messageId === 'TS6307') {\n continue\n }\n\n console.log('')\n\n console.log(\n [\n `${chalk.cyan(sourceFilePath || '?')}`,\n `:${chalk.yellow(msg.sourceFileLine)}:${chalk.yellow(msg.sourceFileColumn)}`,\n ` - ${chalk.yellow('warning')} ${chalk.gray(msg.messageId)}\\n`,\n msg.text,\n ].join(''),\n )\n }\n\n const errors: ExtractorMessage[] = messages.filter((msg) => msg.logLevel === 'error')\n\n for (const msg of errors) {\n const sourceFilePath = msg.sourceFilePath && path.relative(cwd, msg.sourceFilePath)\n\n console.log('')\n\n console.log(\n [\n `${chalk.cyan(sourceFilePath || '?')}`,\n `:${chalk.yellow(msg.sourceFileLine)}:${chalk.yellow(msg.sourceFileColumn)}`,\n ` - ${chalk.red('error')} ${chalk.gray(msg.messageId)}\\n`,\n msg.text,\n ].join(''),\n )\n }\n}\n","import path from 'path'\n\nimport {_fileExists} from '../_lib/_fileExists'\n\nconst CONFIG_FILE_NAMES = [\n 'tsdoc.config.js',\n 'tsdoc.config.jsx',\n 'tsdoc.config.mjs',\n 'tsdoc.config.cjs',\n 'tsdoc.config.ts',\n 'tsdoc.config.tsx',\n]\n\n/** @internal */\nexport function _findConfigFile(options: {packagePath: string}): string | undefined {\n const {packagePath} = options\n\n for (const f of CONFIG_FILE_NAMES) {\n const file = path.resolve(packagePath, f)\n\n if (_fileExists(file)) return file\n }\n\n return undefined\n}\n","import {register} from 'esbuild-register/dist/node'\n\nimport {_findConfigFile} from './_findConfigFile'\nimport type {SanityTSDocConfigOptions} from './types'\n\ntype RegisterOptions = Exclude<Parameters<typeof register>[0], undefined>\n\n/** @internal */\nexport async function _loadConfig(options: {\n packagePath: string\n}): Promise<SanityTSDocConfigOptions | undefined> {\n const {packagePath} = options\n\n const configPath = _findConfigFile({packagePath})\n\n if (!configPath) {\n return undefined\n }\n\n const esbuildOptions = {\n jsx: 'automatic',\n jsxFactory: 'createElement',\n jsxFragment: 'Fragment',\n jsxImportSource: 'react',\n logLevel: 'silent',\n } satisfies RegisterOptions\n\n const {unregister} = globalThis.__DEV__ ? {unregister: () => undefined} : register(esbuildOptions)\n\n // eslint-disable-next-line @typescript-eslint/no-var-requires\n const config = require(configPath)\n\n // Unregister the require hook as we don't need it anymore\n unregister()\n\n return config?.default || config\n}\n","import {SanityTSDocConfigOptions} from './types'\n\n/** @public */\nexport function defineConfig(config: SanityTSDocConfigOptions): SanityTSDocConfigOptions {\n return config\n}\n","import {ExtractorLogLevel, IExtractorMessagesConfig} from '@microsoft/api-extractor'\n\nexport const DEFAULT_MESSAGES_CONFIG: IExtractorMessagesConfig = {\n /**\n * Configures handling of diagnostic messages reported by the TypeScript compiler engine while analyzing\n * the input .d.ts files.\n *\n * TypeScript message identifiers start with \"TS\" followed by an integer. For example: \"TS2551\"\n *\n * DEFAULT VALUE: A single \"default\" entry with logLevel=warning.\n */\n compilerMessageReporting: {\n /**\n * Configures the default routing for messages that don't match an explicit rule in this table.\n */\n default: {\n /**\n * Specifies whether the message should be written to the the tool's output log. Note that\n * the \"addToApiReportFile\" property may supersede this option.\n *\n * Possible values: \"error\", \"warning\", \"none\"\n *\n * Errors cause the build to fail and return a nonzero exit code. Warnings cause a production build fail\n * and return a nonzero exit code. For a non-production build (e.g. when \"api-extractor run\" includes\n * the \"--local\" option), the warning is displayed but the build will not fail.\n *\n * DEFAULT VALUE: \"warning\"\n */\n logLevel: 'warning' as ExtractorLogLevel,\n\n /**\n * When addToApiReportFile is true: If API Extractor is configured to write an API report file (.api.md),\n * then the message will be written inside that file; otherwise, the message is instead logged according to\n * the \"logLevel\" option.\n *\n * DEFAULT VALUE: false\n */\n addToApiReportFile: false,\n },\n\n // \"TS2551\": {\n // \"logLevel\": \"warning\",\n // \"addToApiReportFile\": true\n // },\n //\n // . . .\n },\n\n /**\n * Configures handling of messages reported by API Extractor during its analysis.\n *\n * API Extractor message identifiers start with \"ae-\". For example: \"ae-extra-release-tag\"\n *\n * DEFAULT VALUE: See api-extractor-defaults.json for the complete table of extractorMessageReporting mappings\n */\n extractorMessageReporting: {\n default: {\n logLevel: 'warning' as ExtractorLogLevel,\n addToApiReportFile: false,\n },\n\n // 'ae-extra-release-tag': {\n // logLevel: 'warning' as ExtractorLogLevel,\n // addToApiReportFile: false,\n // },\n\n // 'ae-forgotten-export': {\n // logLevel: 'error' as ExtractorLogLevel,\n // addToApiReportFile: false,\n // },\n\n 'ae-incompatible-release-tags': {\n logLevel: 'none' as ExtractorLogLevel,\n addToApiReportFile: false,\n },\n\n 'ae-internal-missing-underscore': {\n logLevel: 'none' as ExtractorLogLevel,\n addToApiReportFile: false,\n },\n\n 'ae-missing-release-tag': {\n logLevel: 'none' as ExtractorLogLevel,\n addToApiReportFile: false,\n },\n },\n\n /**\n * Configures handling of messages reported by the TSDoc parser when analyzing code comments.\n *\n * TSDoc message identifiers start with \"tsdoc-\". For example: \"tsdoc-link-tag-unescaped-text\"\n *\n * DEFAULT VALUE: A single \"default\" entry with logLevel=warning.\n */\n tsdocMessageReporting: {\n default: {\n logLevel: 'warning' as ExtractorLogLevel,\n addToApiReportFile: false,\n },\n\n // 'tsdoc-link-tag-unescaped-text': {\n // logLevel: 'warning' as ExtractorLogLevel,\n // addToApiReportFile: false,\n // },\n\n // 'tsdoc-unsupported-tag': {\n // logLevel: 'warning' as ExtractorLogLevel,\n // addToApiReportFile: false,\n // },\n\n // 'tsdoc-undefined-tag': {\n // logLevel: 'warning' as ExtractorLogLevel,\n // addToApiReportFile: false,\n // },\n },\n}\n","import type {IConfigFile, IExtractorMessagesConfig} from '@microsoft/api-extractor'\nimport path from 'path'\n\nimport {DEFAULT_MESSAGES_CONFIG} from './defaults'\n\nexport function createApiExtractorConfig(opts: {\n mainEntryPointFilePath: string\n messagesConfig: IExtractorMessagesConfig\n packagePath: string\n tempDirPath: string\n tsconfigPath: string\n bundledPackages: string[]\n}): IConfigFile {\n /**\n * Config file for API Extractor. For more info, please visit: https://api-extractor.com\n */\n return {\n /**\n * Optionally specifies another JSON config file that this file extends from. This provides a way for\n * standard settings to be shared across multiple projects.\n *\n * If the path starts with \"./\" or \"../\", the path is resolved relative to the folder of the file that contains\n * the \"extends\" field. Otherwise, the first path segment is interpreted as an NPM package name, and will be\n * resolved using NodeJS require().\n *\n * SUPPORTED TOKENS: none\n * DEFAULT VALUE: \"\"\n */\n // \"extends\": \"./shared/api-extractor-base.json\"\n // \"extends\": \"my-package/include/api-extractor-base.json\"\n\n /**\n * Determines the \"<projectFolder>\" token that can be used with other config file settings. The project folder\n * typically contains the tsconfig.json and package.json config files, but the path is user-defined.\n *\n * The path is resolved relative to the folder of the config file that contains the setting.\n *\n * The default value for \"projectFolder\" is the token \"<lookup>\", which means the folder is determined by traversing\n * parent folders, starting from the folder containing api-extractor.json, and stopping at the first folder\n * that contains a tsconfig.json file. If a tsconfig.json file cannot be found in this way, then an error\n * will be reported.\n *\n * SUPPORTED TOKENS: <lookup>\n * DEFAULT VALUE: \"<lookup>\"\n */\n projectFolder: opts.packagePath,\n\n /**\n * (REQUIRED) Specifies the .d.ts file to be used as the starting point for analysis. API Extractor\n * analyzes the symbols exported by this module.\n *\n * The file extension must be \".d.ts\" and not \".ts\".\n *\n * The path is resolved relative to the folder of the config file that contains the setting; to change this,\n * prepend a folder token such as \"<projectFolder>\".\n *\n * SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>\n */\n mainEntryPointFilePath: path.resolve(opts.packagePath, opts.mainEntryPointFilePath),\n\n /**\n * A list of NPM package names whose exports should be treated as part of this package.\n *\n * For example, suppose that Webpack is used to generate a distributed bundle for the project \"library1\",\n * and another NPM package \"library2\" is embedded in this bundle. Some types from library2 may become part\n * of the exported API for library1, but by default API Extractor would generate a .d.ts rollup that explicitly\n * imports library2. To avoid this, we can specify:\n *\n * \"bundledPackages\": [ \"library2\" ],\n *\n * This would direct API Extractor to embed those types directly in the .d.ts rollup, as if they had been\n * local files for library1.\n */\n bundledPackages: opts.bundledPackages,\n\n /**\n * Determines how the TypeScript compiler engine will be invoked by API Extractor.\n */\n compiler: {\n /**\n * Specifies the path to the tsconfig.json file to be used by API Extractor when analyzing the project.\n *\n * The path is resolved relative to the folder of the config file that contains the setting; to change this,\n * prepend a folder token such as \"<projectFolder>\".\n *\n * Note: This setting will be ignored if \"overrideTsconfig\" is used.\n *\n * SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>\n * DEFAULT VALUE: \"<projectFolder>/tsconfig.json\"\n */\n // tsconfigFilePath: path.resolve(opts.packagePath, opts.tsconfigPath || 'tsconfig.json'),\n tsconfigFilePath: `<projectFolder>/${opts.tsconfigPath}`,\n\n /**\n * Provides a compiler configuration that will be used instead of reading the tsconfig.json file from disk.\n * The object must conform to the TypeScript tsconfig schema:\n *\n * http://json.schemastore.org/tsconfig\n *\n * If omitted, then the tsconfig.json file will be read from the \"projectFolder\".\n *\n * DEFAULT VALUE: no overrideTsconfig section\n */\n // overrideTsconfig: {\n // // . . .\n // },\n\n /**\n * This option causes the compiler to be invoked with the --skipLibCheck option. This option is not recommended\n * and may cause API Extractor to produce incomplete or incorrect declarations, but it may be required when\n * dependencies contain declarations that are incompatible with the TypeScript engine that API Extractor uses\n * for its analysis. Where possible, the underlying issue should be fixed rather than relying on skipLibCheck.\n *\n * DEFAULT VALUE: false\n */\n // \"skipLibCheck\": true,\n },\n\n /**\n * Configures how the API report file (*.api.md) will be generated.\n */\n apiReport: {\n /**\n * (REQUIRED) Whether to generate an API report.\n */\n enabled: false,\n\n /**\n * The filename for the API report files. It will be combined with \"reportFolder\" or \"reportTempFolder\" to produce\n * a full file path.\n *\n * The file extension should be \".api.md\", and the string should not contain a path separator such as \"\\\" or \"/\".\n *\n * SUPPORTED TOKENS: <packageName>, <unscopedPackageName>\n * DEFAULT VALUE: \"<unscopedPackageName>.api.md\"\n */\n reportFileName: '<unscopedPackageName>.api.md',\n\n /**\n * Specifies the folder where the API report file is written. The file name portion is determined by\n * the \"reportFileName\" setting.\n *\n * The API report file is normally tracked by Git. Changes to it can be used to trigger a branch policy,\n * e.g. for an API review.\n *\n * The path is resolved relative to the folder of the config file that contains the setting; to change this,\n * prepend a folder token such as \"<projectFolder>\".\n *\n * SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>\n * DEFAULT VALUE: \"<projectFolder>/etc/\"\n */\n // \"reportFolder\": \"<projectFolder>/etc/\",\n\n /**\n * Specifies the folder where the temporary report file is written. The file name portion is determined by\n * the \"reportFileName\" setting.\n *\n * After the temporary file is written to disk, it is compared with the file in the \"reportFolder\".\n * If they are different, a production build will fail.\n *\n * The path is resolved relative to the folder of the config file that contains the setting; to change this,\n * prepend a folder token such as \"<projectFolder>\".\n *\n * SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>\n * DEFAULT VALUE: \"<projectFolder>/temp/\"\n */\n // \"reportTempFolder\": \"<projectFolder>/temp/\"\n },\n\n /**\n * Configures how the doc model file (*.api.json) will be generated.\n */\n docModel: {\n /**\n * (REQUIRED) Whether to generate a doc model file.\n */\n enabled: true,\n\n /**\n * The output path for the doc model file. The file extension should be \".api.json\".\n *\n * The path is resolved relative to the folder of the config file that contains the setting; to change this,\n * prepend a folder token such as \"<projectFolder>\".\n *\n * SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>\n * DEFAULT VALUE: \"<projectFolder>/temp/<unscopedPackageName>.api.json\"\n */\n apiJsonFilePath: path.resolve(opts.tempDirPath, 'api.json'),\n },\n\n /**\n * Configures how the .d.ts rollup file will be generated.\n */\n dtsRollup: {\n /**\n * (REQUIRED) Whether to generate the .d.ts rollup file.\n */\n enabled: false,\n\n /**\n * Specifies the output path for a .d.ts rollup file to be generated without any trimming.\n * This file will include all declarations that are exported by the main entry point.\n *\n * If the path is an empty string, then this file will not be written.\n *\n * The path is resolved relative to the folder of the config file that contains the setting; to change this,\n * prepend a folder token such as \"<projectFolder>\".\n *\n * SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>\n * DEFAULT VALUE: \"<projectFolder>/dist/<unscopedPackageName>.d.ts\"\n */\n // untrimmedFilePath: '<projectFolder>/dist/es/<unscopedPackageName>.d.ts',\n\n /**\n * Specifies the output path for a .d.ts rollup file to be generated with trimming for a \"beta\" release.\n * This file will include only declarations that are marked as \"@public\" or \"@beta\".\n *\n * The path is resolved relative to the folder of the config file that contains the setting; to change this,\n * prepend a folder token such as \"<projectFolder>\".\n *\n * SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>\n * DEFAULT VALUE: \"\"\n */\n // betaTrimmedFilePath: '<projectFolder>/dist/es/<unscopedPackageName>-beta.d.ts',\n\n /**\n * Specifies the output path for a .d.ts rollup file to be generated with trimming for a \"public\" release.\n * This file will include only declarations that are marked as \"@public\".\n *\n * If the path is an empty string, then this file will not be written.\n *\n * The path is resolved relative to the folder of the config file that contains the setting; to change this,\n * prepend a folder token such as \"<projectFolder>\".\n *\n * SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>\n * DEFAULT VALUE: \"\"\n */\n // publicTrimmedFilePath: '<projectFolder>/dist/es/<unscopedPackageName>-public.d.ts',\n\n /**\n * When a declaration is trimmed, by default it will be replaced by a code comment such as\n * \"Excluded from this release type: exampleMember\". Set \"omitTrimmingComments\" to true to remove the\n * declaration completely.\n *\n * DEFAULT VALUE: false\n */\n // \"omitTrimmingComments\": true\n },\n\n /**\n * Configures how the tsdoc-metadata.json file will be generated.\n */\n tsdocMetadata: {\n /**\n * Whether to generate the tsdoc-metadata.json file.\n *\n * DEFAULT VALUE: true\n */\n enabled: false,\n /**\n * Specifies where the TSDoc metadata file should be written.\n *\n * The path is resolved relative to the folder of the config file that contains the setting; to change this,\n * prepend a folder token such as \"<projectFolder>\".\n *\n * The default value is \"<lookup>\", which causes the path to be automatically inferred from the \"tsdocMetadata\",\n * \"typings\" or \"main\" fields of the project's package.json. If none of these fields are set, the lookup\n * falls back to \"tsdoc-metadata.json\" in the package folder.\n *\n * SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>\n * DEFAULT VALUE: \"<lookup>\"\n */\n // tsdocMetadataFilePath: '<projectFolder>/dist/es/tsdoc-metadata.json',\n },\n\n /**\n * Specifies what type of newlines API Extractor should use when writing output files. By default, the output files\n * will be written with Windows-style newlines. To use POSIX-style newlines, specify \"lf\" instead.\n * To use the OS's default newline kind, specify \"os\".\n *\n * DEFAULT VALUE: \"crlf\"\n */\n // \"newlineKind\": \"crlf\",\n\n /**\n * Configures how API Extractor reports error and warning messages produced during analysis.\n *\n * There are three sources of messages: compiler messages, API Extractor messages, and TSDoc messages.\n */\n messages: opts.messagesConfig || DEFAULT_MESSAGES_CONFIG,\n }\n}\n","import tmp from 'tmp'\n\nexport function createTempDir(): Promise<{path: string; cleanup: () => void}> {\n return new Promise((resolve, reject) => {\n tmp.dir((err, dirPath, cleanupCallback) => {\n if (err) {\n reject(err)\n\n return\n }\n\n resolve({path: dirPath, cleanup: cleanupCallback})\n })\n })\n}\n","import {TSDocConfigFile} from '@microsoft/tsdoc-config'\nimport {readFile} from 'fs/promises'\nimport {parse} from 'jsonc-parser'\n\nimport {TSDocCustomTag} from './types'\n\nexport async function createTSDocConfig(opts: {\n customTags: TSDocCustomTag[]\n}): Promise<TSDocConfigFile | undefined> {\n const {customTags} = opts\n\n if (customTags.length === 0) {\n return undefined\n }\n\n const tsDocBaseBuf = await readFile(\n require.resolve('@microsoft/api-extractor/extends/tsdoc-base.json'),\n )\n\n // Include the definitions that are required for API Extractor\n // extends: ['@microsoft/api-extractor/extends/tsdoc-base.json'],\n const tsDocBaseConfig = parse(tsDocBaseBuf.toString())\n\n // Define custom tags and specify how they should be parsed\n const tagDefinitions = (tsDocBaseConfig.tagDefinitions || []).concat(\n customTags.map((t) => ({\n tagName: `@${t.name}`,\n syntaxKind: t.syntaxKind,\n allowMultiple: t.allowMultiple,\n })),\n )\n\n // Indicate that custom tags are supported by your tooling.\n // (Without this, warnings may be reported saying that a tag is unsupported.)\n const supportForTags = {...tsDocBaseConfig.supportForTags}\n\n for (const customTag of customTags) {\n supportForTags[`@${customTag.name}`] = true\n }\n\n const tsDocConfig = {\n ...tsDocBaseConfig,\n noStandardTags: false,\n tagDefinitions,\n supportForTags,\n }\n\n return TSDocConfigFile.loadFromObject(tsDocConfig)\n}\n","import {Extractor, ExtractorConfig, type ExtractorMessage} from '@microsoft/api-extractor'\nimport {ApiPackage} from '@microsoft/api-extractor-model'\nimport {\n createLogger,\n getExtractMessagesConfig,\n loadConfig,\n loadPkgWithReporting,\n type PackageJSON,\n parseExports,\n parseStrictOptions,\n type PkgConfigOptions,\n} from '@sanity/pkg-utils'\nimport path from 'path'\n\nimport {createApiExtractorConfig} from './apiExtractorConfig'\nimport {createTempDir} from './helpers'\nimport {createTSDocConfig} from './tsDocConfig'\nimport type {TSDocCustomTag} from './types'\n\n/**\n * @public\n */\nexport interface ExtractResult {\n apiPackage?: ApiPackage\n exportPath: string\n messages: ExtractorMessage[]\n succeeded: boolean\n tempDirPath: string\n typesPath: string\n}\n\n/**\n * Extract API information\n *\n * @public\n */\nexport async function extract(options: {\n customTags?: TSDocCustomTag[]\n packagePath: string\n rules?: NonNullable<PkgConfigOptions['extract']>['rules']\n strict: boolean\n tsconfig?: string\n bundledPackages?: string[]\n}): Promise<{pkg: PackageJSON; results: ExtractResult[]}> {\n const {\n customTags,\n packagePath,\n rules,\n strict,\n tsconfig: tsconfigPath = 'tsconfig.json',\n bundledPackages = [],\n } = options\n const tempDir = await createTempDir()\n const tempDirPath = tempDir.path\n const packageJsonFullPath = path.resolve(packagePath, 'package.json')\n\n // pkg utils\n const cwd = packagePath\n const config = await loadConfig({cwd})\n const strictOptions = parseStrictOptions(config?.strictOptions ?? {})\n const logger = createLogger()\n const pkg = await loadPkgWithReporting({cwd, logger, strict})\n\n logger.info('Using tsconfig: ', path.resolve(packagePath, tsconfigPath))\n\n // const exports = _resolveExports({pkg})\n const exports = parseExports({\n cwd,\n pkg,\n strict,\n logger,\n strictOptions,\n })\n\n try {\n const results: ExtractResult[] = []\n\n for (const exp of exports) {\n if (!exp.source || !exp.default) {\n continue\n }\n\n const typesPath = exp.default.replace(/\\.[mc]?js$/, '.d.ts')\n const result = await _doExtract({\n customTags,\n rules: rules ?? config?.extract?.rules,\n mainEntryPointFilePath: typesPath,\n packagePath,\n tempDirPath,\n tsconfigPath,\n packageJsonFullPath,\n bundledPackages,\n })\n\n results.push({\n exportPath: exp._path,\n tempDirPath,\n typesPath: typesPath,\n ...result,\n })\n }\n\n // Clean up temporary directory\n tempDir.cleanup()\n\n return {pkg, results}\n } catch (err) {\n // Clean up temporary directory\n tempDir.cleanup()\n\n throw err\n }\n}\n\nasync function _doExtract(options: {\n customTags: NonNullable<PkgConfigOptions['extract']>['customTags']\n rules: NonNullable<PkgConfigOptions['extract']>['rules']\n mainEntryPointFilePath: string\n packagePath: string\n tempDirPath: string\n tsconfigPath: string\n packageJsonFullPath: string\n bundledPackages: string[]\n}) {\n const {\n customTags,\n rules,\n mainEntryPointFilePath,\n packagePath,\n tempDirPath,\n tsconfigPath,\n packageJsonFullPath,\n bundledPackages,\n } = options\n\n const tsdocConfigFile = await createTSDocConfig({customTags: customTags || []})\n\n // Load the API Extractor configuration\n const extractorConfig: ExtractorConfig = ExtractorConfig.prepare({\n configObject: createApiExtractorConfig({\n mainEntryPointFilePath,\n messagesConfig: getExtractMessagesConfig({rules}),\n packagePath,\n tempDirPath,\n tsconfigPath,\n bundledPackages,\n }),\n configObjectFullPath: undefined,\n packageJson: undefined,\n packageJsonFullPath,\n tsdocConfigFile,\n })\n\n const messages: ExtractorMessage[] = []\n\n // Invoke API Extractor\n const extractorResult = Extractor.invoke(extractorConfig, {\n // Equivalent to the \"--local\" command-line parameter\n localBuild: true,\n // Equivalent to the \"--verbose\" command-line parameter\n showVerboseMessages: true,\n // handle messages\n messageCallback(message: ExtractorMessage) {\n messages.push(message)\n message.handled = true\n },\n })\n\n const apiPackage = ApiPackage.loadFromJsonFile(path.resolve(tempDirPath, 'api.json'))\n\n return {\n apiPackage,\n messages,\n succeeded: extractorResult.succeeded,\n }\n}\n","import fs from 'fs'\nimport util from 'util'\n\nexport const writeFile = util.promisify(fs.writeFile)\n","import {createClient, SanityDocument} from '@sanity/client'\nimport mkdirp from 'mkdirp'\nimport path from 'path'\n\nimport {APIDocument} from '../types'\nimport {writeFile} from './helpers'\n\n/**\n * @public\n */\nexport async function load(\n transformed: APIDocument[],\n opts: {\n cwd: string\n fs?: {path: string}\n sanity?: {projectId?: string; dataset?: string; token?: string}\n },\n): Promise<void> {\n // Write to file system\n if (opts.fs) {\n const dirPath = path.dirname(opts.fs.path)\n\n await mkdirp(dirPath)\n await writeFile(opts.fs.path, JSON.stringify(transformed, null, 2) + '\\n')\n }\n\n // Write to Sanity\n if (opts.sanity && opts.sanity.token) {\n await _loadToSanity(opts.sanity, transformed)\n }\n}\n\nasync function _loadToSanity(\n sanity: {projectId?: string; dataset?: string; token?: string},\n docs: APIDocument[],\n): Promise<void> {\n const client = createClient({\n ...sanity,\n apiVersion: '2022-10-01',\n token: sanity.token,\n useCdn: false,\n })\n\n let tx = client.transaction()\n\n for (const doc of docs) {\n tx = tx.createOrReplace(doc as SanityDocument)\n }\n\n await tx.commit()\n}\n","import crypto from 'crypto'\nimport slugify from 'slugify'\n\nimport {TransformContext} from './types'\n\nexport function _hash(key: string): string {\n return crypto.createHash('md5').update(key).digest('hex')\n}\n\nexport function _createExportMemberId(_ctx: TransformContext, key: string): string {\n return _hash(key)\n}\n\nexport function _isArray(val: unknown): val is unknown[] {\n return Array.isArray(val)\n}\n\nexport function _isRecord(val: unknown): val is Record<string, unknown> {\n return typeof val === 'object' && Boolean(val)\n}\n\nexport function _sanitizeName(str: string): string {\n // Since `Text` is part of the default browser scope, API extractor will append `_2` to other\n // implementations. So we need to replace it for the case of readable docs.\n if (str === 'Text_2') {\n return 'Text'\n }\n\n return str\n}\n\nexport function _slugify(str: string): string {\n return slugify(str)\n}\n\nexport function _parsePackageName(nameStr: string): [string | undefined, string] {\n const p = nameStr.split('/')\n\n const packageScope = p.length > 1 ? p[0] : undefined\n const packageName = p.length > 1 ? p[1] : p[0]\n\n if (!packageName) {\n throw new Error(`Invalid package name: ${nameStr}`)\n }\n\n return [packageScope, packageName]\n}\n","import {ApiClass} from '@microsoft/api-extractor-model'\n\nexport function _classIsReactComponentType(node: ApiClass): boolean {\n const extendsTypeTokens = node.extendsType?.excerpt.tokens\n\n const extendedName = extendsTypeTokens?.[1]?.text || ''\n\n if (\n extendedName.startsWith('React.Component') ||\n extendedName.startsWith('React_2.Component') ||\n extendedName.startsWith('Component') ||\n extendedName.startsWith('React.PureComponent') ||\n extendedName.startsWith('React_2.PureComponent') ||\n extendedName.startsWith('PureComponent')\n ) {\n return true\n }\n\n return false\n}\n","import {ExcerptToken} from '@microsoft/api-extractor-model'\n\nimport {SanityArrayItem} from '../_lib/sanity'\nimport {SerializedAPIToken} from '../types'\nimport {_hash} from './helpers'\nimport {TransformContext} from './types'\n\nexport function _transformTokens(\n ctx: TransformContext,\n tokens: ExcerptToken[],\n): SanityArrayItem<SerializedAPIToken>[] {\n const pkg = ctx.package\n\n if (!pkg) {\n throw new Error('transformTokens: missing package document')\n }\n\n return tokens.map((t, idx) => {\n if (t.kind === 'Content') {\n return {\n _type: 'api.token',\n _key: `token${idx}`,\n text: t.text,\n }\n }\n\n if (t.kind === 'Reference') {\n if (!t.canonicalReference || !t.canonicalReference.source) {\n return {\n _type: 'api.token',\n _key: `token${idx}`,\n text: t.text,\n }\n }\n\n return {\n _type: 'api.token',\n _key: `token${idx}`,\n text: t.text,\n member: {\n _type: 'reference',\n _ref: _getTokenId(t),\n },\n }\n }\n\n throw new Error(`tokens: unknown type: ${t.kind}`)\n })\n}\n\nfunction _getTokenId(t: ExcerptToken) {\n const _ref = t.canonicalReference?.toString()\n\n if (_ref === '!unknown') {\n return '$$unknown$$'\n }\n\n return `tsdoc-${_hash(_ref?.replace('~', '') || '')}`\n}\n","export const RELEASE_TAGS: Record<number, 'internal' | 'alpha' | 'beta' | 'public' | undefined> = {\n 0: undefined,\n 1: 'internal',\n 2: 'alpha',\n 3: 'beta',\n 4: 'public',\n}\n","import {\n DocBlockTag,\n DocCodeSpan,\n DocComment,\n DocErrorText,\n DocEscapedText,\n DocFencedCode,\n DocLinkTag,\n DocMemberIdentifier,\n DocMemberReference,\n DocNode,\n DocNodeTransforms,\n DocParagraph,\n DocPlainText,\n DocSection,\n StandardTags,\n} from '@microsoft/tsdoc'\n\nimport {PortableTextNode, PortableTextSpanNode} from '../_lib/portable-text'\nimport {SanityArrayItem} from '../_lib/sanity'\nimport {\n TSDocComment,\n TSDocCustomBlock,\n TSDocDeprecatedBlock,\n TSDocExampleBlock,\n TSDocModifierTag,\n TSDocParamBlock,\n TSDocRemarksBlock,\n TSDocReturnsBlock,\n TSDocSeeBlock,\n} from '../types'\nimport {_isArray, _isRecord} from './helpers'\n\n/** Regex pattern that matches Markdown headers: h1 - h6 */\nconst RE_MARKDOWN_HEADER = /^([#]{1,6})\\s(.*)$/\n\n/**\n * @internal\n */\nfunction _transformDocNode(docNode: DocNode): PortableTextNode | undefined {\n // Block = \"Block\",\n // BlockTag = \"BlockTag\",\n // Excerpt = \"Excerpt\",\n // FencedCode = \"FencedCode\",\n // CodeSpan = \"CodeSpan\",\n // Comment = \"Comment\",\n // DeclarationReference = \"DeclarationReference\",\n // ErrorText = \"ErrorText\",\n // EscapedText = \"EscapedText\",\n // HtmlAttribute = \"HtmlAttribute\",\n // HtmlEndTag = \"HtmlEndTag\",\n // HtmlStartTag = \"HtmlStartTag\",\n // InheritDocTag = \"InheritDocTag\",\n // InlineTag = \"InlineTag\",\n // LinkTag = \"LinkTag\",\n // MemberIdentifier = \"MemberIdentifier\",\n // MemberReference = \"MemberReference\",\n // MemberSelector = \"MemberSelector\",\n // MemberSymbol = \"MemberSymbol\",\n // Paragraph = \"Paragraph\",\n // ParamBlock = \"ParamBlock\",\n // ParamCollection = \"ParamCollection\",\n // PlainText = \"PlainText\",\n // Section = \"Section\",\n // SoftBreak = \"SoftBreak\"\n\n if (docNode.kind === 'CodeSpan') {\n return {\n _type: 'span',\n marks: ['code'],\n text: (docNode as DocCodeSpan).code,\n }\n }\n\n if (docNode.kind === 'ErrorText') {\n return {\n _type: 'span',\n marks: [],\n text: (docNode as DocErrorText).text,\n }\n }\n\n if (docNode.kind === 'EscapedText') {\n return {\n _type: 'span',\n marks: [],\n text: (docNode as DocEscapedText).decodedText,\n }\n }\n\n if (docNode.kind === 'FencedCode') {\n const node = docNode as DocFencedCode\n\n return {\n _type: 'code',\n code: node.code,\n language: node.language,\n }\n }\n\n if (docNode.kind === 'LinkTag') {\n const linkTag: DocLinkTag = docNode as DocLinkTag\n\n if (linkTag.urlDestination) {\n const linkText: string = linkTag.linkText || linkTag.urlDestination\n\n return {\n _type: 'span',\n _markDef: {\n _type: 'link',\n href: linkTag.urlDestination,\n },\n marks: [],\n text: linkText,\n }\n } else {\n let identifier = ''\n let fullReferenceURL = ''\n\n if (linkTag.codeDestination) {\n // @todo: the library should provide a default rendering for this\n const memberReferences: ReadonlyArray<DocMemberReference> =\n linkTag.codeDestination.memberReferences\n\n if (memberReferences.length > 0) {\n const memberIdentifier: DocMemberIdentifier | undefined =\n memberReferences[memberReferences.length - 1]?.memberIdentifier\n\n fullReferenceURL = memberReferences\n .map((memberReference) => memberReference.memberIdentifier?.identifier)\n .filter((identifier) => Boolean(identifier))\n .join('/')\n\n if (memberIdentifier) {\n identifier = memberIdentifier.identifier\n }\n }\n }\n\n const linkText: string = linkTag.linkText || identifier || '???'\n\n return {\n _type: 'span',\n _markDef: {\n _type: 'link',\n href: fullReferenceURL,\n },\n marks: [],\n text: linkText,\n }\n }\n }\n\n if (docNode.kind === 'Paragraph') {\n const transformedParagraph: DocParagraph = DocNodeTransforms.trimSpacesInParagraph(\n docNode as DocParagraph,\n )\n\n if (\n transformedParagraph.nodes.length === 1 &&\n transformedParagraph.nodes[0]?.kind === 'SoftBreak'\n ) {\n return undefined\n }\n\n const children = _transformDocCommentContent(transformedParagraph)\n\n if (!children) return undefined\n\n // Find mark defs\n const markDefs: SanityArrayItem<PortableTextSpanNode['_markDef']>[] = []\n\n let style = 'normal'\n\n // Check if the first span starts with a Markdown header prefix (#)\n if (children[0]?._type === 'span') {\n const headerMatch = RE_MARKDOWN_HEADER.exec(children[0].text)\n\n if (headerMatch) {\n const child: SanityArrayItem<PortableTextNode> = {\n ...children[0],\n text: headerMatch[2]!,\n }\n\n style = `h${headerMatch[1]?.length}`\n\n children[0] = child\n }\n }\n\n for (const child of children) {\n if (child._type === 'span' && _isRecord(child._markDef)) {\n const markDefKey = `${child._markDef._type}${markDefs.length}`\n\n child._markDef._key = markDefKey\n\n if (_isArray(child.marks)) {\n child.marks.push(markDefKey)\n }\n\n markDefs.push({_key: markDefKey, ...child._markDef})\n\n delete child._markDef\n }\n }\n\n if (children.length === 0) {\n children.push({\n _type: 'span',\n _key: '0',\n marks: [],\n text: '',\n })\n }\n\n return {\n _type: 'block',\n style,\n children,\n markDefs,\n }\n }\n\n if (docNode.kind === 'PlainText') {\n return {\n _type: 'span',\n marks: [],\n text: (docNode as DocPlainText).text,\n }\n }\n\n if (docNode.kind === 'SoftBreak') {\n return {\n _type: 'span',\n text: '\\n',\n }\n }\n\n if (docNode.kind === 'BlockTag') {\n const node = docNode as DocBlockTag\n\n return {\n _type: 'span',\n marks: [],\n text: node.tagName,\n }\n }\n\n throw new Error(`unknown doc node type: ${docNode.kind}`)\n}\n\nexport function _transformDocCommentContent(\n section: DocSection | DocParagraph,\n): SanityArrayItem<PortableTextNode>[] | undefined {\n if (!section.nodes.length) return undefined\n\n const nodes: SanityArrayItem<PortableTextNode>[] = section.nodes\n .map((node, idx) => {\n if (idx === 0 && node.kind === 'SoftBreak') {\n return undefined\n }\n\n const transformedNode = _transformDocNode(node)\n\n return transformedNode && {_key: `node${idx}`, ...transformedNode}\n })\n .filter(Boolean) as SanityArrayItem<PortableTextNode>[]\n\n return nodes.length ? nodes : undefined\n}\n\nexport function _transformDocComment(docComment: DocComment): TSDocComment {\n // Summary\n const summary = _transformDocCommentContent(docComment.summarySection)\n\n // Parameters\n const parameters: SanityArrayItem<TSDocParamBlock>[] | undefined = docComment.params.blocks.length\n ? docComment.params.blocks.map((paramBlock, idx) => ({\n _type: 'tsdoc.paramBlock',\n _key: `paramBlock${idx}`,\n name: paramBlock.parameterName,\n content: _transformDocCommentContent(paramBlock.content),\n }))\n : undefined\n\n // Returns\n const returns: TSDocReturnsBlock | undefined = docComment.returnsBlock && {\n _type: 'tsdoc.returnsBlock',\n content: _transformDocCommentContent(docComment.returnsBlock.content),\n }\n\n // `@remarks` block\n const remarks: TSDocRemarksBlock | undefined = docComment.remarksBlock && {\n _type: 'tsdoc.remarksBlock',\n content: _transformDocCommentContent(docComment.remarksBlock.content),\n }\n\n const exampleBlocks: SanityArrayItem<TSDocExampleBlock>[] = []\n const customBlocks: SanityArrayItem<TSDocCustomBlock>[] = []\n\n // Custom blocks\n for (let i = 0; i < docComment.customBlocks.length; i += 1) {\n const customBlock = docComment.customBlocks[i]!\n\n // This is a `@example` block\n if (customBlock.blockTag.tagNameWithUpperCase === StandardTags.example.tagNameWithUpperCase) {\n exampleBlocks.push({\n _type: 'tsdoc.exampleBlock',\n _key: `exampleBlock${i}`,\n content: _transformDocCommentContent(customBlock.content),\n })\n } else {\n customBlocks.push({\n _type: 'tsdoc.customBlock',\n _key: `customBlock${i}`,\n tag: customBlock.blockTag.tagName,\n content: _transformDocCommentContent(customBlock.content),\n })\n }\n }\n\n // `@see` blocks\n const seeBlocks: SanityArrayItem<TSDocSeeBlock>[] | undefined = docComment.seeBlocks.length\n ? docComment.seeBlocks.map((seeBlock, idx) => ({\n _type: 'tsdoc.seeBlock',\n _key: `seeBlock${idx}`,\n content: _transformDocCommentContent(seeBlock.content),\n }))\n : undefined\n\n // `@deprecated` block\n const deprecated: TSDocDeprecatedBlock | undefined = docComment.deprecatedBlock && {\n _type: 'tsdoc.deprecatedBlock',\n content: _transformDocCommentContent(docComment.deprecatedBlock.content),\n }\n\n // Modifiers\n const modifierTags: SanityArrayItem<TSDocModifierTag>[] | undefined = docComment.modifierTagSet\n .nodes.length\n ? docComment.modifierTagSet.nodes.map((modifierTag, idx) => ({\n _type: 'tsdoc.modifierTag',\n _key: `modifierTag${idx}`,\n name: modifierTag.tagName,\n }))\n : undefined\n\n return {\n _type: 'tsdoc.docComment',\n customBlocks: customBlocks.length > 0 ? customBlocks : undefined,\n deprecated,\n exampleBlocks: exampleBlocks.length > 0 ? exampleBlocks : undefined,\n modifierTags,\n parameters,\n remarks,\n returns,\n seeBlocks,\n summary,\n }\n}\n","import {\n ApiCallSignature,\n ApiConstructor,\n ApiFunction,\n ApiIndexSignature,\n Parameter,\n} from '@microsoft/api-extractor-model'\nimport {DocSection} from '@microsoft/tsdoc'\n\nimport {SanityArrayItem} from '../_lib/sanity'\nimport {SerializedAPIParameter} from '../types'\nimport {_transformTokens} from './_transformTokens'\nimport {RELEASE_TAGS} from './constants'\nimport {_transformDocCommentContent} from './transformDocComment'\nimport {TransformContext} from './types'\n\nexport function _transformParameter(\n ctx: TransformContext,\n node: ApiCallSignature | ApiConstructor | ApiFunction | ApiIndexSignature,\n param: Parameter,\n idx: number,\n): SanityArrayItem<SerializedAPIParameter> {\n const tsDocComment = param.tsdocParamBlock?.content\n\n return {\n _type: 'api.parameter',\n _key: `param${idx}`,\n comment: tsDocComment\n ? {\n _type: 'tsdoc.docComment',\n summary: _transformDocCommentContent(tsDocComment as unknown as DocSection),\n }\n : undefined,\n name: param.name,\n isOptional: param.isOptional,\n releaseTag: RELEASE_TAGS[node.releaseTag],\n type: _transformTokens(\n ctx,\n node.excerptTokens.slice(\n param.parameterTypeExcerpt.tokenRange.startIndex,\n param.parameterTypeExcerpt.tokenRange.endIndex,\n ),\n ),\n }\n}\n","import {\n ApiCallSignature,\n ApiClass,\n ApiFunction,\n ApiInterface,\n ApiTypeAlias,\n TypeParameter,\n} from '@microsoft/api-extractor-model'\n\nimport {SanityArrayItem} from '../_lib/sanity'\nimport {SerializedAPITypeParameter} from '../types'\nimport {_transformTokens} from './_transformTokens'\nimport {TransformContext} from './types'\n\nexport function _transformTypeParameter(\n ctx: TransformContext,\n node: ApiCallSignature | ApiClass | ApiFunction | ApiInterface | ApiTypeAlias,\n p: TypeParameter,\n idx: number,\n): SanityArrayItem<SerializedAPITypeParameter> {\n return {\n _type: 'api.typeParameter',\n _key: `typeParameter${idx}`,\n name: p.name,\n constraintType: _transformTokens(\n ctx,\n node.excerptTokens.slice(\n p.constraintExcerpt.tokenRange.startIndex,\n p.constraintExcerpt.tokenRange.endIndex,\n ),\n ),\n defaultType: _transformTokens(\n ctx,\n node.excerptTokens.slice(\n p.defaultTypeExcerpt.tokenRange.startIndex,\n p.defaultTypeExcerpt.tokenRange.endIndex,\n ),\n ),\n }\n}\n","import {\n ApiClass,\n ApiConstructor,\n ApiItem,\n ApiMethod,\n ApiProperty,\n} from '@microsoft/api-extractor-model'\n\nimport {SerializedAPIClass, SerializedAPIClassMember} from '../types'\nimport {_classIsReactComponentType} from './_classIsReactComponentType'\nimport {_transformParameter} from './_transformParameter'\nimport {_transformTokens} from './_transformTokens'\nimport {_transformTypeParameter} from './_transformTypeParameter'\nimport {RELEASE_TAGS} from './constants'\nimport {_sanitizeName, _slugify} from './helpers'\nimport {_transformDocComment} from './transformDocComment'\nimport {TransformContext} from './types'\n\n/**\n * @internal\n */\nexport function _transformClass(ctx: TransformContext, node: ApiClass): SerializedAPIClass {\n if (!ctx.export) {\n throw new Error('transformClass: missing `export` document')\n }\n\n if (!ctx.package) {\n throw new Error('transformClass: missing `package` document')\n }\n\n if (!ctx.release) {\n throw new Error('transformClass: missing `release` document')\n }\n\n const docComment = node.tsdocComment\n const name = _sanitizeName(node.name)\n const isReactComponentType = _classIsReactComponentType(node)\n\n return {\n _type: 'api.class',\n comment: docComment ? _transformDocComment(docComment) : undefined,\n export: {_type: 'reference', _ref: ctx.export._id},\n isReactComponentType,\n members: node.members.map((m, idx) => ({\n _key: `member${idx}`,\n ..._transformClassMember(ctx, m),\n })),\n name,\n package: {_type: 'reference', _ref: ctx.package._id},\n release: {_type: 'reference', _ref: ctx.release._id},\n releaseTag: RELEASE_TAGS[node.releaseTag],\n slug: {_type: 'slug', current: _slugify(name)},\n typeParameters: node.typeParameters.map((p, idx) => _transformTypeParameter(ctx, node, p, idx)),\n }\n}\n\nfunction _transformClassMember(ctx: TransformContext, m: ApiItem): SerializedAPIClassMember {\n if (m.kind === 'Constructor') {\n const mem = m as ApiConstructor\n const docComment = mem.tsdocComment\n\n return {\n _type: 'api.constructor',\n releaseTag: RELEASE_TAGS[mem.releaseTag],\n comment: docComment ? _transformDocComment(docComment) : undefined,\n parameters: mem.parameters.map((p, idx) => _transformParameter(ctx, mem, p, idx)),\n }\n }\n\n if (m.kind === 'Method') {\n const mem = m as ApiMethod\n const docComment = mem.tsdocComment\n\n return {\n _type: 'api.method',\n comment: docComment ? _transformDocComment(docComment) : undefined,\n name: mem.name,\n isOptional: mem.isOptional,\n isStatic: mem.isStatic,\n parameters: mem.parameters.map((p, idx) => _transformParameter(ctx, mem, p, idx)),\n releaseTag: RELEASE_TAGS[mem.releaseTag],\n returnType: _transformTokens(\n ctx,\n mem.excerptTokens.slice(\n mem.returnTypeExcerpt.tokenRange.startIndex,\n mem.returnTypeExcerpt.tokenRange.endIndex,\n ),\n ),\n typeParameters: mem.typeParameters.map((p, idx) => _transformTypeParameter(ctx, mem, p, idx)),\n }\n }\n\n if (m.kind === 'Property') {\n const mem = m as ApiProperty\n const docComment = mem.tsdocComment\n\n return {\n _type: 'api.property',\n comment: docComment ? _transformDocComment(docComment) : undefined,\n name: mem.name,\n isEventProperty: mem.isEventProperty,\n isOptional: mem.isOptional,\n isStatic: mem.isStatic,\n releaseTag: RELEASE_TAGS[mem.releaseTag],\n type: _transformTokens(\n ctx,\n mem.excerptTokens.slice(\n mem.propertyTypeExcerpt.tokenRange.startIndex,\n mem.propertyTypeExcerpt.tokenRange.endIndex,\n ),\n ),\n }\n }\n\n throw new Error(`Unknown class member kind: ${m.kind}`)\n}\n","import {ApiEnum, ApiEnumMember, ApiItem} from '@microsoft/api-extractor-model'\n\nimport {SanityArrayItem} from '../_lib/sanity'\nimport {APIEnumMember, SerializedAPIEnum} from '../types'\nimport {RELEASE_TAGS} from './constants'\nimport {_hash, _slugify} from './helpers'\nimport {_transformDocComment} from './transformDocComment'\nimport {TransformContext} from './types'\n\n/**\n * @internal\n */\nexport function _transformEnum(ctx: TransformContext, node: ApiEnum): SerializedAPIEnum {\n if (!ctx.export) {\n throw new Error('transformEnum: missing `export` document')\n }\n\n if (!ctx.package) {\n throw new Error('transformEnum: missing `package` document')\n }\n\n if (!ctx.release) {\n throw new Error('transformEnum: missing `release` document')\n }\n\n const docComment = node.tsdocComment\n\n return {\n _type: 'api.enum',\n comment: docComment ? _transformDocComment(docComment) : undefined,\n export: {_type: 'reference', _ref: ctx.export._id},\n members: node.members.map((m) => _transformEnumMember(ctx, m)),\n name: node.name,\n package: {_type: 'reference', _ref: ctx.package._id},\n release: {_type: 'reference', _ref: ctx.release._id},\n releaseTag: RELEASE_TAGS[node.releas