UNPKG

novel-segment

Version:

Chinese word segmentation 簡繁中文分词模块 以網路小說為樣本

39 lines 2.82 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.ZhuyinTokenizer = exports.ZhtSynonymOptimizer = exports.ZhRadicalTokenizer = exports.WildcardTokenizer = exports.URLTokenizer = exports.SingleTokenizer = exports.PunctuationTokenizer = exports.JpSimpleTokenizer = exports.ForeignTokenizer = exports.ForeignOptimizer = exports.EmailOptimizer = exports.DictTokenizer = exports.DictOptimizer = exports.DatetimeOptimizer = exports.ChsNameTokenizer = exports.ChsNameOptimizer = exports.AdjectiveOptimizer = void 0; const tslib_1 = require("tslib"); const AdjectiveOptimizer = tslib_1.__importStar(require("./submod/AdjectiveOptimizer")); exports.AdjectiveOptimizer = AdjectiveOptimizer; const ChsNameOptimizer = tslib_1.__importStar(require("./submod/ChsNameOptimizer")); exports.ChsNameOptimizer = ChsNameOptimizer; const ChsNameTokenizer = tslib_1.__importStar(require("./submod/ChsNameTokenizer")); exports.ChsNameTokenizer = ChsNameTokenizer; const DatetimeOptimizer = tslib_1.__importStar(require("./submod/DatetimeOptimizer")); exports.DatetimeOptimizer = DatetimeOptimizer; const DictOptimizer = tslib_1.__importStar(require("./submod/DictOptimizer")); exports.DictOptimizer = DictOptimizer; const DictTokenizer = tslib_1.__importStar(require("./submod/DictTokenizer")); exports.DictTokenizer = DictTokenizer; const EmailOptimizer = tslib_1.__importStar(require("./submod/EmailOptimizer")); exports.EmailOptimizer = EmailOptimizer; const ForeignOptimizer = tslib_1.__importStar(require("./submod/ForeignOptimizer")); exports.ForeignOptimizer = ForeignOptimizer; const ForeignTokenizer = tslib_1.__importStar(require("./submod/ForeignTokenizer")); exports.ForeignTokenizer = ForeignTokenizer; const JpSimpleTokenizer = tslib_1.__importStar(require("./submod/JpSimpleTokenizer")); exports.JpSimpleTokenizer = JpSimpleTokenizer; const PunctuationTokenizer = tslib_1.__importStar(require("./submod/PunctuationTokenizer")); exports.PunctuationTokenizer = PunctuationTokenizer; const SingleTokenizer = tslib_1.__importStar(require("./submod/SingleTokenizer")); exports.SingleTokenizer = SingleTokenizer; const URLTokenizer = tslib_1.__importStar(require("./submod/URLTokenizer")); exports.URLTokenizer = URLTokenizer; const WildcardTokenizer = tslib_1.__importStar(require("./submod/WildcardTokenizer")); exports.WildcardTokenizer = WildcardTokenizer; const ZhRadicalTokenizer = tslib_1.__importStar(require("./submod/ZhRadicalTokenizer")); exports.ZhRadicalTokenizer = ZhRadicalTokenizer; const ZhtSynonymOptimizer = tslib_1.__importStar(require("./submod/ZhtSynonymOptimizer")); exports.ZhtSynonymOptimizer = ZhtSynonymOptimizer; const ZhuyinTokenizer = tslib_1.__importStar(require("./submod/ZhuyinTokenizer")); exports.ZhuyinTokenizer = ZhuyinTokenizer; //# sourceMappingURL=submod.js.map