gpt-tokenizer
Version:
A pure JavaScript implementation of a BPE tokenizer (Encoder/Decoder) for GPT-2 / GPT-3 / GPT-4 and other OpenAI models
12 lines • 594 B
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.EndOfPrompt = exports.ImSep = exports.ImEnd = exports.ImStart = exports.FimSuffix = exports.FimMiddle = exports.FimPrefix = exports.EndOfText = void 0;
exports.EndOfText = '<|endoftext|>';
exports.FimPrefix = '<|fim_prefix|>';
exports.FimMiddle = '<|fim_middle|>';
exports.FimSuffix = '<|fim_suffix|>';
exports.ImStart = '<|im_start|>'; // 100264
exports.ImEnd = '<|im_end|>'; // 100265
exports.ImSep = '<|im_sep|>'; // 100266
exports.EndOfPrompt = '<|endofprompt|>';
//# sourceMappingURL=specialTokens.js.map