mastra-browser-rag
Version:
The Retrieval-Augmented Generation (RAG) module contains document processing and embedding utilities.
1,571 lines (1,564 loc) • 96.1 kB
JavaScript
import { parse } from 'node-html-better-parser';
import { encodingForModel, getEncoding } from 'js-tiktoken';
import { CohereRelevanceScorer, MastraAgentRelevanceScorer } from '@mastra/core/relevance';
import { createTool } from '@mastra/core/tools';
import { z } from 'zod';
import { embed } from 'ai';
// src/document/types.ts
var Language = /* @__PURE__ */ ((Language2) => {
Language2["CPP"] = "cpp";
Language2["GO"] = "go";
Language2["JAVA"] = "java";
Language2["KOTLIN"] = "kotlin";
Language2["JS"] = "js";
Language2["TS"] = "ts";
Language2["PHP"] = "php";
Language2["PROTO"] = "proto";
Language2["PYTHON"] = "python";
Language2["RST"] = "rst";
Language2["RUBY"] = "ruby";
Language2["RUST"] = "rust";
Language2["SCALA"] = "scala";
Language2["SWIFT"] = "swift";
Language2["MARKDOWN"] = "markdown";
Language2["LATEX"] = "latex";
Language2["HTML"] = "html";
Language2["SOL"] = "sol";
Language2["CSHARP"] = "csharp";
Language2["COBOL"] = "cobol";
Language2["C"] = "c";
Language2["LUA"] = "lua";
Language2["PERL"] = "perl";
Language2["HASKELL"] = "haskell";
Language2["ELIXIR"] = "elixir";
Language2["POWERSHELL"] = "powershell";
return Language2;
})(Language || {});
// src/document/transformers/text.ts
var TextTransformer = class {
size;
overlap;
lengthFunction;
keepSeparator;
addStartIndex;
stripWhitespace;
constructor({
size = 4e3,
overlap = 200,
lengthFunction = (text) => text.length,
keepSeparator = false,
addStartIndex = false,
stripWhitespace = true
}) {
if (overlap > size) {
throw new Error(`Got a larger chunk overlap (${overlap}) than chunk size (${size}), should be smaller.`);
}
this.size = size;
this.overlap = overlap;
this.lengthFunction = lengthFunction;
this.keepSeparator = keepSeparator;
this.addStartIndex = addStartIndex;
this.stripWhitespace = stripWhitespace;
}
setAddStartIndex(value) {
this.addStartIndex = value;
}
createDocuments(texts, metadatas) {
const _metadatas = metadatas || Array(texts.length).fill({});
const documents = [];
texts.forEach((text, i) => {
let index = 0;
let previousChunkLen = 0;
this.splitText({ text }).forEach((chunk) => {
const metadata = { ..._metadatas[i] };
if (this.addStartIndex) {
const offset = index + previousChunkLen - this.overlap;
index = text.indexOf(chunk, Math.max(0, offset));
metadata.startIndex = index;
previousChunkLen = chunk.length;
}
documents.push({
text: chunk,
metadata
});
});
});
return documents;
}
splitDocuments(documents) {
const texts = [];
const metadatas = [];
for (const doc of documents) {
texts.push(doc.text);
metadatas.push(doc.metadata);
}
return this.createDocuments(texts, metadatas);
}
transformDocuments(documents) {
const texts = [];
const metadatas = [];
for (const doc of documents) {
texts.push(doc.text);
metadatas.push(doc.metadata);
}
return this.createDocuments(texts, metadatas);
}
joinDocs(docs, separator) {
let text = docs.join(separator);
if (this.stripWhitespace) {
text = text.trim();
}
return text === "" ? null : text;
}
mergeSplits(splits, separator) {
const docs = [];
let currentDoc = [];
let total = 0;
for (const d of splits) {
const len = this.lengthFunction(d);
const separatorLen = separator ? this.lengthFunction(separator) : 0;
if (total + len + (currentDoc.length > 0 ? separatorLen : 0) > this.size) {
if (total > this.size) {
console.warn(`Created a chunk of size ${total}, which is longer than the specified ${this.size}`);
}
if (currentDoc.length > 0) {
const doc = this.joinDocs(currentDoc, separator);
if (doc !== null) {
docs.push(doc);
}
if (this.overlap > 0) {
let overlapContent = [];
let overlapSize = 0;
for (let i = currentDoc.length - 1; i >= 0; i--) {
const piece = currentDoc[i];
const pieceLen = this.lengthFunction(piece);
if (overlapSize + pieceLen > this.overlap) {
break;
}
overlapContent.unshift(piece);
overlapSize += pieceLen + (overlapContent.length > 1 ? separatorLen : 0);
}
currentDoc = overlapContent;
total = overlapSize;
} else {
currentDoc = [];
total = 0;
}
}
}
currentDoc.push(d);
total += len + (currentDoc.length > 1 ? separatorLen : 0);
}
if (currentDoc.length > 0) {
const doc = this.joinDocs(currentDoc, separator);
if (doc !== null) {
docs.push(doc);
}
}
return docs;
}
};
// src/document/transformers/character.ts
function splitTextWithRegex(text, separator, keepSeparator) {
if (!separator) {
return text.split("");
}
if (!keepSeparator) {
return text.split(new RegExp(separator)).filter((s) => s !== "");
}
if (!text) {
return [];
}
const splits = text.split(new RegExp(`(${separator})`));
const result = [];
if (keepSeparator === "end") {
for (let i = 0; i < splits.length - 1; i += 2) {
if (i + 1 < splits.length) {
const chunk = splits[i] + (splits[i + 1] || "");
if (chunk) result.push(chunk);
}
}
if (splits.length % 2 === 1 && splits[splits.length - 1]) {
result.push(splits?.[splits.length - 1]);
}
} else {
if (splits[0]) result.push(splits[0]);
for (let i = 1; i < splits.length - 1; i += 2) {
const separator2 = splits[i];
const text2 = splits[i + 1];
if (separator2 && text2) {
result.push(separator2 + text2);
}
}
}
return result.filter((s) => s !== "");
}
var CharacterTransformer = class extends TextTransformer {
separator;
isSeparatorRegex;
constructor({
separator = "\n\n",
isSeparatorRegex = false,
options = {}
}) {
super(options);
this.separator = separator;
this.isSeparatorRegex = isSeparatorRegex;
}
splitText({ text }) {
const separator = this.isSeparatorRegex ? this.separator : this.separator.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const initialSplits = splitTextWithRegex(text, separator, this.keepSeparator);
const chunks = [];
for (const split of initialSplits) {
if (this.lengthFunction(split) <= this.size) {
chunks.push(split);
} else {
const subChunks = this.__splitChunk(split);
chunks.push(...subChunks);
}
}
return chunks;
}
__splitChunk(text) {
const chunks = [];
let currentPosition = 0;
while (currentPosition < text.length) {
let chunkEnd = currentPosition;
while (chunkEnd < text.length && this.lengthFunction(text.slice(currentPosition, chunkEnd + 1)) <= this.size) {
chunkEnd++;
}
const currentChunk = text.slice(currentPosition, chunkEnd);
const chunkLength = this.lengthFunction(currentChunk);
chunks.push(currentChunk);
if (chunkEnd >= text.length) break;
currentPosition += Math.max(1, chunkLength - this.overlap);
}
return chunks;
}
};
var RecursiveCharacterTransformer = class _RecursiveCharacterTransformer extends TextTransformer {
separators;
isSeparatorRegex;
constructor({
separators,
isSeparatorRegex = false,
options = {}
}) {
super(options);
this.separators = separators || ["\n\n", "\n", " ", ""];
this.isSeparatorRegex = isSeparatorRegex;
}
_splitText(text, separators) {
const finalChunks = [];
let separator = separators?.[separators.length - 1];
let newSeparators = [];
for (let i = 0; i < separators.length; i++) {
const s = separators[i];
const _separator2 = this.isSeparatorRegex ? s : s?.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
if (s === "") {
separator = s;
break;
}
if (new RegExp(_separator2).test(text)) {
separator = s;
newSeparators = separators.slice(i + 1);
break;
}
}
const _separator = this.isSeparatorRegex ? separator : separator?.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const splits = splitTextWithRegex(text, _separator, this.keepSeparator);
const goodSplits = [];
const mergeSeparator = this.keepSeparator ? "" : separator;
for (const s of splits) {
if (this.lengthFunction(s) < this.size) {
goodSplits.push(s);
} else {
if (goodSplits.length > 0) {
const mergedText = this.mergeSplits(goodSplits, mergeSeparator);
finalChunks.push(...mergedText);
goodSplits.length = 0;
}
if (newSeparators.length === 0) {
finalChunks.push(s);
} else {
const otherInfo = this._splitText(s, newSeparators);
finalChunks.push(...otherInfo);
}
}
}
if (goodSplits.length > 0) {
const mergedText = this.mergeSplits(goodSplits, mergeSeparator);
finalChunks.push(...mergedText);
}
return finalChunks;
}
splitText({ text }) {
return this._splitText(text, this.separators);
}
static fromLanguage(language, options = {}) {
const separators = _RecursiveCharacterTransformer.getSeparatorsForLanguage(language);
return new _RecursiveCharacterTransformer({ separators, isSeparatorRegex: true, options });
}
static getSeparatorsForLanguage(language) {
switch (language) {
case "markdown" /* MARKDOWN */:
return [
// First, try to split along Markdown headings (starting with level 2)
"\n#{1,6} ",
// End of code block
"```\n",
// Horizontal lines
"\n\\*\\*\\*+\n",
"\n---+\n",
"\n___+\n",
// Note that this splitter doesn't handle horizontal lines defined
// by *three or more* of ***, ---, or ___, but this is not handled
"\n\n",
"\n",
" ",
""
];
case "cpp" /* CPP */:
case "c" /* C */:
return [
"\nclass ",
"\nvoid ",
"\nint ",
"\nfloat ",
"\ndouble ",
"\nif ",
"\nfor ",
"\nwhile ",
"\nswitch ",
"\ncase ",
"\n\n",
"\n",
" ",
""
];
case "ts" /* TS */:
return [
"\nenum ",
"\ninterface ",
"\nnamespace ",
"\ntype ",
"\nclass ",
"\nfunction ",
"\nconst ",
"\nlet ",
"\nvar ",
"\nif ",
"\nfor ",
"\nwhile ",
"\nswitch ",
"\ncase ",
"\ndefault ",
"\n\n",
"\n",
" ",
""
];
// ... (add other language cases following the same pattern)
default:
throw new Error(`Language ${language} is not supported! Please choose from ${Object.values(Language)}`);
}
}
};
var HTMLHeaderTransformer = class {
headersToSplitOn;
returnEachElement;
constructor(headersToSplitOn, returnEachElement = false) {
this.returnEachElement = returnEachElement;
this.headersToSplitOn = [...headersToSplitOn].sort();
}
splitText({ text }) {
const root = parse(text);
const headerFilter = this.headersToSplitOn.map(([header]) => header);
const headerMapping = Object.fromEntries(this.headersToSplitOn);
const elements = [];
const headers = root.querySelectorAll(headerFilter.join(","));
headers.forEach((header) => {
let content = "";
const parentNode = header.parentNode;
if (parentNode && parentNode.childNodes) {
let foundHeader = false;
for (const node of parentNode.childNodes) {
if (node === header) {
foundHeader = true;
continue;
}
if (foundHeader && node.tagName && headerFilter.includes(node.tagName.toLowerCase())) {
break;
}
if (foundHeader) {
content += this.getTextContent(node) + " ";
}
}
}
elements.push({
url: text,
xpath: this.getXPath(header),
content: content.trim(),
metadata: {
[headerMapping?.[header.tagName.toLowerCase()]]: header.text || ""
}
});
});
return this.returnEachElement ? elements.map(
(el) => ({
text: el.content,
metadata: { ...el.metadata, xpath: el.xpath }
})
) : this.aggregateElementsToChunks(elements);
}
getXPath(element) {
if (!element) return "";
const parts = [];
let current = element;
while (current && current.tagName) {
let index = 1;
const parent = current.parentNode;
if (parent && parent.childNodes) {
for (const sibling of parent.childNodes) {
if (sibling === current) break;
if (sibling.tagName === current.tagName) {
index++;
}
}
}
parts.unshift(`${current.tagName.toLowerCase()}[${index}]`);
current = current.parentNode;
}
return "/" + parts.join("/");
}
getTextContent(element) {
if (!element) return "";
if (!element.tagName) {
return element.text || "";
}
let content = element.text || "";
if (element.childNodes) {
for (const child of element.childNodes) {
const childText = this.getTextContent(child);
if (childText) {
content += " " + childText;
}
}
}
return content.trim();
}
aggregateElementsToChunks(elements) {
const aggregatedChunks = [];
for (const element of elements) {
if (aggregatedChunks.length > 0 && JSON.stringify(aggregatedChunks[aggregatedChunks.length - 1].metadata) === JSON.stringify(element.metadata)) {
aggregatedChunks[aggregatedChunks.length - 1].content += " \n" + element.content;
} else {
aggregatedChunks.push({ ...element });
}
}
return aggregatedChunks.map(
(chunk) => ({
text: chunk.content,
metadata: { ...chunk.metadata, xpath: chunk.xpath }
})
);
}
createDocuments(texts, metadatas) {
const _metadatas = metadatas || Array(texts.length).fill({});
const documents = [];
for (let i = 0; i < texts.length; i++) {
const chunks = this.splitText({ text: texts[i] });
for (const chunk of chunks) {
const metadata = { ..._metadatas[i] || {} };
const chunkMetadata = chunk.metadata;
if (chunkMetadata) {
for (const [key, value] of Object.entries(chunkMetadata || {})) {
if (value === "#TITLE#") {
chunkMetadata[key] = metadata["Title"];
}
}
}
documents.push({
text: chunk.text,
metadata: { ...metadata, ...chunkMetadata }
});
}
}
return documents;
}
transformDocuments(documents) {
const texts = [];
const metadatas = [];
for (const doc of documents) {
texts.push(doc.text);
metadatas.push(doc.metadata);
}
return this.createDocuments(texts, metadatas);
}
};
var HTMLSectionTransformer = class {
headersToSplitOn;
options;
constructor(headersToSplitOn, options = {}) {
this.headersToSplitOn = Object.fromEntries(headersToSplitOn.map(([tag, name]) => [tag.toLowerCase(), name]));
this.options = options;
}
splitText(text) {
const sections = this.splitHtmlByHeaders(text);
return sections.map(
(section) => ({
text: section.content,
metadata: {
[this.headersToSplitOn[section.tagName.toLowerCase()]]: section.header,
xpath: section.xpath
}
})
);
}
getXPath(element) {
const parts = [];
let current = element;
while (current && current.nodeType === 1) {
let index = 1;
let sibling = current.previousSibling;
while (sibling) {
if (sibling.nodeType === 1 && sibling.tagName === current.tagName) {
index++;
}
sibling = sibling.previousSibling;
}
if (current.tagName) {
parts.unshift(`${current.tagName.toLowerCase()}[${index}]`);
}
current = current.parentNode;
}
return "/" + parts.join("/");
}
splitHtmlByHeaders(htmlDoc) {
const sections = [];
const root = parse(htmlDoc);
const headers = Object.keys(this.headersToSplitOn);
const headerElements = root.querySelectorAll(headers.join(","));
headerElements.forEach((headerElement, index) => {
const header = headerElement.text?.trim() || "";
const tagName = headerElement.tagName;
const xpath = this.getXPath(headerElement);
let content = "";
let currentElement = headerElement.nextElementSibling;
const nextHeader = headerElements[index + 1];
while (currentElement && (!nextHeader || currentElement !== nextHeader)) {
if (currentElement.text) {
content += currentElement.text.trim() + " ";
}
currentElement = currentElement.nextElementSibling;
}
content = content.trim();
sections.push({
header,
content,
tagName,
xpath
});
});
return sections;
}
async splitDocuments(documents) {
const texts = [];
const metadatas = [];
for (const doc of documents) {
texts.push(doc.text);
metadatas.push(doc.metadata);
}
const results = await this.createDocuments(texts, metadatas);
const textSplitter = new RecursiveCharacterTransformer({ options: this.options });
return textSplitter.splitDocuments(results);
}
createDocuments(texts, metadatas) {
const _metadatas = metadatas || Array(texts.length).fill({});
const documents = [];
for (let i = 0; i < texts.length; i++) {
const chunks = this.splitText(texts[i]);
for (const chunk of chunks) {
const metadata = { ..._metadatas[i] || {} };
const chunkMetadata = chunk.metadata;
if (chunkMetadata) {
for (const [key, value] of Object.entries(chunkMetadata || {})) {
if (value === "#TITLE#") {
chunkMetadata[key] = metadata["Title"];
}
}
}
documents.push({
text: chunk.text,
metadata: { ...metadata, ...chunkMetadata }
});
}
}
return documents;
}
transformDocuments(documents) {
const texts = [];
const metadatas = [];
for (const doc of documents) {
texts.push(doc.text);
metadatas.push(doc.metadata);
}
return this.createDocuments(texts, metadatas);
}
};
// src/document/transformers/json.ts
var RecursiveJsonTransformer = class _RecursiveJsonTransformer {
maxSize;
minSize;
constructor({ maxSize = 2e3, minSize }) {
this.maxSize = maxSize;
this.minSize = minSize ?? Math.max(maxSize - 200, 50);
}
static jsonSize(data) {
const seen = /* @__PURE__ */ new WeakSet();
function getStringifiableData(obj) {
if (obj === null || typeof obj !== "object") {
return obj;
}
if (seen.has(obj)) {
return "[Circular]";
}
seen.add(obj);
if (Array.isArray(obj)) {
const safeArray = [];
for (const item of obj) {
safeArray.push(getStringifiableData(item));
}
return safeArray;
}
const safeObj = {};
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
safeObj[key] = getStringifiableData(obj[key]);
}
}
return safeObj;
}
const stringifiable = getStringifiableData(data);
const jsonString = JSON.stringify(stringifiable);
return jsonString.length;
}
/**
* Transform JSON data while handling circular references
*/
transform(data) {
const size = _RecursiveJsonTransformer.jsonSize(data);
const seen = /* @__PURE__ */ new WeakSet();
function createSafeCopy(obj) {
if (obj === null || typeof obj !== "object") {
return obj;
}
if (seen.has(obj)) {
return "[Circular]";
}
seen.add(obj);
if (Array.isArray(obj)) {
return obj.map((item) => createSafeCopy(item));
}
const copy = {};
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
copy[key] = createSafeCopy(obj[key]);
}
}
return copy;
}
return {
size,
data: createSafeCopy(data)
};
}
/**
* Set a value in a nested dictionary based on the given path
*/
static setNestedDict(d, path, value) {
let current = d;
for (const key of path.slice(0, -1)) {
current[key] = current[key] || {};
current = current[key];
}
current[path[path.length - 1]] = value;
}
/**
* Convert lists in the JSON structure to dictionaries with index-based keys
*/
listToDictPreprocessing(data) {
if (data && typeof data === "object") {
if (Array.isArray(data)) {
return Object.fromEntries(data.map((item, index) => [String(index), this.listToDictPreprocessing(item)]));
}
return Object.fromEntries(Object.entries(data).map(([k, v]) => [k, this.listToDictPreprocessing(v)]));
}
return data;
}
/**
* Handles primitive values (strings, numbers, etc) by either adding them to the current chunk
* or creating new chunks if they don't fit
*/
handlePrimitiveValue(value, key, currentChunk, chunks, fullPath) {
const testValue = { [key]: value };
if (_RecursiveJsonTransformer.jsonSize(testValue) <= this.maxSize) {
if (_RecursiveJsonTransformer.jsonSize({ ...currentChunk, ...testValue }) <= this.maxSize) {
return {
currentChunk: { ...currentChunk, ...testValue },
chunks
};
} else {
return {
currentChunk: testValue,
chunks: [...chunks, currentChunk]
};
}
} else if (typeof value === "string") {
const stringChunks = this.splitLongString(value);
const newChunks = stringChunks.map((chunk) => {
return this.createChunk(chunk, fullPath);
}).filter((chunk) => _RecursiveJsonTransformer.jsonSize(chunk) <= this.maxSize);
return {
currentChunk,
chunks: [...chunks, ...newChunks]
};
}
const newChunk = this.createChunk(value, fullPath);
return {
currentChunk,
chunks: _RecursiveJsonTransformer.jsonSize(newChunk) <= this.maxSize ? [...chunks, newChunk] : chunks
};
}
/**
* Creates a nested dictionary chunk from a value and path
* e.g., path ['a', 'b'], value 'c' becomes { a: { b: 'c' } }
*/
createChunk(value, path) {
const chunk = {};
_RecursiveJsonTransformer.setNestedDict(chunk, path, value);
return chunk.root ? chunk.root : chunk;
}
/**
* Checks if value is within size limits
*/
isWithinSizeLimit(value, currentSize = 0) {
const size = _RecursiveJsonTransformer.jsonSize(value);
return currentSize === 0 ? size <= this.maxSize : size + currentSize <= this.maxSize || currentSize < this.minSize;
}
/**
* Splits arrays into chunks based on size limits
* Handles nested objects by recursing into handleNestedObject
*/
handleArray(value, key, currentPath, depth, maxDepth) {
const path = currentPath.length ? [...currentPath, key] : ["root", key];
const chunk = this.createChunk(value, path);
if (this.isWithinSizeLimit(chunk)) {
return [chunk];
}
const chunks = [];
let currentGroup = [];
const saveCurrentGroup = () => {
if (currentGroup.length > 0) {
const groupChunk = this.createChunk(currentGroup, path);
if (_RecursiveJsonTransformer.jsonSize(groupChunk) >= this.minSize) {
chunks.push(groupChunk);
currentGroup = [];
}
}
};
for (const item of value) {
const testGroup = [...currentGroup, item];
const testChunk = this.createChunk(testGroup, path);
if (this.isWithinSizeLimit(testChunk)) {
currentGroup = testGroup;
continue;
}
saveCurrentGroup();
if (typeof item === "object" && item !== null) {
const singleItemArray = [item];
const singleItemChunk = this.createChunk(singleItemArray, path);
if (this.isWithinSizeLimit(singleItemChunk)) {
currentGroup = singleItemArray;
} else {
const itemPath = [...path, String(chunks.length)];
const nestedChunks = this.handleNestedObject(item, itemPath, depth + 1, maxDepth);
chunks.push(...nestedChunks);
}
} else {
currentGroup = [item];
}
}
saveCurrentGroup();
return chunks;
}
/**
* Splits objects into chunks based on size limits
* Handles nested arrays and objects by recursing into handleArray and handleNestedObject
*/
handleNestedObject(value, fullPath, depth, maxDepth) {
const path = fullPath.length ? fullPath : ["root"];
if (depth > maxDepth) {
console.warn(`Maximum depth of ${maxDepth} exceeded, flattening remaining structure`);
return [this.createChunk(value, path)];
}
const wholeChunk = this.createChunk(value, path);
if (this.isWithinSizeLimit(wholeChunk)) {
return [wholeChunk];
}
const chunks = [];
let currentChunk = {};
const saveCurrentChunk = () => {
if (Object.keys(currentChunk).length > 0) {
const objChunk = this.createChunk(currentChunk, path);
if (_RecursiveJsonTransformer.jsonSize(objChunk) >= this.minSize) {
chunks.push(objChunk);
currentChunk = {};
}
}
};
for (const [key, val] of Object.entries(value)) {
if (val === void 0) continue;
if (Array.isArray(val)) {
saveCurrentChunk();
const arrayChunks = this.handleArray(val, key, path, depth, maxDepth);
chunks.push(...arrayChunks);
continue;
}
const testChunk = this.createChunk({ ...currentChunk, [key]: val }, path);
if (this.isWithinSizeLimit(testChunk)) {
currentChunk[key] = val;
continue;
}
saveCurrentChunk();
if (typeof val === "object" && val !== null) {
const nestedChunks = this.handleNestedObject(val, [...path, key], depth + 1, maxDepth);
chunks.push(...nestedChunks);
} else {
currentChunk = { [key]: val };
}
}
saveCurrentChunk();
return chunks;
}
/**
* Splits long strings into smaller chunks at word boundaries
* Ensures each chunk is within maxSize limit
*/
splitLongString(value) {
const chunks = [];
let remaining = value;
while (remaining.length > 0) {
const overhead = 20;
const chunkSize = Math.floor(this.maxSize - overhead);
if (remaining.length <= chunkSize) {
chunks.push(remaining);
break;
}
const lastSpace = remaining.slice(0, chunkSize).lastIndexOf(" ");
const splitAt = lastSpace > 0 ? lastSpace + 1 : chunkSize;
chunks.push(remaining.slice(0, splitAt));
remaining = remaining.slice(splitAt);
}
return chunks;
}
/**
* Core chunking logic that processes JSON data recursively
* Handles arrays, objects, and primitive values while maintaining structure
*/
jsonSplit({
data,
currentPath = [],
chunks = [{}],
depth = 0,
maxDepth = 100
}) {
if (!data || typeof data !== "object") {
return chunks;
}
if (depth > maxDepth) {
console.warn(`Maximum depth of ${maxDepth} exceeded, flattening remaining structure`);
_RecursiveJsonTransformer.setNestedDict(chunks[chunks.length - 1] || {}, currentPath, data);
return chunks;
}
let currentChunk = {};
let accumulatedChunks = chunks;
for (const [key, value] of Object.entries(data)) {
const fullPath = [...currentPath, key];
if (Array.isArray(value)) {
const arrayChunks = this.handleArray(value, key, currentPath, depth, maxDepth);
accumulatedChunks = [...accumulatedChunks, ...arrayChunks];
} else if (typeof value === "object" && value !== null) {
const objectChunks = this.handleNestedObject(value, fullPath, depth, maxDepth);
accumulatedChunks = [...accumulatedChunks, ...objectChunks];
} else {
const { currentChunk: newCurrentChunk, chunks: newChunks } = this.handlePrimitiveValue(
value,
key,
currentChunk,
accumulatedChunks,
fullPath
);
currentChunk = newCurrentChunk;
accumulatedChunks = newChunks;
}
}
if (Object.keys(currentChunk).length > 0) {
accumulatedChunks = [...accumulatedChunks, currentChunk];
}
return accumulatedChunks.filter((chunk) => Object.keys(chunk).length > 0);
}
/**
* Splits JSON into a list of JSON chunks
*/
splitJson({
jsonData,
convertLists = false
}) {
const processedData = convertLists ? this.listToDictPreprocessing(jsonData) : jsonData;
const chunks = this.jsonSplit({ data: processedData });
if (Object.keys(chunks[chunks.length - 1] || {}).length === 0) {
chunks.pop();
}
return chunks;
}
/**
* Converts Unicode characters to their escaped ASCII representation
* e.g., 'café' becomes 'caf\u00e9'
*/
escapeNonAscii(obj) {
if (typeof obj === "string") {
return obj.replace(/[\u0080-\uffff]/g, (char) => {
return `\\u${char.charCodeAt(0).toString(16).padStart(4, "0")}`;
});
}
if (Array.isArray(obj)) {
return obj.map((item) => this.escapeNonAscii(item));
}
if (typeof obj === "object" && obj !== null) {
return Object.fromEntries(Object.entries(obj).map(([key, value]) => [key, this.escapeNonAscii(value)]));
}
return obj;
}
/**
* Splits JSON into a list of JSON formatted strings
*/
splitText({
jsonData,
convertLists = false,
ensureAscii = true
}) {
const chunks = this.splitJson({ jsonData, convertLists });
if (ensureAscii) {
const escapedChunks = chunks.map((chunk) => this.escapeNonAscii(chunk));
return escapedChunks.map((chunk) => JSON.stringify(chunk));
}
return chunks.map(
(chunk) => JSON.stringify(chunk, (key, value) => {
if (typeof value === "string") {
return value.replace(/\\u[\da-f]{4}/gi, (match) => String.fromCharCode(parseInt(match.slice(2), 16)));
}
return value;
})
);
}
/**
* Create documents from a list of json objects
*/
createDocuments({
texts,
convertLists = false,
ensureAscii = true,
metadatas
}) {
const _metadatas = metadatas || Array(texts.length).fill({});
const documents = [];
texts.forEach((text, i) => {
const chunks = this.splitText({ jsonData: JSON.parse(text), convertLists, ensureAscii });
chunks.forEach((chunk) => {
const metadata = { ..._metadatas[i] || {} };
documents.push({
text: chunk,
metadata
});
});
});
return documents;
}
transformDocuments({
ensureAscii,
documents,
convertLists
}) {
const texts = [];
const metadatas = [];
for (const doc of documents) {
texts.push(doc.text);
metadatas.push(doc.metadata);
}
return this.createDocuments({
texts,
metadatas,
ensureAscii,
convertLists
});
}
};
// src/document/transformers/latex.ts
var LatexTransformer = class extends RecursiveCharacterTransformer {
constructor(options = {}) {
const separators = RecursiveCharacterTransformer.getSeparatorsForLanguage("latex" /* LATEX */);
super({ separators, isSeparatorRegex: true, options });
}
};
// src/document/transformers/markdown.ts
var MarkdownTransformer = class extends RecursiveCharacterTransformer {
constructor(options = {}) {
const separators = RecursiveCharacterTransformer.getSeparatorsForLanguage("markdown" /* MARKDOWN */);
super({ separators, isSeparatorRegex: true, options });
}
};
var MarkdownHeaderTransformer = class {
headersToSplitOn;
returnEachLine;
stripHeaders;
constructor(headersToSplitOn, returnEachLine = false, stripHeaders = true) {
this.headersToSplitOn = [...headersToSplitOn].sort((a, b) => b[0].length - a[0].length);
this.returnEachLine = returnEachLine;
this.stripHeaders = stripHeaders;
}
aggregateLinesToChunks(lines) {
if (this.returnEachLine) {
return lines.flatMap((line) => {
const contentLines = line.content.split("\n");
return contentLines.filter((l) => l.trim() !== "" || this.headersToSplitOn.some(([sep]) => l.trim().startsWith(sep))).map(
(l) => ({
text: l.trim(),
metadata: line.metadata
})
);
});
}
const aggregatedChunks = [];
for (const line of lines) {
if (aggregatedChunks.length > 0 && JSON.stringify(aggregatedChunks?.[aggregatedChunks.length - 1].metadata) === JSON.stringify(line.metadata)) {
const aggChunk = aggregatedChunks[aggregatedChunks.length - 1];
aggChunk.content += " \n" + line.content;
} else if (aggregatedChunks.length > 0 && JSON.stringify(aggregatedChunks?.[aggregatedChunks.length - 1].metadata) !== JSON.stringify(line.metadata) && Object.keys(aggregatedChunks?.[aggregatedChunks.length - 1].metadata).length < Object.keys(line.metadata).length && aggregatedChunks?.[aggregatedChunks.length - 1]?.content?.split("\n")?.slice(-1)[0][0] === "#" && !this.stripHeaders) {
if (aggregatedChunks && aggregatedChunks?.[aggregatedChunks.length - 1]) {
const aggChunk = aggregatedChunks[aggregatedChunks.length - 1];
if (aggChunk) {
aggChunk.content += " \n" + line.content;
aggChunk.metadata = line.metadata;
}
}
} else {
aggregatedChunks.push(line);
}
}
return aggregatedChunks.map(
(chunk) => ({
text: chunk.content,
metadata: chunk.metadata
})
);
}
splitText({ text }) {
const lines = text.split("\n");
const linesWithMetadata = [];
let currentContent = [];
let currentMetadata = {};
const headerStack = [];
const initialMetadata = {};
let inCodeBlock = false;
let openingFence = "";
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
const strippedLine = line.trim();
if (!inCodeBlock) {
if (strippedLine.startsWith("```") && strippedLine.split("```").length === 2 || strippedLine.startsWith("~~~")) {
inCodeBlock = true;
openingFence = strippedLine.startsWith("```") ? "```" : "~~~";
}
} else {
if (strippedLine.startsWith(openingFence)) {
inCodeBlock = false;
openingFence = "";
}
}
if (inCodeBlock) {
currentContent.push(line);
continue;
}
let headerMatched = false;
for (const [sep, name] of this.headersToSplitOn) {
if (strippedLine.startsWith(sep) && (strippedLine.length === sep.length || strippedLine[sep.length] === " ")) {
headerMatched = true;
if (currentContent.length > 0) {
linesWithMetadata.push({
content: currentContent.join("\n"),
metadata: { ...currentMetadata }
});
currentContent = [];
}
if (name !== null) {
const currentHeaderLevel = (sep.match(/#/g) || []).length;
while (headerStack.length > 0 && headerStack?.[headerStack.length - 1].level >= currentHeaderLevel) {
const poppedHeader = headerStack.pop();
if (poppedHeader.name in initialMetadata) {
delete initialMetadata[poppedHeader.name];
}
}
const header = {
level: currentHeaderLevel,
name,
data: strippedLine.slice(sep.length).trim()
};
headerStack.push(header);
initialMetadata[name] = header.data;
}
linesWithMetadata.push({
content: line,
metadata: { ...currentMetadata, ...initialMetadata }
});
break;
}
}
if (!headerMatched) {
if (strippedLine || this.returnEachLine) {
currentContent.push(line);
if (this.returnEachLine) {
linesWithMetadata.push({
content: line,
metadata: { ...currentMetadata }
});
currentContent = [];
}
} else if (currentContent.length > 0) {
linesWithMetadata.push({
content: currentContent.join("\n"),
metadata: { ...currentMetadata }
});
currentContent = [];
}
}
currentMetadata = { ...initialMetadata };
}
if (currentContent.length > 0) {
linesWithMetadata.push({
content: currentContent.join("\n"),
metadata: currentMetadata
});
}
return this.aggregateLinesToChunks(linesWithMetadata);
}
createDocuments(texts, metadatas) {
const _metadatas = metadatas || Array(texts.length).fill({});
const documents = [];
texts.forEach((text, i) => {
this.splitText({ text }).forEach((chunk) => {
const metadata = { ..._metadatas[i], ...chunk.metadata };
documents.push({
text: chunk.text,
metadata
});
});
});
return documents;
}
transformDocuments(documents) {
const texts = [];
const metadatas = [];
for (const doc of documents) {
texts.push(doc.text);
metadatas.push(doc.metadata);
}
return this.createDocuments(texts, metadatas);
}
};
function splitTextOnTokens({ text, tokenizer }) {
const splits = [];
const inputIds = tokenizer.encode(text);
let startIdx = 0;
let curIdx = Math.min(startIdx + tokenizer.tokensPerChunk, inputIds.length);
let chunkIds = inputIds.slice(startIdx, curIdx);
while (startIdx < inputIds.length) {
splits.push(tokenizer.decode(chunkIds));
if (curIdx === inputIds.length) {
break;
}
startIdx += tokenizer.tokensPerChunk - tokenizer.overlap;
curIdx = Math.min(startIdx + tokenizer.tokensPerChunk, inputIds.length);
chunkIds = inputIds.slice(startIdx, curIdx);
}
return splits;
}
var TokenTransformer = class _TokenTransformer extends TextTransformer {
tokenizer;
allowedSpecial;
disallowedSpecial;
constructor({
encodingName = "cl100k_base",
modelName,
allowedSpecial = /* @__PURE__ */ new Set(),
disallowedSpecial = "all",
options = {}
}) {
super(options);
try {
this.tokenizer = modelName ? encodingForModel(modelName) : getEncoding(encodingName);
} catch {
throw new Error("Could not load tiktoken encoding. Please install it with `npm install js-tiktoken`.");
}
this.allowedSpecial = allowedSpecial;
this.disallowedSpecial = disallowedSpecial;
}
splitText({ text }) {
const encode = (text2) => {
const allowed = this.allowedSpecial === "all" ? "all" : Array.from(this.allowedSpecial);
const disallowed = this.disallowedSpecial === "all" ? "all" : Array.from(this.disallowedSpecial);
const processedText = this.stripWhitespace ? text2.trim() : text2;
return Array.from(this.tokenizer.encode(processedText, allowed, disallowed));
};
const decode = (tokens) => {
const text2 = this.tokenizer.decode(tokens);
return this.stripWhitespace ? text2.trim() : text2;
};
const tokenizer = {
overlap: this.overlap,
tokensPerChunk: this.size,
decode,
encode
};
return splitTextOnTokens({ text, tokenizer });
}
static fromTikToken({
encodingName = "cl100k_base",
modelName,
options = {}
}) {
let tokenizer;
try {
if (modelName) {
tokenizer = encodingForModel(modelName);
} else {
tokenizer = getEncoding(encodingName);
}
} catch {
throw new Error("Could not load tiktoken encoding. Please install it with `npm install js-tiktoken`.");
}
const tikTokenEncoder = (text) => {
const allowed = options.allowedSpecial === "all" ? "all" : options.allowedSpecial ? Array.from(options.allowedSpecial) : [];
const disallowed = options.disallowedSpecial === "all" ? "all" : options.disallowedSpecial ? Array.from(options.disallowedSpecial) : [];
return tokenizer.encode(text, allowed, disallowed).length;
};
return new _TokenTransformer({
encodingName,
modelName,
allowedSpecial: options.allowedSpecial,
disallowedSpecial: options.disallowedSpecial,
options: {
size: options.size,
overlap: options.overlap,
lengthFunction: tikTokenEncoder
}
});
}
};
// src/document/document.ts
var TitleExtractor = class {
options;
constructor(options = {}) {
this.options = options;
}
};
var SummaryExtractor = class {
options;
constructor(options = {}) {
this.options = options;
}
};
var QuestionsAnsweredExtractor = class {
options;
constructor(options = {}) {
this.options = options;
}
};
var KeywordExtractor = class {
options;
constructor(options = {}) {
this.options = options;
}
};
var IngestionPipeline = class {
transformations;
constructor({ transformations }) {
this.transformations = transformations;
}
async run({ documents }) {
return documents;
}
};
var MDocument = class _MDocument {
chunks;
type;
// e.g., 'text', 'html', 'markdown', 'json'
constructor({ docs, type }) {
this.chunks = docs.map((d) => {
return { text: d.text, metadata: d.metadata || {} };
});
this.type = type;
}
async extractMetadata({ title, summary, questions, keywords }) {
const transformations = [];
if (typeof summary !== "undefined") {
transformations.push(new SummaryExtractor(typeof summary === "boolean" ? {} : summary));
}
if (typeof questions !== "undefined") {
transformations.push(new QuestionsAnsweredExtractor(typeof questions === "boolean" ? {} : questions));
}
if (typeof keywords !== "undefined") {
transformations.push(new KeywordExtractor(typeof keywords === "boolean" ? {} : keywords));
}
if (typeof title !== "undefined") {
transformations.push(new TitleExtractor(typeof title === "boolean" ? {} : title));
}
const pipeline = new IngestionPipeline({
transformations
});
await pipeline.run({ documents: this.chunks });
this.chunks.map((doc) => {
const text = doc.text;
const newMetadata = { ...doc.metadata };
if (typeof title !== "undefined") {
const firstLine = text.split("\n")[0] ?? "";
const firstSentence = text.split(/[.!?]/)[0] ?? "";
newMetadata.title = firstLine.length < 100 ? firstLine : firstSentence.length < 100 ? firstSentence : text.substring(0, 100);
}
if (typeof summary !== "undefined") {
const firstParagraph = text.split("\n\n")[0] ?? "";
newMetadata.summary = firstParagraph.length < 200 ? firstParagraph : text.substring(0, 200) + "...";
}
if (typeof keywords !== "undefined") {
const stopwords = /* @__PURE__ */ new Set(["the", "a", "an", "in", "on", "at", "to", "for", "and", "or", "but", "is", "are", "was", "were"]);
const words = text.toLowerCase().match(/\b\w+\b/g) || [];
const wordCounts = {};
words.forEach((word) => {
if (!stopwords.has(word) && word.length > 3) {
wordCounts[word] = (wordCounts[word] || 0) + 1;
}
});
const sortedWords = Object.entries(wordCounts).sort((a, b) => b[1] - a[1]).slice(0, 5).map(([word]) => word);
newMetadata.keywords = sortedWords;
}
if (typeof questions !== "undefined") {
const questionSentences = text.match(/[^.!?]*\?/g) || [];
newMetadata.questions = questionSentences.slice(0, 3);
}
return {
text: doc.text,
metadata: newMetadata
};
});
return this;
}
static fromText(text, metadata) {
return new _MDocument({
docs: [
{
text,
metadata
}
],
type: "text"
});
}
static fromHTML(html, metadata) {
return new _MDocument({
docs: [
{
text: html,
metadata
}
],
type: "html"
});
}
static fromMarkdown(markdown, metadata) {
return new _MDocument({
docs: [
{
text: markdown,
metadata
}
],
type: "markdown"
});
}
static fromJSON(jsonString, metadata) {
return new _MDocument({
docs: [
{
text: jsonString,
metadata
}
],
type: "json"
});
}
defaultStrategy() {
switch (this.type) {
case "html":
return "html";
case "markdown":
return "markdown";
case "json":
return "json";
case "latex":
return "latex";
default:
return "recursive";
}
}
async chunkBy(strategy, options) {
switch (strategy) {
case "recursive":
await this.chunkRecursive(options);
break;
case "character":
await this.chunkCharacter(options);
break;
case "token":
await this.chunkToken(options);
break;
case "markdown":
await this.chunkMarkdown(options);
break;
case "html":
await this.chunkHTML(options);
break;
case "json":
await this.chunkJSON(options);
break;
case "latex":
await this.chunkLatex(options);
break;
default:
throw new Error(`Unknown strategy: ${strategy}`);
}
}
async chunkRecursive(options) {
if (options?.language) {
const rt2 = RecursiveCharacterTransformer.fromLanguage(options.language, options);
const textSplit2 = rt2.transformDocuments(this.chunks);
this.chunks = textSplit2;
return;
}
const rt = new RecursiveCharacterTransformer({
separators: options?.separators,
isSeparatorRegex: options?.isSeparatorRegex,
options
});
const textSplit = rt.transformDocuments(this.chunks);
this.chunks = textSplit;
}
async chunkCharacter(options) {
const rt = new CharacterTransformer({
separator: options?.separator,
isSeparatorRegex: options?.isSeparatorRegex,
options
});
const textSplit = rt.transformDocuments(this.chunks);
this.chunks = textSplit;
}
async chunkHTML(options) {
if (options?.headers?.length) {
const rt = new HTMLHeaderTransformer(options.headers, options?.returnEachLine);
const textSplit = rt.transformDocuments(this.chunks);
this.chunks = textSplit;
return;
}
if (options?.sections?.length) {
const rt = new HTMLSectionTransformer(options.sections);
const textSplit = rt.transformDocuments(this.chunks);
this.chunks = textSplit;
return;
}
throw new Error("HTML chunking requires either headers or sections to be specified");
}
async chunkJSON(options) {
if (!options?.maxSize) {
throw new Error("JSON chunking requires maxSize to be specified");
}
const rt = new RecursiveJsonTransformer({
maxSize: options?.maxSize,
minSize: options?.minSize
});
const textSplit = rt.transformDocuments({
documents: this.chunks,
ensureAscii: options?.ensureAscii,
convertLists: options?.convertLists
});
this.chunks = textSplit;
}
async chunkLatex(options) {
const rt = new LatexTransformer(options);
const textSplit = rt.transformDocuments(this.chunks);
this.chunks = textSplit;
}
async chunkToken(options) {
const rt = TokenTransformer.fromTikToken({
options,
encodingName: options?.encodingName,
modelName: options?.modelName
});
const textSplit = rt.transformDocuments(this.chunks);
this.chunks = textSplit;
}
async chunkMarkdown(options) {
if (options?.headers) {
const rt2 = new MarkdownHeaderTransformer(options.headers, options?.returnEachLine, options?.stripHeaders);
const textSplit2 = rt2.transformDocuments(this.chunks);
this.chunks = textSplit2;
return;
}
const rt = new MarkdownTransformer(options);
const textSplit = rt.transformDocuments(this.chunks);
this.chunks = textSplit;
}
async chunk(params) {
const { strategy: passedStrategy, extract, ...chunkOptions } = params || {};
const strategy = passedStrategy || this.defaultStrategy();
await this.chunkBy(strategy, chunkOptions);
if (extract) {
await this.extractMetadata(extract);
}
return this.chunks;
}
getDocs() {
return this.chunks;
}
getText() {
return this.chunks.map((doc) => doc.text);
}
getMetadata() {
return this.chunks.map((doc) => doc.metadata);
}
};
var DEFAULT_WEIGHTS = {
semantic: 0.4,
vector: 0.4,
position: 0.2
};
function calculatePositionScore(position, totalChunks) {
return 1 - position / totalChunks;
}
function analyzeQueryEmbedding(embedding) {
const magnitude = Math.sqrt(embedding.reduce((sum, val) => sum + val * val, 0));
const dominantFeatures = embedding.map((value, index) => ({ value: Math.abs(value), index })).sort((a, b) => b.value - a.value).slice(0, 5).map((item) => item.index);
return { magnitude, dominantFeatures };
}
function adjustScores(score, queryAnalysis) {
const magnitudeAdjustment = queryAnalysis.magnitude > 10 ? 1.1 : 1;
const featureStrengthAdjustment = queryAnalysis.magnitude > 5 ? 1.05 : 1;
return score * magnitudeAdjustment * featureStrengthAdjus