UNPKG

langbase

Version:

The AI SDK for building declarative and composable AI-powered LLM products.

1,594 lines (1,583 loc) 51.6 kB
"use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // src/index.ts var index_exports = {}; __export(index_exports, { Langbase: () => Langbase, Pipe: () => Pipe, Workflow: () => Workflow, fromReadableStream: () => fromReadableStream, getRunner: () => getRunner, getTextPart: () => getTextPart, getToolsFromRun: () => getToolsFromRun, getToolsFromRunStream: () => getToolsFromRunStream, getToolsFromStream: () => getToolsFromStream, handleResponseStream: () => handleResponseStream, printStreamToStdout: () => printStreamToStdout }); module.exports = __toCommonJS(index_exports); // src/lib/utils/doc-to-formdata.ts async function convertDocToFormData(options) { let formData = new FormData(); if (options.document instanceof Buffer) { const documentBlob = new Blob([options.document], { type: options.contentType }); formData.append("document", documentBlob, options.documentName); } else if (options.document instanceof File) { formData.append("document", options.document, options.documentName); } else if (options.document instanceof FormData) { formData = options.document; } else if (options.document instanceof ReadableStream) { const chunks = []; const reader = options.document.getReader(); while (true) { const { done, value } = await reader.read(); if (done) break; chunks.push(value); } const documentBlob = new Blob(chunks, { type: options.contentType }); formData.append("document", documentBlob, options.documentName); } formData.append("documentName", options.documentName); return formData; } // src/data/constants.ts var GENERATION_ENDPOINTS = [ "/v1/pipes/run", "/beta/chat", "/beta/generate", "/v1/agent/run" ]; // src/common/errors.ts var APIError = class _APIError extends Error { constructor(status, error, message, headers) { super(_APIError.makeMessage(status, error, message)); this.status = status; this.headers = headers; this.request_id = headers == null ? void 0 : headers["lb-request-id"]; const data = error; this.error = data; this.code = data == null ? void 0 : data["code"]; this.status = data == null ? void 0 : data["status"]; } static makeMessage(status, error, message) { const msg = (error == null ? void 0 : error.message) ? typeof error.message === "string" ? error.message : JSON.stringify(error.message) : error ? JSON.stringify(error) : message; if (status && msg) { return `${status} ${msg}`; } if (status) { return `${status} status code (no body)`; } if (msg) { return msg; } return "(no status code or body)"; } static generate(status, errorResponse, message, headers) { if (!status) { return new APIConnectionError({ cause: errorResponse instanceof Error ? errorResponse : void 0 }); } const error = errorResponse == null ? void 0 : errorResponse["error"]; switch (status) { case 400: return new BadRequestError(status, error, message, headers); case 401: return new AuthenticationError(status, error, message, headers); case 403: return new PermissionDeniedError( status, error, message, headers ); case 404: return new NotFoundError(status, error, message, headers); case 409: return new ConflictError(status, error, message, headers); case 422: return new UnprocessableEntityError( status, error, message, headers ); case 429: return new RateLimitError(status, error, message, headers); default: return status >= 500 ? new InternalServerError(status, error, message, headers) : new _APIError(status, error, message, headers); } } }; var APIConnectionError = class extends APIError { constructor({ message, cause }) { super(void 0, void 0, message || "Connection error.", void 0); this.status = void 0; if (cause) this.cause = cause; } }; var BadRequestError = class extends APIError { constructor() { super(...arguments); this.status = 400; } }; var AuthenticationError = class extends APIError { constructor() { super(...arguments); this.status = 401; } }; var PermissionDeniedError = class extends APIError { constructor() { super(...arguments); this.status = 403; } }; var NotFoundError = class extends APIError { constructor() { super(...arguments); this.status = 404; } }; var ConflictError = class extends APIError { constructor() { super(...arguments); this.status = 409; } }; var UnprocessableEntityError = class extends APIError { constructor() { super(...arguments); this.status = 422; } }; var RateLimitError = class extends APIError { constructor() { super(...arguments); this.status = 429; } }; var InternalServerError = class extends APIError { }; // src/common/stream.ts var Stream = class _Stream { constructor(iterator, controller) { this.iterator = iterator; this.controller = controller; } /** * Creates a stream of AsyncIterator from a Server-Sent Events (SSE) response. * * @template Item - The type of items in the stream. * @param {Response} response - The SSE response object. * @param {AbortController} controller - The abort controller used to cancel the ongoing request. * @returns {Stream<AsyncIterator<Item, any, undefined>>} - The stream created from the SSE response. * @throws {Error} - If the stream has already been consumed. */ static fromSSEResponse(response, controller) { let consumed = false; async function* iterator() { if (consumed) { throw new Error( "Cannot iterate over a consumed stream, use `.tee()` to split the stream." ); } consumed = true; let done = false; try { for await (const sse of _iterSSEMessages( response, controller )) { if (done) continue; if (sse.data.startsWith("[DONE]")) { done = true; continue; } if (sse.event === null) { let data; try { data = JSON.parse(sse.data); } catch (e) { console.error( `Could not parse message into JSON:`, sse.data ); console.error(`From chunk:`, sse.raw); throw e; } if (data && data.error) { throw new Error(data.error); } yield data; } else { let data; try { data = JSON.parse(sse.data); } catch (e) { console.error( `Could not parse message into JSON:`, sse.data ); console.error(`From chunk:`, sse.raw); throw e; } if (sse.event == "error") { throw new Error(data.message); } yield { event: sse.event, data }; } } done = true; } catch (e) { if (e instanceof Error && e.name === "AbortError") return; throw e; } finally { if (!done) controller.abort(); } } return new _Stream(iterator, controller); } /** * Generates a Stream from a newline-separated ReadableStream * where each item is a JSON value. * * @template Item - The type of items in the stream. * @param {ReadableStream} readableStream - The readable stream to create the stream from. * @param {AbortController} controller - The abort controller to control the stream. * @returns {Stream<Item>} - The created stream. */ static fromReadableStream(readableStream, controller) { let consumed = false; async function* iterLines() { const lineDecoder = new LineDecoder(); const iter = readableStreamAsyncIterable(readableStream); for await (const chunk of iter) { for (const line of lineDecoder.decode(chunk)) { yield line; } } for (const line of lineDecoder.flush()) { yield line; } } async function* iterator() { if (consumed) { throw new Error( "Cannot iterate over a consumed stream, use `.tee()` to split the stream." ); } consumed = true; let done = false; try { for await (const line of iterLines()) { if (done) continue; if (line) yield JSON.parse(line); } done = true; } catch (e) { if (e instanceof Error && e.name === "AbortError") return; throw e; } finally { if (!done) controller.abort(); } } return new _Stream(iterator, controller); } [Symbol.asyncIterator]() { return this.iterator(); } /** * Splits the stream into two streams which can be * independently read from at different speeds. */ tee() { const left = []; const right = []; const iterator = this.iterator(); const teeIterator = (queue) => { return { next: () => { if (queue.length === 0) { const result = iterator.next(); left.push(result); right.push(result); } return queue.shift(); } }; }; return [ new _Stream(() => teeIterator(left), this.controller), new _Stream(() => teeIterator(right), this.controller) ]; } /** * Converts this stream to a newline-separated ReadableStream of * JSON stringified values in the stream which can be turned back into a Stream with `Stream.fromReadableStream()`. */ toReadableStream() { const self2 = this; let iter; const encoder = new TextEncoder(); return new ReadableStream({ async start() { iter = self2[Symbol.asyncIterator](); }, async pull(ctrl) { try { const { value, done } = await iter.next(); if (done) return ctrl.close(); const bytes = encoder.encode(JSON.stringify(value) + "\n"); ctrl.enqueue(bytes); } catch (err) { ctrl.error(err); } }, async cancel() { var _a; await ((_a = iter.return) == null ? void 0 : _a.call(iter)); } }); } }; async function* _iterSSEMessages(response, controller) { if (!response.body) { controller.abort(); throw new Error(`Attempted to iterate over a response with no body`); } const sseDecoder = new SSEDecoder(); const lineDecoder = new LineDecoder(); const iter = readableStreamAsyncIterable(response.body); for await (const sseChunk of iterSSEChunks(iter)) { for (const line of lineDecoder.decode(sseChunk)) { const sse = sseDecoder.decode(line); if (sse) yield sse; } } for (const line of lineDecoder.flush()) { const sse = sseDecoder.decode(line); if (sse) yield sse; } } async function* iterSSEChunks(iterator) { let data = new Uint8Array(); for await (const chunk of iterator) { if (chunk == null) { continue; } const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk) : typeof chunk === "string" ? new TextEncoder().encode(chunk) : chunk; let newData = new Uint8Array(data.length + binaryChunk.length); newData.set(data); newData.set(binaryChunk, data.length); data = newData; let patternIndex; while ((patternIndex = findDoubleNewlineIndex(data)) !== -1) { yield data.slice(0, patternIndex); data = data.slice(patternIndex); } } if (data.length > 0) { yield data; } } function findDoubleNewlineIndex(buffer) { const newline = 10; const carriage = 13; for (let i = 0; i < buffer.length - 2; i++) { if (buffer[i] === newline && buffer[i + 1] === newline) { return i + 2; } if (buffer[i] === carriage && buffer[i + 1] === carriage) { return i + 2; } if (buffer[i] === carriage && buffer[i + 1] === newline && i + 3 < buffer.length && buffer[i + 2] === carriage && buffer[i + 3] === newline) { return i + 4; } } return -1; } var SSEDecoder = class { constructor() { this.event = null; this.data = []; this.chunks = []; } /** * Decodes a line of text and returns a ServerSentEvent object if a complete event is found. * @param line - The line of text to decode. * @returns A ServerSentEvent object if a complete event is found, otherwise null. */ decode(line) { if (line.endsWith("\r")) { line = line.substring(0, line.length - 1); } if (!line) { if (!this.event && !this.data.length) return null; const sse = { event: this.event, data: this.data.join("\n"), raw: this.chunks }; this.event = null; this.data = []; this.chunks = []; return sse; } this.chunks.push(line); if (line.startsWith(":")) { return null; } let [fieldname, _, value] = partition(line, ":"); if (value.startsWith(" ")) { value = value.substring(1); } if (fieldname === "event") { this.event = value; } else if (fieldname === "data") { this.data.push(value); } return null; } }; var _LineDecoder = class _LineDecoder { // TextDecoder found in browsers; not typed to avoid pulling in either "dom" or "node" types. constructor() { this.buffer = []; this.trailingCR = false; } decode(chunk) { let text = this.decodeText(chunk); if (this.trailingCR) { text = "\r" + text; this.trailingCR = false; } if (text.endsWith("\r")) { this.trailingCR = true; text = text.slice(0, -1); } if (!text) { return []; } const trailingNewline = _LineDecoder.NEWLINE_CHARS.has( text[text.length - 1] || "" ); let lines = text.split(_LineDecoder.NEWLINE_REGEXP); if (trailingNewline) { lines.pop(); } if (lines.length === 1 && !trailingNewline) { this.buffer.push(lines[0]); return []; } if (this.buffer.length > 0) { lines = [this.buffer.join("") + lines[0], ...lines.slice(1)]; this.buffer = []; } if (!trailingNewline) { this.buffer = [lines.pop() || ""]; } return lines; } decodeText(bytes) { var _a; if (bytes == null) return ""; if (typeof bytes === "string") return bytes; if (typeof Buffer !== "undefined") { if (bytes instanceof Buffer) { return bytes.toString(); } if (bytes instanceof Uint8Array) { return Buffer.from(bytes).toString(); } throw new Error( `Unexpected: received non-Uint8Array (${bytes.constructor.name}) stream chunk in an environment with a global "Buffer" defined, which this library assumes to be Node. Please report this error.` ); } if (typeof TextDecoder !== "undefined") { if (bytes instanceof Uint8Array || bytes instanceof ArrayBuffer) { (_a = this.textDecoder) != null ? _a : this.textDecoder = new TextDecoder("utf8"); return this.textDecoder.decode(bytes); } throw new Error( `Unexpected: received non-Uint8Array/ArrayBuffer (${bytes.constructor.name}) in a web platform. Please report this error.` ); } throw new Error( `Unexpected: neither Buffer nor TextDecoder are available as globals. Please report this error.` ); } flush() { if (!this.buffer.length && !this.trailingCR) { return []; } const lines = [this.buffer.join("")]; this.buffer = []; this.trailingCR = false; return lines; } }; // prettier-ignore _LineDecoder.NEWLINE_CHARS = /* @__PURE__ */ new Set(["\n", "\r"]); _LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r]/g; var LineDecoder = _LineDecoder; function partition(str, delimiter) { const index = str.indexOf(delimiter); if (index !== -1) { return [ str.substring(0, index), delimiter, str.substring(index + delimiter.length) ]; } return [str, "", ""]; } function readableStreamAsyncIterable(stream) { if (stream[Symbol.asyncIterator]) return stream; const reader = stream.getReader(); return { async next() { try { const result = await reader.read(); if (result == null ? void 0 : result.done) reader.releaseLock(); return result; } catch (e) { reader.releaseLock(); throw e; } }, async return() { const cancelPromise = reader.cancel(); reader.releaseLock(); await cancelPromise; return { done: true, value: void 0 }; }, [Symbol.asyncIterator]() { return this; } }; } // src/common/request.ts var Request = class { constructor(config) { this.config = config; } // Main send function async send({ endpoint, ...options }) { var _a, _b, _c, _d, _e, _f; const url = this.buildUrl({ endpoint }); const headers = this.buildHeaders({ headers: options.headers }); let response; try { response = await this.makeRequest({ url, options, headers }); } catch (error) { throw new APIConnectionError({ cause: error instanceof Error ? error : void 0 }); } if (!response.ok) { await this.handleErrorResponse({ response }); } const isLllmGenerationEndpoint = GENERATION_ENDPOINTS.includes(endpoint); if (!isLllmGenerationEndpoint && ((_a = options.body) == null ? void 0 : _a.rawResponse)) { const responseData = await response.json(); if (Array.isArray(responseData)) { Object.defineProperty(responseData, "rawResponse", { value: { headers: Object.fromEntries(response.headers.entries()) }, enumerable: false, writable: true }); return responseData; } return { ...responseData, rawResponse: { headers: Object.fromEntries(response.headers.entries()) } }; } if (isLllmGenerationEndpoint) { const threadId = response.headers.get("lb-thread-id"); if (!options.body) { return this.handleRunResponse({ response, threadId: null, rawResponse: (_c = (_b = options.body) == null ? void 0 : _b.rawResponse) != null ? _c : false, endpoint }); } if (((_d = options.body) == null ? void 0 : _d.stream) && url.includes("run")) { return this.handleRunResponseStream({ response, rawResponse: options.body.rawResponse }); } if (options.body.stream) { return this.handleStreamResponse({ response }); } return this.handleRunResponse({ response, threadId, rawResponse: (_f = (_e = options.body) == null ? void 0 : _e.rawResponse) != null ? _f : false, endpoint }); } else { const res = response.json(); return res; } } buildUrl({ endpoint }) { return `${this.config.baseUrl}${endpoint}`; } buildHeaders({ headers }) { return { "Content-Type": "application/json", Authorization: `Bearer ${this.config.apiKey}`, ...headers }; } async makeRequest({ url, options, headers }) { return fetch(url, { method: options.method, headers, body: JSON.stringify(options.body) // signal: AbortSignal.timeout(this.config.timeout || 30000), }); } async handleErrorResponse({ response }) { let errorBody; try { errorBody = await response.json(); } catch (e) { errorBody = await response.text(); } throw APIError.generate( response.status, errorBody, response.statusText, response.headers ); } handleStreamResponse({ response }) { const controller = new AbortController(); const stream = Stream.fromSSEResponse(response, controller); return { stream, threadId: response.headers.get("lb-thread-id") }; } handleRunResponseStream({ response, rawResponse }) { const controller = new AbortController(); const streamSSE = Stream.fromSSEResponse(response, controller); const stream = streamSSE.toReadableStream(); const result = { stream, threadId: response.headers.get("lb-thread-id") }; if (rawResponse) { result.rawResponse = { headers: Object.fromEntries(response.headers.entries()) }; } return result; } async handleRunResponse({ response, threadId, rawResponse, endpoint }) { let isAgentRun = false; if (endpoint === "/v1/agent/run") isAgentRun = true; const generateResponse = await response.json(); const buildResponse = generateResponse.raw ? isAgentRun ? { output: generateResponse.output, ...generateResponse.raw } : { completion: generateResponse.completion, ...generateResponse.raw } : generateResponse; const result = { ...buildResponse }; if (threadId) { result.threadId = threadId; } if (rawResponse) { result.rawResponse = { headers: Object.fromEntries(response.headers.entries()) }; } return result; } async post(options) { return this.send({ ...options, method: "POST" }); } async get(options) { return this.send({ ...options, method: "GET" }); } async put(options) { return this.send({ ...options, method: "PUT" }); } async delete(options) { return this.send({ ...options, method: "DELETE" }); } }; // src/langbase/trace.ts var TraceManager = class { constructor() { this.traces = /* @__PURE__ */ new Map(); } createTrace(type, traceData = {}) { var _a; const traceId = crypto.randomUUID(); let trace; const createdAt = (/* @__PURE__ */ new Date()).toISOString(); const agentWorkflowId = typeof process !== "undefined" && ((_a = process.env) == null ? void 0 : _a.LANGBASE_AGENT_ID) ? process.env.LANGBASE_AGENT_ID : ""; if (type === "workflow") { trace = { workflow: { createdAt, id: traceId, agentWorkflowId, name: traceData.name || "", startTime: Date.now(), steps: [] }, entityAuthId: "" }; } else if (type === "agent") { trace = { agent: { ...traceData, createdAt, id: traceId } }; } else if (type === "chunk") { trace = { chunk: { ...traceData, createdAt, id: traceId } }; } else if (type === "memory") { trace = { memory: { ...traceData, createdAt, id: traceId } }; } else if (type === "parse") { trace = { parse: { ...traceData, createdAt, id: traceId } }; } else if (type === "embed") { trace = { embed: { ...traceData, createdAt, id: traceId } }; } else { throw new Error("Unknown trace type"); } this.traces.set(traceId, trace); return traceId; } addStep(traceId, step) { const trace = this.traces.get(traceId); if (trace && "workflow" in trace) { trace.workflow.steps.push(step); } } endTrace(traceId) { const trace = this.traces.get(traceId); if (trace && "workflow" in trace) { trace.workflow.endTime = Date.now(); trace.workflow.duration = trace.workflow.endTime - trace.workflow.startTime; } } getTrace(traceId) { return this.traces.get(traceId); } printTrace(traceId) { const trace = this.traces.get(traceId); if (!trace) return; if ("workflow" in trace) { const wf = trace.workflow; const duration = wf.endTime ? wf.endTime - wf.startTime : Date.now() - wf.startTime; console.log("\n\u{1F4CA} Workflow Trace:"); console.log(`Name: ${wf.name}`); console.log(`Duration: ${duration}ms`); console.log(`Start Time: ${new Date(wf.startTime).toISOString()}`); if (wf.endTime) { console.log(`End Time: ${new Date(wf.endTime).toISOString()}`); } console.log("\nSteps:"); wf.steps.forEach((step) => { console.log(` Step: ${step.name}`); console.log(` Duration: ${step.duration}ms`); if (step.traces && step.traces.length > 0) { console.log(` Traces:`, step.traces); } console.log(` Output:`, step.output); }); } else { console.log("\n\u{1F4CA} Primitive Trace:"); console.dir(trace, { depth: 4 }); } } }; // src/langbase/workflows.ts var _global = typeof global !== "undefined" ? global : typeof window !== "undefined" ? window : typeof self !== "undefined" ? self : typeof globalThis !== "undefined" ? globalThis : {}; _global._activeTraceCollector = _global._activeTraceCollector || null; _global._workflowDebugEnabled = _global._workflowDebugEnabled || false; var TimeoutError = class extends Error { constructor(stepId, timeout) { super(`Step "${stepId}" timed out after ${timeout}ms`); this.name = "TimeoutError"; } }; var Workflow = class { constructor(config) { // Optional this.originalMethods = /* @__PURE__ */ new Map(); var _a, _b; this.context = { outputs: {} }; this.debug = (_a = config == null ? void 0 : config.debug) != null ? _a : false; this.name = (_b = config == null ? void 0 : config.name) != null ? _b : "workflow"; this.langbase = config == null ? void 0 : config.langbase; if (this.langbase) { this.traceManager = new TraceManager(); this.traceId = this.traceManager.createTrace("workflow", { name: this.name }); _global._workflowDebugEnabled = this.debug; } this.step = this._step.bind(this); } /** * Replace a method in the Langbase instance with a traced version */ interceptMethod(obj, method, path = "") { if (!this.langbase) return; if (!obj || typeof obj[method] !== "function") return; const fullPath = path ? `${path}.${method}` : method; const originalMethod = obj[method]; if (!this.originalMethods.has(fullPath)) { this.originalMethods.set(fullPath, originalMethod); const debug = this.debug; obj[method] = async function(...args) { const lastArg = args[args.length - 1]; const newArgs = [...args]; if (lastArg && typeof lastArg === "object") { newArgs[newArgs.length - 1] = { ...lastArg, rawResponse: true }; } else { newArgs.push({ rawResponse: true }); } const result = await originalMethod.apply(this, newArgs); if (_global._activeTraceCollector) { let traceId; if (result && typeof result === "object") { if ("rawResponse" in result && result.rawResponse) { if (result.rawResponse.headers["lb-trace-id"]) { traceId = result.rawResponse.headers["lb-trace-id"]; } } if (traceId && _global._activeTraceCollector) { if (debug) { console.log( `\u{1F50D} Trace ID extracted: ${traceId}` ); } _global._activeTraceCollector(traceId); } } } return result; }; } } /** * Restore all original methods that were intercepted */ restoreOriginalMethods() { if (!this.langbase) return; this.originalMethods.forEach((originalMethod, path) => { const parts = path.split("."); const methodName = parts.pop(); let obj = this.langbase; for (const part of parts) { if (obj && typeof obj === "object" && part in obj) { obj = obj[part]; } else { return; } } if (obj && methodName in obj && typeof obj[methodName] === "function") { obj[methodName] = originalMethod; } }); this.originalMethods.clear(); } /** * Intercept all important methods in the Langbase instance */ setupMethodInterceptors() { if (!this.langbase) return; this.interceptMethod(this.langbase.agent, "run", "agent"); this.interceptMethod(this.langbase.pipes, "run", "pipes"); this.interceptMethod(this.langbase.pipe, "run", "pipe"); if (this.langbase.memories) { this.interceptMethod( this.langbase.memories, "retrieve", "memories" ); } if (this.langbase.memory) { this.interceptMethod(this.langbase.memory, "retrieve", "memory"); } if (this.langbase.tools) { this.interceptMethod(this.langbase.tools, "webSearch", "tools"); this.interceptMethod(this.langbase.tools, "crawl", "tools"); } if (this.langbase.tool) { this.interceptMethod(this.langbase.tool, "webSearch", "tool"); this.interceptMethod(this.langbase.tool, "crawl", "tool"); } this.interceptMethod(this.langbase, "embed"); this.interceptMethod(this.langbase, "chunk"); this.interceptMethod(this.langbase, "parse"); } async _step(config) { var _a; const stepStartTime = Date.now(); const stepTraces = []; const collectTrace = (traceId) => { if (this.debug) { console.log(`\u{1F4CB} Collected trace ID: ${traceId}`); } stepTraces.push(traceId); }; if (this.debug) { console.log(` \u{1F504} Starting step: ${config.id}`); console.time(`\u23F1\uFE0F Step ${config.id}`); if (config.timeout) console.log(`\u23F3 Timeout: ${config.timeout}ms`); if (config.retries) console.log(`\u{1F504} Retries: ${JSON.stringify(config.retries)}`); } let lastError = null; let attempt = 1; const maxAttempts = ((_a = config.retries) == null ? void 0 : _a.limit) ? config.retries.limit + 1 : 1; if (this.langbase) this.setupMethodInterceptors(); const previousTraceCollector = _global._activeTraceCollector; if (this.langbase) _global._activeTraceCollector = collectTrace; try { let stepPromise = config.run(); if (config.timeout) { stepPromise = this.withTimeout({ promise: stepPromise, timeout: config.timeout, stepId: config.id }); } const result = await stepPromise; this.context.outputs[config.id] = result; if (this.debug) { console.timeEnd(`\u23F1\uFE0F Step ${config.id}`); console.log(`\u{1F4E4} Output:`, result); if (stepTraces.length > 0) { console.log( `\u{1F4CB} Trace IDs (${stepTraces.length}):`, stepTraces ); } else { console.log(`\u{1F50D} No trace IDs captured for this step`); } } if (this.langbase && this.traceManager && this.traceId) { const stepEndTime = Date.now(); const stepTrace = { name: config.id, output: result, traces: stepTraces.length > 0 ? stepTraces : null, duration: stepEndTime - stepStartTime, startTime: stepStartTime, endTime: stepEndTime }; this.traceManager.addStep(this.traceId, stepTrace); } if (this.langbase) { this.restoreOriginalMethods(); _global._activeTraceCollector = previousTraceCollector; } return result; } catch (error) { if (this.langbase) { this.restoreOriginalMethods(); _global._activeTraceCollector = previousTraceCollector; } lastError = error; if (attempt < maxAttempts) { const delay = config.retries ? this.calculateDelay( config.retries.delay, attempt, config.retries.backoff ) : 0; if (this.debug) { console.log( `\u26A0\uFE0F Attempt ${attempt} failed, retrying in ${delay}ms...` ); console.error(error); } await this.sleep(delay); attempt++; return this._step(config); } else { if (this.debug) { console.timeEnd(`\u23F1\uFE0F Step ${config.id}`); console.error(`\u274C Failed step: ${config.id}`, error); } throw lastError; } } } async withTimeout({ promise, timeout, stepId }) { const timeoutPromise = new Promise((_, reject) => { setTimeout( () => reject(new TimeoutError(stepId, timeout)), timeout ); }); return Promise.race([promise, timeoutPromise]); } calculateDelay(baseDelay, attempt, backoff) { switch (backoff) { case "exponential": return baseDelay * Math.pow(2, attempt - 1); case "linear": return baseDelay * attempt; case "fixed": default: return baseDelay; } } async sleep(ms) { return new Promise((resolve) => setTimeout(resolve, ms)); } async end() { if (!this.langbase || !this.traceManager || !this.traceId) return; this.traceManager.endTrace(this.traceId); if (this.debug) { this.traceManager.printTrace(this.traceId); } const traceData = this.traceManager.getTrace(this.traceId); try { const res = await this.langbase.traces.create(traceData); if (!res || res.error) { console.error( `\u274C Trace upload failed: ${(res == null ? void 0 : res.status) || ""} ${(res == null ? void 0 : res.statusText) || ""}` ); } else if (this.debug) { console.log(`\u2705 Trace ${this.traceId} sent to collector`); } } catch (err) { console.error("\u274C Error while sending trace", err); } if (this.debug) { console.log("\n\u{1F50D} DEBUG: Final trace data:"); console.log(JSON.stringify(traceData, null, 2)); } } }; // src/langbase/langbase.ts var Langbase = class { constructor(options) { var _a, _b; this.baseUrl = (_a = options == null ? void 0 : options.baseUrl) != null ? _a : "https://api.langbase.com"; this.apiKey = (_b = options == null ? void 0 : options.apiKey) != null ? _b : ""; this.request = new Request({ apiKey: this.apiKey, baseUrl: this.baseUrl }); this.pipe = { list: this.listPipe.bind(this), create: this.createPipe.bind(this), update: this.updatePipe.bind(this), run: this.runPipe.bind(this) }; this.pipes = { list: this.listPipe.bind(this), create: this.createPipe.bind(this), update: this.updatePipe.bind(this), run: this.runPipe.bind(this) }; this.memory = { create: this.createMemory.bind(this), delete: this.deleteMemory.bind(this), retrieve: this.retrieveMemory.bind(this), list: this.listMemory.bind(this), documents: { list: this.listDocs.bind(this), delete: this.deleteDoc.bind(this), upload: this.uploadDocs.bind(this), embedding: { retry: this.retryDocEmbed.bind(this) } } }; this.memories = { create: this.createMemory.bind(this), delete: this.deleteMemory.bind(this), retrieve: this.retrieveMemory.bind(this), list: this.listMemory.bind(this), documents: { list: this.listDocs.bind(this), delete: this.deleteDoc.bind(this), upload: this.uploadDocs.bind(this), embeddings: { retry: this.retryDocEmbed.bind(this) } } }; this.tools = { crawl: this.webCrawl.bind(this), webSearch: this.webSearch.bind(this) }; this.tool = { crawl: this.webCrawl.bind(this), webSearch: this.webSearch.bind(this) }; this.embed = this.generateEmbeddings.bind(this); this.chunk = this.chunkDocument.bind(this); this.chunker = this.chunkDocument.bind(this); this.parse = this.parseDocument.bind(this); this.parser = this.parseDocument.bind(this); this.threads = { create: this.createThread.bind(this), update: this.updateThread.bind(this), get: this.getThread.bind(this), delete: this.deleteThread.bind(this), append: this.appendThreadMessages.bind(this), messages: { list: this.listThreadMessages.bind(this) } }; this.agent = { run: this.runAgent.bind(this) }; this.workflow = (config = {}) => new Workflow({ ...config, langbase: this }); this.traces = { create: this.createTrace.bind(this) }; } async runPipe(options) { var _a; if (!((_a = options.name) == null ? void 0 : _a.trim()) && !options.apiKey) { throw new Error( "Pipe name or Pipe API key is required to run the pipe." ); } if (typeof options.stream === "undefined") { delete options.stream; } if (options.apiKey) { this.request = new Request({ apiKey: options.apiKey, baseUrl: this.baseUrl }); } return this.request.post({ endpoint: "/v1/pipes/run", body: options, headers: { ...options.llmKey && { "LB-LLM-KEY": options.llmKey } } }); } /** * Creates a new pipe on Langbase. * * @param {PipeCreateOptions} options - The options for creating the pipe. * @returns {Promise<PipeCreateResponse>} A promise that resolves to the response of the pipe creation. */ async createPipe(options) { return this.request.post({ endpoint: "/v1/pipes", body: options }); } /** * Updates a pipe on Langbase. * * @param {PipeUpdateOptions} options - The options for updating the pipe. * @returns {Promise<PipeUpdateResponse>} A promise that resolves to the response of the update operation. */ async updatePipe(options) { return this.request.post({ endpoint: `/v1/pipes/${options.name}`, body: options }); } /** * Retrieves a list of pipes. * * @returns {Promise<PipeListResponse[]>} A promise that resolves to an array of PipeListResponse objects. */ async listPipe() { return this.request.get({ endpoint: "/v1/pipes" }); } /** * Creates a new memory on Langbase. * * @param {MemoryCreateOptions} options - The options to create the memory instance. * @param {string} options.name - The name of the memory. * @param {string} options.description - The description of the memory. * @returns {Promise<MemoryCreateResponse>} A promise that resolves to the response of the memory creation. */ async createMemory(options) { return this.request.post({ endpoint: "/v1/memory", body: options }); } /** * Retrieves a list of all memories on Langbase. * * @returns {Promise<MemoryListResponse[]>} A promise that resolves to an array of memory list responses. */ async listMemory() { return this.request.get({ endpoint: "/v1/memory" }); } /** * Deletes a memory on Langbase. * * @param {MemoryDeleteOptions} options - The options for deleting the memory resource. * @param {string} options.name - The name of the memory to delete. * @returns {Promise<MemoryDeleteResponse>} A promise that resolves to the response of the delete operation. */ async deleteMemory(options) { return this.request.delete({ endpoint: `/v1/memory/${options.name}` }); } /** * Retrieves similar text from the memory. * * @param {MemoryRetrieveOptions} options - The options to use for retrieving memory data. * @param {string} options.query - The query text to search for. * @param {object[]} options.memory - The memory to search in. * @param {number} [options.topK] - The number of similar texts to retrieve. * @returns A promise that resolves to an array of `MemoryRetrieveResponse` objects. */ async retrieveMemory(options) { return this.request.post({ endpoint: "/v1/memory/retrieve", body: options }); } /** * Retrieves a list of documents inside a memory. * * @param {MemoryListDocOptions} options - The options for listing documents, including the memory name. * @param {string} options.memoryName - The name of the memory to list documents from. * @returns A promise that resolves to an array of `MemoryListDocResponse` objects. */ async listDocs(options) { return this.request.get({ endpoint: `/v1/memory/${options.memoryName}/documents` }); } /** * Deletes a document from a memory. * * @param {MemoryDeleteDocOptions} options - The options for deleting the document. * @param {string} options.memoryName - The name of the memory to delete the document from. * @param {string} options.documentName - The name of the document to delete. * @returns A promise that resolves to a `MemoryDeleteDocResponse` indicating the result of the delete operation. */ async deleteDoc(options) { return this.request.delete({ endpoint: `/v1/memory/${options.memoryName}/documents/${options.documentName}` }); } /** * Uploads a document to the memory. * * @param {MemoryUploadDocOptions} options - The options for uploading the document. * @param {string} options.memoryName - The name of the memory to upload the document to. * @param {string} options.fileName - The name of the file being uploaded. * @param {object} [options.meta] - Optional metadata associated with the document. * @param {string} options.contentType - The MIME type of the file being uploaded. * @param {Blob | Buffer} options.file - The file content to be uploaded. * @returns {Promise<Response>} The response from the upload request. * @throws Will throw an error if the upload fails. */ async uploadDocs(options) { try { const response = await this.request.post({ endpoint: `/v1/memory/documents`, body: { memoryName: options.memoryName, fileName: options.documentName, meta: options.meta } }); const uploadUrl = response.signedUrl; return await fetch(uploadUrl, { method: "PUT", headers: { Authorization: `Bearer ${this.apiKey}`, "Content-Type": options.contentType }, body: options.document }); } catch (error) { throw error; } } /** * Retries the embedding process for a specific document in memory. * * @param options - The options required to retry the document embedding. * @param options.memoryName - The name of the memory containing the document. * @param options.documentName - The name of the document to retry embedding for. * @returns A promise that resolves to the response of the retry operation. */ async retryDocEmbed(options) { return this.request.get({ endpoint: `/v1/memory/${options.memoryName}/documents/${options.documentName}/embeddings/retry` }); } /** * Performs a web search using the Langbase API. * * @param options - Web search configuration options * @param options.apiKey - Optional API key for web search authentication * @returns Promise that resolves to an array of web search results */ async webSearch(options) { return this.request.post({ endpoint: "/v1/tools/web-search", body: options, headers: { "LB-WEB-SEARCH-KEY": options.apiKey } }); } /** * Performs a web crawls on target websites using the Langbase API. * * @param options - Crawl configuration options * @returns An array of responses containing data from the crawl operation. */ async webCrawl(options) { return this.request.post({ endpoint: "/v1/tools/crawl", body: options, headers: { "LB-CRAWL-KEY": options.apiKey } }); } /** * Generates embeddings for the given input using the LangBase API. * * @param options - Embed options * @returns Promise that resolves to the embedding response containing vector representations */ async generateEmbeddings(options) { return this.request.post({ endpoint: "/v1/embed", body: options }); } /** * Splits a given document into multiple chunks using the Langbase API. * * @param options - The chunking options. * @param options.document - The document to be chunked. * @param options.chunk_max_length - An optional maximum length for each chunk. * @param options.chunk_overlap - An optional number of overlapping characters between chunks. * @param options.separator - An optional separator used to split the document. * @returns A promise that resolves to the chunked document response. */ async chunkDocument(options) { return this.request.post({ endpoint: "/v1/chunker", body: options }); } /** * Parses a document using the Langbase API. * * @param options - The options for parsing the document * @param options.document - The document to be parsed * @param options.documentName - The name of the document * @param options.contentType - The content type of the document * * @returns A promise that resolves to the parse response from the API * * @throws {Error} If the API request fails */ async parseDocument(options) { const formData = await convertDocToFormData({ document: options.document, documentName: options.documentName, contentType: options.contentType }); const response = await fetch(`${this.baseUrl}/v1/parser`, { method: "POST", headers: { Authorization: `Bearer ${this.apiKey}` }, body: formData }); return response.json(); } /** * Creates a new thread with specified options. * @param {ThreadsCreate} options - The options object containing thread creation parameters. * @returns {Promise<ThreadsBaseResponse>} A promise that resolves to the created thread response. * @private */ async createThread(options) { return this.request.post({ endpoint: "/v1/threads", body: options }); } /** * Updates an existing thread with the provided options. * * @param options - The options to update the thread with * @param options.threadId - The ID of the thread to update * @returns A promise that resolves to the updated thread response * @throws {Error} If the request fails */ async updateThread(options) { return this.request.post({ endpoint: `/v1/threads/${options.threadId}`, body: options }); } /** * Retrieves a thread by its ID. * @param {ThreadsGet} options - The options object containing the thread ID. * @param {string} options.threadId - The unique identifier of the thread to retrieve. * @returns {Promise<ThreadsBaseResponse>} A promise that resolves to the thread data. */ async getThread(options) { return this.request.get({ endpoint: `/v1/threads/${options.threadId}` }); } async deleteThread(options) { return this.request.delete({ endpoint: `/v1/threads/${options.threadId}` }); } async appendThreadMessages(options) { return this.request.post({ endpoint: `/v1/threads/${options.threadId}/messages`, body: options.messages }); } async listThreadMessages(options) { return this.request.get({ endpoint: `/v1/threads/${options.threadId}/messages` }); } async runAgent(options) { if (!options.apiKey) { throw new Error("LLM API key is required to run this LLM."); } if (typeof options.stream === "undefined") { delete options.stream; } return this.request.post({ endpoint: "/v1/agent/run", body: options, headers: { ...options.apiKey && { "LB-LLM-Key": options.apiKey } } }); } /** * Creates a new trace on Langbase. * * @param {any} trace - The trace data to send. * @returns {Promise<any>} A promise that resolves to the response of the trace creation. */ async createTrace(trace) { return this.request.post({ endpoint: "/v1/traces", body: trace }); } }; // src/pipes/pipes.ts var Pipe = class { constructor(options) { const baseUrl = "https://api.langbase.com"; this.request = new Request({ apiKey: options.apiKey, baseUrl }); } /** * @deprecated This method is deprecated and will be removed in a future version. * * Please use `langbase.pipes.run()` instead * @see https://langbase.com/docs/sdk/pipe/run */ async generateText(options) { return this.request.post({ endpoint: options.chat ? "/beta/chat" : "/beta/generate", body: { ...options, stream: false } }); } /** * @deprecated This method is deprecated and will be removed in a future version. * * Please use `langbase.pipes.run()` instead * @see https://langbase.com/docs/sdk/pipe/run */ async streamText(options) { return this.request.post({ endpoint: options.chat ? "/beta/chat" : "/beta/generate", body: { ...options, stream: true } }); } }; var printStreamToStdout = async (stream) => { var _a, _b; for await (const chunk of stream) { const textPart = ((_b = (_a = chunk.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.content) || ""; process.stdout.write(textPart); } }; // src/lib/helpers/index.ts var import_ChatCompletionStream = require("openai/lib/ChatCompletionStream"); var import_streaming = require("openai/streaming"); var fromReadableStream = (readableStream)