langsmith
Version:
Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.
1,276 lines (1,275 loc) • 141 kB
JavaScript
import * as uuid from "uuid";
import { AsyncCaller } from "./utils/async_caller.js";
import { convertLangChainMessageToExample, isLangChainMessage, } from "./utils/messages.js";
import { getEnvironmentVariable, getLangChainEnvVarsMetadata, getLangSmithEnvironmentVariable, getRuntimeEnvironment, } from "./utils/env.js";
import { __version__ } from "./index.js";
import { assertUuid } from "./utils/_uuid.js";
import { warnOnce } from "./utils/warn.js";
import { parsePromptIdentifier } from "./utils/prompts.js";
import { raiseForStatus } from "./utils/error.js";
import { _globalFetchImplementationIsNodeFetch, _getFetchImplementation, } from "./singletons/fetch.js";
import { serialize as serializePayloadForTracing } from "./utils/fast-safe-stringify/index.js";
export function mergeRuntimeEnvIntoRunCreate(run) {
const runtimeEnv = getRuntimeEnvironment();
const envVars = getLangChainEnvVarsMetadata();
const extra = run.extra ?? {};
const metadata = extra.metadata;
run.extra = {
...extra,
runtime: {
...runtimeEnv,
...extra?.runtime,
},
metadata: {
...envVars,
...(envVars.revision_id || run.revision_id
? { revision_id: run.revision_id ?? envVars.revision_id }
: {}),
...metadata,
},
};
return run;
}
const getTracingSamplingRate = (configRate) => {
const samplingRateStr = configRate?.toString() ??
getLangSmithEnvironmentVariable("TRACING_SAMPLING_RATE");
if (samplingRateStr === undefined) {
return undefined;
}
const samplingRate = parseFloat(samplingRateStr);
if (samplingRate < 0 || samplingRate > 1) {
throw new Error(`LANGSMITH_TRACING_SAMPLING_RATE must be between 0 and 1 if set. Got: ${samplingRate}`);
}
return samplingRate;
};
// utility functions
const isLocalhost = (url) => {
const strippedUrl = url.replace("http://", "").replace("https://", "");
const hostname = strippedUrl.split("/")[0].split(":")[0];
return (hostname === "localhost" || hostname === "127.0.0.1" || hostname === "::1");
};
async function toArray(iterable) {
const result = [];
for await (const item of iterable) {
result.push(item);
}
return result;
}
function trimQuotes(str) {
if (str === undefined) {
return undefined;
}
return str
.trim()
.replace(/^"(.*)"$/, "$1")
.replace(/^'(.*)'$/, "$1");
}
const handle429 = async (response) => {
if (response?.status === 429) {
const retryAfter = parseInt(response.headers.get("retry-after") ?? "30", 10) * 1000;
if (retryAfter > 0) {
await new Promise((resolve) => setTimeout(resolve, retryAfter));
// Return directly after calling this check
return true;
}
}
// Fall back to existing status checks
return false;
};
function _formatFeedbackScore(score) {
if (typeof score === "number") {
// Truncate at 4 decimal places
return Number(score.toFixed(4));
}
return score;
}
export class AutoBatchQueue {
constructor() {
Object.defineProperty(this, "items", {
enumerable: true,
configurable: true,
writable: true,
value: []
});
Object.defineProperty(this, "sizeBytes", {
enumerable: true,
configurable: true,
writable: true,
value: 0
});
}
peek() {
return this.items[0];
}
push(item) {
let itemPromiseResolve;
const itemPromise = new Promise((resolve) => {
// Setting itemPromiseResolve is synchronous with promise creation:
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/Promise
itemPromiseResolve = resolve;
});
const size = serializePayloadForTracing(item.item, `Serializing run with id: ${item.item.id}`).length;
this.items.push({
action: item.action,
payload: item.item,
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
itemPromiseResolve: itemPromiseResolve,
itemPromise,
size,
});
this.sizeBytes += size;
return itemPromise;
}
pop(upToSizeBytes) {
if (upToSizeBytes < 1) {
throw new Error("Number of bytes to pop off may not be less than 1.");
}
const popped = [];
let poppedSizeBytes = 0;
// Pop items until we reach or exceed the size limit
while (poppedSizeBytes + (this.peek()?.size ?? 0) < upToSizeBytes &&
this.items.length > 0) {
const item = this.items.shift();
if (item) {
popped.push(item);
poppedSizeBytes += item.size;
this.sizeBytes -= item.size;
}
}
// If there is an item on the queue we were unable to pop,
// just return it as a single batch.
if (popped.length === 0 && this.items.length > 0) {
const item = this.items.shift();
popped.push(item);
poppedSizeBytes += item.size;
this.sizeBytes -= item.size;
}
return [
popped.map((it) => ({ action: it.action, item: it.payload })),
() => popped.forEach((it) => it.itemPromiseResolve()),
];
}
}
// 20 MB
export const DEFAULT_BATCH_SIZE_LIMIT_BYTES = 20_971_520;
const SERVER_INFO_REQUEST_TIMEOUT = 2500;
export class Client {
constructor(config = {}) {
Object.defineProperty(this, "apiKey", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "apiUrl", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "webUrl", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "caller", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "batchIngestCaller", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "timeout_ms", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "_tenantId", {
enumerable: true,
configurable: true,
writable: true,
value: null
});
Object.defineProperty(this, "hideInputs", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "hideOutputs", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "tracingSampleRate", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "filteredPostUuids", {
enumerable: true,
configurable: true,
writable: true,
value: new Set()
});
Object.defineProperty(this, "autoBatchTracing", {
enumerable: true,
configurable: true,
writable: true,
value: true
});
Object.defineProperty(this, "autoBatchQueue", {
enumerable: true,
configurable: true,
writable: true,
value: new AutoBatchQueue()
});
Object.defineProperty(this, "autoBatchTimeout", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "autoBatchAggregationDelayMs", {
enumerable: true,
configurable: true,
writable: true,
value: 250
});
Object.defineProperty(this, "batchSizeBytesLimit", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "fetchOptions", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "settings", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "blockOnRootRunFinalization", {
enumerable: true,
configurable: true,
writable: true,
value: getEnvironmentVariable("LANGSMITH_TRACING_BACKGROUND") === "false"
});
Object.defineProperty(this, "traceBatchConcurrency", {
enumerable: true,
configurable: true,
writable: true,
value: 5
});
Object.defineProperty(this, "_serverInfo", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
// eslint-disable-next-line @typescript-eslint/no-explicit-any
Object.defineProperty(this, "_getServerInfoPromise", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "manualFlushMode", {
enumerable: true,
configurable: true,
writable: true,
value: false
});
Object.defineProperty(this, "debug", {
enumerable: true,
configurable: true,
writable: true,
value: getEnvironmentVariable("LANGSMITH_DEBUG") === "true"
});
const defaultConfig = Client.getDefaultClientConfig();
this.tracingSampleRate = getTracingSamplingRate(config.tracingSamplingRate);
this.apiUrl = trimQuotes(config.apiUrl ?? defaultConfig.apiUrl) ?? "";
if (this.apiUrl.endsWith("/")) {
this.apiUrl = this.apiUrl.slice(0, -1);
}
this.apiKey = trimQuotes(config.apiKey ?? defaultConfig.apiKey);
this.webUrl = trimQuotes(config.webUrl ?? defaultConfig.webUrl);
if (this.webUrl?.endsWith("/")) {
this.webUrl = this.webUrl.slice(0, -1);
}
this.timeout_ms = config.timeout_ms ?? 90_000;
this.caller = new AsyncCaller({
...(config.callerOptions ?? {}),
debug: config.debug ?? this.debug,
});
this.traceBatchConcurrency =
config.traceBatchConcurrency ?? this.traceBatchConcurrency;
if (this.traceBatchConcurrency < 1) {
throw new Error("Trace batch concurrency must be positive.");
}
this.debug = config.debug ?? this.debug;
this.batchIngestCaller = new AsyncCaller({
maxRetries: 2,
maxConcurrency: this.traceBatchConcurrency,
...(config.callerOptions ?? {}),
onFailedResponseHook: handle429,
debug: config.debug ?? this.debug,
});
this.hideInputs =
config.hideInputs ?? config.anonymizer ?? defaultConfig.hideInputs;
this.hideOutputs =
config.hideOutputs ?? config.anonymizer ?? defaultConfig.hideOutputs;
this.autoBatchTracing = config.autoBatchTracing ?? this.autoBatchTracing;
this.blockOnRootRunFinalization =
config.blockOnRootRunFinalization ?? this.blockOnRootRunFinalization;
this.batchSizeBytesLimit = config.batchSizeBytesLimit;
this.fetchOptions = config.fetchOptions || {};
this.manualFlushMode = config.manualFlushMode ?? this.manualFlushMode;
}
static getDefaultClientConfig() {
const apiKey = getLangSmithEnvironmentVariable("API_KEY");
const apiUrl = getLangSmithEnvironmentVariable("ENDPOINT") ??
"https://api.smith.langchain.com";
const hideInputs = getLangSmithEnvironmentVariable("HIDE_INPUTS") === "true";
const hideOutputs = getLangSmithEnvironmentVariable("HIDE_OUTPUTS") === "true";
return {
apiUrl: apiUrl,
apiKey: apiKey,
webUrl: undefined,
hideInputs: hideInputs,
hideOutputs: hideOutputs,
};
}
getHostUrl() {
if (this.webUrl) {
return this.webUrl;
}
else if (isLocalhost(this.apiUrl)) {
this.webUrl = "http://localhost:3000";
return this.webUrl;
}
else if (this.apiUrl.endsWith("/api/v1")) {
this.webUrl = this.apiUrl.replace("/api/v1", "");
return this.webUrl;
}
else if (this.apiUrl.includes("/api") &&
!this.apiUrl.split(".", 1)[0].endsWith("api")) {
this.webUrl = this.apiUrl.replace("/api", "");
return this.webUrl;
}
else if (this.apiUrl.split(".", 1)[0].includes("dev")) {
this.webUrl = "https://dev.smith.langchain.com";
return this.webUrl;
}
else if (this.apiUrl.split(".", 1)[0].includes("eu")) {
this.webUrl = "https://eu.smith.langchain.com";
return this.webUrl;
}
else if (this.apiUrl.split(".", 1)[0].includes("beta")) {
this.webUrl = "https://beta.smith.langchain.com";
return this.webUrl;
}
else {
this.webUrl = "https://smith.langchain.com";
return this.webUrl;
}
}
get headers() {
const headers = {
"User-Agent": `langsmith-js/${__version__}`,
};
if (this.apiKey) {
headers["x-api-key"] = `${this.apiKey}`;
}
return headers;
}
async processInputs(inputs) {
if (this.hideInputs === false) {
return inputs;
}
if (this.hideInputs === true) {
return {};
}
if (typeof this.hideInputs === "function") {
return this.hideInputs(inputs);
}
return inputs;
}
async processOutputs(outputs) {
if (this.hideOutputs === false) {
return outputs;
}
if (this.hideOutputs === true) {
return {};
}
if (typeof this.hideOutputs === "function") {
return this.hideOutputs(outputs);
}
return outputs;
}
async prepareRunCreateOrUpdateInputs(run) {
const runParams = { ...run };
if (runParams.inputs !== undefined) {
runParams.inputs = await this.processInputs(runParams.inputs);
}
if (runParams.outputs !== undefined) {
runParams.outputs = await this.processOutputs(runParams.outputs);
}
return runParams;
}
async _getResponse(path, queryParams) {
const paramsString = queryParams?.toString() ?? "";
const url = `${this.apiUrl}${path}?${paramsString}`;
const response = await this.caller.call(_getFetchImplementation(this.debug), url, {
method: "GET",
headers: this.headers,
signal: AbortSignal.timeout(this.timeout_ms),
...this.fetchOptions,
});
await raiseForStatus(response, `Failed to fetch ${path}`);
return response;
}
async _get(path, queryParams) {
const response = await this._getResponse(path, queryParams);
return response.json();
}
async *_getPaginated(path, queryParams = new URLSearchParams(), transform) {
let offset = Number(queryParams.get("offset")) || 0;
const limit = Number(queryParams.get("limit")) || 100;
while (true) {
queryParams.set("offset", String(offset));
queryParams.set("limit", String(limit));
const url = `${this.apiUrl}${path}?${queryParams}`;
const response = await this.caller.call(_getFetchImplementation(this.debug), url, {
method: "GET",
headers: this.headers,
signal: AbortSignal.timeout(this.timeout_ms),
...this.fetchOptions,
});
await raiseForStatus(response, `Failed to fetch ${path}`);
const items = transform
? transform(await response.json())
: await response.json();
if (items.length === 0) {
break;
}
yield items;
if (items.length < limit) {
break;
}
offset += items.length;
}
}
async *_getCursorPaginatedList(path, body = null, requestMethod = "POST", dataKey = "runs") {
const bodyParams = body ? { ...body } : {};
while (true) {
const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}${path}`, {
method: requestMethod,
headers: { ...this.headers, "Content-Type": "application/json" },
signal: AbortSignal.timeout(this.timeout_ms),
...this.fetchOptions,
body: JSON.stringify(bodyParams),
});
const responseBody = await response.json();
if (!responseBody) {
break;
}
if (!responseBody[dataKey]) {
break;
}
yield responseBody[dataKey];
const cursors = responseBody.cursors;
if (!cursors) {
break;
}
if (!cursors.next) {
break;
}
bodyParams.cursor = cursors.next;
}
}
// Allows mocking for tests
_shouldSample() {
if (this.tracingSampleRate === undefined) {
return true;
}
return Math.random() < this.tracingSampleRate;
}
_filterForSampling(runs, patch = false) {
if (this.tracingSampleRate === undefined) {
return runs;
}
if (patch) {
const sampled = [];
for (const run of runs) {
if (!this.filteredPostUuids.has(run.id)) {
sampled.push(run);
}
else {
this.filteredPostUuids.delete(run.id);
}
}
return sampled;
}
else {
// For new runs, sample at trace level to maintain consistency
const sampled = [];
for (const run of runs) {
const traceId = run.trace_id ?? run.id;
// If we've already made a decision about this trace, follow it
if (this.filteredPostUuids.has(traceId)) {
continue;
}
// For new traces, apply sampling
if (run.id === traceId) {
if (this._shouldSample()) {
sampled.push(run);
}
else {
this.filteredPostUuids.add(traceId);
}
}
else {
// Child runs follow their trace's sampling decision
sampled.push(run);
}
}
return sampled;
}
}
async _getBatchSizeLimitBytes() {
const serverInfo = await this._ensureServerInfo();
return (this.batchSizeBytesLimit ??
serverInfo.batch_ingest_config?.size_limit_bytes ??
DEFAULT_BATCH_SIZE_LIMIT_BYTES);
}
async _getMultiPartSupport() {
const serverInfo = await this._ensureServerInfo();
return (serverInfo.instance_flags?.dataset_examples_multipart_enabled ?? false);
}
drainAutoBatchQueue(batchSizeLimit) {
const promises = [];
while (this.autoBatchQueue.items.length > 0) {
const [batch, done] = this.autoBatchQueue.pop(batchSizeLimit);
if (!batch.length) {
done();
break;
}
const batchPromise = this._processBatch(batch, done).catch(console.error);
promises.push(batchPromise);
}
return Promise.all(promises);
}
async _processBatch(batch, done) {
if (!batch.length) {
done();
return;
}
try {
const ingestParams = {
runCreates: batch
.filter((item) => item.action === "create")
.map((item) => item.item),
runUpdates: batch
.filter((item) => item.action === "update")
.map((item) => item.item),
};
const serverInfo = await this._ensureServerInfo();
if (serverInfo?.batch_ingest_config?.use_multipart_endpoint) {
await this.multipartIngestRuns(ingestParams);
}
else {
await this.batchIngestRuns(ingestParams);
}
}
finally {
done();
}
}
async processRunOperation(item) {
clearTimeout(this.autoBatchTimeout);
this.autoBatchTimeout = undefined;
if (item.action === "create") {
item.item = mergeRuntimeEnvIntoRunCreate(item.item);
}
const itemPromise = this.autoBatchQueue.push(item);
if (this.manualFlushMode) {
// Rely on manual flushing in serverless environments
return itemPromise;
}
const sizeLimitBytes = await this._getBatchSizeLimitBytes();
if (this.autoBatchQueue.sizeBytes > sizeLimitBytes) {
void this.drainAutoBatchQueue(sizeLimitBytes);
}
if (this.autoBatchQueue.items.length > 0) {
this.autoBatchTimeout = setTimeout(() => {
this.autoBatchTimeout = undefined;
void this.drainAutoBatchQueue(sizeLimitBytes);
}, this.autoBatchAggregationDelayMs);
}
return itemPromise;
}
async _getServerInfo() {
const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/info`, {
method: "GET",
headers: { Accept: "application/json" },
signal: AbortSignal.timeout(SERVER_INFO_REQUEST_TIMEOUT),
...this.fetchOptions,
});
await raiseForStatus(response, "get server info");
const json = await response.json();
if (this.debug) {
console.log("\n=== LangSmith Server Configuration ===\n" +
JSON.stringify(json, null, 2) +
"\n");
}
return json;
}
async _ensureServerInfo() {
if (this._getServerInfoPromise === undefined) {
this._getServerInfoPromise = (async () => {
if (this._serverInfo === undefined) {
try {
this._serverInfo = await this._getServerInfo();
}
catch (e) {
console.warn(`[WARNING]: LangSmith failed to fetch info on supported operations with status code ${e.status}. Falling back to batch operations and default limits.`);
}
}
return this._serverInfo ?? {};
})();
}
return this._getServerInfoPromise.then((serverInfo) => {
if (this._serverInfo === undefined) {
this._getServerInfoPromise = undefined;
}
return serverInfo;
});
}
async _getSettings() {
if (!this.settings) {
this.settings = this._get("/settings");
}
return await this.settings;
}
/**
* Flushes current queued traces.
*/
async flush() {
const sizeLimitBytes = await this._getBatchSizeLimitBytes();
await this.drainAutoBatchQueue(sizeLimitBytes);
}
async createRun(run) {
if (!this._filterForSampling([run]).length) {
return;
}
const headers = { ...this.headers, "Content-Type": "application/json" };
const session_name = run.project_name;
delete run.project_name;
const runCreate = await this.prepareRunCreateOrUpdateInputs({
session_name,
...run,
start_time: run.start_time ?? Date.now(),
});
if (this.autoBatchTracing &&
runCreate.trace_id !== undefined &&
runCreate.dotted_order !== undefined) {
void this.processRunOperation({
action: "create",
item: runCreate,
}).catch(console.error);
return;
}
const mergedRunCreateParam = mergeRuntimeEnvIntoRunCreate(runCreate);
const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/runs`, {
method: "POST",
headers,
body: serializePayloadForTracing(mergedRunCreateParam, `Creating run with id: ${mergedRunCreateParam.id}`),
signal: AbortSignal.timeout(this.timeout_ms),
...this.fetchOptions,
});
await raiseForStatus(response, "create run", true);
}
/**
* Batch ingest/upsert multiple runs in the Langsmith system.
* @param runs
*/
async batchIngestRuns({ runCreates, runUpdates, }) {
if (runCreates === undefined && runUpdates === undefined) {
return;
}
let preparedCreateParams = await Promise.all(runCreates?.map((create) => this.prepareRunCreateOrUpdateInputs(create)) ?? []);
let preparedUpdateParams = await Promise.all(runUpdates?.map((update) => this.prepareRunCreateOrUpdateInputs(update)) ?? []);
if (preparedCreateParams.length > 0 && preparedUpdateParams.length > 0) {
const createById = preparedCreateParams.reduce((params, run) => {
if (!run.id) {
return params;
}
params[run.id] = run;
return params;
}, {});
const standaloneUpdates = [];
for (const updateParam of preparedUpdateParams) {
if (updateParam.id !== undefined && createById[updateParam.id]) {
createById[updateParam.id] = {
...createById[updateParam.id],
...updateParam,
};
}
else {
standaloneUpdates.push(updateParam);
}
}
preparedCreateParams = Object.values(createById);
preparedUpdateParams = standaloneUpdates;
}
const rawBatch = {
post: preparedCreateParams,
patch: preparedUpdateParams,
};
if (!rawBatch.post.length && !rawBatch.patch.length) {
return;
}
const batchChunks = {
post: [],
patch: [],
};
for (const k of ["post", "patch"]) {
const key = k;
const batchItems = rawBatch[key].reverse();
let batchItem = batchItems.pop();
while (batchItem !== undefined) {
// Type is wrong but this is a deprecated code path anyway
batchChunks[key].push(batchItem);
batchItem = batchItems.pop();
}
}
if (batchChunks.post.length > 0 || batchChunks.patch.length > 0) {
const runIds = batchChunks.post
.map((item) => item.id)
.concat(batchChunks.patch.map((item) => item.id))
.join(",");
await this._postBatchIngestRuns(serializePayloadForTracing(batchChunks, `Ingesting runs with ids: ${runIds}`));
}
}
async _postBatchIngestRuns(body) {
const headers = {
...this.headers,
"Content-Type": "application/json",
Accept: "application/json",
};
const response = await this.batchIngestCaller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/runs/batch`, {
method: "POST",
headers,
body: body,
signal: AbortSignal.timeout(this.timeout_ms),
...this.fetchOptions,
});
await raiseForStatus(response, "batch create run", true);
}
/**
* Batch ingest/upsert multiple runs in the Langsmith system.
* @param runs
*/
async multipartIngestRuns({ runCreates, runUpdates, }) {
if (runCreates === undefined && runUpdates === undefined) {
return;
}
// transform and convert to dicts
const allAttachments = {};
let preparedCreateParams = [];
for (const create of runCreates ?? []) {
const preparedCreate = await this.prepareRunCreateOrUpdateInputs(create);
if (preparedCreate.id !== undefined &&
preparedCreate.attachments !== undefined) {
allAttachments[preparedCreate.id] = preparedCreate.attachments;
}
delete preparedCreate.attachments;
preparedCreateParams.push(preparedCreate);
}
let preparedUpdateParams = [];
for (const update of runUpdates ?? []) {
preparedUpdateParams.push(await this.prepareRunCreateOrUpdateInputs(update));
}
// require trace_id and dotted_order
const invalidRunCreate = preparedCreateParams.find((runCreate) => {
return (runCreate.trace_id === undefined || runCreate.dotted_order === undefined);
});
if (invalidRunCreate !== undefined) {
throw new Error(`Multipart ingest requires "trace_id" and "dotted_order" to be set when creating a run`);
}
const invalidRunUpdate = preparedUpdateParams.find((runUpdate) => {
return (runUpdate.trace_id === undefined || runUpdate.dotted_order === undefined);
});
if (invalidRunUpdate !== undefined) {
throw new Error(`Multipart ingest requires "trace_id" and "dotted_order" to be set when updating a run`);
}
// combine post and patch dicts where possible
if (preparedCreateParams.length > 0 && preparedUpdateParams.length > 0) {
const createById = preparedCreateParams.reduce((params, run) => {
if (!run.id) {
return params;
}
params[run.id] = run;
return params;
}, {});
const standaloneUpdates = [];
for (const updateParam of preparedUpdateParams) {
if (updateParam.id !== undefined && createById[updateParam.id]) {
createById[updateParam.id] = {
...createById[updateParam.id],
...updateParam,
};
}
else {
standaloneUpdates.push(updateParam);
}
}
preparedCreateParams = Object.values(createById);
preparedUpdateParams = standaloneUpdates;
}
if (preparedCreateParams.length === 0 &&
preparedUpdateParams.length === 0) {
return;
}
// send the runs in multipart requests
const accumulatedContext = [];
const accumulatedParts = [];
for (const [method, payloads] of [
["post", preparedCreateParams],
["patch", preparedUpdateParams],
]) {
for (const originalPayload of payloads) {
// collect fields to be sent as separate parts
const { inputs, outputs, events, attachments, ...payload } = originalPayload;
const fields = { inputs, outputs, events };
// encode the main run payload
const stringifiedPayload = serializePayloadForTracing(payload, `Serializing for multipart ingestion of run with id: ${payload.id}`);
accumulatedParts.push({
name: `${method}.${payload.id}`,
payload: new Blob([stringifiedPayload], {
type: `application/json; length=${stringifiedPayload.length}`, // encoding=gzip
}),
});
// encode the fields we collected
for (const [key, value] of Object.entries(fields)) {
if (value === undefined) {
continue;
}
const stringifiedValue = serializePayloadForTracing(value, `Serializing ${key} for multipart ingestion of run with id: ${payload.id}`);
accumulatedParts.push({
name: `${method}.${payload.id}.${key}`,
payload: new Blob([stringifiedValue], {
type: `application/json; length=${stringifiedValue.length}`,
}),
});
}
// encode the attachments
if (payload.id !== undefined) {
const attachments = allAttachments[payload.id];
if (attachments) {
delete allAttachments[payload.id];
for (const [name, attachment] of Object.entries(attachments)) {
let contentType;
let content;
if (Array.isArray(attachment)) {
[contentType, content] = attachment;
}
else {
contentType = attachment.mimeType;
content = attachment.data;
}
// Validate that the attachment name doesn't contain a '.'
if (name.includes(".")) {
console.warn(`Skipping attachment '${name}' for run ${payload.id}: Invalid attachment name. ` +
`Attachment names must not contain periods ('.'). Please rename the attachment and try again.`);
continue;
}
accumulatedParts.push({
name: `attachment.${payload.id}.${name}`,
payload: new Blob([content], {
type: `${contentType}; length=${content.byteLength}`,
}),
});
}
}
}
// compute context
accumulatedContext.push(`trace=${payload.trace_id},id=${payload.id}`);
}
}
await this._sendMultipartRequest(accumulatedParts, accumulatedContext.join("; "));
}
async _createNodeFetchBody(parts, boundary) {
// Create multipart form data manually using Blobs
const chunks = [];
for (const part of parts) {
// Add field boundary
chunks.push(new Blob([`--${boundary}\r\n`]));
chunks.push(new Blob([
`Content-Disposition: form-data; name="${part.name}"\r\n`,
`Content-Type: ${part.payload.type}\r\n\r\n`,
]));
chunks.push(part.payload);
chunks.push(new Blob(["\r\n"]));
}
// Add final boundary
chunks.push(new Blob([`--${boundary}--\r\n`]));
// Combine all chunks into a single Blob
const body = new Blob(chunks);
// Convert Blob to ArrayBuffer for compatibility
const arrayBuffer = await body.arrayBuffer();
return arrayBuffer;
}
async _createMultipartStream(parts, boundary) {
const encoder = new TextEncoder();
// Create a ReadableStream for streaming the multipart data
// Only do special handling if we're using node-fetch
const stream = new ReadableStream({
async start(controller) {
// Helper function to write a chunk to the stream
const writeChunk = async (chunk) => {
if (typeof chunk === "string") {
controller.enqueue(encoder.encode(chunk));
}
else {
controller.enqueue(chunk);
}
};
// Write each part to the stream
for (const part of parts) {
// Write boundary and headers
await writeChunk(`--${boundary}\r\n`);
await writeChunk(`Content-Disposition: form-data; name="${part.name}"\r\n`);
await writeChunk(`Content-Type: ${part.payload.type}\r\n\r\n`);
// Write the payload
const payloadStream = part.payload.stream();
const reader = payloadStream.getReader();
try {
let result;
while (!(result = await reader.read()).done) {
controller.enqueue(result.value);
}
}
finally {
reader.releaseLock();
}
await writeChunk("\r\n");
}
// Write final boundary
await writeChunk(`--${boundary}--\r\n`);
controller.close();
},
});
return stream;
}
async _sendMultipartRequest(parts, context) {
try {
// Create multipart form data boundary
const boundary = "----LangSmithFormBoundary" + Math.random().toString(36).slice(2);
const body = await (_globalFetchImplementationIsNodeFetch()
? this._createNodeFetchBody(parts, boundary)
: this._createMultipartStream(parts, boundary));
const res = await this.batchIngestCaller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/runs/multipart`, {
method: "POST",
headers: {
...this.headers,
"Content-Type": `multipart/form-data; boundary=${boundary}`,
},
body,
duplex: "half",
signal: AbortSignal.timeout(this.timeout_ms),
...this.fetchOptions,
});
await raiseForStatus(res, "ingest multipart runs", true);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
}
catch (e) {
console.warn(`${e.message.trim()}\n\nContext: ${context}`);
}
}
async updateRun(runId, run) {
assertUuid(runId);
if (run.inputs) {
run.inputs = await this.processInputs(run.inputs);
}
if (run.outputs) {
run.outputs = await this.processOutputs(run.outputs);
}
// TODO: Untangle types
const data = { ...run, id: runId };
if (!this._filterForSampling([data], true).length) {
return;
}
if (this.autoBatchTracing &&
data.trace_id !== undefined &&
data.dotted_order !== undefined) {
if (run.end_time !== undefined &&
data.parent_run_id === undefined &&
this.blockOnRootRunFinalization &&
!this.manualFlushMode) {
// Trigger batches as soon as a root trace ends and wait to ensure trace finishes
// in serverless environments.
await this.processRunOperation({ action: "update", item: data }).catch(console.error);
return;
}
else {
void this.processRunOperation({ action: "update", item: data }).catch(console.error);
}
return;
}
const headers = { ...this.headers, "Content-Type": "application/json" };
const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/runs/${runId}`, {
method: "PATCH",
headers,
body: serializePayloadForTracing(run, `Serializing payload to update run with id: ${runId}`),
signal: AbortSignal.timeout(this.timeout_ms),
...this.fetchOptions,
});
await raiseForStatus(response, "update run", true);
}
async readRun(runId, { loadChildRuns } = { loadChildRuns: false }) {
assertUuid(runId);
let run = await this._get(`/runs/${runId}`);
if (loadChildRuns) {
run = await this._loadChildRuns(run);
}
return run;
}
async getRunUrl({ runId, run, projectOpts, }) {
if (run !== undefined) {
let sessionId;
if (run.session_id) {
sessionId = run.session_id;
}
else if (projectOpts?.projectName) {
sessionId = (await this.readProject({ projectName: projectOpts?.projectName })).id;
}
else if (projectOpts?.projectId) {
sessionId = projectOpts?.projectId;
}
else {
const project = await this.readProject({
projectName: getLangSmithEnvironmentVariable("PROJECT") || "default",
});
sessionId = project.id;
}
const tenantId = await this._getTenantId();
return `${this.getHostUrl()}/o/${tenantId}/projects/p/${sessionId}/r/${run.id}?poll=true`;
}
else if (runId !== undefined) {
const run_ = await this.readRun(runId);
if (!run_.app_path) {
throw new Error(`Run ${runId} has no app_path`);
}
const baseUrl = this.getHostUrl();
return `${baseUrl}${run_.app_path}`;
}
else {
throw new Error("Must provide either runId or run");
}
}
async _loadChildRuns(run) {
const childRuns = await toArray(this.listRuns({
isRoot: false,
projectId: run.session_id,
traceId: run.trace_id,
}));
const treemap = {};
const runs = {};
// TODO: make dotted order required when the migration finishes
childRuns.sort((a, b) => (a?.dotted_order ?? "").localeCompare(b?.dotted_order ?? ""));
for (const childRun of childRuns) {
if (childRun.parent_run_id === null ||
childRun.parent_run_id === undefined) {
throw new Error(`Child run ${childRun.id} has no parent`);
}
if (childRun.dotted_order?.startsWith(run.dotted_order ?? "") &&
childRun.id !== run.id) {
if (!(childRun.parent_run_id in treemap)) {
treemap[childRun.parent_run_id] = [];
}
treemap[childRun.parent_run_id].push(childRun);
runs[childRun.id] = childRun;
}
}
run.child_runs = treemap[run.id] || [];
for (const runId in treemap) {
if (runId !== run.id) {
runs[runId].child_runs = treemap[runId];
}
}
return run;
}
/**
* List runs from the LangSmith server.
* @param projectId - The ID of the project to filter by.
* @param projectName - The name of the project to filter by.
* @param parentRunId - The ID of the parent run to filter by.
* @param traceId - The ID of the trace to filter by.
* @param referenceExampleId - The ID of the reference example to filter by.
* @param startTime - The start time to filter by.
* @param isRoot - Indicates whether to only return root runs.
* @param runType - The run type to filter by.
* @param error - Indicates whether to filter by error runs.
* @param id - The ID of the run to filter by.
* @param query - The query string to filter by.
* @param filter - The filter string to apply to the run spans.
* @param traceFilter - The filter string to apply on the root run of the trace.
* @param treeFilter - The filter string to apply on other runs in the trace.
* @param limit - The maximum number of runs to retrieve.
* @returns {AsyncIterable<Run>} - The runs.
*
* @example
* // List all runs in a project
* const projectRuns = client.listRuns({ projectName: "<your_project>" });
*
* @example
* // List LLM and Chat runs in the last 24 hours
* const todaysLLMRuns = client.listRuns({
* projectName: "<your_project>",
* start_time: new Date(Date.now() - 24 * 60 * 60 * 1000),
* run_type: "llm",
* });
*
* @example
* // List traces in a project
* const rootRuns = client.listRuns({
* projectName: "<your_project>",
* execution_order: 1,
* });
*
* @example
* // List runs without errors
* const correctRuns = client.listRuns({
* projectName: "<your_project>",
* error: false,
* });
*
* @example
* // List runs by run ID
* const runIds = [
* "a36092d2-4ad5-4fb4-9c0d-0dba9a2ed836",
* "9398e6be-964f-4aa4-8ae9-ad78cd4b7074",
* ];
* const selectedRuns = client.listRuns({ run_ids: runIds });
*
* @example
* // List all "chain" type runs that took more than 10 seconds and had `total_tokens` greater than 5000
* const chainRuns = client.listRuns({
* projectName: "<your_project>",
* filter: 'and(eq(run_type, "chain"), gt(latency, 10), gt(total_tokens, 5000))',
* });
*
* @example
* // List all runs called "extractor" whose root of the trace was assigned feedback "user_score" score of 1
* const goodExtractorRuns = client.listRuns({
* projectName: "<your_project>",
* filter: 'eq(name, "extractor")',
* traceFilter: 'and(eq(feedback_key, "user_score"), eq(feedback_score, 1))',
* });
*
* @example
* // List all runs that started after a specific timestamp and either have "error" not equal to null or a "Correctness" feedback score equal to 0
* const complexRuns = client.listRuns({
* projectName: "<your_project>",
* filter: 'and(gt(start_time, "2023-07-15T12:34:56Z"), or(neq(error, null), and(eq(feedback_key, "Correctness"), eq(feedback_score, 0.0))))',
* });
*
* @example
* // List all runs where `tags` include "experimental" or "beta" and `latency` is greater than 2 seconds
* const taggedRuns = client.listRuns({
* projectName: "<your_project>",
* filter: 'and(or(has(tags, "experimental"), has(tags, "beta")), gt(latency, 2))',
* });
*/
async *listRuns(props) {
const { projectId, projectName, parentRunId, traceId, referenceExampleId, startTime, executionOrder, isRoot, runType, error, id, query, filter, traceFilter, treeFilter, limit, select, order, } = props;
let projectIds = [];
if (projectId) {
projectIds = Array.isArray(projectId) ? projectId : [projectId];
}
if (projectName) {
const projectNames = Array.isArray(projectName)
? projectName
: [projectName];
const projectIds_ = await Promise.all(projectNames.map((name) => this.readProject({ projectName: name }).then((project) => project.id)));
projectIds.push(...projectIds_);
}
const default_select = [
"app_path",
"completion_cost",
"completion_tokens",
"dotted_order",
"end_time",
"error",
"events",
"extra",
"feedback_stats",
"first_token_time",
"id",
"inputs",
"name",
"outputs",
"parent_run_id",
"parent_run_ids",
"prompt_cost",
"prompt_tokens",
"reference_example_id",
"run_type",
"session_id",
"start_time",
"status",
"tags",
"total_cost",
"total_tokens",
"trace_id",
];
const body = {
session: projectIds.length ? projectIds : null,
run_type: runType,
reference_example: referenceExampleId,
query,
filter,
trace_filter: traceFilter,
tree_filter: treeFilter,
execution_order: executionOrder,
parent_run: parentRunId,
start_time: startTime ? startTime.toISOString() : null,
error,
id,
limit,
trace: traceId,
select: select ? select : default_select,
is_root: isRoot,
order,
};
let runsYielded = 0;
for await (const runs of this._getCursorPaginatedList("/runs/query", body)) {
if (limit) {
if (runsYielded >= limit) {
break;
}
if (runs.length + runsYielded > limit) {
const newRuns = runs.slice(0, limit - runsYielded);
yield* newRuns;
break;
}
runsYielded += runs.length;
yield* runs;
}
else {