@unified-llm/core
Version:
Unified LLM interface (in-memory).
74 lines • 2.15 kB
JavaScript
/**
* Maintain thread state for LLM invocations.
* Prioritize previousResponseId if present, otherwise uses history to continue the conversation.
*/
export class Thread {
/**
* @param options.previousResponseId Previous response ID (used only when available)
* @param options.history Initial input history
*/
constructor(options) {
var _a;
this.previousResponseId = options === null || options === void 0 ? void 0 : options.previousResponseId;
this.history = (_a = options === null || options === void 0 ? void 0 : options.history) !== null && _a !== void 0 ? _a : [];
}
/**
* Get the current history.
*/
getHistory() {
return this.history;
}
/**
* Replace the entire history.
*/
setHistory(items) {
this.history = items;
}
/**
* Append elements to the history.
*/
appendToHistory(items) {
this.history.push(...items);
}
/**
* Build the input and previous_response_id required for the next request.
*/
buildRequestContextForResponsesAPI(nextInput) {
const combinedInput = [...this.history, ...nextInput];
this.history = combinedInput;
if (this.previousResponseId) {
return {
input: combinedInput,
previous_response_id: this.previousResponseId,
};
}
return { input: combinedInput };
}
/**
* Update the previous_response_id.
*/
updatePreviousResponseId(responseId) {
if (responseId) {
this.previousResponseId = responseId;
}
}
/**
* Convert the thread state to a serializable format.
*/
toJSON() {
return {
previousResponseId: this.previousResponseId,
history: JSON.parse(JSON.stringify(this.history)),
};
}
/**
* Restore from a saved thread state.
*/
static fromJSON(snapshot) {
return new Thread({
previousResponseId: snapshot.previousResponseId,
history: snapshot.history,
});
}
}
//# sourceMappingURL=thread.js.map