@mondaydotcomorg/atp-runtime
Version:
Runtime SDK injected into sandbox for Agent Tool Protocol
118 lines • 5.08 kB
JavaScript
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
/**
* LLM API - Clean refactored version with decorators and extracted modules
*
* Benefits:
* - No duplication between implementation and metadata
* - Types auto-detected from TypeScript signatures
* - Clean separation of concerns (replay, callback, API)
*/
import { pauseForCallback, CallbackType, LLMOperation } from '../pause/index.js';
import { RuntimeAPI, RuntimeMethod } from '../metadata/decorators.js';
import { nextSequenceNumber, getCachedResult } from './replay.js';
export { setClientLLMCallback, getClientLLMCallback } from './callback.js';
export { initializeExecutionState, setPauseForClient, shouldPauseForClient, setReplayMode, getCallSequenceNumber, nextSequenceNumber, getCachedResult, isReplayMode, runInExecutionContext, setCurrentExecutionId, clearCurrentExecutionId, storeAPICallResult, getAPICallResults, clearAPICallResults, setAPIResultCache, getAPIResultFromCache, storeAPIResultInCache, cleanupExecutionState, cleanupOldExecutionStates, resetAllExecutionState, getExecutionStateStats, } from './replay.js';
/**
* LLM Runtime API
*
* Provides client-side LLM operations with pause/resume support.
* All calls pause execution and route to client-provided LLM.
*/
let LLMAPI = class LLMAPI {
/**
* Makes a standard LLM call
* Always pauses execution and routes to client-provided LLM
*/
async call(options) {
const currentSequence = nextSequenceNumber();
const cachedResult = getCachedResult(currentSequence);
if (cachedResult !== undefined) {
return cachedResult;
}
pauseForCallback(CallbackType.LLM, LLMOperation.CALL, {
prompt: options.prompt,
options,
sequenceNumber: currentSequence,
});
}
/**
* Extracts structured data using LLM
* Always pauses execution and routes to client-provided LLM
*/
async extract(options) {
const currentSequence = nextSequenceNumber();
const cachedResult = getCachedResult(currentSequence);
if (cachedResult !== undefined) {
return cachedResult;
}
pauseForCallback(CallbackType.LLM, LLMOperation.EXTRACT, {
prompt: options.prompt,
schema: options.schema,
options,
sequenceNumber: currentSequence,
});
}
/**
* Classifies text into one of the provided categories
* Always pauses execution and routes to client-provided LLM
*/
async classify(options) {
const currentSequence = nextSequenceNumber();
const cachedResult = getCachedResult(currentSequence);
if (cachedResult !== undefined) {
return cachedResult;
}
pauseForCallback(CallbackType.LLM, LLMOperation.CLASSIFY, {
text: options.text,
categories: options.categories,
options,
sequenceNumber: currentSequence,
});
}
};
__decorate([
RuntimeMethod('Make an LLM call with a prompt', {
options: {
description: 'LLM call options including prompt',
type: 'LLMCallOptions',
},
}),
__metadata("design:type", Function),
__metadata("design:paramtypes", [Object]),
__metadata("design:returntype", Promise)
], LLMAPI.prototype, "call", null);
__decorate([
RuntimeMethod('Extract structured data from text using an LLM', {
options: {
description: 'Extraction options with JSON schema',
type: 'LLMExtractOptions',
},
}),
__metadata("design:type", Function),
__metadata("design:paramtypes", [Object]),
__metadata("design:returntype", Promise)
], LLMAPI.prototype, "extract", null);
__decorate([
RuntimeMethod('Classify text into one of the provided categories', {
options: {
description: 'Classification options with categories',
type: 'LLMClassifyOptions',
},
}),
__metadata("design:type", Function),
__metadata("design:paramtypes", [Object]),
__metadata("design:returntype", Promise)
], LLMAPI.prototype, "classify", null);
LLMAPI = __decorate([
RuntimeAPI('llm', 'LLM API - Large Language Model calls using client-provided LLM (requires client.provideLLM())')
], LLMAPI);
export const llm = new LLMAPI();
//# sourceMappingURL=index.js.map