UNPKG

@salesforce/agents

Version:

Client side APIs for working with Salesforce agents

160 lines (159 loc) 6.07 kB
import { Connection } from '@salesforce/core'; import { Duration } from '@salesforce/kit'; import { DeployResult } from '@salesforce/source-deploy-retrieve'; import { type AvailableDefinition, type AgentTestStartResponse, type AgentTestStatusResponse, type AgentTestResultsResponse, type TestSpec } from './types.js'; /** * Events emitted during agent test creation for consumers to listen to and keep track of progress. */ export declare const AgentTestCreateLifecycleStages: { CreatingLocalMetadata: string; Waiting: string; DeployingMetadata: string; Done: string; }; /** * A service for testing agents using `AiEvaluationDefinition` metadata. Start asynchronous * test runs, get or poll for test status, and get detailed test results. * * **Examples** * * Create an instance of the service: * * `const agentTester = new AgentTester(connection);` * * Start a test run: * * `const startResponse = await agentTester.start(aiEvalDef);` * * Get the status for a test run: * * `const status = await agentTester.status(startResponse.runId);` * * Get detailed results for a test run: * * `const results = await agentTester.results(startResponse.runId);` */ export declare class AgentTester { private connection; private maybeMock; constructor(connection: Connection); /** * List the AiEvaluationDefinitions available in the org. */ list(): Promise<AvailableDefinition[]>; /** * Initiates a test run (i.e., AI evaluation). * * @param aiEvalDefName - The name of the AI evaluation definition to run. * @returns Promise that resolves with the response from starting the test. */ start(aiEvalDefName: string): Promise<AgentTestStartResponse>; /** * Get the status of a test run. * * @param {string} jobId * @returns {Promise<AgentTestStatusResponse>} */ status(jobId: string): Promise<AgentTestStatusResponse>; /** * Poll the status of a test run until the tests are complete or the timeout is reached. * * @param {string} jobId * @param {Duration} timeout * @returns {Promise<AgentTestResultsResponse>} */ poll(jobId: string, { timeout, }?: { timeout?: Duration; }): Promise<AgentTestResultsResponse>; /** * Get detailed test run results. * * @param {string} jobId * @returns {Promise<AgentTestResultsResponse>} */ results(jobId: string): Promise<AgentTestResultsResponse>; /** * Cancel an in-progress test run. * * @param {string} jobId * @returns {Promise<{success: boolean}>} */ cancel(jobId: string): Promise<{ success: boolean; }>; /** * Creates and deploys an AiEvaluationDefinition from a specification file. * * @param apiName - The API name of the AiEvaluationDefinition to create * @param specFilePath - The path to the specification file to create the definition from * @param options - Configuration options for creating the definition * @param options.outputDir - The directory where the AiEvaluationDefinition file will be written * @param options.preview - If true, writes the AiEvaluationDefinition file to <api-name>-preview-<timestamp>.xml in the current working directory and does not deploy it * * @returns Promise containing: * - path: The filesystem path to the created AiEvaluationDefinition file * - contents: The AiEvaluationDefinition contents as a string * - deployResult: The deployment result (if not in preview mode) * * @throws {SfError} When deployment fails */ create(apiName: string, specFilePath: string, options: { outputDir: string; preview?: boolean; }): Promise<{ path: string; contents: string; deployResult?: DeployResult; }>; } /** * Convert the raw, detailed test results to another format. * * @param results The detailed results from a test run. * @param format The desired format. One of: json, junit, or tap. * @returns */ export declare function convertTestResultsToFormat(results: AgentTestResultsResponse, format: 'json' | 'junit' | 'tap'): Promise<string>; /** * Normalizes test results by decoding HTML entities in utterances and test result values. * * @param results - The agent test results response object to normalize * @returns A new AgentTestResultsResponse with decoded HTML entities * * @example * ``` * const results = { * testCases: [{ * inputs: { utterance: "&quot;hello&quot;" }, * testResults: [{ * actualValue: "&amp;test", * expectedValue: "&lt;value&gt;" * }] * }] * }; * const normalized = normalizeResults(results); * ``` */ export declare function normalizeResults(results: AgentTestResultsResponse): AgentTestResultsResponse; export declare function humanFriendlyName(name: string): string; /** * Generate a test specification file in YAML format. * This function takes a test specification object, cleans it by removing undefined and empty string values, * converts it to YAML format, and writes it to the specified output file. * * @param spec - The test specification object to be converted to YAML. * @param outputFile - The file path where the YAML output should be written. * @throws {Error} - May throw an error if file operations fail. * @returns A Promise that resolves when the file has been written. */ export declare function writeTestSpec(spec: TestSpec, outputFile: string): Promise<void>; /** * Generates a TestSpec object from an AI Evaluation Definition XML file. * * @param path - The file path to the AI Evaluation Definition XML file. * @returns Promise that resolves to a TestSpec object containing the parsed evaluation definition data. * @description Reads and parses an XML file containing AIEvaluationDefinition, converting it into a structured TestSpec format. * * @throws {Error} If the file cannot be read or parsed. */ export declare function generateTestSpecFromAiEvalDefinition(path: string): Promise<TestSpec>;