@promptbook/vercel
Version:
Promptbook: Turn your company's scattered knowledge into AI ready books
40 lines (39 loc) • 1.72 kB
TypeScript
import type { ChatParticipant } from '../../book-components/Chat/types/ChatParticipant';
import type { AvailableModel } from '../../execution/AvailableModel';
import type { CommonToolsOptions } from '../../execution/CommonToolsOptions';
import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
import type { ChatPromptResult, CompletionPromptResult } from '../../execution/PromptResult';
import type { Prompt } from '../../types/Prompt';
import type { string_markdown, string_markdown_text, string_title } from '../../types/typeAliases';
/**
* Mocked execution Tools for just echoing the requests for testing purposes.
*
* @public exported from `@promptbook/fake-llm`
*/
export declare class MockedEchoLlmExecutionTools implements LlmExecutionTools {
protected readonly options: CommonToolsOptions;
constructor(options?: CommonToolsOptions);
get title(): string_title & string_markdown_text;
get description(): string_markdown;
get profile(): ChatParticipant;
/**
* Does nothing, just to implement the interface
*/
checkConfiguration(): void;
/**
* List all available mocked-models that can be used
*/
listModels(): ReadonlyArray<AvailableModel>;
/**
* Mocks chat model
*/
callChatModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements'>): Promise<ChatPromptResult>;
/**
* Mocks completion model
*/
callCompletionModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements'>): Promise<CompletionPromptResult>;
}
/**
* TODO: [🧠][🈁] Maybe use `isDeterministic` from options
* TODO: Allow in spaceTrim: nesting with > ${block(prompt.request)}, same as replace params
*/