@promptbook/remote-client
Version:
Promptbook: Turn your company's scattered knowledge into AI ready books
42 lines (41 loc) • 1.98 kB
TypeScript
import type { ChatPromptResult } from '../../execution/PromptResult';
import type { Prompt } from '../../types/Prompt';
import type { string_agent_hash, string_agent_name } from '../../types/typeAliases';
import { Agent } from './Agent';
import type { RemoteAgentOptions } from './RemoteAgentOptions';
/**
* Represents one AI Agent
*
* Note: [🦖] There are several different things in Promptbook:
* - `Agent` - which represents an AI Agent with its source, memories, actions, etc. Agent is a higher-level abstraction which is internally using:
* - `LlmExecutionTools` - which wraps one or more LLM models and provides an interface to execute them
* - `AgentLlmExecutionTools` - which is a specific implementation of `LlmExecutionTools` that wraps another LlmExecutionTools and applies agent-specific system prompts and requirements
* - `OpenAiAssistantExecutionTools` - which is a specific implementation of `LlmExecutionTools` for OpenAI models with assistant capabilities, recommended for usage in `Agent` or `AgentLlmExecutionTools`
* - `RemoteAgent` - which is an `Agent` that connects to a Promptbook Agents Server
*
* @public exported from `@promptbook/core`
*/
export declare class RemoteAgent extends Agent {
static connect(options: RemoteAgentOptions): Promise<RemoteAgent>;
/**
* The source of the agent
*/
private agentUrl;
private _remoteAgentName;
private _remoteAgentHash;
private constructor();
get agentName(): string_agent_name;
get agentHash(): string_agent_hash;
/**
* Calls the agent on agents remote server
*/
callChatModel(prompt: Prompt): Promise<ChatPromptResult>;
/**
* Calls the agent on agents remote server with streaming
*/
callChatModelStream(prompt: Prompt, onProgress: (chunk: ChatPromptResult) => void): Promise<ChatPromptResult>;
}
/**
* TODO: [🧠][😰]Agent is not working with the parameters, should it be?
* TODO: !!! Agent on remote server
*/