generator-begcode
Version:
Spring Boot + Angular/React/Vue in one handy generator
43 lines (42 loc) • 2.07 kB
JavaScript
import { ChatMessageBuilder, DirectoryChunker } from '../agent-core/index.js';
import { LlmAgentFunctionBase } from './utils/index.js';
export class SummarizeDirectoryFunction extends LlmAgentFunctionBase {
constructor(llm, tokenizer) {
super(llm, tokenizer);
}
name = 'summarizeDirectory';
description = 'Summarize the contents of a directory. Includes file names and brief descriptions.';
parameters = {
type: 'object',
properties: {
subDirectory: {
type: 'string',
description: 'sub-directory to be summarized (default: root directory)',
},
},
required: [],
additionalProperties: false,
};
buildExecutor({ context, }) {
return async (params, rawParams) => {
const prompt = (summary, chunk) => `Your job is to summarize the contents of the following files. In this summary please structure your response on a per-file basis. NOTE: some files have been chunked, line numbers are annotated.\n
${summary ? `An existing summary already exists, you MUST modify this to contain all new details, WITHOUT LOOSING INFORMATION already present within the summary.\n\`\`\`${summary}\`\`\`\n` : ''}
Chunk:\n\`\`\`\n${chunk}\n\`\`\`\n`;
const fuzTokens = 200;
const maxInputTokens = this.llm.getMaxContextTokens() - (this.llm.getMaxResponseTokens() + fuzTokens);
const chunker = new DirectoryChunker({ maxChunkSize: maxInputTokens });
const chunks = await chunker.chunk({
workspace: context.workspace,
directory: params.subDirectory,
});
let summary;
for (const chunk of chunks) {
summary = await this.askLlm(prompt(summary, chunk));
}
return {
outputs: [],
messages: [ChatMessageBuilder.functionCall(this.name, rawParams), ChatMessageBuilder.functionCallResult(this.name, summary || '')],
};
};
}
}