@elsikora/commitizen-plugin-commitlint-ai
Version:
AI-powered Commitizen adapter with Commitlint integration
46 lines (45 loc) • 1.94 kB
TypeScript
import type { ILlmPromptContext, ILlmService } from '../../application/interface/llm-service.interface';
import type { LLMConfiguration } from '../../domain/entity/llm-configuration.entity';
import { CommitMessage } from '../../domain/entity/commit-message.entity';
/**
* Ollama implementation of the LLM service
*/
export declare class OllamaLlmService implements ILlmService {
/**
* Generate a commit message using Ollama
* @param {ILlmPromptContext} context - The context for generating the commit message
* @param {LLMConfiguration} configuration - The LLM configuration
* @returns {Promise<CommitMessage>} Promise resolving to the generated commit message
*/
generateCommitMessage(context: ILlmPromptContext, configuration: LLMConfiguration): Promise<CommitMessage>;
/**
* Check if the service supports the given configuration
* @param {LLMConfiguration} configuration - The LLM configuration to check
* @returns {boolean} True if the service supports the configuration
*/
supports(configuration: LLMConfiguration): boolean;
/**
* Build the system prompt for Ollama
* @param {ILlmPromptContext} context - The prompt context
* @returns {string} The system prompt
*/
private buildSystemPrompt;
/**
* Build the user prompt for Ollama
* @param {ILlmPromptContext} context - The prompt context
* @returns {string} The user prompt
*/
private buildUserPrompt;
/**
* Format commitlint rules into human-readable instructions
* @param {Record<string, unknown>} rules - The commitlint rules object
* @returns {string} Formatted rules as string
*/
private formatCommitlintRules;
/**
* Parse the commit message from the LLM response
* @param {string} content - The response content
* @returns {CommitMessage} The parsed commit message
*/
private parseCommitMessage;
}