gepa-ts
Version: 
TypeScript implementation of GEPA (Gradient-free Evolution of Prompts and Agents) - Complete port with 100% feature parity
104 lines • 4.26 kB
JavaScript
import { GEPAEngine } from './core/engine.js';
import { GEPAResult } from './core/result.js';
import { DefaultAdapter } from './adapters/default-adapter.js';
import { StdOutLogger } from './logging/logger.js';
import { ReflectiveMutationProposer } from './proposer/reflective-mutation.js';
import { MergeProposer } from './proposer/merge.js';
import { ParetoCandidateSelector, CurrentBestCandidateSelector } from './strategies/candidate-selector.js';
import { RoundRobinReflectionComponentSelector } from './strategies/component-selector.js';
import { EpochShuffledBatchSampler } from './strategies/batch-sampler.js';
export async function optimize(config) {
    const { seedCandidate, trainset, valset = trainset, maxMetricCalls, candidateSelectionStrategy = 'pareto', skipPerfectScore = true, reflectionMinibatchSize = 3, perfectScore = 1, useMerge = false, maxMergeInvocations = 5, seed = 0, raiseOnException = true, trackBestOutputs = false, displayProgressBar = false } = config;
    let { adapter, taskLM, reflectionLM, logger } = config;
    const { runDir = null } = config;
    if (!adapter) {
        if (!taskLM) {
            throw new Error('Since no adapter is provided, GEPA requires a task LM to be provided. Please set the `taskLM` parameter.');
        }
        adapter = new DefaultAdapter(taskLM);
    }
    else if (taskLM) {
        throw new Error('Since an adapter is provided, GEPA does not require a task LM. Please set the `taskLM` parameter to undefined.');
    }
    if (adapter && !adapter.proposeNewTexts && !reflectionLM) {
        throw new Error('reflectionLM was not provided. The adapter used does not provide a proposeNewTexts method, ' +
            'and hence, GEPA will use the default proposer, which requires a reflectionLM to be specified.');
    }
    if (typeof reflectionLM === 'string') {
        const modelName = reflectionLM;
        reflectionLM = async (prompt) => {
            const { OpenAI } = await import('openai');
            const openai = new OpenAI();
            const response = await openai.chat.completions.create({
                model: modelName,
                messages: [{ role: 'user', content: prompt }],
                temperature: 0.7
            });
            return response.choices[0].message.content || '';
        };
    }
    logger = logger || new StdOutLogger();
    let seedValue = seed;
    const rng = () => {
        const x = Math.sin(seedValue++) * 10000;
        return x - Math.floor(x);
    };
    const candidateSelector = candidateSelectionStrategy === 'pareto' ?
        new ParetoCandidateSelector(rng) :
        new CurrentBestCandidateSelector();
    const componentSelector = new RoundRobinReflectionComponentSelector();
    const batchSampler = new EpochShuffledBatchSampler(reflectionMinibatchSize, rng);
    const reflectiveProposer = new ReflectiveMutationProposer({
        logger,
        trainset,
        adapter: adapter,
        candidateSelector,
        componentSelector,
        batchSampler,
        perfectScore,
        skipPerfectScore,
        useWandB: config.useWandB,
        reflectionLM: reflectionLM
    });
    const evaluator = (inputs, prog) => {
        const evalOut = adapter.evaluate(inputs, prog, false);
        if (evalOut instanceof Promise) {
            return evalOut.then(result => [result.outputs, result.scores]);
        }
        return [evalOut.outputs, evalOut.scores];
    };
    let mergeProposer;
    if (useMerge) {
        mergeProposer = new MergeProposer({
            logger,
            valset,
            evaluator,
            useMerge,
            maxMergeInvocations,
            rng
        });
    }
    const engine = new GEPAEngine({
        runDir,
        evaluator,
        valset,
        seedCandidate,
        maxMetricCalls,
        perfectScore,
        seed,
        reflectiveProposer,
        mergeProposer,
        logger,
        useWandB: config.useWandB,
        wandBConfig: {
            apiKey: config.wandBApiKey,
            initKwargs: config.wandBInitKwargs
        },
        trackBestOutputs,
        displayProgressBar,
        raiseOnException
    });
    const state = await engine.run();
    return GEPAResult.fromState(state);
}
//# sourceMappingURL=api.js.map