@ai2070/l0
Version:
L0: The Missing Reliability Substrate for AI
257 lines • 8.72 kB
JavaScript
import { l0 } from "./l0";
export async function parallel(operations, options = {}) {
const { concurrency = 5, failFast = false, sharedRetry, sharedMonitoring, onProgress, onComplete, onError, } = options;
const startTime = Date.now();
const results = new Array(operations.length).fill(null);
const errors = new Array(operations.length).fill(null);
let completed = 0;
let successCount = 0;
let failureCount = 0;
const mergedOperations = operations.map((op) => ({
...op,
retry: op.retry || sharedRetry,
monitoring: op.monitoring || sharedMonitoring,
}));
const queue = mergedOperations.map((op, index) => ({ op, index }));
const executing = [];
const processOperation = async (item) => {
try {
const result = await l0(item.op);
for await (const _event of result.stream) {
}
results[item.index] = result;
successCount++;
if (onComplete) {
onComplete(result, item.index);
}
}
catch (error) {
const err = error instanceof Error ? error : new Error(String(error));
errors[item.index] = err;
failureCount++;
if (onError) {
onError(err, item.index);
}
if (failFast) {
throw err;
}
}
finally {
completed++;
if (onProgress) {
onProgress(completed, operations.length);
}
}
};
try {
for (const item of queue) {
const promise = processOperation(item).then(() => {
executing.splice(executing.indexOf(promise), 1);
});
executing.push(promise);
if (executing.length >= concurrency) {
await Promise.race(executing);
}
}
await Promise.all(executing);
}
catch (error) {
if (failFast) {
await Promise.allSettled(executing);
}
}
const duration = Date.now() - startTime;
const allSucceeded = failureCount === 0;
const aggregatedTelemetry = aggregateTelemetry(results.filter((r) => r !== null));
return {
results,
errors,
successCount,
failureCount,
duration,
allSucceeded,
aggregatedTelemetry,
};
}
export async function parallelAll(operations, options = {}) {
return parallel(operations, { ...options, concurrency: operations.length });
}
export async function sequential(operations, options = {}) {
return parallel(operations, { ...options, concurrency: 1 });
}
export async function batched(operations, batchSize, options = {}) {
const allResults = [];
const allErrors = [];
let totalSuccess = 0;
let totalFailure = 0;
let totalDuration = 0;
const batches = [];
for (let i = 0; i < operations.length; i += batchSize) {
batches.push(operations.slice(i, i + batchSize));
}
for (let batchIndex = 0; batchIndex < batches.length; batchIndex++) {
const batch = batches[batchIndex];
const result = await parallel(batch, {
...options,
concurrency: batchSize,
onProgress: options.onProgress
? (completed, _total) => {
const overallCompleted = batchIndex * batchSize + completed;
options.onProgress(overallCompleted, operations.length);
}
: undefined,
});
allResults.push(...result.results);
allErrors.push(...result.errors);
totalSuccess += result.successCount;
totalFailure += result.failureCount;
totalDuration += result.duration;
if (options.failFast && !result.allSucceeded) {
break;
}
}
const aggregatedTelemetry = aggregateTelemetry(allResults.filter((r) => r !== null));
return {
results: allResults,
errors: allErrors,
successCount: totalSuccess,
failureCount: totalFailure,
duration: totalDuration,
allSucceeded: totalFailure === 0,
aggregatedTelemetry,
};
}
export async function race(operations, options = {}) {
const { sharedRetry, sharedMonitoring } = options;
const controllers = operations.map(() => new AbortController());
const mergedOperations = operations.map((op, index) => ({
...op,
retry: op.retry || sharedRetry,
monitoring: op.monitoring || sharedMonitoring,
signal: controllers[index].signal,
}));
const promises = mergedOperations.map(async (op, index) => {
const result = await l0(op);
for await (const _event of result.stream) {
}
return { result, index };
});
try {
const { result, index } = await Promise.any(promises);
controllers.forEach((controller) => controller.abort());
return { ...result, winnerIndex: index };
}
catch (error) {
controllers.forEach((controller) => controller.abort());
if (error instanceof AggregateError) {
throw error.errors[0] || new Error("All operations failed");
}
throw error;
}
}
function aggregateTelemetry(results) {
if (results.length === 0) {
return {
totalTokens: 0,
totalDuration: 0,
totalRetries: 0,
totalNetworkErrors: 0,
totalViolations: 0,
avgTokensPerSecond: 0,
avgTimeToFirstToken: 0,
};
}
let totalTokens = 0;
let totalDuration = 0;
let totalRetries = 0;
let totalNetworkErrors = 0;
let totalViolations = 0;
let sumTokensPerSecond = 0;
let sumTimeToFirstToken = 0;
let countWithTTFT = 0;
let countWithTPS = 0;
for (const result of results) {
if (result.telemetry) {
totalTokens += result.telemetry.metrics.totalTokens;
totalDuration += result.telemetry.duration || 0;
totalRetries += result.telemetry.metrics.totalRetries;
totalNetworkErrors += result.telemetry.network.errorCount;
totalViolations += result.telemetry.guardrails?.violationCount || 0;
if (result.telemetry.metrics.tokensPerSecond !== undefined) {
sumTokensPerSecond += result.telemetry.metrics.tokensPerSecond;
countWithTPS++;
}
if (result.telemetry.metrics.timeToFirstToken !== undefined) {
sumTimeToFirstToken += result.telemetry.metrics.timeToFirstToken;
countWithTTFT++;
}
}
}
return {
totalTokens,
totalDuration,
totalRetries,
totalNetworkErrors,
totalViolations,
avgTokensPerSecond: countWithTPS > 0 ? sumTokensPerSecond / countWithTPS : 0,
avgTimeToFirstToken: countWithTTFT > 0 ? sumTimeToFirstToken / countWithTTFT : 0,
};
}
export class OperationPool {
concurrency;
options;
queue = [];
activeWorkers = 0;
constructor(concurrency, options = {}) {
this.concurrency = concurrency;
this.options = options;
}
async execute(operation) {
return new Promise((resolve, reject) => {
this.queue.push({ op: operation, resolve, reject });
this.processQueue();
});
}
async processQueue() {
if (this.activeWorkers >= this.concurrency || this.queue.length === 0) {
return;
}
const item = this.queue.shift();
if (!item)
return;
this.activeWorkers++;
try {
const mergedOp = {
...item.op,
retry: item.op.retry || this.options.sharedRetry,
monitoring: item.op.monitoring || this.options.sharedMonitoring,
};
const result = await l0(mergedOp);
for await (const _event of result.stream) {
}
item.resolve(result);
}
catch (error) {
item.reject(error instanceof Error ? error : new Error(String(error)));
}
finally {
this.activeWorkers--;
this.processQueue();
}
}
async drain() {
while (this.queue.length > 0 || this.activeWorkers > 0) {
await new Promise((resolve) => setTimeout(resolve, 10));
}
}
getQueueLength() {
return this.queue.length;
}
getActiveWorkers() {
return this.activeWorkers;
}
}
export function createPool(concurrency, options = {}) {
return new OperationPool(concurrency, options);
}
//# sourceMappingURL=parallel.js.map