@bitrix24/b24jssdk
Version:
Bitrix24 REST API JavaScript SDK
94 lines (91 loc) • 3.71 kB
JavaScript
/**
* @package @bitrix24/b24jssdk
* @version 1.0.5
* @copyright (c) 2026 Bitrix24
* @license MIT
* @see https://github.com/bitrix24/b24jssdk
* @see https://bitrix24.github.io/b24jssdk/
*/
import { AbstractBatch } from '../abstract-batch.mjs';
import { ApiVersion } from '../../../types/b24.mjs';
import { Result } from '../../result.mjs';
var __defProp = Object.defineProperty;
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
class BatchByChunkV2 extends AbstractBatch {
static {
__name(this, "BatchByChunkV2");
}
/**
* Executes a batch request with automatic chunking for any number of commands.
* Unlike `BatchV2`, which is limited to 50 commands, this method automatically splits
* a large set of commands into multiple batches and executes them sequentially.
*
* @template T - The data type returned by commands (default: `unknown`)
*
* @param {ActionBatchByChunkV2} options - parameters for executing the request.
* - `calls: BatchCommandsArrayUniversal | BatchCommandsObjectUniversal` - Commands to execute in a batch.
* Supports several formats:
* 1. Array of tuples: `[['method1', params1], ['method2', params2], ...]`
* 2. Array of objects: `[{ method: 'method1', params: params1 }, { method: 'method2', params: params2 }, ...]`
* - Note: Named commands are not supported as they are difficult to process when chunking.
* - `options?: Omit<IB24BatchOptions, 'returnAjaxResult'>` - Additional options for executing a batch request.
* - `isHaltOnError?: boolean` - Whether to stop execution on the first error (default: true)
* - `requestId?: string` - Unique request identifier for tracking. Used for query deduplication and debugging (default: undefined)
*
* @returns {Promise<Result<T[]>>} A promise that is resolved by the result of executing all commands.
*
* @example
* import { EnumCrmEntityTypeId, Text } from '@bitrix24/b24jssdk'
*
* interface Contact { id: number, name: string }
* const commands = Array.from({ length: 150 }, (_, i) =>
* ['crm.item.get', { entityTypeId: EnumCrmEntityTypeId.contact, id: i + 1 }]
* )
*
* const response = await b24.actions.v2.batchByChunk.make<{ item: Contact }>({
* calls: commands,
* options: {
* isHaltOnError: false,
* requestId: 'batch-by-chunk-123'
* }
* })
*
* if (!response.isSuccess) {
* throw new Error(`Problem: ${response.getErrorMessages().join('; ')}`)
* }
*
* const resultData = response.getData()
* const items: Contact[] = []
* resultData.forEach((chunkRow) => {
* items.push(chunkRow.item)
* })
* console.log(`Successfully retrieved ${items.length} items`)
*
* @tip For very large command sets, consider using server-side task queues instead of bulk batch requests.
*/
async make(options) {
const batchSize = 50;
const opts = {
...options.options,
returnAjaxResult: false,
apiVersion: ApiVersion.v2
};
const result = new Result();
const dataResult = [];
const chunks = this.chunkArray(options.calls, batchSize);
for (const chunkRequest of chunks) {
const response = await this._b24.getHttpClient(ApiVersion.v2).batch(chunkRequest, opts);
if (!response.isSuccess) {
this._addBatchErrorsIfAny(response, result);
}
for (const [_index, data] of response.getData().result) {
if (data.isSuccess) {
dataResult.push(data.getData().result);
}
}
}
return result.setData(dataResult);
}
}
export { BatchByChunkV2 };
//# sourceMappingURL=batch-by-chunk.mjs.map