@speckle/objectloader2
Version:
This is an updated objectloader for the Speckle viewer written in typescript
236 lines • 10.6 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const vitest_1 = require("vitest");
const batchingQueue_js_1 = __importDefault(require("./batchingQueue.js"));
(0, vitest_1.describe)('BatchingQueue', () => {
(0, vitest_1.test)('should add items and process them in batches', async () => {
const processSpy = vitest_1.vi.fn();
const queue = new batchingQueue_js_1.default({
batchSize: 2,
maxWaitTime: 100,
processFunction: async (batch) => {
await new Promise((resolve) => setTimeout(resolve, 0));
processSpy(batch);
}
});
try {
queue.add('key1', 'item1');
queue.add('key2', 'item2');
await new Promise((resolve) => setTimeout(resolve, 200));
(0, vitest_1.expect)(processSpy).toHaveBeenCalledTimes(1);
(0, vitest_1.expect)(processSpy).toHaveBeenCalledWith(['item1', 'item2']);
}
finally {
await queue.disposeAsync();
}
});
(0, vitest_1.test)('should process items after timeout if batch size is not reached', async () => {
const processSpy = vitest_1.vi.fn();
const queue = new batchingQueue_js_1.default({
batchSize: 5,
maxWaitTime: 100,
processFunction: async (batch) => {
await new Promise((resolve) => setTimeout(resolve, 0));
processSpy(batch);
}
});
try {
queue.add('key1', 'item1');
queue.add('key2', 'item2');
await new Promise((resolve) => setTimeout(resolve, 200));
(0, vitest_1.expect)(processSpy).toHaveBeenCalledTimes(1);
(0, vitest_1.expect)(processSpy).toHaveBeenCalledWith(['item1', 'item2']);
}
finally {
await queue.disposeAsync();
}
});
(0, vitest_1.test)('should handle multiple batches correctly', async () => {
const processSpy = vitest_1.vi.fn();
const queue = new batchingQueue_js_1.default({
batchSize: 2,
maxWaitTime: 100,
processFunction: async (batch) => {
await new Promise((resolve) => setTimeout(resolve, 0));
processSpy(batch);
}
});
try {
queue.add('key1', 'item1');
queue.add('key2', 'item2');
queue.add('key3', 'item3');
queue.add('key4', 'item4');
await new Promise((resolve) => setTimeout(resolve, 200));
(0, vitest_1.expect)(processSpy).toHaveBeenCalledTimes(2);
(0, vitest_1.expect)(processSpy).toHaveBeenCalledWith(['item1', 'item2']);
(0, vitest_1.expect)(processSpy).toHaveBeenCalledWith(['item3', 'item4']);
}
finally {
await queue.disposeAsync();
}
});
(0, vitest_1.test)('should retrieve items by key', async () => {
const queue = new batchingQueue_js_1.default({
batchSize: 3,
maxWaitTime: 100,
processFunction: async () => {
await new Promise((resolve) => setTimeout(resolve, 0));
}
});
try {
queue.add('key1', 'item1');
queue.add('key2', 'item2');
(0, vitest_1.expect)(queue.get('key1')).toBe('item1');
(0, vitest_1.expect)(queue.get('key2')).toBe('item2');
(0, vitest_1.expect)(queue.get('key3')).toBeUndefined();
}
finally {
await queue.disposeAsync();
}
});
(0, vitest_1.test)('should return correct count of items', async () => {
const queue = new batchingQueue_js_1.default({
batchSize: 3,
maxWaitTime: 100,
processFunction: async () => {
await new Promise((resolve) => setTimeout(resolve, 0));
}
});
try {
(0, vitest_1.expect)(queue.count()).toBe(0);
queue.add('key1', 'item1');
queue.add('key2', 'item2');
(0, vitest_1.expect)(queue.count()).toBe(2);
}
finally {
await queue.disposeAsync();
}
});
(0, vitest_1.test)('should not process items if already processing', async () => {
const processSpy = vitest_1.vi.fn();
const queue = new batchingQueue_js_1.default({
batchSize: 2,
maxWaitTime: 100,
processFunction: async (batch) => {
processSpy(batch);
await new Promise((resolve) => setTimeout(resolve, 300));
}
});
try {
queue.add('key1', 'item1');
queue.add('key2', 'item2');
queue.add('key3', 'item3');
await new Promise((resolve) => setTimeout(resolve, 200));
(0, vitest_1.expect)(processSpy).toHaveBeenCalledTimes(1);
(0, vitest_1.expect)(processSpy).toHaveBeenCalledWith(['item1', 'item2']);
await new Promise((resolve) => setTimeout(resolve, 500));
(0, vitest_1.expect)(processSpy).toHaveBeenCalledTimes(2);
(0, vitest_1.expect)(processSpy).toHaveBeenCalledWith(['item3']);
}
finally {
await queue.disposeAsync();
}
});
(0, vitest_1.test)('should handle processFunction throwing an exception during flush and is disposed', async () => {
const errorMessage = 'Process function failed';
const processFunction = vitest_1.vi.fn().mockRejectedValue(new Error(errorMessage));
const queue = new batchingQueue_js_1.default({
batchSize: 5,
maxWaitTime: 1000,
processFunction
});
const items = Array.from({ length: 3 }, (_, i) => ({ id: `item-${i}` }));
items.forEach((item) => queue.add(item.id, item));
(0, vitest_1.expect)(queue.count()).toBe(3);
// flush should not throw even if processFunction rejects
await (0, vitest_1.expect)(queue.flush()).resolves.not.toThrow();
(0, vitest_1.expect)(processFunction).toHaveBeenCalled();
(0, vitest_1.expect)(queue.count()).toBe(0);
(0, vitest_1.expect)(queue.isDisposed()).toBe(false);
(0, vitest_1.expect)(queue.isErrored()).toBe(true);
// Add more items after the exception
queue.add('key3', { id: `item-3` });
queue.add('key4', { id: `item-4` });
// Wait to see if second batch gets processed (it shouldn't due to errored state)
await new Promise((resolve) => setTimeout(resolve, 200));
(0, vitest_1.expect)(queue.count()).toBe(0); // Items were not added due to errored state
await queue.disposeAsync();
});
(0, vitest_1.test)('should drain remaining items when disposed', async () => {
const processSpy = vitest_1.vi.fn();
const queue = new batchingQueue_js_1.default({
batchSize: 5, // Large batch size to prevent automatic processing
maxWaitTime: 10000, // Long timeout to prevent timeout-based processing
processFunction: async (batch) => {
await new Promise((resolve) => setTimeout(resolve, 10));
processSpy(batch);
}
});
// Add items that won't trigger automatic processing (less than batch size)
queue.add('key1', 'item1');
queue.add('key2', 'item2');
queue.add('key3', 'item3');
// Verify items are in queue but haven't been processed yet
(0, vitest_1.expect)(queue.count()).toBe(3);
(0, vitest_1.expect)(processSpy).not.toHaveBeenCalled();
// Dispose should drain the remaining items
await queue.disposeAsync();
// Verify all items were processed during disposal
(0, vitest_1.expect)(processSpy).toHaveBeenCalledTimes(1);
(0, vitest_1.expect)(processSpy).toHaveBeenCalledWith(['item1', 'item2', 'item3']);
(0, vitest_1.expect)(queue.count()).toBe(0);
(0, vitest_1.expect)(queue.isDisposed()).toBe(true);
});
(0, vitest_1.test)('should drain items even with ongoing processing during dispose', async () => {
const processSpy = vitest_1.vi.fn();
let firstBatchStarted = false;
let allowFirstBatchToComplete = null;
const queue = new batchingQueue_js_1.default({
batchSize: 2,
maxWaitTime: 100,
processFunction: async (batch) => {
processSpy(batch);
// Make the first batch wait for our signal
if (!firstBatchStarted) {
firstBatchStarted = true;
await new Promise((resolve) => {
allowFirstBatchToComplete = resolve;
});
}
else {
// Other batches process normally
await new Promise((resolve) => setTimeout(resolve, 10));
}
}
});
// Add first batch that will trigger processing but will be blocked
queue.add('key1', 'item1');
queue.add('key2', 'item2');
// Wait for first batch to start processing and allowFirstBatchToComplete to be assigned
await new Promise((resolve) => setTimeout(resolve, 50));
(0, vitest_1.expect)(firstBatchStarted).toBe(true);
(0, vitest_1.expect)(processSpy).toHaveBeenCalledTimes(1);
(0, vitest_1.expect)(allowFirstBatchToComplete).not.toBeNull();
// Add more items while first batch is still processing
queue.add('key3', 'item3');
queue.add('key4', 'item4');
// Verify the additional items are queued
(0, vitest_1.expect)(queue.count()).toBe(2);
// Start disposal (this should wait for ongoing processing and then drain)
const disposePromise = queue.disposeAsync();
// Allow the first batch to complete
allowFirstBatchToComplete();
// Wait for disposal to complete
await disposePromise;
// Verify all batches were processed
(0, vitest_1.expect)(processSpy).toHaveBeenCalledTimes(2);
(0, vitest_1.expect)(processSpy).toHaveBeenCalledWith(['item1', 'item2']);
(0, vitest_1.expect)(processSpy).toHaveBeenCalledWith(['item3', 'item4']);
(0, vitest_1.expect)(queue.count()).toBe(0);
(0, vitest_1.expect)(queue.isDisposed()).toBe(true);
});
});
//# sourceMappingURL=batchingQueue.test.js.map