UNPKG

@ceramicnetwork/job-queue

Version:

Ceramic persistent job queue

109 lines 4.39 kB
import { default as PgBoss, } from 'pg-boss'; import Pg from 'pg'; import { fromEvent, firstValueFrom, timeout, throwError, filter, interval, mergeMap } from 'rxjs'; class PgWrapper { constructor(db) { this.db = db; } executeSql(text, values) { return this.db.query(text, values); } } export class JobQueue { constructor(db, logger) { this.logger = logger; this.jobs = []; this.dbConnection = new Pg.Pool({ connectionString: db, }); this.queue = new PgBoss({ db: new PgWrapper(this.dbConnection) }); this.queue.on('error', (err) => { this.logger.err(`Error received by job queue: ${err}`); }); } async _getJobIds(state = 'active', jobTypes = this.jobs) { const result = await this.dbConnection.query(`SELECT id FROM pgboss.job WHERE state = '${state}' and name IN (${jobTypes .map((jobName) => `'${jobName}'`) .join(', ')})`); return result.rows.map(({ id }) => id); } async getJobs(state = 'active', jobTypes = this.jobs) { const activeJobsIds = await this._getJobIds(state, jobTypes); const jobs = await Promise.all(activeJobsIds.map((jobId) => this.queue.getJobById(jobId))); return jobs.reduce((jobsByJobName, job) => { if (job == null) { return jobsByJobName; } if (!jobsByJobName[job.name]) jobsByJobName[job.name] = []; jobsByJobName[job.name].push({ name: job.name, data: job.data, id: job.id, startedOn: job.startedon, createdOn: job.createdon, completedOn: job.completedon, }); return jobsByJobName; }, {}); } async init(workersByJob, resumeActive = true) { this.jobs = Object.keys(workersByJob); await this.dbConnection.query('CREATE EXTENSION IF NOT EXISTS "pgcrypto"'); await this.queue.start(); if (resumeActive) { const activeJobsIds = await this._getJobIds('active'); if (activeJobsIds.length > 0) { await this.queue.cancel(activeJobsIds); await this.queue.resume(activeJobsIds); } } await Promise.all(Object.entries(workersByJob).map(([jobName, worker]) => this.queue.work(jobName, { teamRefill: true }, worker.handler.bind(worker)))); } _workerExistsForJob(jobName) { return this.jobs.includes(jobName); } async addJob(job) { if (!this._workerExistsForJob(job.name)) { throw Error(`Cannot add job ${job.name} to queue because no workers for that job exist`); } await this.queue.send(job.name, job.data, job.options); } async addJobs(jobs) { const jobWithoutWorker = jobs.find((job) => !this._workerExistsForJob(job.name)); if (jobWithoutWorker) { throw Error(`Cannot add job ${jobWithoutWorker.name} to queue because no workers for that job exist`); } await this.queue.insert(jobs.map((job) => ({ name: job.name, data: job.data, }))); } async stop() { await this.queue.stop({ graceful: true }); await this.queue.boss.stop(); await firstValueFrom(fromEvent(this.queue, 'stopped')); if (this.dbConnection) { await this.dbConnection.end(); this.dbConnection = null; } } async updateJob(jobId, data) { const text = 'UPDATE pgboss.job set data = $1 WHERE id = $2'; const values = [data, jobId]; const result = await this.dbConnection.query(text, values); if (result.rowCount !== 1) { throw Error(`Unable to update job with id ${jobId}`); } } async _clearAllJobs() { await this.queue.clearStorage(); } async _waitForAllJobsToComplete() { await firstValueFrom(interval(500).pipe(mergeMap(() => Promise.all(this.jobs.map(async (jobName) => this.queue.getQueueSize(jobName, { before: 'completed' })))), filter((jobCounts) => jobCounts.every((count) => count === 0)), timeout({ each: 30000, with: () => throwError(() => new Error(`Timeout waiting for jobs to complete`)), }))); } } //# sourceMappingURL=index.js.map