UNPKG

@grouparoo/core

Version:
146 lines (145 loc) 5.58 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.Reset = void 0; const actionhero_1 = require("actionhero"); const App_1 = require("../models/App"); const Destination_1 = require("../models/Destination"); const DestinationGroupMembership_1 = require("../models/DestinationGroupMembership"); const Export_1 = require("../models/Export"); const Group_1 = require("../models/Group"); const GroupMember_1 = require("../models/GroupMember"); const GroupRule_1 = require("../models/GroupRule"); const Import_1 = require("../models/Import"); const Mapping_1 = require("../models/Mapping"); const Option_1 = require("../models/Option"); const GrouparooRecord_1 = require("../models/GrouparooRecord"); const RecordProperty_1 = require("../models/RecordProperty"); const Property_1 = require("../models/Property"); const Filter_1 = require("../models/Filter"); const Run_1 = require("../models/Run"); const Schedule_1 = require("../models/Schedule"); const SetupStep_1 = require("../models/SetupStep"); const Source_1 = require("../models/Source"); /** * Utilities for deleting and resetting things */ var Reset; (function (Reset) { /** * reset:cluster (most dangerous) * * truncates all tables except those regarding teams/members * * clears the redis cache * * clears resque */ async function cluster(callerId) { const counts = {}; const models = [ App_1.App, Destination_1.Destination, DestinationGroupMembership_1.DestinationGroupMembership, Export_1.Export, Group_1.Group, GroupMember_1.GroupMember, GroupRule_1.GroupRule, Import_1.Import, Mapping_1.Mapping, Option_1.Option, GrouparooRecord_1.GrouparooRecord, RecordProperty_1.RecordProperty, Property_1.Property, Filter_1.Filter, Run_1.Run, Schedule_1.Schedule, Source_1.Source, ]; // truncate data tables for (const i in models) { const model = models[i]; const count = await model.count(); await model.truncate(); counts[model.name] = count; } // reset the SetupSteps await SetupStep_1.SetupStep.update({ complete: false }, { where: { complete: true } }); await clearLocalCaches(); await clearRedis(); return counts; } Reset.cluster = cluster; /** * reset:data * * truncates "data" tables (records, groups, etc) but leaves apps, sources, properties * * clears the redis cache * * clears resque */ async function data(callerId) { await GrouparooRecord_1.GrouparooRecord.truncate(); await RecordProperty_1.RecordProperty.truncate(); await GroupMember_1.GroupMember.truncate(); await Import_1.Import.truncate(); await Export_1.Export.truncate(); await Run_1.Run.truncate(); await clearLocalCaches(); await clearRedis(); } Reset.data = data; /** * reset:cache (least dangerous) * * clears the redis cache * * clears resque */ async function cache(callerId) { await clearLocalCaches(); await clearRedis(); } Reset.cache = cache; async function resetHighWatermarks() { const schedules = await Schedule_1.Schedule.findAll(); for (const schedule of schedules) await schedule.resetHighWatermarks(); } Reset.resetHighWatermarks = resetHighWatermarks; async function clearLocalCaches() { await Property_1.Property.invalidateCache(); } Reset.clearLocalCaches = clearLocalCaches; async function clearRedis() { await actionhero_1.cache.clear(); // clear redis cache // resque await deleteKeys("*resque:queue:*"); // clear resque queues await deleteKeys("*resque:delayed:*"); // clear resque delayed queues await deleteKeys("*resque:timestamps:*"); // clear resque timestamps await deleteKeys("*resque:*lock*:*"); // clear resque locks await deleteKeys("*resque:stat:*"); // clear resque stats // wait for any currently-running workers await new Promise((resolve) => setTimeout(resolve, actionhero_1.config.tasks.timeout * 2)); await clearFailedTasks(); // re-start recurring tasks await actionhero_1.task.enqueueAllRecurrentTasks(); } Reset.clearRedis = clearRedis; async function deleteKeys(pattern) { const client = actionhero_1.api.resque.queue.connection.redis; const result = await new Promise((resolve, reject) => { let count = 0; const scanStream = client.scanStream({ match: pattern }); scanStream.once("error", (error) => reject(error)); scanStream.once("end", () => resolve(count)); scanStream.on("data", async (keys) => { scanStream.pause(); await Promise.all(keys.map((k) => client.del(k))); count += keys.length; scanStream.resume(); }); }); return result; } async function clearFailedTasks() { const failed = await actionhero_1.task.failed(0, 0); if (failed && failed.length > 0) { const failedJob = failed[0]; await actionhero_1.task.removeFailed(failedJob); return clearFailedTasks(); } } })(Reset = exports.Reset || (exports.Reset = {}));