@r-delfino/mux-sync-engine
Version:
Mux Sync Engine to sync Mux data based on webhooks to Postgres
715 lines (705 loc) • 22.8 kB
JavaScript
'use strict';
var muxNode = require('@mux/mux-node');
var pg = require('pg');
var yesql = require('yesql');
var pgNodeMigrations = require('pg-node-migrations');
var fs = require('fs');
var path = require('path');
var url = require('url');
var _documentCurrentScript = typeof document !== 'undefined' ? document.currentScript : null;
function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
var pg__default = /*#__PURE__*/_interopDefault(pg);
var fs__default = /*#__PURE__*/_interopDefault(fs);
var path__default = /*#__PURE__*/_interopDefault(path);
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
}) : x)(function(x) {
if (typeof require !== "undefined") return require.apply(this, arguments);
throw Error('Dynamic require of "' + x + '" is not supported');
});
var PostgresClient = class {
constructor(config) {
this.config = config;
this.pool = new pg__default.default.Pool({
connectionString: config.databaseUrl,
max: config.maxConnections || 10,
keepAlive: true
});
}
pool;
async deleteByField(table, field, value) {
const prepared = yesql.pg(`
delete from "${this.config.schema}"."${table}"
where "${field}" = :value
returning id;
`)({ value });
const { rows } = await this.query(prepared.text, prepared.values);
return rows.length > 0;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
async query(text, params) {
return this.pool.query(text, params);
}
async upsertMany(entries, table, tableSchema, options) {
if (!entries.length) return [];
const chunkSize = 5;
const results = [];
for (let i = 0; i < entries.length; i += chunkSize) {
const chunk = entries.slice(i, i + chunkSize);
const queries = [];
chunk.forEach((entry) => {
const cleansed = this.cleanseArrayField(entry);
const upsertSql = this.constructUpsertSql(
this.config.schema,
table,
tableSchema,
options
);
const prepared = yesql.pg(upsertSql, {
useNullForMissing: true
})(cleansed);
queries.push(this.pool.query(prepared.text, prepared.values));
});
results.push(...await Promise.all(queries));
}
return results.flatMap((it) => it.rows);
}
async findMissingEntries(idField = "id", table, ids) {
if (!ids.length) return [];
const prepared = yesql.pg(`
select ${idField} from "${this.config.schema}"."${table}"
where ${idField} in (:ids);
`)({ ids });
const { rows } = await this.query(prepared.text, prepared.values);
const existingIds = rows.map((it) => it[idField]);
const missingIds = ids.filter((it) => !existingIds.includes(it));
return missingIds;
}
/**
* Returns an (yesql formatted) upsert function based on the key/vals of an object.
* eg,
* insert into customers ("id", "name")
* values (:id, :name)
* on conflict (id)
* do update set (
* "id" = :id,
* "name" = :name
* )
*/
constructUpsertSql(schema, table, tableSchema, options) {
const { conflict = "id" } = options || {};
const properties = tableSchema.properties;
return `
insert into "${schema}"."${table}" (
${properties.map((x) => `"${x}"`).join(",")}
)
values (
${properties.map((x) => `:${x}`).join(",")}
)
on conflict (
${conflict}
)
do update set
${properties.map((x) => `"${x}" = :${x}`).join(",")}
;`;
}
/**
* For array object field
* ex: [{"name":"Project name","value":"Test Project"}]
*
* we need to stringify it first cos passing array object directly will end up with
* {
* invalid input syntax for type json
* detail: 'Expected ":", but found "}".',
* where: 'JSON data, line 1: ...\\":\\"Project name\\",\\"value\\":\\"Test Project\\"}"}',
* }
*/
cleanseArrayField(obj) {
const cleansed = { ...obj };
Object.keys(cleansed).map((k) => {
const data = cleansed[k];
if (Array.isArray(data)) {
cleansed[k] = JSON.stringify(data);
}
});
return cleansed;
}
};
// src/schemas/mux_assets.ts
var muxAssetsSchema = {
properties: [
"id",
"status",
"created_at",
"duration_seconds",
"max_stored_frame_rate",
"aspect_ratio",
"playback_ids",
"tracks",
"errors",
"master_access",
"normalize_audio",
"static_renditions",
"test",
"passthrough",
"live_stream_id",
"ingest_type",
"source_asset_id",
"upload_id",
"input_info",
"video_quality",
"resolution_tier",
"non_standard_input_reasons",
"progress",
"meta",
"max_resolution_tier",
"is_live",
"master",
"recording_times"
]
};
// src/schemas/mux_live_streams.ts
var muxLiveStreamsSchema = {
properties: [
"id",
"status",
"created_at",
"stream_key",
"active_asset_id",
"recent_asset_ids",
"playback_ids",
"new_asset_settings",
"passthrough",
"audio_only",
"embedded_subtitles",
"generated_subtitles",
"latency_mode",
"test",
"max_continuous_duration_seconds",
"reconnect_window_seconds",
"use_slate_for_standard_latency",
"reconnect_slate_url",
"active_ingest_protocol",
"meta",
"simulcast_targets",
"srt_passphrase"
]
};
// src/schemas/mux_uploads.ts
var muxUploadsSchema = {
properties: [
"id",
"status",
"timeout_seconds",
"asset_id",
"cors_origin",
"url",
"error",
"test"
]
};
// src/schemas/mux_webhook_events.ts
var muxWebhookEventsSchema = {
properties: [
"id",
"type",
"created_at",
"attempts",
"environment",
"object",
"raw_body",
"headers"
]
};
// src/muxSync.ts
var DEFAULT_SCHEMA = "mux";
var MuxSync = class {
constructor(config) {
this.config = config;
this.logger = config.logger || console;
this.mux = new muxNode.Mux({
tokenId: config.muxTokenId,
tokenSecret: config.muxTokenSecret,
webhookSecret: config.muxWebhookSecret
});
this.logger.info("MuxSync initialized");
this.postgresClient = new PostgresClient({
databaseUrl: config.databaseUrl,
schema: DEFAULT_SCHEMA,
maxConnections: config.maxPostgresConnections
});
}
mux;
postgresClient;
logger;
async processWebhook(payload, headers) {
const event = this.mux.webhooks.unwrap(payload, headers);
this.logger.info(`Received webhook ${event.id}: ${event.type}`);
await this.upsertWebhookEvent(event, headers);
const eventType = event.type;
try {
switch (eventType) {
// Assets
case "video.asset.created":
case "video.asset.ready":
case "video.asset.updated":
case "video.asset.errored":
case "video.asset.live_stream_completed":
case "video.asset.static_renditions.ready":
case "video.asset.static_renditions.preparing":
case "video.asset.static_renditions.deleted":
case "video.asset.static_renditions.errored":
case "video.asset.master.ready":
case "video.asset.master.preparing":
case "video.asset.master.deleted":
case "video.asset.warning":
case "video.asset.non_standard_input_detected":
case "video.asset.master.errored": {
const asset = await this.fetchOrUseWebhookData(
event.data,
async (id) => {
const response = await this.mux.video.assets.retrieve(id);
return response.data ?? response;
}
);
await this.upsertAssets([asset]);
break;
}
// Asset deletion
case "video.asset.deleted": {
const assetData = event.data;
if (assetData.id) {
await this.deleteAsset(assetData.id);
} else {
this.logger.warn(
"Asset deletion event received but no asset id found"
);
}
break;
}
// Uploads
case "video.upload.created":
case "video.upload.asset_created":
case "video.upload.cancelled":
case "video.upload.errored": {
const upload = await this.fetchOrUseWebhookData(
event.data,
async (id) => {
const response = await this.mux.video.uploads.retrieve(id);
return response.data ?? response;
}
);
await this.upsertUploads([upload]);
break;
}
// Tracks
case "video.asset.track.created":
case "video.asset.track.ready":
case "video.asset.track.errored":
case "video.asset.track.deleted": {
const track = event.data;
await this.handleAssetTrackEvent(track);
break;
}
// Live streams
case "video.live_stream.warning":
case "video.live_stream.created":
case "video.live_stream.connected":
case "video.live_stream.recording":
case "video.live_stream.active":
case "video.live_stream.disconnected":
case "video.live_stream.idle":
case "video.live_stream.updated":
case "video.live_stream.enabled":
case "video.live_stream.disabled": {
const liveStream = await this.fetchOrUseWebhookData(
event.data,
async (id) => {
const response = await this.mux.video.liveStreams.retrieve(id);
return response.data ?? response;
}
);
await this.upsertLiveStreams([liveStream]);
break;
}
// Live stream deletion
case "video.live_stream.deleted": {
const liveStreamData = event.data;
if (liveStreamData.id) {
await this.deleteLiveStream(liveStreamData.id);
} else {
this.logger.warn(
"Live stream deletion event received but no live stream id found"
);
}
break;
}
// Static rendition
case "video.asset.static_rendition.created":
case "video.asset.static_rendition.ready":
case "video.asset.static_rendition.errored":
case "video.asset.static_rendition.skipped":
case "video.asset.static_rendition.deleted": {
const staticRendition = event.data;
await this.handleAssetStaticRenditionEvent(staticRendition);
break;
}
// Simulcast targets
case "video.live_stream.simulcast_target.created":
case "video.live_stream.simulcast_target.idle":
case "video.live_stream.simulcast_target.starting":
case "video.live_stream.simulcast_target.broadcasting":
case "video.live_stream.simulcast_target.errored":
case "video.live_stream.simulcast_target.deleted":
case "video.live_stream.simulcast_target.updated": {
const simulcastTargetData = event.data;
await this.handleLiveStreamSimulcastTargetEvent(simulcastTargetData);
break;
}
default:
this.logger.warn("Unhandled webhook event", event.type);
break;
}
this.logger.info(`Successfully processed webhook ${event.type}`);
} catch (error) {
this.logger.error(
error,
`Error processing webhook ${event.type}`
);
throw error;
}
}
async syncBackfill(params) {
const { object } = params ?? {};
let muxAssets, muxLiveStreams, muxUploads;
switch (object) {
case "all":
muxAssets = await this.syncMuxAssets();
[muxLiveStreams, muxUploads] = await Promise.all([
this.syncMuxLiveStreams(),
this.syncMuxUploads()
]);
break;
case "mux_assets":
muxAssets = await this.syncMuxAssets();
break;
case "mux_live_streams":
muxLiveStreams = await this.syncMuxLiveStreams();
break;
case "mux_uploads":
muxUploads = await this.syncMuxUploads();
break;
}
return {
muxAssets,
muxLiveStreams,
muxUploads
};
}
async fetchOrUseWebhookData(entity, fetchFn) {
if (!entity.id) return entity;
if (this.config.revalidateEntityViaMuxApi) {
return fetchFn(entity.id);
}
return entity;
}
async upsertAssets(assets) {
const transformedAssets = assets.map((asset) => ({
...asset,
duration_seconds: asset.duration,
created_at: asset.created_at ? new Date(Number(asset.created_at) * 1e3).toISOString() : null
}));
return this.postgresClient.upsertMany(
transformedAssets,
"assets",
muxAssetsSchema,
{ conflict: "id" }
);
}
async genericSync(resourceName, listFn, upsertFn) {
this.logger.info(`Starting Mux ${resourceName} sync...`);
let nextCursor;
let totalSynced = 0;
let pageCount = 0;
do {
pageCount++;
this.logger.info(`Fetching page ${pageCount} of Mux ${resourceName}...`);
const listParams = { limit: 100 };
if (nextCursor) listParams.cursor = nextCursor;
const response = await listFn(listParams);
const items = response.data;
if (items.length) {
this.logger.info(
`Processing ${items.length} ${resourceName} from page ${pageCount}...`
);
await upsertFn(items);
totalSynced += items.length;
this.logger.info(
`\u2713 Page ${pageCount} completed. Total ${resourceName} synced so far: ${totalSynced}`
);
} else {
this.logger.info(`No ${resourceName} found on page ${pageCount}`);
}
nextCursor = response.next_cursor;
} while (nextCursor);
this.logger.info(
`\u2705 Mux ${resourceName} sync completed! Total ${resourceName} synced: ${totalSynced}`
);
return { synced: totalSynced };
}
async syncMuxAssets(assetIds) {
if (assetIds?.length) {
const result = { synced: 0 };
const missing = await this.postgresClient.findMissingEntries(
"id",
"assets",
assetIds
);
if (missing.length) {
const fetchedAssets = [];
for (const id of missing) {
try {
const asset = await this.mux.video.assets.retrieve(id);
fetchedAssets.push(asset);
} catch {
this.logger.warn?.(`Failed fetching asset ${id}`);
}
}
if (fetchedAssets.length) {
const rows = await this.upsertAssets(fetchedAssets);
result.synced = rows.length;
}
}
return result;
}
return this.genericSync(
"assets",
(params) => this.mux.video.assets.list(params),
(assets) => this.upsertAssets(assets)
);
}
async upsertEntitiesWithRelatedAssets(entities, tableName, schema, config) {
if (this.config.backfillRelatedEntities && config.assetIds?.length) {
await this.syncMuxAssets(config.assetIds);
}
const transformedEntities = entities.map(config.transformEntity);
return this.postgresClient.upsertMany(
transformedEntities,
tableName,
schema,
{
conflict: "id"
}
);
}
async upsertLiveStreams(liveStreams) {
const assetIds = liveStreams.filter((ls) => ls.active_asset_id != null).map((ls) => ls.active_asset_id);
return this.upsertEntitiesWithRelatedAssets(
liveStreams,
"live_streams",
muxLiveStreamsSchema,
{
assetIds,
transformEntity: (ls) => ({
...ls,
created_at: ls.created_at ? new Date(Number(ls.created_at) * 1e3).toISOString() : null,
max_continuous_duration_seconds: ls.max_continuous_duration,
reconnect_window_seconds: ls.reconnect_window
})
}
);
}
async upsertUploads(uploads) {
const assetIds = uploads.filter((u) => u.asset_id != null).map((u) => u.asset_id);
return this.upsertEntitiesWithRelatedAssets(
uploads,
"uploads",
muxUploadsSchema,
{
assetIds,
transformEntity: (u) => ({
...u,
timeout_seconds: u.timeout
})
}
);
}
async syncMuxLiveStreams() {
return this.genericSync(
"live streams",
(params) => this.mux.video.liveStreams.list(params),
(liveStreams) => this.upsertLiveStreams(liveStreams)
);
}
async syncMuxUploads() {
return this.genericSync(
"uploads",
(params) => this.mux.video.uploads.list(params),
(uploads) => this.upsertUploads(uploads)
);
}
async handleAssetUpdateEvent(assetId, eventType) {
if (!assetId) {
this.logger.warn?.(
`${eventType} event received but no asset_id found in webhook data`
);
return;
}
try {
const response = await this.mux.video.assets.retrieve(assetId);
const asset = response.data ?? response;
await this.upsertAssets([asset]);
} catch (error) {
this.logger.warn?.(
`${eventType} event received but asset ${assetId} not found (likely deleted): ${error}`
);
return;
}
}
async handleAssetTrackEvent(track) {
const assetId = track.asset_id;
await this.handleAssetUpdateEvent(assetId, "track");
}
async handleLiveStreamUpdateEvent(liveStreamId, eventType) {
if (!liveStreamId) {
this.logger.warn?.(
`${eventType} event received but no live_stream_id found in webhook data`
);
return;
}
try {
const response = await this.mux.video.liveStreams.retrieve(liveStreamId);
const liveStream = response.data ?? response;
await this.upsertLiveStreams([liveStream]);
} catch (error) {
this.logger.warn?.(
`${eventType} event received but live stream ${liveStreamId} not found (likely deleted): ${error}`
);
return;
}
}
async handleLiveStreamSimulcastTargetEvent(simulcastTargetData) {
const liveStreamId = simulcastTargetData?.live_stream_id;
await this.handleLiveStreamUpdateEvent(liveStreamId, "simulcast_target");
}
async handleAssetStaticRenditionEvent(staticRendition) {
const assetId = staticRendition.asset_id;
await this.handleAssetUpdateEvent(assetId, "static_rendition");
}
async deleteAsset(muxAssetId) {
return await this.postgresClient.deleteByField("assets", "id", muxAssetId);
}
async deleteLiveStream(muxLiveStreamId) {
return await this.postgresClient.deleteByField(
"live_streams",
"id",
muxLiveStreamId
);
}
async upsertWebhookEvent(event, headers) {
const transformedEvent = {
...event,
created_at: event.created_at,
attempts: event.attempts || [],
environment: event.environment || {},
object: event.object || {},
raw_body: event.data,
headers
};
try {
await this.postgresClient.upsertMany(
[transformedEvent],
"webhook_events",
muxWebhookEventsSchema,
{ conflict: "id" }
);
this.logger.info(`Stored webhook event ${event.id}`);
} catch (error) {
this.logger.error(`Failed to store webhook event ${event.id}:`, error);
throw error;
}
}
};
var DEFAULT_SCHEMA2 = "mux";
function getMigrationsPath() {
try {
const packageJsonPath = __require.resolve(
"@r-delfino/mux-sync-engine/package.json"
);
const packageDir = path__default.default.dirname(packageJsonPath);
const distMigrationsPath = path__default.default.join(packageDir, "dist", "migrations");
if (fs__default.default.existsSync(distMigrationsPath)) {
return distMigrationsPath;
}
const srcMigrationsPath = path__default.default.join(
packageDir,
"src",
"database",
"migrations"
);
if (fs__default.default.existsSync(srcMigrationsPath)) {
return srcMigrationsPath;
}
let baseDir;
if (typeof ({ url: (typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.tagName.toUpperCase() === 'SCRIPT' && _documentCurrentScript.src || new URL('index.cjs', document.baseURI).href)) }) !== "undefined" && (typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.tagName.toUpperCase() === 'SCRIPT' && _documentCurrentScript.src || new URL('index.cjs', document.baseURI).href))) {
const __filename = url.fileURLToPath((typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.tagName.toUpperCase() === 'SCRIPT' && _documentCurrentScript.src || new URL('index.cjs', document.baseURI).href)));
baseDir = path__default.default.dirname(__filename);
} else {
baseDir = __dirname;
}
const relativeMigrationsPath = path__default.default.resolve(baseDir, "migrations");
if (fs__default.default.existsSync(relativeMigrationsPath)) {
return relativeMigrationsPath;
}
throw new Error("Could not find migrations directory");
} catch (error) {
throw new Error(
`Failed to locate migrations: ${error instanceof Error ? error.message : "Unknown error"}`
);
}
}
async function connectAndMigrate(client, migrationsDirectory, logger, logOnError = false) {
if (!fs__default.default.existsSync(migrationsDirectory)) {
logger.info(
`Migrations directory ${migrationsDirectory} not found, skipping`
);
return;
}
const optionalConfig = {
schemaName: DEFAULT_SCHEMA2,
tableName: "migrations"
};
try {
await pgNodeMigrations.migrate({ client }, migrationsDirectory, optionalConfig);
} catch (error) {
if (logOnError && error instanceof Error) {
logger.error(error, "Migration error:");
} else {
throw error;
}
}
}
async function runMigrations(config) {
const client = new pg.Client({
connectionString: config.databaseUrl,
connectionTimeoutMillis: 1e4
});
const logger = config.logger || console;
try {
await client.connect();
await client.query(`CREATE SCHEMA IF NOT EXISTS ${DEFAULT_SCHEMA2};`);
logger.info("Running migrations");
const migrationsPath = getMigrationsPath();
logger.info(`Looking for migrations in: ${migrationsPath}`);
await connectAndMigrate(client, migrationsPath, logger);
} catch (err) {
logger.error(err, "Error running migrations");
} finally {
await client.end();
logger.info("Finished migrations");
}
}
exports.MuxSync = MuxSync;
exports.runMigrations = runMigrations;
//# sourceMappingURL=index.cjs.map
//# sourceMappingURL=index.cjs.map