@r-delfino/mux-sync-engine
Version:
Mux Sync Engine to sync Mux data based on webhooks to Postgres
268 lines (264 loc) • 7.77 kB
JavaScript
import { Mux } from '@mux/mux-node';
import pg, { Client } from 'pg';
import { pg as pg$1 } from 'yesql';
import { migrate } from 'pg-node-migrations';
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
// src/muxSync.ts
var PostgresClient = class {
constructor(config) {
this.config = config;
this.pool = new pg.Pool({
connectionString: config.databaseUrl,
max: config.maxConnections || 10,
keepAlive: true
});
}
pool;
async delete(table, id) {
const prepared = pg$1(`
delete from "${this.config.schema}"."${table}"
where id = :id
returning id;
`)({ id });
const { rows } = await this.query(prepared.text, prepared.values);
return rows.length > 0;
}
async query(text, params) {
return this.pool.query(text, params);
}
async upsertMany(entries, table, tableSchema, options) {
if (!entries.length) return [];
const chunkSize = 5;
const results = [];
for (let i = 0; i < entries.length; i += chunkSize) {
const chunk = entries.slice(i, i + chunkSize);
const queries = [];
chunk.forEach((entry) => {
const cleansed = this.cleanseArrayField(entry);
const upsertSql = this.constructUpsertSql(this.config.schema, table, tableSchema, options);
const prepared = pg$1(upsertSql, {
useNullForMissing: true
})(cleansed);
queries.push(this.pool.query(prepared.text, prepared.values));
});
results.push(...await Promise.all(queries));
}
return results.flatMap((it) => it.rows);
}
async findMissingEntries(table, ids) {
if (!ids.length) return [];
const prepared = pg$1(`
select id from "${this.config.schema}"."${table}"
where id=any(:ids::text[]);
`)({ ids });
const { rows } = await this.query(prepared.text, prepared.values);
const existingIds = rows.map((it) => it.id);
const missingIds = ids.filter((it) => !existingIds.includes(it));
return missingIds;
}
/**
* Returns an (yesql formatted) upsert function based on the key/vals of an object.
* eg,
* insert into customers ("id", "name")
* values (:id, :name)
* on conflict (id)
* do update set (
* "id" = :id,
* "name" = :name
* )
*/
constructUpsertSql(schema, table, tableSchema, options) {
const { conflict = "id" } = options || {};
const properties = tableSchema.properties;
return `
insert into "${schema}"."${table}" (
${properties.map((x) => `"${x}"`).join(",")}
)
values (
${properties.map((x) => `:${x}`).join(",")}
)
on conflict (
${conflict}
)
do update set
${properties.map((x) => `"${x}" = :${x}`).join(",")}
;`;
}
/**
* For array object field like invoice.custom_fields
* ex: [{"name":"Project name","value":"Test Project"}]
*
* we need to stringify it first cos passing array object directly will end up with
* {
* invalid input syntax for type json
* detail: 'Expected ":", but found "}".',
* where: 'JSON data, line 1: ...\\":\\"Project name\\",\\"value\\":\\"Test Project\\"}"}',
* }
*/
cleanseArrayField(obj) {
const cleansed = { ...obj };
Object.keys(cleansed).map((k) => {
const data = cleansed[k];
if (Array.isArray(data)) {
cleansed[k] = JSON.stringify(data);
}
});
return cleansed;
}
};
// src/schemas/mux_assets.ts
var muxAssetsSchema = {
properties: [
"mux_asset_id",
"status",
"created_at",
"duration",
"max_stored_resolution",
"max_stored_frame_rate",
"aspect_ratio",
"playback_ids",
"tracks",
"errors",
"master_access",
"mp4_support",
"normalize_audio",
"static_renditions",
"test",
"passthrough",
"live_stream_id",
"encoding_tier",
"ingest_type",
"source_asset_id",
"per_title_encode",
"upload_id",
"input_info",
"video_quality",
"resolution_tier",
"non_standard_input_reasons",
"progress",
"meta",
"max_resolution_tier"
]
};
// src/muxSync.ts
var DEFAULT_SCHEMA = "mux";
var MuxSync = class {
constructor(config) {
this.config = config;
this.mux = new Mux({
tokenId: config.muxTokenId,
tokenSecret: config.muxTokenSecret,
webhookSecret: config.muxWebhookSecret
});
this.config.logger?.info(
"MuxSync initialized"
);
this.postgresClient = new PostgresClient({
databaseUrl: config.databaseUrl,
schema: config.schema || DEFAULT_SCHEMA,
maxConnections: config.maxPostgresConnections
});
}
mux;
postgresClient;
async processWebhook(payload, headers) {
const event = this.mux.webhooks.unwrap(payload, headers);
this.config.logger?.info(
`Received webhook ${event.id}: ${event.type}`
);
switch (event.type) {
//todo: add manual fetch
case "video.asset.created":
case "video.asset.ready":
const asset = await this.fetchOrUseWebhookData(event.data, async (id) => Promise.resolve(event.data));
this.config.logger?.info(
`Asset for upsert: ${JSON.stringify(asset)}`
);
await this.upsertAssets([asset]);
break;
default:
console.warn("Unhandled webhook event", event.type);
break;
}
}
async fetchOrUseWebhookData(entity, fetchFn) {
if (!entity.id) return entity;
if (this.config.revalidateEntityViaMuxApi) {
return fetchFn(entity.id);
}
return entity;
}
async syncBackfill(params) {
const { object } = params ?? {};
let muxAssets, muxLiveStreams;
return {
muxAssets,
muxLiveStreams
};
}
async upsertAssets(assets, backfillRelatedEntities) {
if (backfillRelatedEntities ?? this.config.backfillRelatedEntities) ;
const transformedAssets = assets.map((asset) => ({
...asset,
mux_asset_id: asset.id,
created_at: asset.created_at ? new Date(Number(asset.created_at) * 1e3).toISOString() : null
}));
return this.postgresClient.upsertMany(transformedAssets, "assets", muxAssetsSchema, { conflict: "mux_asset_id" });
}
};
var DEFAULT_SCHEMA2 = "mux";
function getDirname() {
try {
if (typeof import.meta !== "undefined" && import.meta.url) {
const __filename = fileURLToPath(import.meta.url);
return path.dirname(__filename);
}
} catch (error) {
}
try {
return __dirname;
} catch (error) {
return path.join(process.cwd(), "packages/sync-engine/src/database");
}
}
async function connectAndMigrate(client, migrationsDirectory, config, logOnError = false) {
if (!fs.existsSync(migrationsDirectory)) {
config.logger?.info(`Migrations directory ${migrationsDirectory} not found, skipping`);
return;
}
const optionalConfig = {
schemaName: DEFAULT_SCHEMA2,
tableName: "migrations"
};
try {
await migrate({ client }, migrationsDirectory, optionalConfig);
} catch (error) {
if (logOnError && error instanceof Error) {
config.logger?.error(error, "Migration error:");
} else {
throw error;
}
}
}
async function runMigrations(config) {
const client = new Client({
connectionString: config.databaseUrl,
connectionTimeoutMillis: 1e4
});
try {
await client.connect();
await client.query(`CREATE SCHEMA IF NOT EXISTS ${DEFAULT_SCHEMA2};`);
config.logger?.info("Running migrations");
await connectAndMigrate(client, path.resolve(getDirname(), "./migrations"), config);
} catch (err) {
config.logger?.error(err, "Error running migrations");
} finally {
await client.end();
config.logger?.info("Finished migrations");
}
}
export { MuxSync, runMigrations };
//# sourceMappingURL=index.js.map
//# sourceMappingURL=index.js.map