@r-delfino/mux-sync-engine
Version:
Mux Sync Engine to sync Mux data based on webhooks to Postgres
278 lines (271 loc) • 9 kB
JavaScript
;
var muxNode = require('@mux/mux-node');
var pg = require('pg');
var yesql = require('yesql');
var pgNodeMigrations = require('pg-node-migrations');
var fs = require('fs');
var path = require('path');
var url = require('url');
var _documentCurrentScript = typeof document !== 'undefined' ? document.currentScript : null;
function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
var pg__default = /*#__PURE__*/_interopDefault(pg);
var fs__default = /*#__PURE__*/_interopDefault(fs);
var path__default = /*#__PURE__*/_interopDefault(path);
// src/muxSync.ts
var PostgresClient = class {
constructor(config) {
this.config = config;
this.pool = new pg__default.default.Pool({
connectionString: config.databaseUrl,
max: config.maxConnections || 10,
keepAlive: true
});
}
pool;
async delete(table, id) {
const prepared = yesql.pg(`
delete from "${this.config.schema}"."${table}"
where id = :id
returning id;
`)({ id });
const { rows } = await this.query(prepared.text, prepared.values);
return rows.length > 0;
}
async query(text, params) {
return this.pool.query(text, params);
}
async upsertMany(entries, table, tableSchema, options) {
if (!entries.length) return [];
const chunkSize = 5;
const results = [];
for (let i = 0; i < entries.length; i += chunkSize) {
const chunk = entries.slice(i, i + chunkSize);
const queries = [];
chunk.forEach((entry) => {
const cleansed = this.cleanseArrayField(entry);
const upsertSql = this.constructUpsertSql(this.config.schema, table, tableSchema, options);
const prepared = yesql.pg(upsertSql, {
useNullForMissing: true
})(cleansed);
queries.push(this.pool.query(prepared.text, prepared.values));
});
results.push(...await Promise.all(queries));
}
return results.flatMap((it) => it.rows);
}
async findMissingEntries(table, ids) {
if (!ids.length) return [];
const prepared = yesql.pg(`
select id from "${this.config.schema}"."${table}"
where id=any(:ids::text[]);
`)({ ids });
const { rows } = await this.query(prepared.text, prepared.values);
const existingIds = rows.map((it) => it.id);
const missingIds = ids.filter((it) => !existingIds.includes(it));
return missingIds;
}
/**
* Returns an (yesql formatted) upsert function based on the key/vals of an object.
* eg,
* insert into customers ("id", "name")
* values (:id, :name)
* on conflict (id)
* do update set (
* "id" = :id,
* "name" = :name
* )
*/
constructUpsertSql(schema, table, tableSchema, options) {
const { conflict = "id" } = options || {};
const properties = tableSchema.properties;
return `
insert into "${schema}"."${table}" (
${properties.map((x) => `"${x}"`).join(",")}
)
values (
${properties.map((x) => `:${x}`).join(",")}
)
on conflict (
${conflict}
)
do update set
${properties.map((x) => `"${x}" = :${x}`).join(",")}
;`;
}
/**
* For array object field like invoice.custom_fields
* ex: [{"name":"Project name","value":"Test Project"}]
*
* we need to stringify it first cos passing array object directly will end up with
* {
* invalid input syntax for type json
* detail: 'Expected ":", but found "}".',
* where: 'JSON data, line 1: ...\\":\\"Project name\\",\\"value\\":\\"Test Project\\"}"}',
* }
*/
cleanseArrayField(obj) {
const cleansed = { ...obj };
Object.keys(cleansed).map((k) => {
const data = cleansed[k];
if (Array.isArray(data)) {
cleansed[k] = JSON.stringify(data);
}
});
return cleansed;
}
};
// src/schemas/mux_assets.ts
var muxAssetsSchema = {
properties: [
"mux_asset_id",
"status",
"created_at",
"duration",
"max_stored_resolution",
"max_stored_frame_rate",
"aspect_ratio",
"playback_ids",
"tracks",
"errors",
"master_access",
"mp4_support",
"normalize_audio",
"static_renditions",
"test",
"passthrough",
"live_stream_id",
"encoding_tier",
"ingest_type",
"source_asset_id",
"per_title_encode",
"upload_id",
"input_info",
"video_quality",
"resolution_tier",
"non_standard_input_reasons",
"progress",
"meta",
"max_resolution_tier"
]
};
// src/muxSync.ts
var DEFAULT_SCHEMA = "mux";
var MuxSync = class {
constructor(config) {
this.config = config;
this.mux = new muxNode.Mux({
tokenId: config.muxTokenId,
tokenSecret: config.muxTokenSecret,
webhookSecret: config.muxWebhookSecret
});
this.config.logger?.info(
"MuxSync initialized"
);
this.postgresClient = new PostgresClient({
databaseUrl: config.databaseUrl,
schema: config.schema || DEFAULT_SCHEMA,
maxConnections: config.maxPostgresConnections
});
}
mux;
postgresClient;
async processWebhook(payload, headers) {
const event = this.mux.webhooks.unwrap(payload, headers);
this.config.logger?.info(
`Received webhook ${event.id}: ${event.type}`
);
switch (event.type) {
//todo: add manual fetch
case "video.asset.created":
case "video.asset.ready":
const asset = await this.fetchOrUseWebhookData(event.data, async (id) => Promise.resolve(event.data));
this.config.logger?.info(
`Asset for upsert: ${JSON.stringify(asset)}`
);
await this.upsertAssets([asset]);
break;
default:
console.warn("Unhandled webhook event", event.type);
break;
}
}
async fetchOrUseWebhookData(entity, fetchFn) {
if (!entity.id) return entity;
if (this.config.revalidateEntityViaMuxApi) {
return fetchFn(entity.id);
}
return entity;
}
async syncBackfill(params) {
const { object } = params ?? {};
let muxAssets, muxLiveStreams;
return {
muxAssets,
muxLiveStreams
};
}
async upsertAssets(assets, backfillRelatedEntities) {
if (backfillRelatedEntities ?? this.config.backfillRelatedEntities) ;
const transformedAssets = assets.map((asset) => ({
...asset,
mux_asset_id: asset.id,
created_at: asset.created_at ? new Date(Number(asset.created_at) * 1e3).toISOString() : null
}));
return this.postgresClient.upsertMany(transformedAssets, "assets", muxAssetsSchema, { conflict: "mux_asset_id" });
}
};
var DEFAULT_SCHEMA2 = "mux";
function getDirname() {
try {
if (typeof ({ url: (typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.tagName.toUpperCase() === 'SCRIPT' && _documentCurrentScript.src || new URL('index.cjs', document.baseURI).href)) }) !== "undefined" && (typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.tagName.toUpperCase() === 'SCRIPT' && _documentCurrentScript.src || new URL('index.cjs', document.baseURI).href))) {
const __filename = url.fileURLToPath((typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.tagName.toUpperCase() === 'SCRIPT' && _documentCurrentScript.src || new URL('index.cjs', document.baseURI).href)));
return path__default.default.dirname(__filename);
}
} catch (error) {
}
try {
return __dirname;
} catch (error) {
return path__default.default.join(process.cwd(), "packages/sync-engine/src/database");
}
}
async function connectAndMigrate(client, migrationsDirectory, config, logOnError = false) {
if (!fs__default.default.existsSync(migrationsDirectory)) {
config.logger?.info(`Migrations directory ${migrationsDirectory} not found, skipping`);
return;
}
const optionalConfig = {
schemaName: DEFAULT_SCHEMA2,
tableName: "migrations"
};
try {
await pgNodeMigrations.migrate({ client }, migrationsDirectory, optionalConfig);
} catch (error) {
if (logOnError && error instanceof Error) {
config.logger?.error(error, "Migration error:");
} else {
throw error;
}
}
}
async function runMigrations(config) {
const client = new pg.Client({
connectionString: config.databaseUrl,
connectionTimeoutMillis: 1e4
});
try {
await client.connect();
await client.query(`CREATE SCHEMA IF NOT EXISTS ${DEFAULT_SCHEMA2};`);
config.logger?.info("Running migrations");
await connectAndMigrate(client, path__default.default.resolve(getDirname(), "./migrations"), config);
} catch (err) {
config.logger?.error(err, "Error running migrations");
} finally {
await client.end();
config.logger?.info("Finished migrations");
}
}
exports.MuxSync = MuxSync;
exports.runMigrations = runMigrations;
//# sourceMappingURL=index.cjs.map
//# sourceMappingURL=index.cjs.map