@fly-cut/av-cliper
Version:
WebCodecs-based, combine video, audio, images, text, with animation support 基于 WebCodecs 合成 视频、音频、图片、文字,支持动画
1,690 lines • 104 kB
JavaScript
var ss = Object.defineProperty;
var gi = (o) => {
throw TypeError(o);
};
var ns = (o, t, e) => t in o ? ss(o, t, { enumerable: !0, configurable: !0, writable: !0, value: e }) : o[t] = e;
var A = (o, t, e) => ns(o, typeof t != "symbol" ? t + "" : t, e), Ge = (o, t, e) => t.has(o) || gi("Cannot " + e);
var i = (o, t, e) => (Ge(o, t, "read from private field"), e ? e.call(o) : t.get(o)), m = (o, t, e) => t.has(o) ? gi("Cannot add the same private member more than once") : t instanceof WeakSet ? t.add(o) : t.set(o, e), d = (o, t, e, s) => (Ge(o, t, "write to private field"), s ? s.call(o, e) : t.set(o, e), e), C = (o, t, e) => (Ge(o, t, "access private method"), e);
import Ft from "@webav/mp4box.js";
import { workerTimer as rs, Log as F, autoReadStream as oi, file2stream as Di, EventTool as ai, recodemux as os } from "@fly-cut/internal-utils";
import { Log as mn } from "@fly-cut/internal-utils";
import * as as from "wave-resampler";
import { tmpfile as Qe, write as Je } from "opfs-tools";
function ls(o) {
return document.createElement(o);
}
function cs(o, t) {
const e = ls("pre");
e.style.cssText = `margin: 0; ${t}; visibility: hidden; position: fixed;`, e.textContent = o, document.body.appendChild(e);
const { width: s, height: n } = e.getBoundingClientRect();
e.remove(), e.style.visibility = "visible";
const r = new Image();
r.width = s, r.height = n;
const a = `
<svg xmlns="http://www.w3.org/2000/svg" width="${s}" height="${n}">
<foreignObject width="100%" height="100%">
<div xmlns="http://www.w3.org/1999/xhtml">${e.outerHTML}</div>
</foreignObject>
</svg>
`.replace(/\t/g, "").replace(/#/g, "%23");
return r.src = `data:image/svg+xml;charset=utf-8,${a}`, r;
}
async function an(o, t) {
const e = cs(o, t);
await new Promise((r) => {
e.onload = r;
});
const s = new OffscreenCanvas(e.width, e.height), n = s.getContext("2d");
return n == null || n.drawImage(e, 0, 0, e.width, e.height), await createImageBitmap(s);
}
function hs(o) {
const t = new Float32Array(
o.map((s) => s.length).reduce((s, n) => s + n)
);
let e = 0;
for (const s of o)
t.set(s, e), e += s.length;
return t;
}
function ds(o) {
const t = [];
for (let e = 0; e < o.length; e += 1)
for (let s = 0; s < o[e].length; s += 1)
t[s] == null && (t[s] = []), t[s].push(o[e][s]);
return t.map(hs);
}
function Pi(o) {
if (o.format === "f32-planar") {
const t = [];
for (let e = 0; e < o.numberOfChannels; e += 1) {
const s = o.allocationSize({ planeIndex: e }), n = new ArrayBuffer(s);
o.copyTo(n, { planeIndex: e }), t.push(new Float32Array(n));
}
return t;
} else if (o.format === "f32") {
const t = new ArrayBuffer(o.allocationSize({ planeIndex: 0 }));
return o.copyTo(t, { planeIndex: 0 }), fs(new Float32Array(t), o.numberOfChannels);
} else if (o.format === "s16") {
const t = new ArrayBuffer(o.allocationSize({ planeIndex: 0 }));
return o.copyTo(t, { planeIndex: 0 }), us(new Int16Array(t), o.numberOfChannels);
}
throw Error("Unsupported audio data format");
}
function us(o, t) {
const e = o.length / t, s = Array.from(
{ length: t },
() => new Float32Array(e)
);
for (let n = 0; n < e; n++)
for (let r = 0; r < t; r++) {
const a = o[n * t + r];
s[r][n] = a / 32768;
}
return s;
}
function fs(o, t) {
const e = o.length / t, s = Array.from(
{ length: t },
() => new Float32Array(e)
);
for (let n = 0; n < e; n++)
for (let r = 0; r < t; r++)
s[r][n] = o[n * t + r];
return s;
}
function li(o) {
return Array(o.numberOfChannels).fill(0).map((t, e) => o.getChannelData(e));
}
async function ms(o, t) {
var a;
const e = {
type: t,
data: o
}, s = new ImageDecoder(e);
await Promise.all([s.completed, s.tracks.ready]);
let n = ((a = s.tracks.selectedTrack) == null ? void 0 : a.frameCount) ?? 1;
const r = [];
for (let c = 0; c < n; c += 1)
r.push((await s.decode({ frameIndex: c })).image);
return r;
}
function wi(o) {
var s, n;
const t = Math.max(...o.map((r) => {
var a;
return ((a = r[0]) == null ? void 0 : a.length) ?? 0;
})), e = new Float32Array(t * 2);
for (let r = 0; r < t; r++) {
let a = 0, c = 0;
for (let l = 0; l < o.length; l++) {
const h = ((s = o[l][0]) == null ? void 0 : s[r]) ?? 0, u = ((n = o[l][1]) == null ? void 0 : n[r]) ?? h;
a += h, c += u;
}
e[r] = a, e[r + t] = c;
}
return e;
}
async function ps(o, t, e) {
const s = o.length, n = Array(e.chanCount).fill(0).map(() => new Float32Array(0));
if (s === 0) return n;
const r = Math.max(...o.map((h) => h.length));
if (r === 0) return n;
if (globalThis.OfflineAudioContext == null)
return o.map(
(h) => new Float32Array(
as.resample(h, t, e.rate, {
method: "sinc",
LPF: !1
})
)
);
const a = new globalThis.OfflineAudioContext(
e.chanCount,
r * e.rate / t,
e.rate
), c = a.createBufferSource(), l = a.createBuffer(s, r, t);
return o.forEach((h, u) => l.copyToChannel(h, u)), c.buffer = l, c.connect(a.destination), c.start(), li(await a.startRendering());
}
function ci(o) {
return new Promise((t) => {
const e = rs(() => {
e(), t();
}, o);
});
}
function Ke(o, t, e) {
const s = e - t, n = new Float32Array(s);
let r = 0;
for (; r < s; )
n[r] = o[(t + r) % o.length], r += 1;
return n;
}
function Bi(o, t) {
const e = Math.floor(o.length / t), s = new Float32Array(e);
for (let n = 0; n < e; n++) {
const r = n * t, a = Math.floor(r), c = r - a;
a + 1 < o.length ? s[n] = o[a] * (1 - c) + o[a + 1] * c : s[n] = o[a];
}
return s;
}
const T = {
sampleRate: 48e3,
channelCount: 2,
codec: "mp4a.40.2"
};
function hi(o, t) {
const e = t.videoTracks[0], s = {};
if (e != null) {
const r = gs(o.getTrackById(e.id)).buffer, { descKey: a, type: c } = e.codec.startsWith("avc1") ? { descKey: "avcDecoderConfigRecord", type: "avc1" } : e.codec.startsWith("hvc1") ? { descKey: "hevcDecoderConfigRecord", type: "hvc1" } : { descKey: "", type: "" };
a !== "" && (s.videoTrackConf = {
timescale: e.timescale,
duration: e.duration,
width: e.video.width,
height: e.video.height,
brands: t.brands,
type: c,
[a]: r
}), s.videoDecoderConf = {
codec: e.codec,
codedHeight: e.video.height,
codedWidth: e.video.width,
description: r
};
}
const n = t.audioTracks[0];
if (n != null) {
const r = yi(o);
s.audioTrackConf = {
timescale: n.timescale,
samplerate: n.audio.sample_rate,
channel_count: n.audio.channel_count,
hdlr: "soun",
type: n.codec.startsWith("mp4a") ? "mp4a" : n.codec,
description: yi(o)
}, s.audioDecoderConf = {
codec: n.codec.startsWith("mp4a") ? T.codec : n.codec,
numberOfChannels: n.audio.channel_count,
sampleRate: n.audio.sample_rate,
...r == null ? {} : ws(r)
};
}
return s;
}
function gs(o) {
for (const t of o.mdia.minf.stbl.stsd.entries) {
const e = t.avcC ?? t.hvcC ?? t.av1C ?? t.vpcC;
if (e != null) {
const s = new Ft.DataStream(
void 0,
0,
Ft.DataStream.BIG_ENDIAN
);
return e.write(s), new Uint8Array(s.buffer.slice(8));
}
}
throw Error("avcC, hvcC, av1C or VPX not found");
}
function yi(o, t = "mp4a") {
var s;
const e = (s = o.moov) == null ? void 0 : s.traks.map((n) => n.mdia.minf.stbl.stsd.entries).flat().find(({ type: n }) => n === t);
return e == null ? void 0 : e.esds;
}
function ws(o) {
var c;
const t = (c = o.esd.descs[0]) == null ? void 0 : c.descs[0];
if (t == null) return {};
const [e, s] = t.data, n = ((e & 7) << 1) + (s >> 7), r = (s & 127) >> 3;
return {
sampleRate: [
96e3,
88200,
64e3,
48e3,
44100,
32e3,
24e3,
22050,
16e3,
12e3,
11025,
8e3,
7350
][n],
numberOfChannels: r
};
}
async function ys(o, t, e) {
const s = Ft.createFile(!1);
s.onReady = (r) => {
var l, h;
t({ mp4boxFile: s, info: r });
const a = (l = r.videoTracks[0]) == null ? void 0 : l.id;
a != null && s.setExtractionOptions(a, "video", { nbSamples: 100 });
const c = (h = r.audioTracks[0]) == null ? void 0 : h.id;
c != null && s.setExtractionOptions(c, "audio", { nbSamples: 100 }), s.start();
}, s.onSamples = e, await n();
async function n() {
let r = 0;
const a = 30 * 1024 * 1024;
for (; ; ) {
const c = await o.read(a, {
at: r
});
if (c.byteLength === 0) break;
c.fileStart = r;
const l = s.appendBuffer(c);
if (l == null) break;
r = l;
}
s.stop();
}
}
let di = 0;
function je(o) {
return o.kind === "file" && o.createReader instanceof Function;
}
var Le, jt, he, j, _, K, mt, D, rt, Qt, Dt, q, U, Jt, ae, Mi, Oi;
const yt = class yt {
constructor(t, e = {}) {
m(this, ae);
m(this, Le, di++);
m(this, jt, F.create(`MP4Clip id:${i(this, Le)},`));
A(this, "ready");
m(this, he, !1);
m(this, j, {
// 微秒
duration: 0,
width: 0,
height: 0,
audioSampleRate: 0,
audioChanCount: 0
});
m(this, _);
m(this, K, []);
m(this, mt, 1);
m(this, D, []);
m(this, rt, []);
m(this, Qt, null);
m(this, Dt, null);
m(this, q, {
video: null,
audio: null
});
m(this, U, { audio: !0 });
/**
* 拦截 {@link MP4Clip.tick} 方法返回的数据,用于对图像、音频数据二次处理
* @param time 调用 tick 的时间
* @param tickRet tick 返回的数据
*
* @see [移除视频绿幕背景](https://webav-tech.github.io/WebAV/demo/3_2-chromakey-video)
*/
A(this, "tickInterceptor", async (t, e) => e);
m(this, Jt, new AbortController());
if (!(t instanceof ReadableStream) && !je(t) && !Array.isArray(t.videoSamples))
throw Error("Illegal argument");
d(this, U, { audio: !0, ...e }), d(this, mt, typeof e.audio == "object" && "volume" in e.audio ? e.audio.volume : 1);
const s = async (n) => (await Je(i(this, _), n), i(this, _));
d(this, _, je(t) ? t : "localFile" in t ? t.localFile : Qe()), this.ready = (t instanceof ReadableStream ? s(t).then(
(n) => xi(n, i(this, U))
) : je(t) ? xi(t, i(this, U)) : Promise.resolve(t)).then(
async ({ videoSamples: n, audioSamples: r, decoderConf: a, headerBoxPos: c }) => {
d(this, D, n), d(this, rt, r), d(this, q, a), d(this, K, c);
const { videoFrameFinder: l, audioFrameFinder: h } = xs(
{
video: a.video == null ? null : {
...a.video,
hardwareAcceleration: i(this, U).__unsafe_hardwareAcceleration__
},
audio: a.audio
},
await i(this, _).createReader(),
n,
r,
i(this, U).audio !== !1 ? i(this, mt) : 0
);
return d(this, Qt, l), d(this, Dt, h), d(this, j, bs(a, n, r)), i(this, jt).info("MP4Clip meta:", i(this, j)), { ...i(this, j) };
}
);
}
get meta() {
return { ...i(this, j) };
}
/**
* 提供视频头(box: ftyp, moov)的二进制数据
* 使用任意 mp4 demxer 解析即可获得详细的视频信息
* 单元测试包含使用 mp4box.js 解析示例代码
*/
async getFileHeaderBinData() {
await this.ready;
const t = await i(this, _).getOriginFile();
if (t == null) throw Error("MP4Clip localFile is not origin file");
return await new Blob(
i(this, K).map(
({ start: e, size: s }) => t.slice(e, e + s)
)
).arrayBuffer();
}
/**
* 获取素材指定时刻的图像帧、音频数据
* @param time 微秒
*/
async tick(t) {
var n, r, a;
if (t >= i(this, j).duration)
return await this.tickInterceptor(t, {
audio: await ((n = i(this, Dt)) == null ? void 0 : n.find(t)) ?? [],
state: "done"
});
const [e, s] = await Promise.all([
((r = i(this, Dt)) == null ? void 0 : r.find(t)) ?? [],
(a = i(this, Qt)) == null ? void 0 : a.find(t)
]);
return s == null ? await this.tickInterceptor(t, {
audio: e,
state: "success"
}) : await this.tickInterceptor(t, {
video: s,
audio: e,
state: "success"
});
}
/**
* 生成缩略图,默认每个关键帧生成一个 100px 宽度的缩略图。
*
* @param imgWidth 缩略图宽度,默认 100
* @param opts Partial<ThumbnailOpts>
* @returns Promise<Array<{ ts: number; img: Blob }>>
*/
async thumbnails(t = 100, e) {
i(this, Jt).abort(), d(this, Jt, new AbortController());
const s = i(this, Jt).signal;
await this.ready;
const n = "generate thumbnails aborted";
if (s.aborted) throw Error(n);
const { width: r, height: a } = i(this, j), c = ks(
t,
Math.round(a * (t / r)),
{ quality: 0.1, type: "image/png" }
);
return new Promise(
async (l, h) => {
let u = [];
const p = i(this, q).video;
if (p == null || i(this, D).length === 0) {
f();
return;
}
s.addEventListener("abort", () => {
h(Error(n));
});
async function f() {
s.aborted || l(
await Promise.all(
u.map(async (b) => ({
ts: b.ts,
img: await b.img
}))
)
);
}
function y(b) {
u.push({
ts: b.timestamp,
img: c(b)
});
}
const { start: g = 0, end: x = i(this, j).duration, step: w } = e ?? {};
if (w) {
let b = g;
const v = new _i(
await i(this, _).createReader(),
i(this, D),
{
...p,
hardwareAcceleration: i(this, U).__unsafe_hardwareAcceleration__
}
);
for (; b <= x && !s.aborted; ) {
const S = await v.find(b);
S && y(S), b += w;
}
v.destroy(), f();
} else
await Fs(
i(this, D),
i(this, _),
p,
s,
{ start: g, end: x },
(b, v) => {
b != null && y(b), v && f();
}
);
}
);
}
async split(t) {
if (await this.ready, t <= 0 || t >= i(this, j).duration)
throw Error('"time" out of bounds');
const [e, s] = Ts(
i(this, D),
t
), [n, r] = Is(
i(this, rt),
t
), a = new yt(
{
localFile: i(this, _),
videoSamples: e ?? [],
audioSamples: n ?? [],
decoderConf: i(this, q),
headerBoxPos: i(this, K)
},
i(this, U)
), c = new yt(
{
localFile: i(this, _),
videoSamples: s ?? [],
audioSamples: r ?? [],
decoderConf: i(this, q),
headerBoxPos: i(this, K)
},
i(this, U)
);
return await Promise.all([a.ready, c.ready]), [a, c];
}
async removeSegment(t, e) {
if (await this.ready, t < 0 || e > i(this, j).duration || t >= e)
throw Error("Invalid time range");
const s = C(this, ae, Mi).call(this, t, e), n = C(this, ae, Oi).call(this, t, e), r = new yt(
{
localFile: i(this, _),
videoSamples: s,
audioSamples: n,
decoderConf: i(this, q),
headerBoxPos: i(this, K)
},
i(this, U)
);
return await r.ready, r.tickInterceptor = this.tickInterceptor, r;
}
// 在 MP4Clip 类中添加
// 在 MP4Clip 类中添加
setVolume(t) {
if (t < 0 || t > 1)
throw new Error("Volume must be between 0 and 1");
d(this, mt, t);
const e = this.tickInterceptor;
this.tickInterceptor = async (s, n) => {
if (n.audio && i(this, mt) !== 1)
for (const r of n.audio)
for (let a = 0; a < r.length; a++)
r[a] *= i(this, mt);
return e(s, n);
};
}
getVolume() {
return i(this, mt);
}
async clone() {
await this.ready;
const t = new yt(
{
localFile: i(this, _),
videoSamples: [...i(this, D)],
audioSamples: [...i(this, rt)],
decoderConf: i(this, q),
headerBoxPos: i(this, K)
},
i(this, U)
);
return await t.ready, t.tickInterceptor = this.tickInterceptor, t;
}
/**
* 拆分 MP4Clip 为仅包含视频轨道和音频轨道的 MP4Clip
* @returns Mp4CLip[]
*/
async splitTrack() {
await this.ready;
const t = [];
if (i(this, D).length > 0) {
const e = new yt(
{
localFile: i(this, _),
videoSamples: [...i(this, D)],
audioSamples: [],
decoderConf: {
video: i(this, q).video,
audio: null
},
headerBoxPos: i(this, K)
},
i(this, U)
);
await e.ready, e.tickInterceptor = this.tickInterceptor, t.push(e);
}
if (i(this, rt).length > 0) {
const e = new yt(
{
localFile: i(this, _),
videoSamples: [],
audioSamples: [...i(this, rt)],
decoderConf: {
audio: i(this, q).audio,
video: null
},
headerBoxPos: i(this, K)
},
i(this, U)
);
await e.ready, e.tickInterceptor = this.tickInterceptor, t.push(e);
}
return t;
}
destroy() {
var t, e;
i(this, he) || (i(this, jt).info("MP4Clip destroy"), d(this, he, !0), (t = i(this, Qt)) == null || t.destroy(), (e = i(this, Dt)) == null || e.destroy());
}
};
Le = new WeakMap(), jt = new WeakMap(), he = new WeakMap(), j = new WeakMap(), _ = new WeakMap(), K = new WeakMap(), mt = new WeakMap(), D = new WeakMap(), rt = new WeakMap(), Qt = new WeakMap(), Dt = new WeakMap(), q = new WeakMap(), U = new WeakMap(), Jt = new WeakMap(), ae = new WeakSet(), Mi = function(t, e) {
if (i(this, D).length === 0) return [];
const s = [], n = [];
let r = -1, a = -1;
for (let l = 0; l < i(this, D).length; l++) {
const h = i(this, D)[l];
if (h.is_idr) {
if (h.cts < t)
r = l;
else if (h.cts >= e && a === -1) {
a = l;
break;
}
}
}
if (r === -1) {
for (let l = 0; l < i(this, D).length; l++)
if (i(this, D)[l].is_idr) {
r = l;
break;
}
}
if (a === -1) {
for (let l = i(this, D).length - 1; l >= 0; l--)
if (i(this, D)[l].is_idr) {
a = l;
break;
}
}
if (r !== -1)
for (let l = r; l < i(this, D).length; l++) {
const h = i(this, D)[l];
if (h.cts >= t) break;
s.push({ ...h });
}
if (a !== -1) {
const l = e - t;
for (let h = a; h < i(this, D).length; h++) {
const u = i(this, D)[h];
n.push({
...u,
cts: u.cts - l
});
}
}
const c = [...s, ...n];
return c.length > 0 && !c[0].is_idr && i(this, jt).warn(
"First sample is not IDR frame after merging, samples might be corrupted"
), c;
}, Oi = function(t, e) {
if (i(this, rt).length === 0) return [];
const s = [], n = [];
for (const r of i(this, rt))
r.cts < t ? s.push({ ...r }) : r.cts >= e && n.push({
...r,
cts: r.cts - (e - t)
});
return [...s, ...n];
};
let bi = yt;
function bs(o, t, e) {
const s = {
duration: 0,
width: 0,
height: 0,
audioSampleRate: 0,
audioChanCount: 0
};
o.video != null && t.length > 0 && (s.width = o.video.codedWidth ?? 0, s.height = o.video.codedHeight ?? 0), o.audio != null && e.length > 0 && (s.audioSampleRate = T.sampleRate, s.audioChanCount = T.channelCount);
let n = 0, r = 0;
if (t.length > 0)
for (let a = t.length - 1; a >= 0; a--) {
const c = t[a];
if (!c.deleted) {
n = c.cts + c.duration;
break;
}
}
if (e.length > 0) {
const a = e.at(-1);
r = a.cts + a.duration;
}
return s.duration = Math.max(n, r), s;
}
function xs(o, t, e, s, n) {
return {
audioFrameFinder: n === 0 || o.audio == null || s.length === 0 ? null : new vs(
t,
s,
o.audio,
{
volume: n,
targetSampleRate: T.sampleRate
}
),
videoFrameFinder: o.video == null || e.length === 0 ? null : new _i(
t,
e,
o.video
)
};
}
async function xi(o, t = {}) {
let e = null;
const s = { video: null, audio: null };
let n = [], r = [], a = [], c = -1, l = -1;
const h = await o.createReader();
await ys(
h,
(f) => {
e = f.info;
const y = f.mp4boxFile.ftyp;
a.push({ start: y.start, size: y.size });
const g = f.mp4boxFile.moov;
a.push({ start: g.start, size: g.size });
let { videoDecoderConf: x, audioDecoderConf: w } = hi(
f.mp4boxFile,
f.info
);
s.video = x ?? null, s.audio = w ?? null, x == null && w == null && F.error("MP4Clip no video and audio track"), F.info(
"mp4BoxFile moov ready",
{
...f.info,
tracks: null,
videoTracks: null,
audioTracks: null
},
s
);
},
(f, y, g) => {
if (y === "video") {
c === -1 && (c = g[0].dts);
for (const x of g)
n.push(p(x, c, "video"));
} else if (y === "audio" && t.audio) {
l === -1 && (l = g[0].dts);
for (const x of g)
r.push(p(x, l, "audio"));
}
}
), await h.close();
const u = n.at(-1) ?? r.at(-1);
if (e == null)
throw Error("MP4Clip stream is done, but not emit ready");
if (u == null)
throw Error("MP4Clip stream not contain any sample");
return Ze(n), F.info("mp4 stream parsed"), {
videoSamples: n,
audioSamples: r,
decoderConf: s,
headerBoxPos: a
};
function p(f, y = 0, g) {
const x = g === "video" && f.is_sync ? Rs(f.data, f.description.type) : -1;
let w = f.offset, b = f.size;
return x >= 0 && (w += x, b -= x), {
...f,
is_idr: x >= 0,
offset: w,
size: b,
cts: (f.cts - y) / f.timescale * 1e6,
dts: (f.dts - y) / f.timescale * 1e6,
duration: f.duration / f.timescale * 1e6,
timescale: 1e6,
// 音频数据量可控,直接保存在内存中
data: g === "video" ? null : f.data
};
}
}
var L, Pt, Bt, de, Mt, ot, X, bt, Ot, Kt, ue, qt, xt, fe, _t, me;
class _i {
constructor(t, e, s) {
m(this, L, null);
m(this, Pt, 0);
m(this, Bt, { abort: !1, st: performance.now() });
A(this, "find", async (t) => {
(i(this, L) == null || i(this, L).state === "closed" || t <= i(this, Pt) || t - i(this, Pt) > 3e6) && i(this, _t).call(this, t), i(this, Bt).abort = !0, d(this, Pt, t), d(this, Bt, { abort: !1, st: performance.now() });
const e = await i(this, qt).call(this, t, i(this, L), i(this, Bt));
return d(this, Kt, 0), e;
});
// fix VideoFrame duration is null
m(this, de, 0);
m(this, Mt, !1);
m(this, ot, 0);
m(this, X, []);
m(this, bt, 0);
m(this, Ot, 0);
m(this, Kt, 0);
m(this, ue, !1);
m(this, qt, async (t, e, s) => {
if (e == null || e.state === "closed" || s.abort) return null;
if (i(this, X).length > 0) {
const n = i(this, X)[0];
return t < n.timestamp ? null : (i(this, X).shift(), t > n.timestamp + (n.duration ?? 0) ? (n.close(), await i(this, qt).call(this, t, e, s)) : (!i(this, ue) && i(this, X).length < 10 && i(this, fe).call(this, e).catch((r) => {
throw d(this, ue, !0), i(this, _t).call(this, t), r;
}), n));
}
if (i(this, xt) || i(this, bt) < i(this, Ot) && e.decodeQueueSize > 0) {
if (performance.now() - s.st > 6e3)
throw Error(
`MP4Clip.tick video timeout, ${JSON.stringify(i(this, me).call(this))}`
);
d(this, Kt, i(this, Kt) + 1), await ci(15);
} else {
if (i(this, ot) >= this.samples.length)
return null;
try {
await i(this, fe).call(this, e);
} catch (n) {
throw i(this, _t).call(this, t), n;
}
}
return await i(this, qt).call(this, t, e, s);
});
m(this, xt, !1);
m(this, fe, async (t) => {
var n, r;
if (i(this, xt) || t.decodeQueueSize > 600) return;
let e = i(this, ot) + 1;
if (e > this.samples.length) return;
d(this, xt, !0);
let s = !1;
for (; e < this.samples.length; e++) {
const a = this.samples[e];
if (!s && !a.deleted && (s = !0), a.is_idr) break;
}
if (s) {
const a = this.samples.slice(i(this, ot), e);
if (((n = a[0]) == null ? void 0 : n.is_idr) !== !0)
F.warn("First sample not idr frame");
else {
const c = performance.now(), l = await Li(a, this.localFileReader), h = performance.now() - c;
if (h > 1e3) {
const u = a[0], p = a.at(-1), f = p.offset + p.size - u.offset;
F.warn(
`Read video samples time cost: ${Math.round(h)}ms, file chunk size: ${f}`
);
}
if (t.state === "closed") return;
d(this, de, ((r = l[0]) == null ? void 0 : r.duration) ?? 0), qe(t, l, {
onDecodingError: (u) => {
if (i(this, Mt))
throw u;
i(this, bt) === 0 && (d(this, Mt, !0), F.warn("Downgrade to software decode"), i(this, _t).call(this));
}
}), d(this, Ot, i(this, Ot) + l.length);
}
}
d(this, ot, e), d(this, xt, !1);
});
m(this, _t, (t) => {
var s, n;
if (d(this, xt, !1), i(this, X).forEach((r) => r.close()), d(this, X, []), t == null || t === 0)
d(this, ot, 0);
else {
let r = 0;
for (let a = 0; a < this.samples.length; a++) {
const c = this.samples[a];
if (c.is_idr && (r = a), !(c.cts < t)) {
d(this, ot, r);
break;
}
}
}
d(this, Ot, 0), d(this, bt, 0), ((s = i(this, L)) == null ? void 0 : s.state) !== "closed" && ((n = i(this, L)) == null || n.close());
const e = {
...this.conf,
...i(this, Mt) ? { hardwareAcceleration: "prefer-software" } : {}
};
d(this, L, new VideoDecoder({
output: (r) => {
if (d(this, bt, i(this, bt) + 1), r.timestamp === -1) {
r.close();
return;
}
let a = r;
r.duration == null && (a = new VideoFrame(r, {
duration: i(this, de)
}), r.close()), i(this, X).push(a);
},
error: (r) => {
if (r.message.includes("Codec reclaimed due to inactivity")) {
d(this, L, null), F.warn(r.message);
return;
}
const a = `VideoFinder VideoDecoder err: ${r.message}, config: ${JSON.stringify(e)}, state: ${JSON.stringify(i(this, me).call(this))}`;
throw F.error(a), Error(a);
}
})), i(this, L).configure(e);
});
m(this, me, () => {
var t, e;
return {
time: i(this, Pt),
decState: (t = i(this, L)) == null ? void 0 : t.state,
decQSize: (e = i(this, L)) == null ? void 0 : e.decodeQueueSize,
decCusorIdx: i(this, ot),
sampleLen: this.samples.length,
inputCnt: i(this, Ot),
outputCnt: i(this, bt),
cacheFrameLen: i(this, X).length,
softDeocde: i(this, Mt),
clipIdCnt: di,
sleepCnt: i(this, Kt),
memInfo: Wi()
};
});
A(this, "destroy", () => {
var t, e;
((t = i(this, L)) == null ? void 0 : t.state) !== "closed" && ((e = i(this, L)) == null || e.close()), d(this, L, null), i(this, Bt).abort = !0, i(this, X).forEach((s) => s.close()), d(this, X, []), this.localFileReader.close();
});
this.localFileReader = t, this.samples = e, this.conf = s;
}
}
L = new WeakMap(), Pt = new WeakMap(), Bt = new WeakMap(), de = new WeakMap(), Mt = new WeakMap(), ot = new WeakMap(), X = new WeakMap(), bt = new WeakMap(), Ot = new WeakMap(), Kt = new WeakMap(), ue = new WeakMap(), qt = new WeakMap(), xt = new WeakMap(), fe = new WeakMap(), _t = new WeakMap(), me = new WeakMap();
function Cs(o, t) {
for (let e = 0; e < t.length; e++) {
const s = t[e];
if (o >= s.cts && o < s.cts + s.duration)
return e;
if (s.cts > o) break;
}
return 0;
}
var pe, ge, Z, Lt, at, pt, Q, Zt, we, ye, We, ze;
class vs {
constructor(t, e, s, n) {
m(this, pe, 1);
m(this, ge);
m(this, Z, null);
m(this, Lt, { abort: !1, st: performance.now() });
A(this, "find", async (t) => {
const e = t <= i(this, at) || t - i(this, at) > 1e5;
(i(this, Z) == null || i(this, Z).state === "closed" || e) && i(this, We).call(this), e && (d(this, at, t), d(this, pt, Cs(t, this.samples))), i(this, Lt).abort = !0;
const s = t - i(this, at);
d(this, at, t), d(this, Lt, { abort: !1, st: performance.now() });
const n = await i(this, we).call(this, Math.ceil(s * (i(this, ge) / 1e6)), i(this, Z), i(this, Lt));
return d(this, Zt, 0), n;
});
m(this, at, 0);
m(this, pt, 0);
m(this, Q, {
frameCnt: 0,
data: []
});
m(this, Zt, 0);
m(this, we, async (t, e = null, s) => {
if (e == null || s.abort || e.state === "closed" || t === 0)
return [];
const n = i(this, Q).frameCnt - t;
if (n > 0)
return n < T.sampleRate / 10 && i(this, ye).call(this, e), Ci(i(this, Q), t);
if (e.decoding) {
if (performance.now() - s.st > 3e3)
throw s.abort = !0, Error(
`MP4Clip.tick audio timeout, ${JSON.stringify(i(this, ze).call(this))}`
);
d(this, Zt, i(this, Zt) + 1), await ci(15);
} else {
if (i(this, pt) >= this.samples.length - 1)
return Ci(i(this, Q), i(this, Q).frameCnt);
i(this, ye).call(this, e);
}
return i(this, we).call(this, t, e, s);
});
m(this, ye, (t) => {
if (t.decodeQueueSize > 10) return;
const s = [];
let n = i(this, pt);
for (; n < this.samples.length; ) {
const r = this.samples[n];
if (n += 1, !r.deleted && (s.push(r), s.length >= 10))
break;
}
d(this, pt, n), t.decode(
s.map(
(r) => new EncodedAudioChunk({
type: "key",
timestamp: r.cts,
duration: r.duration,
data: r.data
})
)
);
});
m(this, We, () => {
var t;
d(this, at, 0), d(this, pt, 0), d(this, Q, {
frameCnt: 0,
data: []
}), (t = i(this, Z)) == null || t.close(), d(this, Z, Ss(
this.conf,
{
resampleRate: T.sampleRate,
volume: i(this, pe)
},
(e) => {
i(this, Q).data.push(e), i(this, Q).frameCnt += e[0].length;
}
));
});
m(this, ze, () => {
var t, e;
return {
time: i(this, at),
decState: (t = i(this, Z)) == null ? void 0 : t.state,
decQSize: (e = i(this, Z)) == null ? void 0 : e.decodeQueueSize,
decCusorIdx: i(this, pt),
sampleLen: this.samples.length,
pcmLen: i(this, Q).frameCnt,
clipIdCnt: di,
sleepCnt: i(this, Zt),
memInfo: Wi()
};
});
A(this, "destroy", () => {
d(this, Z, null), i(this, Lt).abort = !0, d(this, Q, {
frameCnt: 0,
data: []
}), this.localFileReader.close();
});
this.localFileReader = t, this.samples = e, this.conf = s, d(this, pe, n.volume), d(this, ge, n.targetSampleRate);
}
}
pe = new WeakMap(), ge = new WeakMap(), Z = new WeakMap(), Lt = new WeakMap(), at = new WeakMap(), pt = new WeakMap(), Q = new WeakMap(), Zt = new WeakMap(), we = new WeakMap(), ye = new WeakMap(), We = new WeakMap(), ze = new WeakMap();
function Ss(o, t, e) {
let s = 0, n = 0;
const r = (u) => {
if (n += 1, u.length !== 0) {
if (t.volume !== 1)
for (const p of u)
for (let f = 0; f < p.length; f++) p[f] *= t.volume;
u.length === 1 && (u = [u[0], u[0]]), e(u);
}
}, a = As(r), c = t.resampleRate !== o.sampleRate;
let l = new AudioDecoder({
output: (u) => {
const p = Pi(u);
c ? a(
() => ps(p, u.sampleRate, {
rate: t.resampleRate,
chanCount: u.numberOfChannels
})
) : r(p), u.close();
},
error: (u) => {
u.message.includes("Codec reclaimed due to inactivity") || h("MP4Clip AudioDecoder err", u);
}
});
l.configure(o);
function h(u, p) {
const f = `${u}: ${p.message}, state: ${JSON.stringify(
{
qSize: l.decodeQueueSize,
state: l.state,
inputCnt: s,
outputCnt: n
}
)}`;
throw F.error(f), Error(f);
}
return {
decode(u) {
s += u.length;
try {
for (const p of u) l.decode(p);
} catch (p) {
h("decode audio chunk error", p);
}
},
close() {
l.state !== "closed" && l.close();
},
get decoding() {
return s > n && l.decodeQueueSize > 0;
},
get state() {
return l.state;
},
get decodeQueueSize() {
return l.decodeQueueSize;
}
};
}
function As(o) {
const t = [];
let e = 0;
function s(a, c) {
t[c] = a, n();
}
function n() {
const a = t[e];
a != null && (o(a), e += 1, n());
}
let r = 0;
return (a) => {
const c = r;
r += 1, a().then((l) => s(l, c)).catch((l) => s(l, c));
};
}
function Ci(o, t) {
const e = [new Float32Array(t), new Float32Array(t)];
let s = 0, n = 0;
for (; n < o.data.length; ) {
const [r, a] = o.data[n];
if (s + r.length > t) {
const c = t - s;
e[0].set(r.subarray(0, c), s), e[1].set(a.subarray(0, c), s), o.data[n][0] = r.subarray(c, r.length), o.data[n][1] = a.subarray(c, a.length);
break;
} else
e[0].set(r, s), e[1].set(a, s), s += r.length, n++;
}
return o.data = o.data.slice(n), o.frameCnt -= t, e;
}
async function Li(o, t) {
const e = o[0], s = o.at(-1);
if (s == null) return [];
const n = s.offset + s.size - e.offset;
if (n < 3e7) {
const r = new Uint8Array(
await t.read(n, { at: e.offset })
);
return o.map((a) => {
const c = a.offset - e.offset;
return new EncodedVideoChunk({
type: a.is_sync ? "key" : "delta",
timestamp: a.cts,
duration: a.duration,
data: r.subarray(c, c + a.size)
});
});
}
return await Promise.all(
o.map(async (r) => new EncodedVideoChunk({
type: r.is_sync ? "key" : "delta",
timestamp: r.cts,
duration: r.duration,
data: await t.read(r.size, {
at: r.offset
})
}))
);
}
function ks(o, t, e) {
const s = new OffscreenCanvas(o, t), n = s.getContext("2d");
return async (r) => (n.drawImage(r, 0, 0, o, t), r.close(), await s.convertToBlob(e));
}
function Ts(o, t) {
if (o.length === 0) return [];
let e = 0, s = 0, n = -1;
for (let l = 0; l < o.length; l++) {
const h = o[l];
if (n === -1 && t < h.cts && (n = l - 1), h.is_idr)
if (n === -1)
e = l;
else {
s = l;
break;
}
}
const r = o[n];
if (r == null) throw Error("Not found video sample by time");
const a = o.slice(0, s === 0 ? o.length : s).map((l) => ({ ...l }));
for (let l = e; l < a.length; l++) {
const h = a[l];
t < h.cts && (h.deleted = !0, h.cts = -1);
}
Ze(a);
const c = o.slice(r.is_idr ? n : e).map((l) => ({ ...l, cts: l.cts - t }));
for (const l of c)
l.cts < 0 && (l.deleted = !0, l.cts = -1);
return Ze(c), [a, c];
}
function Is(o, t) {
if (o.length === 0) return [];
let e = -1;
for (let r = 0; r < o.length; r++) {
const a = o[r];
if (!(t > a.cts)) {
e = r;
break;
}
}
if (e === -1) throw Error("Not found audio sample by time");
const s = o.slice(0, e).map((r) => ({ ...r })), n = o.slice(e).map((r) => ({ ...r, cts: r.cts - t }));
return [s, n];
}
function qe(o, t, e) {
let s = 0;
if (o.state === "configured") {
for (; s < t.length; s++) o.decode(t[s]);
o.flush().catch((n) => {
if (!(n instanceof Error)) throw n;
if (n.message.includes("Decoding error") && e.onDecodingError != null) {
e.onDecodingError(n);
return;
}
if (!n.message.includes("Aborted due to close"))
throw n;
});
}
}
function Rs(o, t) {
if (t !== "avc1" && t !== "hvc1") return 0;
const e = new DataView(o.buffer);
let s = 0;
for (; s < o.byteLength - 4; ) {
if (t === "avc1" && (e.getUint8(s + 4) & 31) === 5)
return s;
if (t === "hvc1") {
const n = e.getUint8(s + 4) >> 1 & 63;
if (n === 19 || n === 20) return s;
}
s += e.getUint32(s) + 4;
}
return -1;
}
async function Fs(o, t, e, s, n, r) {
const a = await t.createReader(), c = await Li(
o.filter(
(u) => !u.deleted && u.is_sync && u.cts >= n.start && u.cts <= n.end
),
a
);
if (c.length === 0 || s.aborted) return;
let l = 0;
qe(h(), c, {
onDecodingError: (u) => {
F.warn("thumbnailsByKeyFrame", u), l === 0 ? qe(h(!0), c, {
onDecodingError: (p) => {
a.close(), F.error("thumbnailsByKeyFrame retry soft deocde", p);
}
}) : (r(null, !0), a.close());
}
});
function h(u = !1) {
const p = {
...e,
...u ? { hardwareAcceleration: "prefer-software" } : {}
}, f = new VideoDecoder({
output: (y) => {
l += 1;
const g = l === c.length;
r(y, g), g && (a.close(), f.state !== "closed" && f.close());
},
error: (y) => {
const g = `thumbnails decoder error: ${y.message}, config: ${JSON.stringify(p)}, state: ${JSON.stringify(
{
qSize: f.decodeQueueSize,
state: f.state,
outputCnt: l,
inputCnt: c.length
}
)}`;
throw F.error(g), Error(g);
}
});
return s.addEventListener("abort", () => {
a.close(), f.state !== "closed" && f.close();
}), f.configure(p), f;
}
}
function Ze(o) {
let t = 0, e = null;
for (const s of o)
if (!s.deleted) {
if (s.is_sync && (t += 1), t >= 2) break;
(e == null || s.cts < e.cts) && (e = s);
}
e != null && e.cts < 2e5 && (e.duration += e.cts, e.cts = 0);
}
function Wi() {
try {
const o = performance.memory;
return {
jsHeapSizeLimit: o.jsHeapSizeLimit,
totalJSHeapSize: o.totalJSHeapSize,
usedJSHeapSize: o.usedJSHeapSize,
percentUsed: (o.usedJSHeapSize / o.jsHeapSizeLimit).toFixed(3),
percentTotal: (o.totalJSHeapSize / o.jsHeapSizeLimit).toFixed(3)
};
} catch {
return {};
}
}
var H, J, B, Ve, zi;
const Et = class Et {
/**
* 静态图片可使用流、ImageBitmap 初始化
*
* 动图需要使用 VideoFrame[] 或提供图片类型
*/
constructor(t) {
m(this, Ve);
A(this, "ready");
m(this, H, {
// 微秒
duration: 0,
width: 0,
height: 0
});
m(this, J, null);
m(this, B, []);
A(this, "tickInterceptor", async (t, e) => e);
const e = (s) => (d(this, J, s), i(this, H).width = s.width, i(this, H).height = s.height, i(this, H).duration = 1 / 0, { ...i(this, H) });
if (t instanceof ReadableStream)
this.ready = new Response(t).blob().then((s) => createImageBitmap(s)).then(e);
else if (t instanceof ImageBitmap)
this.ready = Promise.resolve(e(t));
else if (Array.isArray(t) && t.every((s) => s instanceof VideoFrame)) {
d(this, B, t);
const s = i(this, B)[0];
if (s == null) throw Error("The frame count must be greater than 0");
d(this, H, {
width: s.displayWidth,
height: s.displayHeight,
duration: i(this, B).reduce(
(n, r) => n + (r.duration ?? 0),
0
)
}), this.ready = Promise.resolve({ ...i(this, H), duration: 1 / 0 });
} else if ("type" in t)
this.ready = C(this, Ve, zi).call(this, t.stream, t.type).then(() => ({
width: i(this, H).width,
height: i(this, H).height,
duration: 1 / 0
}));
else
throw Error("Illegal arguments");
}
/**
* ⚠️ 静态图片的 duration 为 Infinity
*
* 使用 Sprite 包装时需要将它的 duration 设置为有限数
*
*/
get meta() {
return { ...i(this, H) };
}
async tick(t) {
if (i(this, J) != null)
return await this.tickInterceptor(t, {
video: await createImageBitmap(i(this, J)),
state: "success"
});
const e = t % i(this, H).duration;
return await this.tickInterceptor(t, {
video: (i(this, B).find(
(s) => e >= s.timestamp && e <= s.timestamp + (s.duration ?? 0)
) ?? i(this, B)[0]).clone(),
state: "success"
});
}
async split(t) {
if (await this.ready, i(this, J) != null)
return [
new Et(await createImageBitmap(i(this, J))),
new Et(await createImageBitmap(i(this, J)))
];
let e = -1;
for (let r = 0; r < i(this, B).length; r++) {
const a = i(this, B)[r];
if (!(t > a.timestamp)) {
e = r;
break;
}
}
if (e === -1) throw Error("Not found frame by time");
const s = i(this, B).slice(0, e).map((r) => new VideoFrame(r)), n = i(this, B).slice(e).map(
(r) => new VideoFrame(r, {
timestamp: r.timestamp - t
})
);
return [new Et(s), new Et(n)];
}
async clone() {
await this.ready;
const t = i(this, J) == null ? i(this, B).map((e) => e.clone()) : await createImageBitmap(i(this, J));
return new Et(t);
}
destroy() {
var t;
F.info("ImgClip destroy"), (t = i(this, J)) == null || t.close(), i(this, B).forEach((e) => e.close());
}
};
H = new WeakMap(), J = new WeakMap(), B = new WeakMap(), Ve = new WeakSet(), zi = async function(t, e) {
d(this, B, await ms(t, e));
const s = i(this, B)[0];
if (s == null) throw Error("No frame available in gif");
d(this, H, {
duration: i(this, B).reduce((n, r) => n + (r.duration ?? 0), 0),
width: s.codedWidth,
height: s.codedHeight
}), F.info("ImgClip ready:", i(this, H));
};
let vi = Et;
var Wt, tt, lt, Y, Ue, Vi, Ct, ct;
const ft = class ft {
/**
*
* @param dataSource 音频文件流
* @param opts 音频配置,控制音量、是否循环
*/
constructor(t, e = {}) {
m(this, Ue);
A(this, "ready");
m(this, Wt, {
// 微秒
duration: 0,
width: 0,
height: 0
});
m(this, tt, new Float32Array());
m(this, lt, new Float32Array());
m(this, Y);
/**
* 拦截 {@link AudioClip.tick} 方法返回的数据,用于对音频数据二次处理
* @param time 调用 tick 的时间
* @param tickRet tick 返回的数据
*
* @see [移除视频绿幕背景](https://webav-tech.github.io/WebAV/demo/3_2-chromakey-video)
*/
A(this, "tickInterceptor", async (t, e) => e);
// 微秒
m(this, Ct, 0);
m(this, ct, 0);
d(this, Y, {
loop: !1,
volume: 1,
...e
}), this.ready = C(this, Ue, Vi).call(this, t).then(() => ({
// audio 没有宽高,无需绘制
width: 0,
height: 0,
duration: e.loop ? 1 / 0 : i(this, Wt).duration
}));
}
/**
* 音频元信息
*
* ⚠️ 注意,这里是转换后(标准化)的元信息,非原始音频元信息
*/
get meta() {
return {
...i(this, Wt),
sampleRate: T.sampleRate,
chanCount: 2
};
}
/**
* 获取音频素材完整的 PCM 数据
*/
getPCMData() {
return [i(this, tt), i(this, lt)];
}
// 在 AudioClip 类中添加
setVolume(t) {
if (t < 0 || t > 1)
throw new Error("Volume must be between 0 and 1");
const e = t / i(this, Y).volume;
for (let s = 0; s < i(this, tt).length; s++)
i(this, tt)[s] *= e;
for (let s = 0; s < i(this, lt).length; s++)
i(this, lt)[s] *= e;
i(this, Y).volume = t;
}
getVolume() {
return i(this, Y).volume;
}
/**
* 返回上次与当前时刻差对应的音频 PCM 数据;
*
* 若差值超过 3s 或当前时间小于上次时间,则重置状态
* @example
* tick(0) // => []
* tick(1e6) // => [leftChanPCM(1s), rightChanPCM(1s)]
*
*/
async tick(t) {
if (!i(this, Y).loop && t >= i(this, Wt).duration)
return await this.tickInterceptor(t, { audio: [], state: "done" });
const e = t - i(this, Ct);
if (t < i(this, Ct) || e > 3e6)
return d(this, Ct, t), d(this, ct, Math.ceil(
i(this, Ct) / 1e6 * T.sampleRate
)), await this.tickInterceptor(t, {
audio: [new Float32Array(0), new Float32Array(0)],
state: "success"
});
d(this, Ct, t);
const s = Math.ceil(
e / 1e6 * T.sampleRate
), n = i(this, ct) + s, r = i(this, Y).loop ? [
Ke(i(this, tt), i(this, ct), n),
Ke(i(this, lt), i(this, ct), n)
] : [
i(this, tt).slice(i(this, ct), n),
i(this, lt).slice(i(this, ct), n)
];
return d(this, ct, n), await this.tickInterceptor(t, { audio: r, state: "success" });
}
/**
* 按指定时间切割,返回前后两个音频素材
* @param time 时间,单位微秒
*/
async split(t) {
await this.ready;
const e = Math.ceil(t / 1e6 * T.sampleRate), s = new ft(
this.getPCMData().map((r) => r.slice(0, e)),
i(this, Y)
), n = new ft(
this.getPCMData().map((r) => r.slice(e)),
i(this, Y)
);
return [s, n];
}
async clone() {
await this.ready;
const t = new ft(this.getPCMData(), i(this, Y));
return await t.ready, t;
}
/**
* 销毁实例,释放资源
*/
destroy() {
d(this, tt, new Float32Array(0)), d(this, lt, new Float32Array(0)), F.info("---- audioclip destroy ----");
}
};
Wt = new WeakMap(), tt = new WeakMap(), lt = new WeakMap(), Y = new WeakMap(), Ue = new WeakSet(), Vi = async function(t) {
ft.ctx == null && (ft.ctx = new AudioContext({
sampleRate: T.sampleRate
}));
const e = performance.now(), s = t instanceof ReadableStream ? await Es(t, ft.ctx) : t;
F.info("Audio clip decoded complete:", performance.now() - e);
const n = i(this, Y).volume;
if (n !== 1)
for (const r of s)
for (let a = 0; a < r.length; a += 1) r[a] *= n;
i(this, Wt).duration = s[0].length / T.sampleRate * 1e6, d(this, tt, s[0]), d(this, lt, s[1] ?? i(this, tt)), F.info(
"Audio clip convert to AudioData, time:",
performance.now() - e
);
}, Ct = new WeakMap(), ct = new WeakMap(), A(ft, "ctx", null);
let Si = ft;
async function Es(o, t) {
const e = await new Response(o).arrayBuffer();
return li(await t.decodeAudioData(e));
}
var zt, be, te, ee;
const He = class He {
constructor(t) {
A(this, "ready");
m(this, zt, {
// 微秒
duration: 0,
width: 0,
height: 0
});
m(this, be, () => {
});
/**
* 实时流的音轨
*/
A(this, "audioTrack");
m(this, te, null);
m(this, ee);
d(this, ee, t), this.audioTrack = t.getAudioTracks()[0] ?? null, i(this, zt).duration = 1 / 0;
const e = t.getVideoTracks()[0];
e != null ? (e.contentHint = "motion", this.ready = new Promise((s) => {
d(this, be, Ds(e, (n) => {
i(this, zt).width = n.width, i(this, zt).height = n.height, d(this, te, n), s(this.meta);
}));
})) : this.ready = Promise.resolve(this.meta);
}
get meta() {
return {
...i(this, zt)
};
}
async tick() {
return {
video: i(this, te) == null ? null : await createImageBitmap(i(this, te)),
audio: [],
state: "success"
};
}
async split() {
return [await this.clone(), await this.clone()];
}
async clone() {
return new He(i(this, ee).clone());
}
destroy() {
i(this, ee).getTracks().forEach((t) => t.stop()), i(this, be).call(this);
}
};
zt = new WeakMap(), be = new WeakMap(), te = new WeakMap(), ee = new WeakMap(), A(He, "ctx", null);
let Ai = He;
function Ds(o, t) {
let e = !1, s;
return oi(
new MediaStreamTrackProcessor({
track: o
}).readable,
{
onChunk: async (n) => {
if (!e) {
const { displayHeight: r, displayWidth: a } = n, c = a ?? 0, l = r ?? 0, h = new OffscreenCanvas(c, l);
s = h.getContext("2d"), t(h), e = !0;
}
s.drawImage(n, 0, 0), n.close();
},
onDone: async () => {
}
}
);
}
var R, ie, Ne, I, et, G, M, vt, xe, V, ti, Ui, ei, Hi, Ni, $i;
const ce = class ce {
constructor(t, e) {
m(this, V);
A(this, "ready");
m(this, R, []);
m(this, ie, {
width: 0,
height: 0,
duration: 0
});
m(this, Ne, {
fontSize: 30,
fontFamily: "Noto Sans SC",
fontWeight: "normal",
fontColor: "#FFF",
fontStyle: "normal",
textAlign: "center",
letterSpacing: 0,
lineHeight: 1.5,
stroke: "#000",
strokeWidth: 5,
shadow: !0,
shadowColor: "#000",
shadowBlur: 4,
shadowAngle: 45,
shadowDistance: 2,
padding: 0,
backgroundColor: "",
wordWrapWidth: 0,
breakWords: !0
});
m(this, I);
m(this, et, new OffscreenCanvas(1, 1));
m(this, G, i(this, et).getContext("2d"));
m(this, M, null);
m(this, vt, 0);
m(this, xe, 0);
var s;
if (d(this, R, Array.isArray(t) ? t : Ps(t).map(({ start: n, end: r, text: a }) => ({
start: n * 1e6,
end: r * 1e6,
text: a
}))), i(this, R).length === 0) throw Error("No subtitles content");
d(this, I, {
type: "srt",
width: e.width,
height: e.height,
style: {
...i(this, Ne),
...e.style
}
}), C(this, V, ti).call(this), d(this, ie, {
width: i(this, I).width,
height: i(this, I).height,
duration: ((s = i(this, R).at(-1)) == null ? void 0 : s.end) ?? 0
}), this.ready = Promise.resolve(this.meta);
}
get meta() {
return { ...i(this, ie) };
}
/**
* 更新字幕样式
*/
setStyle(t) {
var e;
i(this, I).style = {
...i(this, I).style,
...t
}, C(this, V, ti).call(this), (e = i(this, M)) == null || e.close(), d(this, M, null);
}
/**
* @see {@link IClip.tick}
*/
async tick(t) {
var r, a;
if (i(this, M) != null && t >= i(this, M).timestamp && t <= i(this, M).timestamp + (i(this, M).duration ?? 0))
return { video: i(this, M).clone(), state: "success" };
let e = 0;
for (; e < i(this, R).length && !(t <= i(this, R)[e].end); e += 1)
;
const s = i(this, R)[e] ?? i(this, R).at(-1);
if (t > s.end) return { state: "done" };
if (t <