@spatialwalk/avatarkit
Version:
SPAvatar SDK - 3D Gaussian Splatting Avatar Rendering SDK
335 lines (334 loc) • 12.4 kB
JavaScript
var C = Object.defineProperty;
var g = (h, t, e) => t in h ? C(h, t, { enumerable: !0, configurable: !0, writable: !0, value: e }) : h[t] = e;
var i = (h, t, e) => g(h, typeof t != "symbol" ? t + "" : t, e);
import { A as m, e as f, a as c, l as u } from "./index-Dsokgngg.js";
class y {
constructor(t) {
// AudioContext is managed internally
i(this, "audioContext", null);
i(this, "sampleRate");
i(this, "channelCount");
i(this, "debug");
// Session-level state
i(this, "sessionId");
i(this, "sessionStartTime", 0);
// AudioContext time when session started
i(this, "pausedTimeOffset", 0);
// Accumulated paused time
i(this, "pausedAt", 0);
// Time when paused
i(this, "pausedAudioContextTime", 0);
// audioContext.currentTime when paused (for resume calculation)
i(this, "scheduledTime", 0);
// Next chunk schedule time in AudioContext time
// Playback state
i(this, "isPlaying", !1);
i(this, "isPaused", !1);
i(this, "autoStartEnabled", !0);
// Control whether to auto-start when buffer is ready
// Audio buffer queue
i(this, "audioChunks", []);
i(this, "scheduledChunks", 0);
// Number of chunks already scheduled
i(this, "activeSources", /* @__PURE__ */ new Set());
// Event callbacks
i(this, "onEndedCallback");
this.sessionId = `session_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`, this.sampleRate = (t == null ? void 0 : t.sampleRate) ?? m.audio.sampleRate, this.channelCount = (t == null ? void 0 : t.channelCount) ?? 1, this.debug = (t == null ? void 0 : t.debug) ?? !1;
}
/**
* Initialize audio context (create and ensure it's ready)
*/
async initialize() {
if (!this.audioContext)
try {
this.audioContext = new AudioContext({
sampleRate: this.sampleRate
}), this.audioContext.state === "suspended" && await this.audioContext.resume(), this.log("AudioContext initialized", {
sessionId: this.sessionId,
sampleRate: this.audioContext.sampleRate,
state: this.audioContext.state
});
} catch (t) {
const e = f(t);
throw c.logEvent("activeAudioSessionFailed", "warning", {
sessionId: this.sessionId,
reason: e
}), u.error("Failed to initialize AudioContext:", e), t instanceof Error ? t : new Error(e);
}
}
/**
* Add audio chunk (16-bit PCM)
*/
addChunk(t, e = !1) {
if (!this.audioContext) {
u.error("AudioContext not initialized");
return;
}
this.audioChunks.push({ data: t, isLast: e }), this.log(`Added chunk ${this.audioChunks.length}`, {
size: t.length,
totalChunks: this.audioChunks.length,
isLast: e,
isPlaying: this.isPlaying,
scheduledChunks: this.scheduledChunks
}), !this.isPlaying && this.autoStartEnabled && this.audioChunks.length > 0 ? (this.log("[StreamingAudioPlayer] Auto-starting playback from addChunk"), this.startPlayback()) : this.isPlaying && !this.isPaused ? (this.log("[StreamingAudioPlayer] Already playing, scheduling next chunk"), this.scheduleNextChunk()) : this.log("[StreamingAudioPlayer] Not playing and no chunks, waiting for more chunks");
}
/**
* Start new session (stop current and start fresh)
*/
async startNewSession(t) {
this.stop(), this.sessionId = `session_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`, this.audioChunks = [], this.scheduledChunks = 0, this.pausedTimeOffset = 0, this.pausedAt = 0, this.pausedAudioContextTime = 0, this.log("Starting new session", {
chunks: t.length
});
for (const e of t)
this.addChunk(e.data, e.isLast);
}
/**
* Start playback
*/
startPlayback() {
if (!this.audioContext) {
this.log("[StreamingAudioPlayer] Cannot start playback: AudioContext not initialized");
return;
}
if (this.isPlaying) {
this.log("[StreamingAudioPlayer] Cannot start playback: Already playing");
return;
}
this.isPlaying = !0, this.sessionStartTime = this.audioContext.currentTime, this.scheduledTime = this.sessionStartTime, this.log("[StreamingAudioPlayer] Starting playback", {
sessionStartTime: this.sessionStartTime,
bufferedChunks: this.audioChunks.length,
scheduledChunks: this.scheduledChunks,
activeSources: this.activeSources.size
}), this.scheduleAllChunks();
}
/**
* Schedule all pending chunks
*/
scheduleAllChunks() {
for (; this.scheduledChunks < this.audioChunks.length; )
this.scheduleNextChunk();
}
/**
* Schedule next audio chunk
*/
scheduleNextChunk() {
if (!this.audioContext) {
this.log("[StreamingAudioPlayer] Cannot schedule chunk: AudioContext not initialized");
return;
}
if (!this.isPlaying || this.isPaused) {
this.log("[StreamingAudioPlayer] Cannot schedule chunk: Not playing or paused");
return;
}
const t = this.scheduledChunks;
if (t >= this.audioChunks.length) {
this.log(`[StreamingAudioPlayer] No more chunks to schedule (chunkIndex: ${t}, totalChunks: ${this.audioChunks.length})`);
return;
}
const e = this.audioChunks[t];
if (e.data.length === 0 && !e.isLast) {
this.scheduledChunks++;
return;
}
const r = e.data, o = e.isLast, a = this.pcmToAudioBuffer(r);
if (!a) {
u.error("Failed to create AudioBuffer from PCM data"), c.logEvent("character_player", "error", {
sessionId: this.sessionId,
event: "audio_buffer_creation_failed"
});
return;
}
try {
const s = this.audioContext.createBufferSource();
s.buffer = a, s.connect(this.audioContext.destination), s.start(this.scheduledTime), this.activeSources.add(s), s.onended = () => {
this.activeSources.delete(s), o && this.activeSources.size === 0 && (this.log("Last audio chunk ended, marking playback as ended"), this.markEnded());
}, this.scheduledTime += a.duration, this.scheduledChunks++, this.log(`[StreamingAudioPlayer] Scheduled chunk ${t + 1}/${this.audioChunks.length}`, {
startTime: this.scheduledTime - a.duration,
duration: a.duration,
nextScheduleTime: this.scheduledTime,
isLast: o,
activeSources: this.activeSources.size
});
} catch (s) {
u.errorWithError("Failed to schedule audio chunk:", s), c.logEvent("character_player", "error", {
sessionId: this.sessionId,
event: "schedule_chunk_failed",
reason: s instanceof Error ? s.message : String(s)
});
}
}
/**
* Convert PCM data to AudioBuffer
* Input: 16-bit PCM (int16), Output: AudioBuffer (float32 [-1, 1])
*/
pcmToAudioBuffer(t) {
if (!this.audioContext)
return null;
if (t.length === 0) {
const l = Math.floor(this.sampleRate * 0.01), n = this.audioContext.createBuffer(
this.channelCount,
l,
this.sampleRate
);
for (let d = 0; d < this.channelCount; d++)
n.getChannelData(d).fill(0);
return n;
}
const e = new Uint8Array(t), r = new Int16Array(e.buffer, 0, e.length / 2), o = r.length / this.channelCount, a = this.audioContext.createBuffer(
this.channelCount,
o,
this.sampleRate
);
for (let s = 0; s < this.channelCount; s++) {
const l = a.getChannelData(s);
for (let n = 0; n < o; n++) {
const d = n * this.channelCount + s;
l[n] = r[d] / 32768;
}
}
return a;
}
/**
* Get current playback time (seconds)
*/
getCurrentTime() {
if (!this.audioContext || !this.isPlaying)
return 0;
if (this.isPaused)
return this.pausedAt;
const e = this.audioContext.currentTime - this.sessionStartTime - this.pausedTimeOffset;
return Math.max(0, e);
}
/**
* Pause playback
*/
pause() {
!this.isPlaying || this.isPaused || !this.audioContext || (this.pausedAt = this.getCurrentTime(), this.pausedAudioContextTime = this.audioContext.currentTime, this.isPaused = !0, this.audioContext.state === "running" && this.audioContext.suspend().catch((t) => {
u.errorWithError("Failed to suspend AudioContext:", t), this.isPaused = !1;
}), this.log("Playback paused", {
pausedAt: this.pausedAt,
pausedAudioContextTime: this.pausedAudioContextTime,
audioContextState: this.audioContext.state
}));
}
/**
* Resume playback
*/
async resume() {
if (!this.isPaused || !this.audioContext || !this.isPlaying)
return;
if (this.audioContext.state === "suspended")
try {
await this.audioContext.resume();
} catch (e) {
throw u.errorWithError("Failed to resume AudioContext:", e), e;
}
const t = this.audioContext.currentTime;
this.sessionStartTime = this.pausedAudioContextTime - this.pausedAt - this.pausedTimeOffset, this.isPaused = !1, this.scheduledChunks < this.audioChunks.length && this.scheduleAllChunks(), this.log("Playback resumed", {
pausedAt: this.pausedAt,
pausedAudioContextTime: this.pausedAudioContextTime,
currentAudioContextTime: t,
adjustedSessionStartTime: this.sessionStartTime,
audioContextState: this.audioContext.state
});
}
/**
* Stop playback
*/
stop() {
if (this.audioContext) {
this.isPaused && this.audioContext.state === "suspended" && (this.audioContext.resume().catch(() => {
}), this.isPaused = !1), this.isPlaying = !1, this.isPaused = !1, this.sessionStartTime = 0, this.scheduledTime = 0;
for (const t of this.activeSources) {
t.onended = null;
try {
t.stop(0);
} catch {
}
try {
t.disconnect();
} catch {
}
}
this.activeSources.clear(), this.audioChunks = [], this.scheduledChunks = 0, this.log("[StreamingAudioPlayer] Playback stopped, state reset");
}
}
/**
* Enable or disable auto-start (for delayed start scenarios)
*/
setAutoStart(t) {
this.autoStartEnabled = t, this.log(`Auto-start ${t ? "enabled" : "disabled"}`);
}
/**
* Start playback manually (for delayed start scenarios)
* This allows starting playback after transition animation completes
*/
play() {
this.isPlaying || (this.autoStartEnabled = !0, this.startPlayback());
}
/**
* Mark playback as ended
*/
markEnded() {
var t;
this.log("Playback ended"), this.isPlaying = !1, (t = this.onEndedCallback) == null || t.call(this);
}
/**
* Set ended callback
*/
onEnded(t) {
this.onEndedCallback = t;
}
/**
* Check if playing
*/
isPlayingNow() {
return this.isPlaying && !this.isPaused;
}
/**
* Get total duration of buffered audio
*/
getBufferedDuration() {
if (!this.audioContext)
return 0;
let t = 0;
for (const e of this.audioChunks)
t += e.data.length / 2 / this.channelCount;
return t / this.sampleRate;
}
/**
* Get remaining duration (buffered - played) in seconds
*/
getRemainingDuration() {
const t = this.getBufferedDuration(), e = this.getCurrentTime();
return Math.max(0, t - e);
}
/**
* Dispose and cleanup
*/
dispose() {
this.stop(), this.audioContext && (this.audioContext.close(), this.audioContext = null), this.audioChunks = [], this.scheduledChunks = 0, this.sessionStartTime = 0, this.pausedTimeOffset = 0, this.pausedAt = 0, this.pausedAudioContextTime = 0, this.scheduledTime = 0, this.onEndedCallback = void 0, this.log("StreamingAudioPlayer disposed");
}
/**
* Flush buffered audio
* - hard: stops all playing sources and clears all chunks
* - soft (default): clears UNSCHEDULED chunks only
*/
flush(t) {
if ((t == null ? void 0 : t.hard) === !0) {
this.stop(), this.audioChunks = [], this.scheduledChunks = 0, this.sessionStartTime = 0, this.pausedAt = 0, this.scheduledTime = 0, this.log("Flushed (hard)");
return;
}
this.scheduledChunks < this.audioChunks.length && this.audioChunks.splice(this.scheduledChunks), this.log("Flushed (soft)", { remainingScheduled: this.scheduledChunks });
}
/**
* Debug logging
*/
log(t, e) {
this.debug && u.log(`[StreamingAudioPlayer] ${t}`, e || "");
}
}
export {
y as StreamingAudioPlayer
};
//# sourceMappingURL=StreamingAudioPlayer-COgQTrz3.js.map