flamflix-sdk
Version:
Flamflix SDK - Video player with alpha channel support and audio processing (BETA)
1,309 lines (1,278 loc) • 92.4 kB
JavaScript
/*!
* Flamflix SDK v0.1.2
* (c) 2025 Flamflix
* Released under the MIT License.
*/
function _arrayLikeToArray(r, a) {
(null == a || a > r.length) && (a = r.length);
for (var e = 0, n = Array(a); e < a; e++) n[e] = r[e];
return n;
}
function _arrayWithHoles(r) {
if (Array.isArray(r)) return r;
}
function _assertClassBrand(e, t, n) {
if ("function" == typeof e ? e === t : e.has(t)) return arguments.length < 3 ? t : n;
throw new TypeError("Private element is not present on this object");
}
function asyncGeneratorStep(n, t, e, r, o, a, c) {
try {
var i = n[a](c),
u = i.value;
} catch (n) {
return void e(n);
}
i.done ? t(u) : Promise.resolve(u).then(r, o);
}
function _asyncToGenerator(n) {
return function () {
var t = this,
e = arguments;
return new Promise(function (r, o) {
var a = n.apply(t, e);
function _next(n) {
asyncGeneratorStep(a, r, o, _next, _throw, "next", n);
}
function _throw(n) {
asyncGeneratorStep(a, r, o, _next, _throw, "throw", n);
}
_next(void 0);
});
};
}
function _checkPrivateRedeclaration(e, t) {
if (t.has(e)) throw new TypeError("Cannot initialize the same private elements twice on an object");
}
function _classCallCheck(a, n) {
if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function");
}
function _classPrivateFieldGet2(s, a) {
return s.get(_assertClassBrand(s, a));
}
function _classPrivateFieldInitSpec(e, t, a) {
_checkPrivateRedeclaration(e, t), t.set(e, a);
}
function _classPrivateFieldSet2(s, a, r) {
return s.set(_assertClassBrand(s, a), r), r;
}
function _classPrivateMethodInitSpec(e, a) {
_checkPrivateRedeclaration(e, a), a.add(e);
}
function _defineProperties(e, r) {
for (var t = 0; t < r.length; t++) {
var o = r[t];
o.enumerable = o.enumerable || false, o.configurable = true, "value" in o && (o.writable = true), Object.defineProperty(e, _toPropertyKey(o.key), o);
}
}
function _createClass(e, r, t) {
return r && _defineProperties(e.prototype, r), Object.defineProperty(e, "prototype", {
writable: false
}), e;
}
function _defineProperty(e, r, t) {
return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, {
value: t,
enumerable: true,
configurable: true,
writable: true
}) : e[r] = t, e;
}
function _iterableToArrayLimit(r, l) {
var t = null == r ? null : "undefined" != typeof Symbol && r[Symbol.iterator] || r["@@iterator"];
if (null != t) {
var e,
n,
i,
u,
a = [],
f = true,
o = false;
try {
if (i = (t = t.call(r)).next, 0 === l) ; else for (; !(f = (e = i.call(t)).done) && (a.push(e.value), a.length !== l); f = !0);
} catch (r) {
o = true, n = r;
} finally {
try {
if (!f && null != t.return && (u = t.return(), Object(u) !== u)) return;
} finally {
if (o) throw n;
}
}
return a;
}
}
function _nonIterableRest() {
throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");
}
function ownKeys(e, r) {
var t = Object.keys(e);
if (Object.getOwnPropertySymbols) {
var o = Object.getOwnPropertySymbols(e);
r && (o = o.filter(function (r) {
return Object.getOwnPropertyDescriptor(e, r).enumerable;
})), t.push.apply(t, o);
}
return t;
}
function _objectSpread2(e) {
for (var r = 1; r < arguments.length; r++) {
var t = null != arguments[r] ? arguments[r] : {};
r % 2 ? ownKeys(Object(t), true).forEach(function (r) {
_defineProperty(e, r, t[r]);
}) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) {
Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r));
});
}
return e;
}
function _regenerator() {
/*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/babel/babel/blob/main/packages/babel-helpers/LICENSE */
var e,
t,
r = "function" == typeof Symbol ? Symbol : {},
n = r.iterator || "@@iterator",
o = r.toStringTag || "@@toStringTag";
function i(r, n, o, i) {
var c = n && n.prototype instanceof Generator ? n : Generator,
u = Object.create(c.prototype);
return _regeneratorDefine(u, "_invoke", function (r, n, o) {
var i,
c,
u,
f = 0,
p = o || [],
y = false,
G = {
p: 0,
n: 0,
v: e,
a: d,
f: d.bind(e, 4),
d: function (t, r) {
return i = t, c = 0, u = e, G.n = r, a;
}
};
function d(r, n) {
for (c = r, u = n, t = 0; !y && f && !o && t < p.length; t++) {
var o,
i = p[t],
d = G.p,
l = i[2];
r > 3 ? (o = l === n) && (u = i[(c = i[4]) ? 5 : (c = 3, 3)], i[4] = i[5] = e) : i[0] <= d && ((o = r < 2 && d < i[1]) ? (c = 0, G.v = n, G.n = i[1]) : d < l && (o = r < 3 || i[0] > n || n > l) && (i[4] = r, i[5] = n, G.n = l, c = 0));
}
if (o || r > 1) return a;
throw y = true, n;
}
return function (o, p, l) {
if (f > 1) throw TypeError("Generator is already running");
for (y && 1 === p && d(p, l), c = p, u = l; (t = c < 2 ? e : u) || !y;) {
i || (c ? c < 3 ? (c > 1 && (G.n = -1), d(c, u)) : G.n = u : G.v = u);
try {
if (f = 2, i) {
if (c || (o = "next"), t = i[o]) {
if (!(t = t.call(i, u))) throw TypeError("iterator result is not an object");
if (!t.done) return t;
u = t.value, c < 2 && (c = 0);
} else 1 === c && (t = i.return) && t.call(i), c < 2 && (u = TypeError("The iterator does not provide a '" + o + "' method"), c = 1);
i = e;
} else if ((t = (y = G.n < 0) ? u : r.call(n, G)) !== a) break;
} catch (t) {
i = e, c = 1, u = t;
} finally {
f = 1;
}
}
return {
value: t,
done: y
};
};
}(r, o, i), true), u;
}
var a = {};
function Generator() {}
function GeneratorFunction() {}
function GeneratorFunctionPrototype() {}
t = Object.getPrototypeOf;
var c = [][n] ? t(t([][n]())) : (_regeneratorDefine(t = {}, n, function () {
return this;
}), t),
u = GeneratorFunctionPrototype.prototype = Generator.prototype = Object.create(c);
function f(e) {
return Object.setPrototypeOf ? Object.setPrototypeOf(e, GeneratorFunctionPrototype) : (e.__proto__ = GeneratorFunctionPrototype, _regeneratorDefine(e, o, "GeneratorFunction")), e.prototype = Object.create(u), e;
}
return GeneratorFunction.prototype = GeneratorFunctionPrototype, _regeneratorDefine(u, "constructor", GeneratorFunctionPrototype), _regeneratorDefine(GeneratorFunctionPrototype, "constructor", GeneratorFunction), GeneratorFunction.displayName = "GeneratorFunction", _regeneratorDefine(GeneratorFunctionPrototype, o, "GeneratorFunction"), _regeneratorDefine(u), _regeneratorDefine(u, o, "Generator"), _regeneratorDefine(u, n, function () {
return this;
}), _regeneratorDefine(u, "toString", function () {
return "[object Generator]";
}), (_regenerator = function () {
return {
w: i,
m: f
};
})();
}
function _regeneratorDefine(e, r, n, t) {
var i = Object.defineProperty;
try {
i({}, "", {});
} catch (e) {
i = 0;
}
_regeneratorDefine = function (e, r, n, t) {
function o(r, n) {
_regeneratorDefine(e, r, function (e) {
return this._invoke(r, n, e);
});
}
r ? i ? i(e, r, {
value: n,
enumerable: !t,
configurable: !t,
writable: !t
}) : e[r] = n : (o("next", 0), o("throw", 1), o("return", 2));
}, _regeneratorDefine(e, r, n, t);
}
function _slicedToArray(r, e) {
return _arrayWithHoles(r) || _iterableToArrayLimit(r, e) || _unsupportedIterableToArray(r, e) || _nonIterableRest();
}
function _toPrimitive(t, r) {
if ("object" != typeof t || !t) return t;
var e = t[Symbol.toPrimitive];
if (void 0 !== e) {
var i = e.call(t, r);
if ("object" != typeof i) return i;
throw new TypeError("@@toPrimitive must return a primitive value.");
}
return (String )(t);
}
function _toPropertyKey(t) {
var i = _toPrimitive(t, "string");
return "symbol" == typeof i ? i : i + "";
}
function _unsupportedIterableToArray(r, a) {
if (r) {
if ("string" == typeof r) return _arrayLikeToArray(r, a);
var t = {}.toString.call(r).slice(8, -1);
return "Object" === t && r.constructor && (t = r.constructor.name), "Map" === t || "Set" === t ? Array.from(r) : "Arguments" === t || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(t) ? _arrayLikeToArray(r, a) : void 0;
}
}
// Flamflix SDK - Main Entry Point
// Only initializeFlamflix is exported as the public API
function audioBufferToWav(buffer) {
var numChannels = buffer.numberOfChannels;
var sampleRate = buffer.sampleRate;
var format = 1;
var bitDepth = 16;
var bytesPerSample = bitDepth / 8;
var blockAlign = numChannels * bytesPerSample;
var length = buffer.length;
var arrayBuffer = new ArrayBuffer(44 + length * numChannels * bytesPerSample);
var view = new DataView(arrayBuffer);
var writeString = function writeString(offset, string) {
for (var i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
};
writeString(0, "RIFF");
view.setUint32(4, 36 + length * numChannels * bytesPerSample, true);
writeString(8, "WAVE");
writeString(12, "fmt ");
view.setUint32(16, 16, true);
view.setUint16(20, format, true);
view.setUint16(22, numChannels, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * blockAlign, true);
view.setUint16(32, blockAlign, true);
view.setUint16(34, bitDepth, true);
writeString(36, "data");
view.setUint32(40, length * numChannels * bytesPerSample, true);
var offset = 44;
for (var i = 0; i < length; i++) {
for (var channel = 0; channel < numChannels; channel++) {
var sample = Math.max(-1, Math.min(1, buffer.getChannelData(channel)[i]));
view.setInt16(offset, sample * 0x7fff, true);
offset += 2;
}
}
return new Blob([arrayBuffer], {
type: "audio/wav"
});
}
// Convert Blob to Base64
function blobToBase64(blob) {
return new Promise(function (resolve, reject) {
var reader = new FileReader();
reader.onload = function () {
var result = reader.result;
var base64 = result.split(",")[1];
resolve(base64);
};
reader.onerror = reject;
reader.readAsDataURL(blob);
});
}
// Convert WebM audio to WAV using Web Audio API
function convertWebMToWav(_x) {
return _convertWebMToWav.apply(this, arguments);
} // Check microphone permission status
function _convertWebMToWav() {
_convertWebMToWav = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee12(webmBlob) {
return _regenerator().w(function (_context12) {
while (1) switch (_context12.n) {
case 0:
return _context12.a(2, new Promise(/*#__PURE__*/function () {
var _ref5 = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee11(resolve, reject) {
var audioContext, arrayBuffer, audioBuffer, wavBlob, _t10;
return _regenerator().w(function (_context11) {
while (1) switch (_context11.p = _context11.n) {
case 0:
_context11.p = 0;
// Create audio context
audioContext = new (window.AudioContext || window.webkitAudioContext)(); // Convert blob to array buffer
_context11.n = 1;
return webmBlob.arrayBuffer();
case 1:
arrayBuffer = _context11.v;
_context11.n = 2;
return audioContext.decodeAudioData(arrayBuffer);
case 2:
audioBuffer = _context11.v;
// Convert to WAV
wavBlob = audioBufferToWav(audioBuffer); // Close audio context
_context11.n = 3;
return audioContext.close();
case 3:
resolve(wavBlob);
_context11.n = 5;
break;
case 4:
_context11.p = 4;
_t10 = _context11.v;
reject(_t10);
case 5:
return _context11.a(2);
}
}, _callee11, null, [[0, 4]]);
}));
return function (_x3, _x4) {
return _ref5.apply(this, arguments);
};
}()));
}
}, _callee12);
}));
return _convertWebMToWav.apply(this, arguments);
}
function checkMicrophonePermission() {
return _checkMicrophonePermission.apply(this, arguments);
} // Canvas Renderer for Alpha Channel Video Rendering
function _checkMicrophonePermission() {
_checkMicrophonePermission = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee13() {
var permissionStatus;
return _regenerator().w(function (_context13) {
while (1) switch (_context13.n) {
case 0:
if (navigator.permissions) {
_context13.n = 1;
break;
}
console.warn("Permissions API not supported in this browser");
return _context13.a(2, {
state: "unknown",
supported: false,
message: "Permissions API not supported in this browser"
});
case 1:
_context13.n = 2;
return navigator.permissions.query({
name: "microphone"
});
case 2:
permissionStatus = _context13.v;
return _context13.a(2, {
state: permissionStatus.state,
// 'granted', 'denied', or 'prompt'
supported: true,
message: "Microphone permission is ".concat(permissionStatus.state)
});
}
}, _callee13);
}));
return _checkMicrophonePermission.apply(this, arguments);
}
function createCanvasRenderer() {
var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var videoElement = options.videoElement,
container = options.container,
_options$width = options.width,
width = _options$width === void 0 ? "100%" : _options$width,
_options$height = options.height,
height = _options$height === void 0 ? "auto" : _options$height,
_options$style = options.style,
style = _options$style === void 0 ? {} : _options$style,
_options$objectFit = options.objectFit,
objectFit = _options$objectFit === void 0 ? "contain" : _options$objectFit,
onError = options.onError,
_options$filters = options.filters,
filters = _options$filters === void 0 ? {} : _options$filters,
_options$preserveDraw = options.preserveDrawingBuffer,
preserveDrawingBuffer = _options$preserveDraw === void 0 ? false : _options$preserveDraw,
_options$className = options.className,
className = _options$className === void 0 ? "" : _options$className;
// Make these mutable so they can be updated
var enableAlphaProcessing = options.enableAlphaProcessing || false;
var isHorizontal = options.isHorizontal || false;
var frameRate = options.frameRate || 18;
// Internal state
var canvasElement = null;
var animationFrameId = null;
var _isRendering = false;
var containerElement = container;
var lastTime = 0;
// Create canvas element for alpha channel rendering
function createCanvasElement() {
var canvas = document.createElement("canvas");
if (className) {
canvas.className = className;
}
var canvasStyle = _objectSpread2({
width: typeof width === "number" ? "".concat(width, "px") : width,
height: typeof height === "number" ? "".concat(height, "px") : height,
maxWidth: "100%",
objectFit: objectFit,
display: "block"
}, style);
Object.assign(canvas.style, canvasStyle);
if (preserveDrawingBuffer) {
canvas.preserveDrawingBuffer = true;
}
return canvas;
}
// Process vertical alpha channel
function processVerticalAlpha(imageData) {
var data = imageData.data,
width = imageData.width,
height = imageData.height;
var newImageData = new ImageData(width, height / 2);
var newData = newImageData.data;
var halfHeight = height / 2;
for (var y = 0; y < halfHeight; y++) {
for (var x = 0; x < width; x++) {
var colorY = y;
var colorIndex = (colorY * width + x) * 4;
var alphaY = colorY + halfHeight;
var alphaIndex = (alphaY * width + x) * 4;
var outputIndex = (y * width + x) * 4;
var r = data[colorIndex];
var g = data[colorIndex + 1];
var b = data[colorIndex + 2];
var alpha = data[alphaIndex];
newData[outputIndex] = r;
newData[outputIndex + 1] = g;
newData[outputIndex + 2] = b;
newData[outputIndex + 3] = alpha;
}
}
return newImageData;
}
// Process horizontal alpha channel
function processHorizontalAlpha(imageData) {
var data = imageData.data,
width = imageData.width,
height = imageData.height;
var newImageData = new ImageData(width / 2, height);
var newData = newImageData.data;
var halfWidth = width / 2;
for (var y = 0; y < height; y++) {
for (var x = 0; x < halfWidth; x++) {
var colorX = x;
var colorY = y;
var colorIndex = (colorY * width + colorX) * 4;
var alphaX = colorX + halfWidth;
var alphaIndex = (colorY * width + alphaX) * 4;
var outputIndex = (y * halfWidth + x) * 4;
var r = data[colorIndex];
var g = data[colorIndex + 1];
var b = data[colorIndex + 2];
var alpha = data[alphaIndex];
newData[outputIndex] = r;
newData[outputIndex + 1] = g;
newData[outputIndex + 2] = b;
newData[outputIndex + 3] = alpha;
}
}
return newImageData;
}
// Render loop for alpha channel
function startRenderLoop() {
if (!videoElement || !canvasElement) {
console.warn("Canvas renderer: Missing video or canvas element");
return;
}
var ctx = canvasElement.getContext("2d");
if (!ctx) {
var error = new Error("Failed to get 2D context from canvas");
console.error("Canvas renderer:", error);
if (onError) onError(error);
return;
}
_isRendering = true;
function draw(time) {
if (!_isRendering || !videoElement || !canvasElement) return;
if (enableAlphaProcessing) {
if (videoElement.paused || videoElement.ended) return;
var elapsed = time - lastTime;
var interval = 1000 / frameRate;
if (elapsed < interval) {
if (_isRendering) {
animationFrameId = requestAnimationFrame(draw);
}
return;
}
lastTime = time;
}
try {
var targetWidth = videoElement.videoWidth || 0;
var targetHeight = videoElement.videoHeight || 0;
if (targetWidth && targetHeight) {
if (enableAlphaProcessing) {
var outputWidth = isHorizontal ? targetWidth / 2 : targetWidth;
var outputHeight = isHorizontal ? targetHeight : targetHeight / 2;
if (canvasElement.width !== outputWidth) canvasElement.width = outputWidth;
if (canvasElement.height !== outputHeight) canvasElement.height = outputHeight;
if (objectFit === "contain") {
canvasElement.style.width = typeof width === "number" ? "".concat(width, "px") : width;
canvasElement.style.height = typeof height === "number" ? "".concat(height, "px") : height;
}
ctx.clearRect(0, 0, outputWidth, outputHeight);
var tempCanvas = document.createElement("canvas");
tempCanvas.width = targetWidth;
tempCanvas.height = targetHeight;
var tempCtx = tempCanvas.getContext("2d");
if (tempCtx) {
tempCtx.drawImage(videoElement, 0, 0, targetWidth, targetHeight);
var imageData = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
var processedImageData = isHorizontal ? processHorizontalAlpha(imageData) : processVerticalAlpha(imageData);
if (Object.keys(filters).length > 0) {
processedImageData = rendererAPI.applyAllFilters(processedImageData);
}
ctx.putImageData(processedImageData, 0, 0);
}
} else {
if (canvasElement.width !== targetWidth) canvasElement.width = targetWidth;
if (canvasElement.height !== targetHeight) canvasElement.height = targetHeight;
if (objectFit === "contain") {
canvasElement.style.width = typeof width === "number" ? "".concat(width, "px") : width;
canvasElement.style.height = typeof height === "number" ? "".concat(height, "px") : height;
}
ctx.clearRect(0, 0, targetWidth, targetHeight);
ctx.drawImage(videoElement, 0, 0, targetWidth, targetHeight);
}
}
} catch (error) {
console.error("Canvas renderer draw error:", error);
if (onError) onError(error);
}
if (_isRendering) {
animationFrameId = requestAnimationFrame(draw);
}
}
if (animationFrameId === null) {
animationFrameId = requestAnimationFrame(draw);
}
}
function stopRenderLoop() {
_isRendering = false;
if (animationFrameId !== null) {
cancelAnimationFrame(animationFrameId);
animationFrameId = null;
}
}
// Initialize canvas renderer
function initialize() {
try {
if (!videoElement) {
throw new Error("Video element is required");
}
if (!containerElement) {
throw new Error("Container element is required");
}
canvasElement = createCanvasElement();
containerElement.appendChild(canvasElement);
return true;
} catch (error) {
console.error("Failed to initialize canvas renderer:", error);
if (onError) onError(error);
return false;
}
}
// Public API
var rendererAPI = {
start: function start() {
if (!canvasElement) {
console.warn("Canvas renderer not initialized");
return false;
}
startRenderLoop();
return true;
},
stop: function stop() {
stopRenderLoop();
},
getCanvas: function getCanvas() {
return canvasElement;
},
getContext: function getContext() {
return canvasElement ? canvasElement.getContext("2d") : null;
},
isRendering: function isRendering() {
return _isRendering;
},
updateAlphaSettings: function updateAlphaSettings(newEnableAlphaProcessing, newIsHorizontal, newFrameRate) {
if (typeof newEnableAlphaProcessing === "boolean") {
enableAlphaProcessing = newEnableAlphaProcessing;
}
if (typeof newIsHorizontal === "boolean") {
isHorizontal = newIsHorizontal;
}
if (typeof newFrameRate === "number" && newFrameRate > 0) {
frameRate = newFrameRate;
}
},
applyAllFilters: function applyAllFilters(imageData) {
var result = imageData;
for (var _i = 0, _Object$entries = Object.entries(filters); _i < _Object$entries.length; _i++) {
var _Object$entries$_i = _slicedToArray(_Object$entries[_i], 2),
name = _Object$entries$_i[0],
filterFunc = _Object$entries$_i[1];
try {
result = filterFunc(result) || result;
} catch (error) {
console.warn("Filter \"".concat(name, "\" failed:"), error);
}
}
return result;
},
destroy: function destroy() {
stopRenderLoop();
if (canvasElement && containerElement) {
try {
containerElement.removeChild(canvasElement);
} catch (error) {
console.warn("Failed to remove canvas from container:", error);
}
}
canvasElement = null;
containerElement = null;
}
};
if (initialize()) {
return rendererAPI;
} else {
return null;
}
}
/**
* Creates a simplified Flamflix Player instance
* @param {string|HTMLElement} container - CSS selector or DOM element for the container
* @param {Object} options - Configuration options
* @param {string} options.micPosition - Position: 'top-left', 'top-right', 'bottom-left', 'bottom-right', 'center' (default: 'top-right')
* @param {string} options.micMode - Microphone mode: 'toggle', 'push-to-talk', 'hybrid' (default: 'push-to-talk')
* @param {boolean} options.useDefaultMic - Whether to show the default microphone button (default: true)
* @returns {Object} Player instance with essential control methods
*/
function createFlamflixPlayer(container) {
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
var _options$micPosition = options.micPosition,
micPosition = _options$micPosition === void 0 ? "top-right" : _options$micPosition,
_options$micMode = options.micMode,
micMode = _options$micMode === void 0 ? "push-to-talk" : _options$micMode,
_options$useDefaultMi = options.useDefaultMic,
useDefaultMic = _options$useDefaultMi === void 0 ? true : _options$useDefaultMi;
// Control whether to show default mic button
// Internal state
var videoElement = null;
var canvasRenderer = null;
var player = null;
var _isLoading = true;
var containerElement = null;
// Audio recording state
var mediaRecorder = null;
var audioChunks = [];
var _isRecording = false;
var audioStream = null;
var talkButton = null;
var isProcessingAudio = false;
var recordedAudioBlob = null; // Store recorded audio for playback
var recordedAudioURL = null; // URL for playing recorded audio
// Microphone mode state
var currentMicMode = micMode;
var hybridPressStartTime = 0;
var hybridMinPressTime = 500; // 500ms minimum for push-to-talk mode
var hybridTimeout = null;
var pushToTalkPressStartTime = 0;
var pushToTalkMinPressTime = 200; // 200ms minimum for push-to-talk mode
var pushToTalkTimeout = null;
// Video configuration state
var currentSrc = null;
var currentIsAlpha = false;
var currentIsHorizontal = false;
var isIdleVideo = false;
var idleVideoConfig = null;
var welcomeVideoConfig = null;
// Get container element
function getContainerElement() {
if (typeof container === "string") {
return document.querySelector(container);
} else if (container instanceof Element) {
return container;
}
if (container && container.current) {
return container.current;
}
return null;
}
// Audio recording functions
function startAudioRecording() {
return _startAudioRecording.apply(this, arguments);
}
function _startAudioRecording() {
_startAudioRecording = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee5() {
var microphonePermission, _options, _t6;
return _regenerator().w(function (_context5) {
while (1) switch (_context5.p = _context5.n) {
case 0:
if (!isProcessingAudio) {
_context5.n = 1;
break;
}
return _context5.a(2);
case 1:
_context5.p = 1;
if (isIdleVideo) {
_context5.n = 2;
break;
}
_context5.n = 2;
return playIdleVideo();
case 2:
isProcessingAudio = false;
updateTalkButton(true);
_context5.n = 3;
return checkMicrophonePermission();
case 3:
microphonePermission = _context5.v;
_context5.n = 4;
return navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true
}
});
case 4:
audioStream = _context5.v;
if (!(microphonePermission.state !== "granted")) {
_context5.n = 5;
break;
}
updateTalkButton(false);
return _context5.a(2);
case 5:
_context5.n = 6;
return new Promise(function (resolve) {
return setTimeout(resolve, 100);
});
case 6:
_options = {
mimeType: "audio/webm;codecs=opus"
};
mediaRecorder = new MediaRecorder(audioStream, _options);
audioChunks = [];
mediaRecorder.ondataavailable = function (event) {
if (event.data && event.data.size > 0) {
audioChunks.push(event.data);
}
};
mediaRecorder.onstop = /*#__PURE__*/_asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee4() {
var mimeType, audioBlob, videoConfig, startCanvasWhenPlaying, _handleVideoPlay, canvas, _handleResponseVideoEnd, hasPlayed, playWhenReady, _t2, _t3, _t4, _t5;
return _regenerator().w(function (_context4) {
while (1) switch (_context4.p = _context4.n) {
case 0:
if (!isProcessingAudio) {
_context4.n = 1;
break;
}
return _context4.a(2);
case 1:
if (!(!audioChunks || audioChunks.length === 0)) {
_context4.n = 3;
break;
}
console.warn("No audio chunks recorded");
_isRecording = false;
isProcessingAudio = false;
updateTalkButton(false);
// Return to idle video
_context4.n = 2;
return playIdleVideo();
case 2:
return _context4.a(2);
case 3:
isProcessingAudio = true;
updateTalkButton(true);
_context4.p = 4;
mimeType = mediaRecorder.mimeType || "audio/webm";
audioBlob = new Blob(audioChunks, {
type: mimeType
});
recordedAudioBlob = audioBlob;
if (recordedAudioURL) {
URL.revokeObjectURL(recordedAudioURL);
}
recordedAudioURL = URL.createObjectURL(audioBlob);
_context4.p = 5;
_context4.n = 6;
return processAudioToVideo(audioBlob);
case 6:
videoConfig = _context4.v;
currentSrc = videoConfig.src;
currentIsAlpha = videoConfig.isalpha;
currentIsHorizontal = videoConfig.isHorizontal;
videoConfig.streamType || "hls";
isIdleVideo = false;
if (!(player && currentSrc)) {
_context4.n = 18;
break;
}
_context4.p = 7;
_context4.n = 8;
return player.load(currentSrc);
case 8:
_context4.n = 11;
break;
case 9:
_context4.p = 9;
_t2 = _context4.v;
console.error("Error loading video:", _t2);
// Return to idle video on error
_context4.n = 10;
return playIdleVideo();
case 10:
throw _t2;
case 11:
// Disable loop for response videos
videoElement.loop = false;
if (currentIsAlpha) {
videoElement.style.display = "none";
videoElement.muted = false;
if (videoElement.readyState < 2) {
videoElement.addEventListener("loadeddata", function () {}, {
once: true
});
}
} else {
videoElement.style.display = "block";
videoElement.muted = false;
}
if (currentIsAlpha) {
videoElement.style.display = "none";
// Create canvas renderer only if it doesn't exist
if (!canvasRenderer) {
canvasRenderer = initCanvasRenderer();
}
if (canvasRenderer) {
// Stop any existing rendering first
canvasRenderer.stop();
// Update alpha settings for the new video
canvasRenderer.updateAlphaSettings(true, currentIsHorizontal, 18);
// Wait for video to actually start playing before starting canvas
startCanvasWhenPlaying = function startCanvasWhenPlaying() {
var canvas = canvasRenderer.getCanvas();
if (canvas) {
canvas.style.display = "block";
}
canvasRenderer.start();
}; // Listen for when video actually starts playing
_handleVideoPlay = function handleVideoPlay() {
videoElement.removeEventListener("play", _handleVideoPlay);
// Small delay to ensure video is actually rendering frames
setTimeout(startCanvasWhenPlaying, 100);
};
videoElement.addEventListener("play", _handleVideoPlay, {
once: true
});
}
} else {
videoElement.style.display = "block";
// For non-alpha videos, just stop the canvas renderer but don't destroy it
if (canvasRenderer) {
canvasRenderer.stop();
canvas = canvasRenderer.getCanvas();
if (canvas) {
canvas.style.display = "none";
}
}
}
// Add event listener to return to idle video when response video ends
_handleResponseVideoEnd = /*#__PURE__*/function () {
var _ref2 = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee2() {
return _regenerator().w(function (_context2) {
while (1) switch (_context2.n) {
case 0:
videoElement.removeEventListener("ended", _handleResponseVideoEnd);
_context2.n = 1;
return playIdleVideo();
case 1:
return _context2.a(2);
}
}, _callee2);
}));
return function handleResponseVideoEnd() {
return _ref2.apply(this, arguments);
};
}();
videoElement.addEventListener("ended", _handleResponseVideoEnd, {
once: true
});
_context4.p = 12;
if (!(videoElement.readyState >= 2)) {
_context4.n = 14;
break;
}
_context4.n = 13;
return videoElement.play();
case 13:
_context4.n = 15;
break;
case 14:
hasPlayed = false;
playWhenReady = /*#__PURE__*/function () {
var _ref3 = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee3() {
var _t;
return _regenerator().w(function (_context3) {
while (1) switch (_context3.p = _context3.n) {
case 0:
if (!hasPlayed) {
_context3.n = 1;
break;
}
return _context3.a(2);
case 1:
hasPlayed = true;
_context3.p = 2;
_context3.n = 3;
return videoElement.play();
case 3:
_context3.n = 5;
break;
case 4:
_context3.p = 4;
_t = _context3.v;
console.warn("Autoplay failed after ready event:", _t);
case 5:
return _context3.a(2);
}
}, _callee3, null, [[2, 4]]);
}));
return function playWhenReady() {
return _ref3.apply(this, arguments);
};
}();
videoElement.addEventListener("canplay", playWhenReady, {
once: true
});
case 15:
_context4.n = 17;
break;
case 16:
_context4.p = 16;
_t3 = _context4.v;
console.warn("Autoplay failed:", _t3);
case 17:
_context4.n = 20;
break;
case 18:
console.error("No player instance or video source available");
// Return to idle video on error
_context4.n = 19;
return playIdleVideo();
case 19:
throw new Error("Missing player instance or video source");
case 20:
_context4.n = 22;
break;
case 21:
_context4.p = 21;
_t4 = _context4.v;
console.error("❌ Failed to process audio:", _t4);
// Return to idle video on error instead of showing error
_context4.n = 22;
return playIdleVideo();
case 22:
_context4.n = 24;
break;
case 23:
_context4.p = 23;
_t5 = _context4.v;
console.error("❌ Failed to process recorded audio:", _t5);
// Return to idle video on error
_context4.n = 24;
return playIdleVideo();
case 24:
_context4.p = 24;
isProcessingAudio = false;
updateTalkButton(false);
return _context4.f(24);
case 25:
if (audioStream) {
audioStream.getTracks().forEach(function (track) {
return track.stop();
});
audioStream = null;
}
_isRecording = false;
updateTalkButton(false);
case 26:
return _context4.a(2);
}
}, _callee4, null, [[12, 16], [7, 9], [5, 21], [4, 23, 24, 25]]);
}));
updateTalkButton();
mediaRecorder.start(100);
_isRecording = true;
updateTalkButton();
_context5.n = 8;
break;
case 7:
_context5.p = 7;
_t6 = _context5.v;
console.error("Error accessing microphone:", _t6);
showError("Could not access microphone. Please check permissions.");
updateTalkButton();
_isRecording = false;
isProcessingAudio = false; // Reset processing flag on error
updateTalkButton();
case 8:
return _context5.a(2);
}
}, _callee5, null, [[1, 7]]);
}));
return _startAudioRecording.apply(this, arguments);
}
function stopAudioRecording() {
try {
if (mediaRecorder && mediaRecorder.state === "recording") {
mediaRecorder.stop();
}
} catch (error) {
console.warn("Error stopping media recorder:", error);
}
// Clean up audio stream if it exists
if (audioStream) {
try {
audioStream.getTracks().forEach(function (track) {
return track.stop();
});
} catch (error) {
console.warn("Error stopping audio stream tracks:", error);
}
audioStream = null;
}
_isRecording = false;
updateTalkButton();
}
function updateTalkButton() {
var showLoading = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
if (!talkButton) return;
if (showLoading || isProcessingAudio) {
// Show loading state when explicitly requested or when processing audio
talkButton.disabled = true;
talkButton.style.color = "#ffffff";
talkButton.innerHTML = "\n <div style=\"\n width: 20px;\n height: 20px;\n border: 2px solid rgba(255, 255, 255, 0.3);\n border-top: 2px solid #ffffff;\n border-radius: 50%;\n animation: spin 1s linear infinite;\n \"></div>\n ";
} else if (_isRecording) {
talkButton.disabled = false;
talkButton.style.backgroundColor = "#007bff";
talkButton.style.color = "#ffffff";
talkButton.style.boxShadow = "0 0 20px rgba(0, 123, 255, 0.6), 0 0 40px rgba(0, 123, 255, 0.4)";
talkButton.style.transformOrigin = "center";
talkButton.innerHTML = "<svg xmlns=\"http://www.w3.org/2000/svg\" width=\"24\" height=\"24\" viewBox=\"0 0 24 24\" fill=\"none\" stroke=\"currentColor\" stroke-width=\"2\" stroke-linecap=\"round\" stroke-linejoin=\"round\" class=\"lucide lucide-mic-icon lucide-mic\"><path d=\"M12 19v3\"/><path d=\"M19 10v2a7 7 0 0 1-14 0v-2\"/><rect x=\"9\" y=\"2\" width=\"6\" height=\"13\" rx=\"3\"/></svg>";
} else {
talkButton.disabled = false;
talkButton.style.backgroundColor = "#000000";
talkButton.style.color = "#6b7280";
talkButton.style.boxShadow = "0 2px 8px rgba(0,0,0,0.2)";
talkButton.style.animation = "none";
talkButton.style.transformOrigin = "center";
talkButton.innerHTML = "<svg xmlns=\"http://www.w3.org/2000/svg\" width=\"24\" height=\"24\" viewBox=\"0 0 24 24\" fill=\"none\" stroke=\"currentColor\" stroke-width=\"2\" stroke-linecap=\"round\" stroke-linejoin=\"round\" class=\"lucide lucide-mic-icon lucide-mic\"><path d=\"M12 19v3\"/><path d=\"M19 10v2a7 7 0 0 1-14 0v-2\"/><rect x=\"9\" y=\"2\" width=\"6\" height=\"13\" rx=\"3\"/></svg>";
}
}
function addPulseAnimation() {
if (document.getElementById("flamflix-pulse-animation")) return;
var style = document.createElement("style");
style.id = "flamflix-pulse-animation";
style.textContent = "\n @keyframes pulse {\n 0% {\n transform: scale(1);\n box-shadow: 0 0 20px rgba(0, 123, 255, 0.6), 0 0 40px rgba(0, 123, 255, 0.4);\n }\n 50% {\n transform: scale(1.05);\n box-shadow: 0 0 30px rgba(0, 123, 255, 0.8), 0 0 60px rgba(0, 123, 255, 0.6);\n }\n 100% {\n transform: scale(1);\n box-shadow: 0 0 20px rgba(0, 123, 255, 0.6), 0 0 40px rgba(0, 123, 255, 0.4);\n }\n }\n @keyframes spin {\n 0% { transform: rotate(0deg); }\n 100% { transform: rotate(360deg); }\n }\n \n /* Make SVG elements non-interactive */\n #flamflix-talk-button svg,\n #flamflix-talk-button .lucide-mic-icon {\n pointer-events: none;\n }\n ";
document.head.appendChild(style);
}
function createTalkButton() {
addPulseAnimation();
var button = document.createElement("button");
button.id = "flamflix-talk-button";
// Fixed mic button configuration - no customization allowed
var micConfig = {
html: "<svg xmlns=\"http://www.w3.org/2000/svg\" width=\"24\" height=\"24\" viewBox=\"0 0 24 24\" fill=\"none\" stroke=\"currentColor\" stroke-width=\"2\" stroke-linecap=\"round\" stroke-linejoin=\"round\" class=\"lucide lucide-mic-icon lucide-mic\"><path d=\"M12 19v3\"/><path d=\"M19 10v2a7 7 0 0 1-14 0v-2\"/><rect x=\"9\" y=\"2\" width=\"6\" height=\"13\" rx=\"3\"/></svg>",
style: {
position: "absolute",
top: "10px",
right: "10px",
zIndex: "20",
width: "48px",
height: "48px",
padding: "0",
backgroundColor: "#000000",
color: "#6b7280",
// gray color for mic
border: "none",
borderRadius: "100%",
cursor: "pointer",
display: "flex",
alignItems: "center",
justifyContent: "center",
transition: "all 0.3s ease",
boxShadow: "0 2px 8px rgba(0,0,0,0.2)",
transformOrigin: "center",
transform: "translate(0, 0)"
},
position: micPosition
};
{
button.innerHTML = micConfig.html;
}
Object.assign(button.style, micConfig.style);
var positionMap = {
"top-left": {
top: "10px",
left: "10px",
right: "auto",
bottom: "auto"
},
"top-right": {
top: "10px",
right: "10px",
left: "auto",
bottom: "auto"
},
"bottom-left": {
bottom: "10px",
left: "10px",
right: "auto",
top: "auto"
},
"bottom-right": {
bottom: "10px",
right: "10px",
left: "auto",
top: "auto"
},
center: {
top: "50%",
left: "50%",
transform: "translate(-50%, -50%)",
right: "auto",
bottom: "auto"
},
"bottom-center": {
bottom: "10px",
left: "50%",
transform: "translate(-50%, 0)",
right: "auto",
top: "auto"
},
"top-center": {
top: "10px",
left: "50%",
transform: "translate(-50%, 0)",
right: "auto",
bottom: "auto"
},
"left-center": {
left: "10px",
top: "50%",
transform: "translate(0, -50%)",
right: "auto",
bottom: "auto"
},
"right-center": {
right: "10px",
top: "50%",
transform: "translate(0, -50%)",
left: "auto",
bottom: "auto"
}
};
if (positionMap[micConfig.position]) {
Object.assign(button.style, positionMap[micConfig.position]);
}
if (currentMicMode === "toggle") {
if (button.disabled) {
return;
}
button.addEventListener("click", function () {
if (_isRecording) {
stopAudioRecording();
} else {
startAudioRecording();
}
});
} else if (currentMicMode === "push-to-talk") {
if (button.disabled) {
return;
}
button.addEventListener("mousedown", function (e) {
e.preventDefault();
pushToTalkPressStartTime = Date.now();
if (!_isRecording && !isProcessingAudio) {
pushToTalkTimeout = setTimeout(function () {
if (!_isRecording && !isProcessingAudio) {
startAudioRecording();
}
}, pushToTalkMinPressTime);
}
});
// Add mouseup to both button and document to catch mouse release anywhere
var _handleMouseUp = function handleMouseUp(e) {
e.preventDefault();
if (button.disabled) {
return;
}
var pressDuration = Date.now() - pushToTalkPressStartTime;
if (pushToTalkTimeout) {
clearTimeout(pushToTalkTimeout);
pushToTalkTimeout = null;
}
if (pressDuration >= pushToTalkMinPressTime && _isRecording) {
stopAudioRecording();
}
// Remove the global listener after handling
document.removeEventListener("mouseup", _handleMouseUp);
};
button.addEventListener("mouseup", _handleMouseUp);
// Add global mouseup listener when recording starts
button.addEventListener("mousedown", function () {
document.addEventListener("mouseup", _handleMouseUp);
});
// Touch events for mobile
var _handleTouchEnd = function handleTouchEnd(e) {
// Check if button is disabled FIRST, before preventDefault
if (button.disabled) {
return;
}
e.preventDefault();
var pressDuration = Date.now() - pushToTalkPressStartTime;
// Clear timeout if touch released
if (pushToTalkTimeout) {
clearTimeout(pushToTalkTimeout);
pushToTalkTimeout = null;
}
if (pressDuration >= pushToTalkMinPressTime && _isR