gxjs-ic-ai-assistant-vue3
Version:
An Ai assistant Frontend application with Dify.
1,430 lines (1,429 loc) • 2.93 MB
JavaScript
var __defProp = Object.defineProperty;
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
var _a2, _b, _c, _d, _e2, _f, _g;
import { defineComponent, inject, ref as ref$1, useSlots, useAttrs, shallowRef, getCurrentInstance, toRaw, createVNode, mergeProps, Fragment, isVNode, reactive, onMounted, watch, onUnmounted, Teleport, nextTick, isRef, cloneVNode, onActivated, provide, h as h$8, createApp, onBeforeUnmount, Text, createTextVNode, computed, unref, createElementBlock, openBlock, createElementVNode, resolveComponent, createBlock, withCtx, normalizeClass, renderSlot, createStaticVNode, createCommentVNode, toDisplayString } from "vue";
class DifyController {
// 收到指令
constructor(config) {
__publicField(this, "difyType", "jk");
// dify工作流的类型类型
__publicField(this, "status", 0);
// 当前的状态
__publicField(this, "normalStatus", 0);
// 异常状态,如果status为error,通过该字段可以判断出错的原因
__publicField(this, "conversationId", "");
__publicField(this, "inputs", {});
// dify自定义的输入参数
__publicField(this, "cacheQueryText", "");
// 问题文本缓存
__publicField(this, "cacheAnswerText", "");
// 回答文本缓存
__publicField(this, "difyKey", "");
__publicField(this, "url", "");
// 请求url
__publicField(this, "user", "");
// 用户名
__publicField(this, "isFlowEnd", false);
// 本次工作流是否结束
__publicField(this, "isMessageEnd", false);
// 本次消息是否结束
__publicField(this, "reconnectTime", 0);
// 重试的次数
__publicField(this, "prefix", null);
// 本次响应的前缀
__publicField(this, "abortController", null);
// 响应中止控制器
__publicField(this, "reader", null);
// 当前响应reader的引用
__publicField(this, "requestId", null);
// 请求计时器id
__publicField(this, "onFlowStart", null);
// 工作流开始的回调,dify开始回答
__publicField(this, "onSseEnd", null);
// 工作流结束的回调,dify已经回答完毕
__publicField(this, "onMessageEnd", null);
__publicField(this, "onUpdate", null);
// 收到回答文本的回调
__publicField(this, "onError", null);
// 出现异常的回调,可以根据异常的类型执行响应的操作
__publicField(this, "onReconnect", null);
// 当超时进行重试的时候,要告诉外部,重置状态
__publicField(this, "onCode", null);
if (config) {
this.setConfig(config);
}
}
setConfig(config) {
this.difyKey = config.difyKey;
this.inputs = config.inputs;
this.url = config.url;
this.user = config.user;
this.difyType = config.difyType || "jk";
this.init();
}
/**
* 设置dify自定义参数
* @param inputs { customOptions: any }
*/
setInputs(inputs) {
this.inputs = inputs;
}
/**
* 获取当前dify状态
*/
getStatus() {
return this.status;
}
/**
* 初始化状态,进入CLOSED状态的唯一方法
* @private
*/
init() {
if (this.status === 1) {
throw new Error("init failed , dify flow is processing");
} else if (this.status === 3) {
this.cacheAnswerText = "";
this.status = 0;
this.normalStatus = 0;
this.prefix = null;
} else {
this.conversationId = "";
this.cacheAnswerText = "";
this.cacheQueryText = "";
this.status = 0;
this.normalStatus = 0;
this.reconnectTime = 10;
this.prefix = null;
}
}
/**
* fetch
* @param queryText
*/
async send(queryText) {
if (this.status === 1) {
throw new Error("send failed, dify is working");
} else if (this.status === 0) ;
else {
this.init();
}
this.status = 1;
this.cacheQueryText = queryText;
const stringData = JSON.stringify({
inputs: this.difyType === "szy" ? this.inputs : {},
query: queryText,
response_mode: "streaming",
conversation_id: this.conversationId,
user: this.user
});
const difyOptions = {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: "Bearer " + this.difyKey
},
body: stringData
};
try {
this.abortController = new AbortController();
const id = setTimeout(() => {
var _a3;
return (_a3 = this.abortController) == null ? void 0 : _a3.abort();
}, 600 * 1e3);
this.requestId = id;
const response = await fetch(this.url, { ...difyOptions, signal: this.abortController.signal });
if (!response.ok) {
this.status = 4;
this.handleError(5, "error, response from Dify is not ok");
return;
}
const stream = response.body;
if (!stream) {
this.status = 4;
if (this.onError) {
this.onError(
2
/* EMPTY_STREAM */
);
}
throw new Error("error, did not get stream from dify");
}
const reader = stream.getReader();
this.reader = reader;
let currentData = "";
while (true) {
const { done, value } = await reader.read();
if (done) {
this.abortController = null;
this.reader = null;
clearTimeout(id);
break;
}
const decoder = new TextDecoder("utf-8");
const text2 = decoder.decode(value);
currentData += text2;
const chunks = currentData.split("\n\n");
const lastChunk = chunks.pop();
if (lastChunk !== void 0) {
if (lastChunk.endsWith("\n\n")) {
chunks.push(lastChunk);
currentData = "";
} else {
currentData = lastChunk;
}
}
for (const chunk of chunks) {
this.parseDataChunk(chunk);
}
}
} catch (e2) {
if ((e2 == null ? void 0 : e2.name) === "AbortError") {
this.handleError(1, "error, request to Dify timeout");
return;
}
this.status = 4;
if (this.onError) {
this.onError(
3
/* FETCH_ERROR */
);
}
this.abortController = null;
console.error(e2);
}
}
/**
* 解析数据块
* @param chunk
* @private
*/
parseDataChunk(chunk) {
if (this.difyType === "jk") {
this.parseEventDataChunk(chunk);
}
if (chunk.startsWith("data: ")) {
const jsonData = chunk.slice(6);
try {
const data2 = JSON.parse(jsonData);
if (data2.event === "workflow_started") {
if (this.onFlowStart) {
this.onFlowStart();
}
} else if (data2.event === "message") {
console.log("=======收到message消息========", data2);
if (!this.conversationId || this.conversationId !== data2.conversation_id) {
this.conversationId = data2.conversation_id;
}
if (data2.answer) {
if (this.difyType === "szy") {
this.handleSzyMessagesText(data2.answer);
} else {
this.onUpdate && this.onUpdate(data2.answer);
}
}
} else if (data2.event === "workflow_finished") {
} else if (data2.event === "message_end") {
this.isMessageEnd = true;
this.isFlowEnd = true;
this.onMessageEnd && this.onMessageEnd(data2.metadata.retriever_resources);
this.handleSseEnd();
} else if (data2.event === "error") {
this.onError && this.onError(
6
/* WORKFLOW_ERROR */
);
}
} catch (e2) {
this.status = 4;
throw new Error("an error occur when flowing");
}
}
}
parseEventDataChunk(chunk) {
var _a3;
const lines = chunk.split(/\r?\n/);
let currentEvent = "";
for (const raw of lines) {
const line = raw.trim();
if (!line) continue;
if (line.startsWith("event:")) {
currentEvent = line.slice(6).trim();
console.log("=== 检测到事件 ===", currentEvent);
}
if (line.startsWith("data:")) {
const jsonStr = line.slice(5).trim();
if (!jsonStr || jsonStr === "" || jsonStr === "[Stream Ended]") {
continue;
}
try {
const data2 = JSON.parse(jsonStr);
if (data2.event === "workflow_started") {
if (this.onFlowStart) {
this.onFlowStart();
}
} else if (data2.event === "message") {
if (!this.conversationId || this.conversationId !== data2.conversation_id) {
this.conversationId = data2.conversation_id;
}
if (data2.answer && data2.answer !== "[Stream Ended]") {
if (this.difyType === "szy") {
this.handleSzyMessagesText(data2.answer);
} else {
this.onUpdate && this.onUpdate(data2.answer);
}
}
} else if (data2.event === "workflow_finished") {
} else if (data2.event === "message_end") {
this.isMessageEnd = true;
this.isFlowEnd = true;
this.onMessageEnd && this.onMessageEnd(((_a3 = data2.metadata) == null ? void 0 : _a3.retriever_resources) || "");
try {
if (this.reader) {
try {
this.reader.cancel();
} catch (e2) {
console.error("取消reader失败:", e2);
}
}
this.reader = null;
this.abortController = null;
} catch (e2) {
console.error("取消reader失败:", e2);
}
if (this.requestId) {
clearTimeout(this.requestId);
this.requestId = null;
}
this.handleSseEnd();
} else if (data2.event === "error") {
this.onError && this.onError(
6
/* WORKFLOW_ERROR */
);
}
} catch (e2) {
console.error("=== JSON解析失败 ===", e2, "原始数据:", jsonStr);
}
}
}
}
/**
* 处理返回消息文本
* @param text
* @private
*/
handleSzyMessagesText(text2) {
const prefixLength = this.prefix ? this.prefix.length : 0;
const textLength = text2.length;
if (prefixLength < 5) {
if (prefixLength + textLength <= 5) {
if (this.prefix === null) {
this.prefix = text2;
} else {
this.prefix += text2;
}
} else {
if (this.prefix === null) {
this.prefix = text2.slice(0, 5);
} else {
this.prefix += text2.slice(0, 5 - prefixLength);
}
this.cacheAnswerText = text2.slice(5 - prefixLength);
if (this.prefix === "text:") {
this.onUpdate && this.onUpdate(this.cacheAnswerText);
}
}
} else {
this.cacheAnswerText += text2;
if (this.prefix === "text:") {
this.onUpdate && this.onUpdate(text2);
}
}
}
/**
* 处理整个SSE流程全部结束
*/
handleSseEnd() {
if (this.isFlowEnd && this.isMessageEnd) {
if (this.prefix && this.prefix === "code:") {
this.onCode && this.onCode(this.cacheAnswerText);
} else {
this.onSseEnd && this.onSseEnd();
}
this.status = 2;
this.prefix = null;
this.isFlowEnd = false;
this.isMessageEnd = false;
}
}
/**
* 内部可以尝试处理的异常
* @param errorType
* @private
*/
handleError(errorType, error) {
if (errorType === 1) {
if (this.reconnectTime > 0) {
console.warn("第" + (10 - this.reconnectTime) + "次重试");
this.reconnectTime--;
this.status = 3;
if (this.onReconnect) {
this.onReconnect();
}
this.send(this.cacheQueryText);
} else {
this.status = 4;
this.normalStatus = 1;
throw new Error("Timeout");
}
}
}
exit() {
if (this.abortController) {
this.abortController.abort();
}
this.destroyCallback();
}
destroyCallback() {
this.onFlowStart = null;
this.onSseEnd = null;
this.onUpdate = null;
this.onError = null;
this.onReconnect = null;
this.onCode = null;
}
abort() {
var _a3;
(_a3 = this.abortController) == null ? void 0 : _a3.abort();
this.status = 2;
this.init();
}
}
var commonjsGlobal = typeof globalThis !== "undefined" ? globalThis : typeof window !== "undefined" ? window : typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : {};
function getDefaultExportFromCjs(x2) {
return x2 && x2.__esModule && Object.prototype.hasOwnProperty.call(x2, "default") ? x2["default"] : x2;
}
var recorderCore = { exports: {} };
(function(module) {
(function(factory) {
var browser = typeof window == "object" && !!window.document;
var win = browser ? window : Object;
factory(win, browser);
if (module.exports) {
module.exports = win.Recorder;
}
})(function(Export, isBrowser) {
var NOOP = function() {
};
var IsNum = function(v2) {
return typeof v2 == "number";
};
var ToJson = function(v2) {
return JSON.stringify(v2);
};
var Recorder2 = function(set2) {
return new initFn(set2);
};
var LM = Recorder2.LM = "2025-01-11 09:28";
var GitUrl = "https://github.com/xiangyuecn/Recorder";
var RecTxt = "Recorder";
var getUserMediaTxt = "getUserMedia";
var srcSampleRateTxt = "srcSampleRate";
var sampleRateTxt = "sampleRate";
var bitRateTxt = "bitRate";
var CatchTxt = "catch";
var WRec2 = Export[RecTxt];
if (WRec2 && WRec2.LM == LM) {
WRec2.CLog(WRec2.i18n.$T("K8zP::重复导入{1}", 0, RecTxt), 3);
return;
}
Recorder2.IsOpen = function() {
var stream = Recorder2.Stream;
if (stream) {
var tracks = Tracks_(stream), track = tracks[0];
if (track) {
var state = track.readyState;
return state == "live" || state == track.LIVE;
}
}
return false;
};
Recorder2.BufferSize = 4096;
Recorder2.Destroy = function() {
CLog(RecTxt + " Destroy");
Disconnect();
for (var k2 in DestroyList) {
DestroyList[k2]();
}
};
var DestroyList = {};
Recorder2.BindDestroy = function(key, call) {
DestroyList[key] = call;
};
Recorder2.Support = function() {
if (!isBrowser) return false;
var scope = navigator.mediaDevices || {};
if (!scope[getUserMediaTxt]) {
scope = navigator;
scope[getUserMediaTxt] || (scope[getUserMediaTxt] = scope.webkitGetUserMedia || scope.mozGetUserMedia || scope.msGetUserMedia);
}
if (!scope[getUserMediaTxt]) {
return false;
}
Recorder2.Scope = scope;
if (!Recorder2.GetContext()) {
return false;
}
return true;
};
Recorder2.GetContext = function(tryNew) {
if (!isBrowser) return null;
var AC = window.AudioContext;
if (!AC) {
AC = window.webkitAudioContext;
}
if (!AC) {
return null;
}
var ctx = Recorder2.Ctx, isNew = 0;
if (!ctx) {
ctx = Recorder2.Ctx = new AC();
isNew = 1;
Recorder2.NewCtxs = Recorder2.NewCtxs || [];
Recorder2.BindDestroy("Ctx", function() {
var ctx2 = Recorder2.Ctx;
if (ctx2 && ctx2.close) {
CloseCtx(ctx2);
Recorder2.Ctx = 0;
}
var arr = Recorder2.NewCtxs;
Recorder2.NewCtxs = [];
for (var i2 = 0; i2 < arr.length; i2++) CloseCtx(arr[i2]);
});
}
if (tryNew && ctx.close) {
if (!isNew) {
if (!ctx._useC) CloseCtx(ctx);
ctx = new AC();
}
ctx._useC = 1;
Recorder2.NewCtxs.push(ctx);
}
return ctx;
};
Recorder2.CloseNewCtx = function(ctx) {
if (ctx && ctx.close) {
CloseCtx(ctx);
var arr = Recorder2.NewCtxs || [], L2 = arr.length;
for (var i2 = 0; i2 < arr.length; i2++) {
if (arr[i2] == ctx) {
arr.splice(i2, 1);
break;
}
}
CLog($T("mSxV::剩{1}个GetContext未close", 0, L2 + "-1=" + arr.length), arr.length ? 3 : 0);
}
};
var CloseCtx = function(ctx) {
if (ctx && ctx.close && !ctx._isC) {
ctx._isC = 1;
if (ctx.state != "closed") {
try {
ctx.close();
} catch (e2) {
CLog("ctx close err", 1, e2);
}
}
}
};
var ResumeCtx = Recorder2.ResumeCtx = function(ctx, check, True, False) {
var isEnd = 0, isBind = 0, isLsSC = 0, runC = 0, EL = "EventListener", Tag = "ResumeCtx ";
var end = function(err, ok2) {
if (isBind) {
bind();
}
if (!isEnd) {
isEnd = 1;
err && False(err, runC);
ok2 && True(runC);
}
if (ok2) {
if (!ctx._LsSC && ctx["add" + EL]) ctx["add" + EL]("statechange", run2);
ctx._LsSC = 1;
isLsSC = 1;
}
};
var bind = function(add) {
if (add && isBind) return;
isBind = add ? 1 : 0;
var types2 = ["focus", "mousedown", "mouseup", "touchstart", "touchend"];
for (var i2 = 0; i2 < types2.length; i2++)
window[(add ? "add" : "remove") + EL](types2[i2], run2, true);
};
var run2 = function() {
var sVal = ctx.state, spEnd = CtxSpEnd(sVal);
if (!isEnd && !check(spEnd ? ++runC : runC)) return end();
if (spEnd) {
if (isLsSC) CLog(Tag + "sc " + sVal, 3);
bind(1);
ctx.resume().then(function() {
if (isLsSC) CLog(Tag + "sc " + ctx.state);
end(0, 1);
})[CatchTxt](function(e2) {
CLog(Tag + "error", 1, e2);
if (!CtxSpEnd(ctx.state)) {
end(e2.message || "error");
}
});
} else if (sVal == "closed") {
if (isLsSC && !ctx._isC) CLog(Tag + "sc " + sVal, 1);
end("ctx closed");
} else {
end(0, 1);
}
};
run2();
};
var CtxSpEnd = Recorder2.CtxSpEnd = function(v2) {
return v2 == "suspended" || v2 == "interrupted";
};
var CtxState = function(ctx) {
var v2 = ctx.state, msg = "ctx.state=" + v2;
if (CtxSpEnd(v2)) msg += $T("nMIy::(注意:ctx不是running状态,rec.open和start至少要有一个在用户操作(触摸、点击等)时进行调用,否则将在rec.start时尝试进行ctx.resume,可能会产生兼容性问题(仅iOS),请参阅文档中runningContext配置)");
return msg;
};
var ConnectEnableWebM = "ConnectEnableWebM";
Recorder2[ConnectEnableWebM] = true;
var ConnectEnableWorklet = "ConnectEnableWorklet";
Recorder2[ConnectEnableWorklet] = false;
var Connect = function(streamStore) {
var bufferSize = streamStore.BufferSize || Recorder2.BufferSize;
var stream = streamStore.Stream;
var ctx = stream._c, ctxSR = ctx[sampleRateTxt], srChunk = {};
var tracks = Tracks_(stream), track = tracks[0], trackSet = null, tsMsg = "";
if (track && track.getSettings) {
trackSet = track.getSettings();
var trackSR = trackSet[sampleRateTxt];
if (trackSR && trackSR != ctxSR) {
tsMsg = $T("eS8i::Stream的采样率{1}不等于{2},将进行采样率转换(注意:音质不会变好甚至可能变差),主要在移动端未禁用回声消除时会产生此现象,浏览器有回声消除时可能只会返回16k采样率的音频数据,", 0, trackSR, ctxSR);
}
}
stream._ts = trackSet;
CLog(tsMsg + "Stream TrackSet: " + ToJson(trackSet), tsMsg ? 3 : 0);
var mediaConn = function(node2) {
var media = stream._m = ctx.createMediaStreamSource(stream);
var ctxDest = ctx.destination, cmsdTxt = "createMediaStreamDestination";
if (ctx[cmsdTxt]) {
ctxDest = stream._d = ctx[cmsdTxt]();
}
media.connect(node2);
node2.connect(ctxDest);
};
var isWebM, isWorklet, badInt, webMTips = "";
var calls = stream._call;
var onReceive = function(float32Arr, arrSR) {
for (var k0 in calls) {
if (arrSR != ctxSR) {
srChunk.index = 0;
srChunk = Recorder2.SampleData([float32Arr], arrSR, ctxSR, srChunk, { _sum: 1 });
var pcm = srChunk.data;
var sum = srChunk._sum;
} else {
srChunk = {};
var size = float32Arr.length;
var pcm = new Int16Array(size);
var sum = 0;
for (var j2 = 0; j2 < size; j2++) {
var s2 = Math.max(-1, Math.min(1, float32Arr[j2]));
s2 = s2 < 0 ? s2 * 32768 : s2 * 32767;
pcm[j2] = s2;
sum += Math.abs(s2);
}
}
for (var k2 in calls) {
calls[k2](pcm, sum);
}
return;
}
};
var scriptProcessor = "ScriptProcessor";
var audioWorklet = "audioWorklet";
var recAudioWorklet = RecTxt + " " + audioWorklet;
var RecProc = "RecProc";
var MediaRecorderTxt = "MediaRecorder";
var MRWebMPCM = MediaRecorderTxt + ".WebM.PCM";
var oldFn = ctx.createScriptProcessor || ctx.createJavaScriptNode;
var oldIsBest = $T("ZGlf::。由于{1}内部1秒375次回调,在移动端可能会有性能问题导致回调丢失录音变短,PC端无影响,暂不建议开启{1}。", 0, audioWorklet);
var oldScript = function() {
isWorklet = stream.isWorklet = false;
_Disconn_n(stream);
CLog($T("7TU0::Connect采用老的{1},", 0, scriptProcessor) + i18n.get(
Recorder2[ConnectEnableWorklet] ? $T("JwCL::但已设置{1}尝试启用{2}", 2) : $T("VGjB::可设置{1}尝试启用{2}", 2),
[RecTxt + "." + ConnectEnableWorklet + "=true", audioWorklet]
) + webMTips + oldIsBest, 3);
var process2 = stream._p = oldFn.call(ctx, bufferSize, 1, 1);
mediaConn(process2);
process2.onaudioprocess = function(e2) {
var arr = e2.inputBuffer.getChannelData(0);
onReceive(arr, ctxSR);
};
};
var connWorklet = function() {
isWebM = stream.isWebM = false;
_Disconn_r(stream);
isWorklet = stream.isWorklet = !oldFn || Recorder2[ConnectEnableWorklet];
var AwNode = window.AudioWorkletNode;
if (!(isWorklet && ctx[audioWorklet] && AwNode)) {
oldScript();
return;
}
var clazzUrl = function() {
var xf = function(f2) {
return f2.toString().replace(/^function|DEL_/g, "").replace(/\$RA/g, recAudioWorklet);
};
var clazz = "class " + RecProc + " extends AudioWorkletProcessor{";
clazz += "constructor " + xf(function(option) {
DEL_super(option);
var This = this, bufferSize2 = option.processorOptions.bufferSize;
This.bufferSize = bufferSize2;
This.buffer = new Float32Array(bufferSize2 * 2);
This.pos = 0;
This.port.onmessage = function(e2) {
if (e2.data.kill) {
This.kill = true;
$C.log("$RA kill call");
}
};
$C.log("$RA .ctor call", option);
});
clazz += "process " + xf(function(input, b2, c2) {
var This = this, bufferSize2 = This.bufferSize;
var buffer = This.buffer, pos = This.pos;
input = (input[0] || [])[0] || [];
if (input.length) {
buffer.set(input, pos);
pos += input.length;
var len = ~~(pos / bufferSize2) * bufferSize2;
if (len) {
this.port.postMessage({ val: buffer.slice(0, len) });
var more = buffer.subarray(len, pos);
buffer = new Float32Array(bufferSize2 * 2);
buffer.set(more);
pos = more.length;
This.buffer = buffer;
}
This.pos = pos;
}
return !This.kill;
});
clazz += '}try{registerProcessor("' + RecProc + '", ' + RecProc + ')}catch(e){$C.error("' + recAudioWorklet + ' Reg Error",e)}';
clazz = clazz.replace(/\$C\./g, "console.");
return "data:text/javascript;base64," + btoa(unescape(encodeURIComponent(clazz)));
};
var awNext = function() {
return isWorklet && stream._na;
};
var nodeAlive = stream._na = function() {
if (badInt !== "") {
clearTimeout(badInt);
badInt = setTimeout(function() {
badInt = 0;
if (awNext()) {
CLog($T("MxX1::{1}未返回任何音频,恢复使用{2}", 0, audioWorklet, scriptProcessor), 3);
oldFn && oldScript();
}
}, 500);
}
};
var createNode = function() {
if (!awNext()) return;
var node2 = stream._n = new AwNode(ctx, RecProc, {
processorOptions: { bufferSize }
});
mediaConn(node2);
node2.port.onmessage = function(e2) {
if (badInt) {
clearTimeout(badInt);
badInt = "";
}
if (awNext()) {
onReceive(e2.data.val, ctxSR);
} else if (!isWorklet) {
CLog($T("XUap::{1}多余回调", 0, audioWorklet), 3);
}
};
CLog($T("yOta::Connect采用{1},设置{2}可恢复老式{3}", 0, audioWorklet, RecTxt + "." + ConnectEnableWorklet + "=false", scriptProcessor) + webMTips + oldIsBest, 3);
};
var ctxOK = function() {
if (!awNext()) return;
if (ctx[RecProc]) {
createNode();
return;
}
var url = clazzUrl();
ctx[audioWorklet].addModule(url).then(function(e2) {
if (!awNext()) return;
ctx[RecProc] = 1;
createNode();
if (badInt) {
nodeAlive();
}
})[CatchTxt](function(e2) {
CLog(audioWorklet + ".addModule Error", 1, e2);
awNext() && oldScript();
});
};
ResumeCtx(ctx, function() {
return awNext();
}, ctxOK, ctxOK);
};
var connWebM = function() {
var MR = window[MediaRecorderTxt];
var onData = "ondataavailable";
var webmType = "audio/webm; codecs=pcm";
isWebM = stream.isWebM = Recorder2[ConnectEnableWebM];
var supportMR = MR && onData in MR.prototype && MR.isTypeSupported(webmType);
webMTips = supportMR ? "" : $T("VwPd::(此浏览器不支持{1})", 0, MRWebMPCM);
if (!isWebM || !supportMR) {
connWorklet();
return;
}
var mrNext = function() {
return isWebM && stream._ra;
};
stream._ra = function() {
if (badInt !== "") {
clearTimeout(badInt);
badInt = setTimeout(function() {
if (mrNext()) {
CLog($T("vHnb::{1}未返回任何音频,降级使用{2}", 0, MediaRecorderTxt, audioWorklet), 3);
connWorklet();
}
}, 500);
}
};
var mrSet = Object.assign({ mimeType: webmType }, Recorder2.ConnectWebMOptions);
var mr = stream._r = new MR(stream, mrSet);
var webmData = stream._rd = {};
mr[onData] = function(e2) {
var reader = new FileReader();
reader.onloadend = function() {
if (mrNext()) {
var f32arr = WebM_Extract(new Uint8Array(reader.result), webmData);
if (!f32arr) return;
if (f32arr == -1) {
connWorklet();
return;
}
if (badInt) {
clearTimeout(badInt);
badInt = "";
}
onReceive(f32arr, webmData.webmSR);
} else if (!isWebM) {
CLog($T("O9P7::{1}多余回调", 0, MediaRecorderTxt), 3);
}
};
reader.readAsArrayBuffer(e2.data);
};
try {
mr.start(~~(bufferSize / 48));
CLog($T("LMEm::Connect采用{1},设置{2}可恢复使用{3}或老式{4}", 0, MRWebMPCM, RecTxt + "." + ConnectEnableWebM + "=false", audioWorklet, scriptProcessor));
} catch (e2) {
CLog("mr start err", 1, e2);
connWorklet();
}
};
connWebM();
};
var ConnAlive = function(stream) {
if (stream._na) stream._na();
if (stream._ra) stream._ra();
};
var _Disconn_n = function(stream) {
stream._na = null;
if (stream._n) {
stream._n.port.postMessage({ kill: true });
stream._n.disconnect();
stream._n = null;
}
};
var _Disconn_r = function(stream) {
stream._ra = null;
if (stream._r) {
try {
stream._r.stop();
} catch (e2) {
CLog("mr stop err", 1, e2);
}
stream._r = null;
}
};
var Disconnect = function(streamStore) {
streamStore = streamStore || Recorder2;
var isGlobal = streamStore == Recorder2;
var stream = streamStore.Stream;
if (stream) {
if (stream._m) {
stream._m.disconnect();
stream._m = null;
}
if (!stream._RC && stream._c) {
Recorder2.CloseNewCtx(stream._c);
}
stream._RC = null;
stream._c = null;
if (stream._d) {
StopS_(stream._d.stream);
stream._d = null;
}
if (stream._p) {
stream._p.disconnect();
stream._p.onaudioprocess = stream._p = null;
}
_Disconn_n(stream);
_Disconn_r(stream);
if (isGlobal) {
StopS_(stream);
}
}
streamStore.Stream = 0;
};
var StopS_ = Recorder2.StopS_ = function(stream) {
var tracks = Tracks_(stream);
for (var i2 = 0; i2 < tracks.length; i2++) {
var track = tracks[i2];
track.stop && track.stop();
}
stream.stop && stream.stop();
};
var Tracks_ = function(stream) {
var arr1 = 0, arr2 = 0, arr = [];
if (stream.getAudioTracks) {
arr1 = stream.getAudioTracks();
arr2 = stream.getVideoTracks();
}
if (!arr1) {
arr1 = stream.audioTracks;
arr2 = stream.videoTracks;
}
for (var i2 = 0, L2 = arr1 ? arr1.length : 0; i2 < L2; i2++) arr.push(arr1[i2]);
for (var i2 = 0, L2 = arr2 ? arr2.length : 0; i2 < L2; i2++) arr.push(arr2[i2]);
return arr;
};
Recorder2.SampleData = function(pcmDatas, pcmSampleRate, newSampleRate, prevChunkInfo, option) {
var Txt = "SampleData";
prevChunkInfo || (prevChunkInfo = {});
var index2 = prevChunkInfo.index || 0;
var offset2 = prevChunkInfo.offset || 0;
var raisePrev = prevChunkInfo.raisePrev || 0;
var filter2 = prevChunkInfo.filter;
if (filter2 && filter2.fn && (filter2.sr && filter2.sr != pcmSampleRate || filter2.srn && filter2.srn != newSampleRate)) {
filter2 = null;
CLog($T("d48C::{1}的filter采样率变了,重设滤波", 0, Txt), 3);
}
if (!filter2) {
if (newSampleRate <= pcmSampleRate) {
var freq = newSampleRate > pcmSampleRate * 3 / 4 ? 0 : newSampleRate / 2 * 3 / 4;
filter2 = { fn: freq ? Recorder2.IIRFilter(true, pcmSampleRate, freq) : 0 };
} else {
var freq = pcmSampleRate > newSampleRate * 3 / 4 ? 0 : pcmSampleRate / 2 * 3 / 4;
filter2 = { fn: freq ? Recorder2.IIRFilter(true, newSampleRate, freq) : 0 };
}
}
filter2.sr = pcmSampleRate;
filter2.srn = newSampleRate;
var filterFn = filter2.fn;
var frameNext = prevChunkInfo.frameNext || [];
option || (option = {});
var frameSize = option.frameSize || 1;
if (option.frameType) {
frameSize = option.frameType == "mp3" ? 1152 : 1;
}
var useSum = option._sum, _sum = 0;
var nLen = pcmDatas.length;
if (index2 > nLen + 1) {
CLog($T("tlbC::{1}似乎传入了未重置chunk {2}", 0, Txt, index2 + ">" + nLen), 3);
}
var size = 0;
for (var i2 = index2; i2 < nLen; i2++) {
size += pcmDatas[i2].length;
}
var step = pcmSampleRate / newSampleRate;
if (step > 1) {
size = Math.max(0, size - Math.floor(offset2));
size = Math.floor(size / step);
} else if (step < 1) {
var raiseStep = 1 / step;
size = Math.floor(size * raiseStep);
}
size += frameNext.length;
var res = new Int16Array(size);
var idx = 0;
for (var i2 = 0; i2 < frameNext.length; i2++) {
res[idx] = frameNext[i2];
idx++;
}
for (; index2 < nLen; index2++) {
var o2 = pcmDatas[index2], isF32 = o2 instanceof Float32Array;
var i2 = offset2, il = o2.length;
var F2 = filterFn && filterFn.Embed, F1 = 0, F22 = 0, Fx = 0, Fy = 0;
if (step < 1) {
var idx1 = idx + i2, prev = raisePrev;
for (var i0 = 0; i0 < il; i0++) {
var oVal = o2[i0];
if (isF32) {
oVal = Math.max(-1, Math.min(1, oVal));
oVal = oVal < 0 ? oVal * 32768 : oVal * 32767;
}
var pos = Math.floor(idx1);
idx1 += raiseStep;
var end = Math.floor(idx1);
var n2 = (oVal - prev) / (end - pos);
for (var j2 = 1; pos < end; pos++, j2++) {
var s2 = Math.floor(prev + j2 * n2);
if (F2) {
Fx = s2;
Fy = F2.b0 * Fx + F2.b1 * F2.x1 + F2.b0 * F2.x2 - F2.a1 * F2.y1 - F2.a2 * F2.y2;
F2.x2 = F2.x1;
F2.x1 = Fx;
F2.y2 = F2.y1;
F2.y1 = Fy;
s2 = Fy;
} else {
s2 = filterFn ? filterFn(s2) : s2;
}
if (s2 > 32767) s2 = 32767;
else if (s2 < -32768) s2 = -32768;
if (useSum) _sum += Math.abs(s2);
res[pos] = s2;
idx++;
}
prev = raisePrev = oVal;
i2 += raiseStep;
}
offset2 = i2 % 1;
continue;
}
for (var i0 = 0, i22 = 0; i0 < il; i0++, i22++) {
if (i22 < il) {
var oVal = o2[i22];
if (isF32) {
oVal = Math.max(-1, Math.min(1, oVal));
oVal = oVal < 0 ? oVal * 32768 : oVal * 32767;
}
if (F2) {
Fx = oVal;
Fy = F2.b0 * Fx + F2.b1 * F2.x1 + F2.b0 * F2.x2 - F2.a1 * F2.y1 - F2.a2 * F2.y2;
F2.x2 = F2.x1;
F2.x1 = Fx;
F2.y2 = F2.y1;
F2.y1 = Fy;
} else {
Fy = filterFn ? filterFn(oVal) : oVal;
}
}
F1 = F22;
F22 = Fy;
if (i22 == 0) {
i0--;
continue;
}
var before = Math.floor(i2);
if (i0 != before) continue;
var after = Math.ceil(i2);
var atPoint = i2 - before;
var beforeVal = F1;
var afterVal = after < il ? F22 : beforeVal;
var val = beforeVal + (afterVal - beforeVal) * atPoint;
if (val > 32767) val = 32767;
else if (val < -32768) val = -32768;
if (useSum) _sum += Math.abs(val);
res[idx] = val;
idx++;
i2 += step;
}
offset2 = Math.max(0, i2 - il);
}
if (step < 1 && idx + 1 == size) {
size--;
res = new Int16Array(res.buffer.slice(0, size * 2));
}
if (idx - 1 != size && idx != size) CLog(Txt + " idx:" + idx + " != size:" + size, 3);
frameNext = null;
var frameNextSize = size % frameSize;
if (frameNextSize > 0) {
var u8Pos = (size - frameNextSize) * 2;
frameNext = new Int16Array(res.buffer.slice(u8Pos));
res = new Int16Array(res.buffer.slice(0, u8Pos));
}
var obj = {
index: index2,
offset: offset2,
raisePrev,
filter: filter2,
frameNext,
sampleRate: newSampleRate,
data: res
};
if (useSum) obj._sum = _sum;
return obj;
};
Recorder2.IIRFilter = function(useLowPass, sampleRate, freq) {
var ov = 2 * Math.PI * freq / sampleRate;
var sn = Math.sin(ov);
var cs = Math.cos(ov);
var alpha = sn / 2;
var a0 = 1 + alpha;
var a1 = -2 * cs / a0;
var a2 = (1 - alpha) / a0;
if (useLowPass) {
var b0 = (1 - cs) / 2 / a0;
var b1 = (1 - cs) / a0;
} else {
var b0 = (1 + cs) / 2 / a0;
var b1 = -(1 + cs) / a0;
}
var x1 = 0, x2 = 0, y2 = 0, y1 = 0, y22 = 0;
var fn2 = function(x3) {
y2 = b0 * x3 + b1 * x1 + b0 * x2 - a1 * y1 - a2 * y22;
x2 = x1;
x1 = x3;
y22 = y1;
y1 = y2;
return y2;
};
fn2.Embed = { x1: 0, x2: 0, y1: 0, y2: 0, b0, b1, a1, a2 };
return fn2;
};
Recorder2.PowerLevel = function(pcmAbsSum, pcmLength) {
var power = pcmAbsSum / pcmLength || 0;
var level;
if (power < 1251) {
level = Math.round(power / 1250 * 10);
} else {
level = Math.round(Math.min(100, Math.max(0, (1 + Math.log(power / 1e4) / Math.log(10)) * 100)));
}
return level;
};
Recorder2.PowerDBFS = function(maxSample) {
var val = Math.max(0.1, maxSample || 0), Pref = 32767;
val = Math.min(val, Pref);
val = 20 * Math.log(val / Pref) / Math.log(10);
return Math.max(-100, Math.round(val));
};
Recorder2.CLog = function(msg, err) {
if (typeof console != "object") return;
var now2 = /* @__PURE__ */ new Date();
var t2 = ("0" + now2.getMinutes()).substr(-2) + ":" + ("0" + now2.getSeconds()).substr(-2) + "." + ("00" + now2.getMilliseconds()).substr(-3);
var recID = this && this.envIn && this.envCheck && this.id;
var arr = ["[" + t2 + " " + RecTxt + (recID ? ":" + recID : "") + "]" + msg];
var a2 = arguments, cwe = Recorder2.CLog;
var i2 = 2, fn2 = cwe.log || console.log;
if (IsNum(err)) {
fn2 = err == 1 ? cwe.error || console.error : err == 3 ? cwe.warn || console.warn : fn2;
} else {
i2 = 1;
}
for (; i2 < a2.length; i2++) {
arr.push(a2[i2]);
}
if (IsLoser) {
fn2 && fn2("[IsLoser]" + arr[0], arr.length > 1 ? arr : "");
} else {
fn2.apply(console, arr);
}
};
var CLog = function() {
Recorder2.CLog.apply(this, arguments);
};
var IsLoser = true;
try {
IsLoser = !console.log.apply;
} catch (e2) {
}
var ID = 0;
function initFn(set2) {
var This = this;
This.id = ++ID;
Traffic();
var o2 = {
type: "mp3",
onProcess: NOOP
//fn(buffers,powerLevel,bufferDuration,bufferSampleRate,newBufferIdx,asyncEnd) buffers=[[Int16,...],...]:缓冲的PCM数据,为从开始录音到现在的所有pcm片段;powerLevel:当前缓冲的音量级别0-100,bufferDuration:已缓冲时长,bufferSampleRate:缓冲使用的采样率(当type支持边录边转码(Worker)时,此采样率和设置的采样率相同,否则不一定相同);newBufferIdx:本次回调新增的buffer起始索引;asyncEnd:fn() 如果onProcess是异步的(返回值为true时),处理完成时需要调用此回调,如果不是异步的请忽略此参数,此方法回调时必须是真异步(不能真异步时需用setTimeout包裹)。onProcess返回值:如果返回true代表开启异步模式,在某些大量运算的场合异步是必须的,必须在异步处理完成时调用asyncEnd(不能真异步时需用setTimeout包裹),在onProcess执行后新增的buffer会全部替换成空数组,因此本回调开头应立即将newBufferIdx到本次回调结尾位置的buffer全部保存到另外一个数组内,处理完成后写回buffers中本次回调的结尾位置。
//*******高级设置******
//,sourceStream:MediaStream Object
//可选直接提供一个媒体流,从这个流中录制、实时处理音频数据(当前Recorder实例独享此流);不提供时为普通的麦克风录音,由getUserMedia提供音频流(所有Recorder实例共享同一个流)
//比如:audio、video标签dom节点的captureStream方法(实验特性,不同浏览器支持程度不高)返回的流;WebRTC中的remote流;自己创建的流等
//注意:流内必须至少存在一条音轨(Audio Track),比如audio标签必须等待到可以开始播放后才会有音轨,否则open会失败
//,runningContext:AudioContext
//可选提供一个state为running状态的AudioContext对象(ctx);默认会在rec.open时自动创建一个新的ctx,无用户操作(触摸、点击等)时调用rec.open的ctx.state可能为suspended,会在rec.start时尝试进行ctx.resume,如果也无用户操作ctx.resume可能不会恢复成running状态(目前仅iOS上有此兼容性问题),导致无法去读取媒体流,这时请提前在用户操作时调用Recorder.GetContext(true)来得到一个running状态AudioContext(用完需调用CloseNewCtx(ctx)关闭)
//,audioTrackSet:{ deviceId:"",groupId:"", autoGainControl:true, echoCancellation:true, noiseSuppression:true }
//普通麦克风录音时getUserMedia方法的audio配置参数,比如指定设备id,回声消除、降噪开关;注意:提供的任何配置值都不一定会生效
//由于麦克风是全局共享的,所以新配置后需要close掉以前的再重新open
//同样可配置videoTrackSet,更多参考: https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackConstraints
//,disableEnvInFix:false 内部参数,禁用设备卡顿时音频输入丢失补偿功能
//,takeoffEncodeChunk:NOOP //fn(chunkBytes) chunkBytes=[Uint8,...]:实时编码环境下接管编码器输出,当编码器实时编码出一块有效的二进制音频数据时实时回调此方法;参数为二进制的Uint8Array,就是编码出来的音频数据片段,所有的chunkBytes拼接在一起即为完整音频。本实现的想法最初由QQ2543775048提出
//当提供此回调方法时,将接管编码器的数据输出,编码器内部将放弃存储生成的音频数据;如果当前编码器或环境不支持实时编码处理,将在open时直接走fail逻辑
//因此提供此回调后调用stop方法将无法获得有效的音频数据,因为编码器内没有音频数据,因此stop时返回的blob将是一个字节长度为0的blob
//大部分录音格式编码器都支持实时编码(边录边转码),比如mp3格式:会实时的将编码出来的mp3片段通过此方法回调,所有的chunkBytes拼接到一起即为完整的mp3,此种拼接的结果比mock方法实时生成的音质更加,因为天然避免了首尾的静默
//不支持实时编码的录音格式不可以提供此回调(wav格式不支持,因为wav文件头中需要提供文件最终长度),提供了将在open时直接走fail逻辑
};
for (var k2 in set2) {
o2[k2] = set2[k2];
}
This.set = o2;
var vB = o2[bitRateTxt], vS = o2[sampleRateTxt];
if (vB && !IsNum(vB) || vS && !IsNum(vS)) {
This.CLog($T.G("IllegalArgs-1", [$T("VtS4::{1}和{2}必须是数值", 0, sampleRateTxt, bitRateTxt)]), 1, set2);
}
o2[bitRateTxt] = +vB || 16;
o2[sampleRateTxt] = +vS || 16e3;
This.state = 0;
This._S = 9;
This.Sync = { O: 9, C: 9 };
}
Recorder2.Sync = {
/*open*/
O: 9,
/*close*/
C: 9
};
Recorder2.prototype = initFn.prototype = {
CLog,
_streamStore: function() {
if (this.set.sourceStream) {
return this;
} else {
return Recorder2;
}
},
_streamGet: function() {
return this._streamStore().Stream;
},
_streamCtx: function() {
var m2 = this._streamGet();
return m2 && m2._c;
},
open: function(True, False) {
var This = this, set2 = This.set, streamStore = This._streamStore(), newCtx = 0;
True = True || NOOP;
var failCall = function(errMsg, isUserNotAllow) {
isUserNotAllow = !!isUserNotAllow;
This.CLog($T("5tWi::录音open失败:") + errMsg + ",isUserNotAllow:" + isUserNotAllow, 1);
if (newCtx) Recorder2.CloseNewCtx(newCtx);
False && False(errMsg, isUserNotAllow);
};
This._streamTag = getUserMediaTxt;
var ok2 = function() {
This.CLog("open ok, id:" + This.id + " stream:" + This._streamTag);
True();
This._SO = 0;
};
var Lock = streamStore.Sync;
var lockOpen = ++Lock.O, lockClose = Lock.C;
This._O = This._O_ = lockOpen;
This._SO = This._S;
var lockFail = function() {
if (lockClose != Lock.C || !This._O) {
var err = $T("dFm8::open被取消");
if (lockOpen == Lock.O) {
This.close();
} else {
err = $T("VtJO::open被中断");
}
failCall(err);
return true;
}
};
if (!isBrowser) {
failCall($T.G("NonBrowser-1", ["open"]) + $T("EMJq::,可尝试使用RecordApp解决方案") + "(" + GitUrl + "/tree/master/app-support-sample)");
return;
}
var checkMsg = This.envCheck({ envName: "H5", canProcess: true });
if (checkMsg) {
failCall($T("A5bm::不能录音:") + checkMsg);
return;
}
var ctx;
var getCtx = function() {
ctx = set2.runningContext;
if (!ctx) ctx = newCtx = Recorder2.GetContext(true);
};
if (set2.sourceStream) {
This._streamTag = "set.sourceStream";
if (!Recorder2.GetContext()) {
failCall($T("1iU7::不支持此浏览器从流中获取录音"));
return;
}
getCtx();
Disconnect(streamStore);
var stream = This.Stream = set2.sourceStream;
stream._c = ctx;
stream._RC = set2.runningContext;
stream._call = {};
try {
Connect(streamStore);
} catch (e2) {
Disconnect(streamStore);
failCall($T("BTW2::从流中打开录音失败:") + e2.message);
return;
}
ok2();
return;
}
var codeFail = function(code2, msg) {
try {
window.top.a;
} catch (e2) {
failCall($T("Nclz::无权录音(跨域,请尝试给iframe添加麦克风访问策略,如{1})", 0, 'allow="camera;microphone"'));
return;
}
if (codeErr1(1, code2)) {
if (/Found/i.test(code2)) {
failCall(msg + $T("jBa9::,无可用麦克风"));
} else {
failCall(msg);
}
}
};
var codeErr1 = function(call, code2) {
if (/Permission|Allow/i.test(code2)) {
if (call) failCall($T("gyO5::用户拒绝了录音权限"), true);
} else if (window.isSecureContext === false) {
if (call) failCall($T("oWNo::浏览器禁止不安全页面录音,可开启https解决"));
} else {
return 1;
}
};
if (Recorder2.IsOpen()) {
ok2();
return;
}
if (!Recorder2.Support()) {
codeFail("", $T("COxc::此浏览器不支持录音"));
return;
}
getCtx();
var f1 = function(stream2) {
setTimeout(function() {
stream2._call = {};
var oldStream = Recorder2.Stream;
if (oldStream) {
Disconnect();
stream2._call = oldStream._call;
}
Recorder2.Stream = stream2;
stream2._c = ctx;
stream2._RC = set2.runningContext;
if (lockFail()) return;
if (Recorder2.IsOpen()) {
if (oldStream) This.CLog($T("upb8::发现同时多次调用open"), 1);
Connect(streamStore);
ok2();
} else {
failCall($T("Q1GA::录音功能无效:无音频流"));
}
}, 100);
};
var f2 = function(e2) {
var code2 = e2.name || e2.message || e2.code + ":" + e2;
var tryMsg = "";
if (callUmCount == 1 && codeErr1(0, code2)) {
tryMsg = $T("KxE2::,将尝试禁用回声消除后重试");
}
var msg1 = $T("xEQR::请求录音权限错误"), msg2 = $T("bDOG::无法录音:");
This.CLog(msg1 + tryMsg + "|" + e2, tryMsg || f2_e ? 3 : 1, e2);
if (tryMsg) {
f2_c = code2;
f2_e = e2;
callUserMedia(1);
} else if (f2_e) {
This.CLog(msg1 + "|" + f2_e, 1, f2_e);
codeFail(f2_c, msg2 + f2_e);
} else {
codeFail(code2, msg2 + e2);
}
};
var callUmCount = 0, f2_c, f2_e;
var callUserMedia = function(retry) {
callUmCount++;
var atsTxt = "audioTrackSet";
var t_AGC = "autoGainControl", t_AEC = "echoCancellation", t_ANS = "noiseSuppression";
var atsTxtJs = atsTxt + ":{" + t_AEC + "," + t_ANS + "," + t_AGC + "}";
var trackSet = JSON.parse(ToJson(set2[atsTxt] || true));
This.CLog("open... " + callUmCount + " " + atsTxt + ":" + ToJson(trackSet));
if (retry) {
if (typeof trackSet != "object") trackSet = {};
trackSet[t_AGC] = false;
trackSet[t_AEC] = false;
trackSet[t_ANS] = false;
}
if (trackSet[sampleRateTxt]) {
This.CLog($T("IjL3::注意:已配置{1}参数,可能会出现浏览器不能正确选用麦克风、移动端无法启用回声消除等现象", 0, atsTxt + "." + sampleRateTxt), 3);
}
var mSet = { audio: trackSet, video: set2.videoTrackSet || false };
try {
var pro = Recorder2.Scope[getUserMediaTxt](mSet, f1, f2);
} catch (e2) {
This.CLog(getUserMediaTxt, 3, e2);
mSet = { audio: true, video: false };
pro = Recorder2.Scope[getUserMediaTxt](mSet, f1, f2);
}
This.CLog(getUserMediaTxt + "(" + ToJson(mSet) + ") " + CtxState(ctx) + $T("RiWe::,未配置 {1} 时浏览器可能会自动启用回声消除,移动端未禁用回声消除时可能会降低系统播放音量(关闭录音后可恢复)和仅提供16k采样率的音频流(不需要回声消除时可明确配置成禁用来获得48k高音质的流),请参阅文档中{2}配置", 0, atsTxtJs, atsTxt) + "(" + GitUrl + ") LM:" + LM + " UA:" + navigator.userAgent);
if (pro && pro.then) {
pro.then(f1)[CatchTxt](f2);
}
};
callUserMedia();
},
close: function(call) {
call = call || NOOP;
var This = this, streamStore = This._streamStore();
This._stop();
var sTag = " stream:" + This._streamTag;
var Lock = streamStore.Sync;
This._O = 0;
if (This._O_ != Lock.O) {
This.CLog($T("hWVz::close被忽略(因为同时open了多个rec,只有最后一个会真正close)") + sTag, 3);
call();
return;
}
Lock.C++;
Disconnect(streamStore);
This.CLog("close," + sTag);
call();
},
mock: function(pcmData, pcmSampleRate) {
var This = this;
This._stop();
This.isMock = 1;
This.mockEnvInfo = null;
This.buffers = [pcmData];
This.recSize = pcmData.length;
This._setSrcSR(pcmSampleRate);
This._streamTag = "mock";
return This;
},
_setSrcSR: function(sampleRate) {
var This = this, set2 = This.set;
var setSr = set2[sampleRateTxt];
if (setSr > sampleRate) {
set2[sampleRateTxt] = sampleRate;
} else {
setSr = 0;
}
This[srcSampleRateTxt] = sampleRate;
This.CLog(srcSampleRateTxt + ": " + sampleRate + " set." + sampleRateTxt + ": " + set2[sampleRateTxt] + (setSr ? " " + $T("UHvm::忽略") + ": " + setSr : ""), setSr ? 3 : 0);
},
envCheck: function(envInfo) {
var errMsg, This = this, set2 = This.set;
var tag2 = "CPU_BE";
if (!errMsg && !Recorder2[tag2] && typeof Int8Array == "function" && !new Int8Array(new Int32Array([1]).buffer)[0]) {
Traffic(tag2);
errMsg = $T("Essp::不支持{1}架构", 0, tag2);
}
if (!errMsg) {
var type = set2.type, hasFn = This[type + "_envCheck"];
if (set2.takeoffEncodeChunk) {