assistant-robot
Version:
An assistant widget, have a 3D robot which can interact with user, have a simple LLM which can chat with user.
62 lines (61 loc) • 1.77 kB
JavaScript
var n = Object.defineProperty;
var d = (o, t, s) => t in o ? n(o, t, { enumerable: !0, configurable: !0, writable: !0, value: s }) : o[t] = s;
var a = (o, t, s) => (d(o, typeof t != "symbol" ? t + "" : t, s), s);
import * as l from "@tensorflow-models/qna";
import "@tensorflow/tfjs-core";
import "@tensorflow/tfjs-backend-cpu";
import "@tensorflow/tfjs-backend-webgl";
import { o as i, q as h } from "./utils-FAxjgnse.mjs";
class m {
constructor() {
a(this, "status", i.loading);
a(this, "onLoadList", []);
}
loaded() {
this.status = i.ready, this.onLoadList.forEach((t) => t());
}
onLoad(t) {
this.onLoadList.push(t);
}
removeLoadCb(t) {
this.onLoadList = this.onLoadList.filter((s) => s != t);
}
}
class f extends m {
constructor({ passage: s, modelUrl: e }) {
super();
// The content to extract answers from.
a(this, "passage");
// specifies custom url of the model. This is useful for area/countries that don't have access to the model hosted on GCP.
a(this, "modelUrl");
// the real language model
a(this, "model");
this.passage = s, this.modelUrl = e, this.init();
}
// to load real language model
async init() {
this.status = i.loading;
try {
const s = this.modelUrl ? { modelUrl: this.modelUrl } : void 0;
this.model = await l.load(s), this.loaded();
} catch (s) {
console.warn(s), this.status = i.error;
}
}
/**
* ask model question
* @param question question to ask
* @returns answer of the question
*/
async findAnswers(s) {
if (this.model) {
const e = await this.model.findAnswers(s, this.passage), r = h(e);
return r ? r.text : "";
}
return "";
}
}
export {
m as LanguageModel,
f as MobileBertModel
};