huggingface-chat
Version:
A lightweight and powerful Node.js API client for Hugging Face Chat. Interact with open-source LLMs like Llama 3, Mixtral, and Gemma for conversational AI, text generation, and more. Supports ESM and CJS modules.
554 lines (553 loc) • 25.3 kB
JavaScript
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __asyncValues = (this && this.__asyncValues) || function (o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
};
Object.defineProperty(exports, "__esModule", { value: true });
const promises_1 = require("fs/promises");
/**
* ChatBot class for managing conversations and interactions with models on Hugging Face.
*/
class ChatBot {
/**
* Constructs a new instance of the ChatBot class.
* @param {string} cookie - The user's authentication cookie.
* @param {string} path - The path to a file containing the authentication cookie.
* @throws {Error} If both `cookie` and `path` are provided or if neither is provided.
*/
constructor(cookie, path) {
this.headers = {
accept: "*/*",
"accept-language": "en-US,en;q=0.9",
"sec-ch-ua": '"Chromium";v="116", "Not)A;Brand";v="24", "Google Chrome";v="116"',
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": '"Windows"',
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-origin",
origin: "https://huggingface.co",
"Referrer-Policy": "strict-origin-when-cross-origin",
};
this.chatLength = 0;
this.models = [];
this.sessons = [];
this.currentModel = null;
this.currentModelId = null;
this.currentConversation = null;
this.currnetSesson = null;
this.currentConversionID = undefined;
this.tools = [];
if (!cookie && !path)
throw new Error("cookie or path of cookie required");
else if (cookie && path)
throw new Error("both cookie and path given");
else if (cookie && !path)
this.cookie = cookie;
else
this.path = path;
}
/**
* Initializes the ChatBot instance.
* @async
* @returns {Promise<void>}
*/
intialize() {
return __awaiter(this, void 0, void 0, function* () {
if (this.path)
yield this.readCookiesFromPath(this.path);
yield this.getRemoteLlms();
this.currentModel = this.models[0];
this.currentModelId = this.currentModel.id;
yield this.getRemoteConversations();
});
}
/**
* Switches the current model for the chat.
* @param {string} value - The ID of the model to switch to.
* @throws {Error} If the provided model ID is not a string or if the model is not found.
*/
switchModel(value) {
this.currentConversation = null;
this.currentModel = null;
this.currentModelId = null;
if (typeof value === "string") {
for (const model of this.models) {
if (model.id === value) {
this.currentModel = model;
this.currentModelId = model.id;
}
}
if (this.currentModelId === null) {
throw Error("Model not found, you can list the models by calling listAvilableModels()");
}
}
else {
throw Error("Model Id should be a string");
}
}
/**
* Lists available models that can be used with the chat.
* @returns {Model[]} An array of available model names.
*/
listAvilableModels() {
return this.models;
}
/**
* Lists available sessions for the chat.
* @returns {Sesson[]} An array of available sessions.
*/
listAvilableSesson() {
return this.sessons;
}
/**
* Returns the currently selected model for the chat.
* @returns {Model | null} The current model.
*/
showCurrentModel() {
return this.currentModel;
}
/**
* Reads cookies from a specified file path.
* @param {string} path - The path to the file containing the cookies.
* @throws {Error} If the path is undefined.
* @private
*/
readCookiesFromPath(path) {
return __awaiter(this, void 0, void 0, function* () {
var _a, e_1, _b, _c;
if (!path)
throw new Error("cookie path undefined");
const file = yield (0, promises_1.open)(path);
try {
for (var _d = true, _e = __asyncValues(file.readLines()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) {
_c = _f.value;
_d = false;
const line = _c;
this.cookie += line;
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (!_d && !_a && (_b = _e.return)) yield _b.call(_e);
}
finally { if (e_1) throw e_1.error; }
}
});
}
/**
* Fetches remote conversations from a server.
* @returns {Promise<Sesson[]>} A promise that resolves to an array of fetched conversations.
* @throws {Error} If the server response is not successful.
*/
getRemoteConversations() {
return __awaiter(this, void 0, void 0, function* () {
try {
const response = yield fetch("https://huggingface.co/chat/__data.json", {
headers: Object.assign(Object.assign({}, this.headers), { cookie: this.cookie }),
body: null,
method: "GET",
});
if (response.status !== 200) {
throw new Error(`Failed to get remote conversations with status code: ${response.status}`);
}
let val = yield response.text();
val = val.split('{"type":"chunk",')[1];
const json = JSON.parse('{"type":"chunk",' + val);
const data = json;
const conversationIndices = data["data"][0];
const conversations = [];
for (const index of conversationIndices) {
const conversationData = data["data"][index];
const c = {
id: data["data"][conversationData.id],
title: data["data"][conversationData.title],
model: data["data"][conversationData.model],
};
conversations.push(c);
}
this.sessons = conversations;
return conversations;
}
catch (error) {
throw error;
}
});
}
/**
* Fetches remote LLMs from a server.
* @returns {Promise<Model[]>} A promise that resolves to an array of fetched conversations.
* @throws {Error} If the server response is not successful.
*/
getRemoteLlms() {
return __awaiter(this, void 0, void 0, function* () {
try {
const response = yield fetch("https://huggingface.co/chat/__data.json", {
headers: Object.assign(Object.assign({}, this.headers), { cookie: this.cookie }),
body: null,
method: "GET",
credentials: "include",
});
if (response.status !== 200) {
throw new Error(`Failed to get remote LLMs with status code: ${response.status}`);
}
let val = yield response.text();
val = val.split('{"type":"chunk",')[0];
const json = JSON.parse(val);
const data = json.nodes[0].data;
const modelsIndices = data[data[0].models];
const modelList = [];
const returnDataFromIndex = (index) => index === -1 ? null : data[index];
for (const modelIndex of modelsIndices) {
const modelData = data[modelIndex];
// Model is unlisted, skip it
if (data[modelData.unlisted]) {
continue;
}
const m = {
id: returnDataFromIndex(modelData.id),
name: returnDataFromIndex(modelData.name),
displayName: returnDataFromIndex(modelData.displayName),
preprompt: returnDataFromIndex(modelData.preprompt),
promptExamples: [],
websiteUrl: returnDataFromIndex(modelData.websiteUrl),
description: returnDataFromIndex(modelData.description),
datasetName: returnDataFromIndex(modelData.datasetName),
datasetUrl: returnDataFromIndex(modelData.datasetUrl),
modelUrl: returnDataFromIndex(modelData.modelUrl),
parameters: {},
};
const promptList = returnDataFromIndex(modelData.promptExamples);
if (promptList !== null) {
const _promptExamples = promptList.map((index) => returnDataFromIndex(index));
m.promptExamples = _promptExamples.map((prompt) => ({
title: data[prompt.title],
prompt: data[prompt.prompt],
}));
}
const indicesParametersDict = returnDataFromIndex(modelData.parameters);
const outParametersDict = {};
for (const [key, value] of Object.entries(indicesParametersDict)) {
if (value === -1) {
outParametersDict[key] = null;
continue;
}
if (Array.isArray(data[value])) {
outParametersDict[key] = data[value].map((index) => data[index]);
continue;
}
outParametersDict[key] = data[value];
}
m.parameters = outParametersDict;
modelList.push(m);
}
this.models = modelList;
return modelList;
}
catch (error) {
throw error;
}
});
}
getToolList(pageNumber) {
return __awaiter(this, void 0, void 0, function* () {
try {
const response = yield fetch("https://huggingface.co/chat/tools/__data.json?p=" +
pageNumber +
"&x-sveltekit-invalidated=001", {
headers: Object.assign(Object.assign({}, this.headers), { cookie: this.cookie }),
referrer: "https://huggingface.co/chat/",
body: null,
method: "GET",
credentials: "include",
});
if (response.status !== 200) {
throw new Error(`Failed to get remote LLMs with status code: ${response.status}`);
}
const data = yield response.json();
console.log(data);
const dataArray = data.nodes[2].data;
console.log(dataArray);
const toolIndex = dataArray[0].tools;
const toolsJsonArr = [];
for (const tool of dataArray[toolIndex]) {
toolsJsonArr.push(dataArray[tool]);
}
for (const t of toolsJsonArr) {
for (const param in t) {
t[param] = dataArray[t[param]];
}
}
this.tools = toolsJsonArr;
return toolsJsonArr;
}
catch (error) {
throw error;
}
});
}
/**
* Initializes a new chat conversation.
* @returns {Promise<Conversation>} The conversation ID of the new chat.
* @throws {Error} If the creation of a new conversation fails.
*/
getNewChat(systemPrompt) {
return __awaiter(this, void 0, void 0, function* () {
const model = {
model: this.currentModelId,
preprompt: systemPrompt,
};
let retry = 0;
while (retry < 5) {
let response = yield fetch("https://huggingface.co/chat/conversation", {
headers: Object.assign(Object.assign({}, this.headers), { "content-type": "application/json", cookie: this.cookie, Referer: "https://huggingface.co/chat/" }),
body: JSON.stringify(model),
method: "POST",
});
const { conversationId } = yield response.json();
if (conversationId) {
this.currentConversionID = conversationId;
break;
}
else {
console.error(`Failed to create new conversation error ${response.statusText}, retrying...`);
retry++;
}
}
if (!this.currentConversionID)
throw new Error("Failed to create new conversion");
const currentChat = yield this.getConversationHistory(this.currentConversionID);
return currentChat;
});
}
downloadFile(conversation, fileSha, name) {
return __awaiter(this, void 0, void 0, function* () {
const response = yield fetch("https://huggingface.co/chat/conversation/" +
conversation +
"/output/" +
fileSha, {
headers: Object.assign(Object.assign({}, this.headers), { cookie: this.cookie }),
referrer: "https://huggingface.co/chat/conversation/" + conversation,
referrerPolicy: "strict-origin-when-cross-origin",
body: null,
method: "GET",
mode: "cors",
credentials: "include",
});
if (response.status !== 200) {
throw new Error(`Error file fetch: ${yield response.text()}`);
}
console.error("Writing to file:", name);
if (response.body) {
const buffer = yield response.arrayBuffer();
yield (0, promises_1.writeFile)(name, Buffer.from(buffer));
}
});
}
/**
* Initiates a chat with the provided text.
* @param {string} text - The user's input text or prompt.
* @param {string} currentConversionID - The conversation ID for the current chat.
* @param {ChatOptions} options
* @returns {Promise<ChatResponse>} An object containing conversation details.
* @throws {Error} If there is an issue with the chat request.
*/
chat(text, currentConversionID, options) {
return __awaiter(this, void 0, void 0, function* () {
var _a;
if (text == "")
throw new Error("the prompt can not be empty.");
if (!currentConversionID && !this.currentConversionID) {
yield this.getNewChat(); // if no chat is avilable
}
else if (currentConversionID) {
this.currentConversionID = currentConversionID;
yield this.getConversationHistory(currentConversionID);
}
else if (this.currentConversionID) {
yield this.getConversationHistory(this.currentConversionID);
}
if (!this.currentConversation)
throw new Error("Failed to create new conversion");
const data = {
inputs: text,
id: this.currentConversation.history[this.currentConversation.history.length - 1].id,
is_retry: false,
is_continue: false,
web_search: false,
tools: (options === null || options === void 0 ? void 0 : options.tools) || [],
};
const formData = new FormData();
formData.append("data", JSON.stringify(data));
const response = yield fetch("https://huggingface.co/chat/conversation/" +
this.currentConversionID +
"", {
headers: Object.assign(Object.assign({}, this.headers), { cookie: this.cookie, Referer: "https://huggingface.co/chat/conversation/" +
this.currentConversionID +
"" }),
body: formData,
method: "POST",
});
function parseResponse(chunck) {
try {
// check if chunk contains multiple jsons
const _jsonArr = chunck.split("\n");
const newJsonArray = [];
for (const val of _jsonArr) {
if (val.trim())
newJsonArray.push(JSON.parse(val));
}
return newJsonArray;
}
catch (error) {
if (chunck)
console.error("Error parsing JSON:", chunck);
return [{}];
}
}
const decoder = new TextDecoder();
let completeResponse = "";
let rawResponse = options === null || options === void 0 ? void 0 : options.rawResponse;
const transformStream = new TransformStream({
transform(chunk, controller) {
return __awaiter(this, void 0, void 0, function* () {
const decodedChunk = decoder.decode(chunk);
try {
const modifiedDataArr = parseResponse(decodedChunk);
const filePath = (options === null || options === void 0 ? void 0 : options.filePath) ? (options === null || options === void 0 ? void 0 : options.filePath) + "/" : "";
for (const modifiedData of modifiedDataArr) {
if (rawResponse) {
controller.enqueue(modifiedData || "");
if (modifiedData.type === "finalAnswer") {
completeResponse = (modifiedData === null || modifiedData === void 0 ? void 0 : modifiedData.text) || "";
controller.terminate();
}
if ((modifiedData === null || modifiedData === void 0 ? void 0 : modifiedData.type) === "file" && _this.currentConversionID) {
yield _this.downloadFile(_this.currentConversionID, modifiedData.sha, filePath + modifiedData.name);
}
}
else {
if (modifiedData.type === "finalAnswer") {
completeResponse = (modifiedData === null || modifiedData === void 0 ? void 0 : modifiedData.text) || "";
controller.terminate();
}
else if (modifiedData.type === "stream") {
controller.enqueue((modifiedData === null || modifiedData === void 0 ? void 0 : modifiedData.token) || "");
}
else if ((modifiedData === null || modifiedData === void 0 ? void 0 : modifiedData.type) === "file" &&
_this.currentConversionID) {
yield _this.downloadFile(_this.currentConversionID, modifiedData.sha, filePath + modifiedData.name);
}
}
}
}
catch (_a) {
throw new Error("Error during parsing response");
}
});
},
});
const modifiedStream = (_a = response.body) === null || _a === void 0 ? void 0 : _a.pipeThrough(transformStream);
const _this = this;
function completeResponsePromise() {
return __awaiter(this, void 0, void 0, function* () {
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
if (!modifiedStream) {
console.error("modifiedStream undefined");
}
else {
let reader = modifiedStream.getReader();
while (true) {
const { done, value } = yield reader.read();
if (done) {
resolve(completeResponse);
break; // The streaming has ended.
}
}
}
}));
});
}
this.chatLength += 1;
return {
id: this.currentConversionID,
stream: modifiedStream,
completeResponsePromise,
};
});
}
/**
* get the details of current conversation
* @returns {Promise<Conversation>} A Promise that return conversation details
* @throws {Error} If there is an api error
*/
getConversationHistory(conversationId) {
return __awaiter(this, void 0, void 0, function* () {
if (!conversationId)
throw new Error("conversationId is required for getConversationHistory");
let response = yield fetch("https://huggingface.co/chat/conversation/" +
conversationId +
"/__data.json", {
headers: Object.assign(Object.assign({}, this.headers), { cookie: this.cookie, Referer: "https://huggingface.co/chat/" }),
body: null,
method: "GET",
});
if (response.status != 200)
throw new Error("Unable get conversation details " + response);
else {
let val = yield response.text();
val = val.split('{"type":"chunk",')[0];
const json = JSON.parse(val);
const conversation = this.metadataParser(json, conversationId);
return conversation;
}
});
}
metadataParser(meta, conversationId) {
let conversation = {
id: "",
model: "",
systemPrompt: "",
title: "",
history: [],
};
const data = meta.nodes[1].data;
const model = data[data[0].model];
const systemPrompt = data[data[0].preprompt];
const title = data[data[0].title];
const messages = data[data[0].messages];
const history = [];
for (const index of messages) {
const nodeMeta = data[index];
const createdAt = new Date(data[nodeMeta.createdAt][1]).getTime() / 1000;
const updatedAt = new Date(data[nodeMeta.updatedAt][1]).getTime() / 1000;
history.push({
id: data[nodeMeta.id],
role: data[nodeMeta.from],
content: data[nodeMeta.content],
createdAt,
updatedAt,
});
}
conversation.id = conversationId;
conversation.model = model;
conversation.systemPrompt = systemPrompt;
conversation.title = title;
conversation.history = history;
this.currentConversation = conversation;
return conversation;
}
}
exports.default = ChatBot;