@capgo/capacitor-llm
Version:
Adds support for LLM locally runned for Capacitor
174 lines (168 loc) • 6.91 kB
JavaScript
;
var core = require('@capacitor/core');
var tasksGenai = require('@mediapipe/tasks-genai');
const CapgoLLM = core.registerPlugin('CapgoLLM', {
web: () => Promise.resolve().then(function () { return web; }).then((m) => new m.CapgoLLMWeb()),
});
var __asyncValues = (globalThis && globalThis.__asyncValues) || function (o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
};
class CapgoLLMWeb extends core.WebPlugin {
constructor() {
super(...arguments);
this.llm = null;
this.chatSessions = new Map();
this.readiness = 'not-loaded';
}
async getReadiness() {
return { readiness: this.readiness };
}
async createChat() {
if (!this.llm) {
throw new Error('Model not loaded. Call setModel first.');
}
const chatId = `chat_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
this.chatSessions.set(chatId, {
id: chatId,
llm: this.llm,
isActive: true,
});
return { id: chatId };
}
async sendMessage(options) {
var e_1, _a;
const session = this.chatSessions.get(options.chatId);
if (!session) {
throw new Error(`Chat session ${options.chatId} not found`);
}
if (!session.isActive) {
throw new Error(`Chat session ${options.chatId} is not active`);
}
try {
// Generate response using MediaPipe GenAI streaming API
const response = session.llm.generateResponseStream(options.message);
try {
for (var response_1 = __asyncValues(response), response_1_1; response_1_1 = await response_1.next(), !response_1_1.done;) {
const partialResponse = response_1_1.value;
// Send incremental text
this.notifyListeners('textFromAi', {
text: partialResponse,
chatId: options.chatId,
isChunk: true,
});
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (response_1_1 && !response_1_1.done && (_a = response_1.return)) await _a.call(response_1);
}
finally { if (e_1) throw e_1.error; }
}
// Notify completion
this.notifyListeners('aiFinished', {
chatId: options.chatId,
});
}
catch (error) {
console.error('Error generating response:', error);
throw error;
}
}
async setModel(options) {
try {
// Update readiness
this.readiness = 'loading';
this.notifyListeners('readinessChange', { readiness: this.readiness });
// Create LLM configuration
const config = {
baseOptions: {
modelAssetPath: options.path,
},
maxTokens: options.maxTokens || 2048,
topK: options.topk || 40,
temperature: options.temperature || 0.8,
randomSeed: options.randomSeed || 0,
};
const genai = await tasksGenai.FilesetResolver.forGenAiTasks('https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai@latest/wasm');
// Create LLM instance
this.llm = await tasksGenai.LlmInference.createFromOptions(genai, config);
// Update readiness
this.readiness = 'ready';
this.notifyListeners('readinessChange', { readiness: this.readiness });
}
catch (error) {
this.readiness = 'error';
this.notifyListeners('readinessChange', { readiness: this.readiness });
throw error;
}
}
async downloadModel(options) {
var _a;
try {
// For web, we'll simulate download by fetching and storing in IndexedDB or returning the URL
const response = await fetch(options.url, {
method: 'GET',
headers: {
'Content-Type': 'application/octet-stream',
},
});
if (!response.ok) {
throw new Error(`Failed to download model: ${response.statusText}`);
}
const contentLength = response.headers.get('content-length');
const totalBytes = contentLength ? parseInt(contentLength, 10) : undefined;
// Read the response with progress
const reader = (_a = response.body) === null || _a === void 0 ? void 0 : _a.getReader();
if (!reader) {
throw new Error('Failed to get reader from response');
}
const chunks = [];
let downloadedBytes = 0;
while (true) {
const { done, value } = await reader.read();
if (done)
break;
if (value) {
chunks.push(value);
downloadedBytes += value.length;
}
// Notify progress
const progress = totalBytes ? (downloadedBytes / totalBytes) * 100 : 0;
this.notifyListeners('downloadProgress', {
progress,
totalBytes,
downloadedBytes,
});
}
// For web, we'll return the original URL as the path
// In a real implementation, you might want to store this in IndexedDB
const result = {
path: options.url,
};
// Handle companion file if provided
if (options.companionUrl) {
// For web, just return the companion URL
result.companionPath = options.companionUrl;
}
return result;
}
catch (error) {
console.error('Error downloading model:', error);
throw error;
}
}
async getPluginVersion() {
return { version: 'web' };
}
}
var web = /*#__PURE__*/Object.freeze({
__proto__: null,
CapgoLLMWeb: CapgoLLMWeb
});
exports.CapgoLLM = CapgoLLM;
//# sourceMappingURL=plugin.cjs.js.map