@mvkproject/nexus
Version:
Free AI SDK with API key (500 free daily requests). Access 25+ LLM models (GPT-4, Gemini, Llama, DeepSeek), generate images with 14+ models (Flux, Stable Diffusion), and integrate Akinator game - all completely free.
143 lines (120 loc) • 3.33 kB
JavaScript
export class TextAPI {
constructor(client) {
this.client = client;
}
async generate(options) {
const {
model,
prompt,
messages,
userid,
systemInstruction,
temperature = 1.0,
maxOutputTokens = 8192,
max_tokens,
top_p,
images,
stream = false
} = options;
if (!model) {
throw new Error('Model is required for text generation');
}
// Support both original format (prompt) and OpenAI format (messages)
if (!prompt && !messages) {
throw new Error('Either prompt or messages is required for text generation');
}
if (stream) {
throw new Error('For streaming, use generateStream() method instead');
}
const data = {
model,
temperature,
maxOutputTokens: max_tokens || maxOutputTokens
};
// Original format
if (prompt) {
data.prompt = prompt;
}
// OpenAI format
if (messages) {
data.messages = messages;
}
if (userid) data.userid = userid;
if (systemInstruction) data.systemInstruction = systemInstruction;
if (top_p) data.top_p = top_p;
if (images) data.images = images;
return await this.client.post('/v1/text/generate', data);
}
async generateStream(options, onChunk) {
const {
model,
prompt,
messages,
userid,
systemInstruction,
temperature = 1.0,
maxOutputTokens = 8192,
max_tokens,
top_p,
images
} = options;
if (!model) {
throw new Error('Model is required for text generation');
}
if (!prompt && !messages) {
throw new Error('Either prompt or messages is required for text generation');
}
if (typeof onChunk !== 'function') {
throw new Error('onChunk callback function is required for streaming');
}
const data = {
model,
temperature,
maxOutputTokens: max_tokens || maxOutputTokens,
stream: true
};
if (prompt) data.prompt = prompt;
if (messages) data.messages = messages;
if (userid) data.userid = userid;
if (systemInstruction) data.systemInstruction = systemInstruction;
if (top_p) data.top_p = top_p;
if (images) data.images = images;
try {
const response = await this.client.client.post('/v1/text/generate', data, {
responseType: 'stream'
});
return new Promise((resolve, reject) => {
let buffer = '';
response.data.on('data', (chunk) => {
buffer += chunk.toString();
const lines = buffer.split('\n');
buffer = lines.pop();
for (const line of lines) {
if (line.startsWith('data: ')) {
const data = line.slice(6).trim();
if (data === '[DONE]') {
resolve();
return;
}
try {
const parsed = JSON.parse(data);
if (parsed.chunk) {
onChunk(parsed.chunk);
}
} catch (e) {
}
}
}
});
response.data.on('end', () => {
resolve();
});
response.data.on('error', (error) => {
reject(error);
});
});
} catch (error) {
this.client.handleError(error);
}
}
}