@built-in-ai/core
Version:
Browser Built-in AI API provider for Vercel AI SDK v5+ (Chrome & Edge)
532 lines (528 loc) • 15.8 kB
JavaScript
// src/built-in-ai-language-model.ts
import {
LoadSettingError
} from "@ai-sdk/provider";
// src/convert-to-built-in-ai-messages.ts
import {
UnsupportedFunctionalityError
} from "@ai-sdk/provider";
function convertBase64ToUint8Array(base64) {
try {
const binaryString = atob(base64);
const bytes = new Uint8Array(binaryString.length);
for (let i = 0; i < binaryString.length; i++) {
bytes[i] = binaryString.charCodeAt(i);
}
return bytes;
} catch (error) {
throw new Error(`Failed to convert base64 to Uint8Array: ${error}`);
}
}
function convertFileData(data, mediaType) {
if (data instanceof URL) {
return data.toString();
}
if (data instanceof Uint8Array) {
return data;
}
if (typeof data === "string") {
return convertBase64ToUint8Array(data);
}
console.warn(`Unexpected data type for ${mediaType}:`, typeof data);
return data;
}
function convertToBuiltInAIMessages(prompt) {
let systemMessage;
const messages = [];
for (const message of prompt) {
switch (message.role) {
case "system": {
systemMessage = message.content;
break;
}
case "user": {
messages.push({
role: "user",
content: message.content.map((part) => {
switch (part.type) {
case "text": {
return {
type: "text",
value: part.text
};
}
case "file": {
const { mediaType, data, filename } = part;
if (mediaType?.startsWith("image/")) {
const convertedData = convertFileData(data, mediaType);
return {
type: "image",
value: convertedData
};
} else if (mediaType?.startsWith("audio/")) {
const convertedData = convertFileData(data, mediaType);
return {
type: "audio",
value: convertedData
};
} else {
throw new UnsupportedFunctionalityError({
functionality: `file type: ${mediaType}`
});
}
}
default: {
throw new UnsupportedFunctionalityError({
functionality: `content type: ${part.type}`
});
}
}
})
});
break;
}
case "assistant": {
let text = "";
for (const part of message.content) {
switch (part.type) {
case "text": {
text += part.text;
break;
}
case "tool-call": {
throw new UnsupportedFunctionalityError({
functionality: "tool calls"
});
}
}
}
messages.push({
role: "assistant",
content: text
});
break;
}
case "tool": {
throw new UnsupportedFunctionalityError({
functionality: "tool messages"
});
}
default: {
throw new Error(`Unsupported role: ${message.role}`);
}
}
}
return { systemMessage, messages };
}
// src/built-in-ai-language-model.ts
function doesBrowserSupportBuiltInAI() {
return typeof LanguageModel !== "undefined";
}
function isBuiltInAIModelAvailable() {
return typeof LanguageModel !== "undefined";
}
function hasMultimodalContent(prompt) {
for (const message of prompt) {
if (message.role === "user") {
for (const part of message.content) {
if (part.type === "file") {
return true;
}
}
}
}
return false;
}
function getExpectedInputs(prompt) {
const inputs = /* @__PURE__ */ new Set();
for (const message of prompt) {
if (message.role === "user") {
for (const part of message.content) {
if (part.type === "file") {
if (part.mediaType?.startsWith("image/")) {
inputs.add("image");
} else if (part.mediaType?.startsWith("audio/")) {
inputs.add("audio");
}
}
}
}
}
return Array.from(inputs).map((type) => ({ type }));
}
var BuiltInAIChatLanguageModel = class {
constructor(modelId, options = {}) {
this.specificationVersion = "v2";
this.provider = "browser-ai";
this.supportedUrls = {
"image/*": [/^https?:\/\/.+$/],
"audio/*": [/^https?:\/\/.+$/]
};
this.modelId = modelId;
this.config = {
provider: this.provider,
modelId,
options
};
}
async getSession(options, expectedInputs, systemMessage, onDownloadProgress) {
if (typeof LanguageModel === "undefined") {
throw new LoadSettingError({
message: "Prompt API is not available. This library requires Chrome or Edge browser with built-in AI capabilities."
});
}
if (this.session) return this.session;
const availability = await LanguageModel.availability();
if (availability === "unavailable") {
throw new LoadSettingError({ message: "Built-in model not available" });
}
const mergedOptions = {
...this.config.options,
...options
};
if (systemMessage) {
mergedOptions.initialPrompts = [
{ role: "system", content: systemMessage }
];
}
if (expectedInputs && expectedInputs.length > 0) {
mergedOptions.expectedInputs = expectedInputs;
}
if (onDownloadProgress) {
mergedOptions.monitor = (m) => {
m.addEventListener("downloadprogress", (e) => {
onDownloadProgress(e.loaded);
});
};
}
this.session = await LanguageModel.create(mergedOptions);
return this.session;
}
getArgs({
prompt,
maxOutputTokens,
temperature,
topP,
topK,
frequencyPenalty,
presencePenalty,
stopSequences,
responseFormat,
seed,
tools
}) {
const warnings = [];
if (tools && tools.length > 0) {
warnings.push({
type: "unsupported-setting",
setting: "tools",
details: "Tool calling is not yet supported by Prompt API"
});
}
if (maxOutputTokens != null) {
warnings.push({
type: "unsupported-setting",
setting: "maxOutputTokens",
details: "maxOutputTokens is not supported by Prompt API"
});
}
if (stopSequences != null) {
warnings.push({
type: "unsupported-setting",
setting: "stopSequences",
details: "stopSequences is not supported by Prompt API"
});
}
if (topP != null) {
warnings.push({
type: "unsupported-setting",
setting: "topP",
details: "topP is not supported by Prompt API"
});
}
if (presencePenalty != null) {
warnings.push({
type: "unsupported-setting",
setting: "presencePenalty",
details: "presencePenalty is not supported by Prompt API"
});
}
if (frequencyPenalty != null) {
warnings.push({
type: "unsupported-setting",
setting: "frequencyPenalty",
details: "frequencyPenalty is not supported by Prompt API"
});
}
if (seed != null) {
warnings.push({
type: "unsupported-setting",
setting: "seed",
details: "seed is not supported by Prompt API"
});
}
const hasMultiModalInput = hasMultimodalContent(prompt);
const { systemMessage, messages } = convertToBuiltInAIMessages(prompt);
const promptOptions = {};
if (responseFormat?.type === "json") {
promptOptions.responseConstraint = responseFormat.schema;
}
if (temperature !== void 0) {
promptOptions.temperature = temperature;
}
if (topK !== void 0) {
promptOptions.topK = topK;
}
return {
systemMessage,
messages,
warnings,
promptOptions,
hasMultiModalInput,
expectedInputs: hasMultiModalInput ? getExpectedInputs(prompt) : void 0
};
}
/**
* Generates a complete text response using the browser's built-in Prompt API
* @param options
* @returns Promise resolving to the generated content with finish reason, usage stats, and any warnings
* @throws {LoadSettingError} When the Prompt API is not available or model needs to be downloaded
* @throws {UnsupportedFunctionalityError} When unsupported features like file input are used
*/
async doGenerate(options) {
const converted = this.getArgs(options);
const { systemMessage, messages, warnings, promptOptions, expectedInputs } = converted;
const session = await this.getSession(
void 0,
expectedInputs,
systemMessage
);
const text = await session.prompt(messages, promptOptions);
const content = [
{
type: "text",
text
}
];
return {
content,
finishReason: "stop",
usage: {
inputTokens: void 0,
outputTokens: void 0,
totalTokens: void 0
},
request: { body: { messages, options: promptOptions } },
warnings
};
}
/**
* Check the availability of the built-in AI model
* @returns Promise resolving to "unavailable", "available", or "available-after-download"
*/
async availability() {
if (typeof LanguageModel === "undefined") {
return "unavailable";
}
return LanguageModel.availability();
}
/**
* Creates a session with download progress monitoring.
*
* @example
* ```typescript
* const session = await model.createSessionWithProgress(
* (progress) => {
* console.log(`Download progress: ${Math.round(progress * 100)}%`);
* }
* );
* ```
*
* @param onDownloadProgress Optional callback receiving progress values 0-1 during model download
* @returns Promise resolving to a configured LanguageModel session
* @throws {LoadSettingError} When the Prompt API is not available or model is unavailable
*/
async createSessionWithProgress(onDownloadProgress) {
return this.getSession(void 0, void 0, void 0, onDownloadProgress);
}
/**
* Generates a streaming text response using the browser's built-in Prompt API
* @param options
* @returns Promise resolving to a readable stream of text chunks and request metadata
* @throws {LoadSettingError} When the Prompt API is not available or model needs to be downloaded
* @throws {UnsupportedFunctionalityError} When unsupported features like file input are used
*/
async doStream(options) {
const converted = this.getArgs(options);
const {
systemMessage,
messages,
warnings,
promptOptions,
expectedInputs,
hasMultiModalInput
} = converted;
const session = await this.getSession(
void 0,
expectedInputs,
systemMessage
);
const streamOptions = {
...promptOptions,
signal: options.abortSignal
};
const promptStream = session.promptStreaming(messages, streamOptions);
let isFirstChunk = true;
const textId = "text-0";
const stream = promptStream.pipeThrough(
new TransformStream({
start(controller) {
controller.enqueue({
type: "stream-start",
warnings
});
if (options.abortSignal) {
options.abortSignal.addEventListener("abort", () => {
controller.terminate();
});
}
},
transform(chunk, controller) {
if (isFirstChunk) {
controller.enqueue({
type: "text-start",
id: textId
});
isFirstChunk = false;
}
controller.enqueue({
type: "text-delta",
id: textId,
delta: chunk
});
},
flush(controller) {
controller.enqueue({
type: "text-end",
id: textId
});
controller.enqueue({
type: "finish",
finishReason: "stop",
usage: {
inputTokens: session.inputUsage,
outputTokens: void 0,
totalTokens: void 0
}
});
}
})
);
return {
stream,
request: { body: { messages, options: promptOptions } }
};
}
};
// src/built-in-ai-embedding-model.ts
import { TextEmbedder } from "@mediapipe/tasks-text";
var BuiltInAIEmbeddingModel = class {
constructor(settings = {}) {
this.specificationVersion = "v2";
this.provider = "google-mediapipe";
this.modelId = "embedding";
this.supportsParallelCalls = true;
this.maxEmbeddingsPerCall = void 0;
this.settings = {
wasmLoaderPath: "https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/text_wasm_internal.js",
wasmBinaryPath: "https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/text_wasm_internal.wasm",
modelAssetPath: "https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/universal_sentence_encoder.tflite",
l2Normalize: false,
quantize: false
};
this.getTextEmbedder = async () => {
return TextEmbedder.createFromOptions(
{
wasmBinaryPath: this.settings.wasmBinaryPath,
wasmLoaderPath: this.settings.wasmLoaderPath
},
{
baseOptions: {
modelAssetBuffer: await this.modelAssetBuffer,
delegate: this.settings.delegate
},
l2Normalize: this.settings.l2Normalize,
quantize: this.settings.quantize
}
);
};
this.doEmbed = async (options) => {
if (options.abortSignal?.aborted) {
throw new Error("Operation was aborted");
}
const embedder = await this.textEmbedder;
const embeddings = options.values.map((text) => {
const embedderResult = embedder.embed(text);
const [embedding] = embedderResult.embeddings;
return embedding?.floatEmbedding ?? [];
});
return {
embeddings,
rawResponse: {
model: "universal_sentence_encoder",
provider: "google-mediapipe",
processed_texts: options.values.length
}
};
};
this.settings = { ...this.settings, ...settings };
this.modelAssetBuffer = fetch(this.settings.modelAssetPath).then(
(response) => response.body.getReader()
);
this.textEmbedder = this.getTextEmbedder();
}
};
// src/built-in-ai-provider.ts
import {
NoSuchModelError
} from "@ai-sdk/provider";
function createBuiltInAI(options = {}) {
const createChatModel = (modelId, settings) => {
return new BuiltInAIChatLanguageModel(modelId, settings);
};
const createEmbeddingModel = (modelId, settings) => {
return new BuiltInAIEmbeddingModel(settings);
};
const provider = function(modelId = "text", settings) {
if (new.target) {
throw new Error(
"The BuiltInAI model function cannot be called with the new keyword."
);
}
return createChatModel(modelId, settings);
};
provider.languageModel = createChatModel;
provider.chat = createChatModel;
provider.textEmbedding = createEmbeddingModel;
provider.textEmbeddingModel = createEmbeddingModel;
provider.imageModel = (modelId) => {
throw new NoSuchModelError({ modelId, modelType: "imageModel" });
};
provider.speechModel = (modelId) => {
throw new NoSuchModelError({ modelId, modelType: "speechModel" });
};
provider.transcriptionModel = (modelId) => {
throw new NoSuchModelError({ modelId, modelType: "transcriptionModel" });
};
return provider;
}
var builtInAI = createBuiltInAI();
export {
BuiltInAIChatLanguageModel,
BuiltInAIEmbeddingModel,
builtInAI,
createBuiltInAI,
doesBrowserSupportBuiltInAI,
isBuiltInAIModelAvailable
};
//# sourceMappingURL=index.mjs.map