genkitx-github
Version:
Firebase Genkit AI framework plugin for GitHub Models APIs.
1,126 lines • 40.5 kB
JavaScript
"use strict";
/**
* Copyright 2024 Xavier Portilla Edo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* eslint-disable @typescript-eslint/no-explicit-any */
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __asyncValues = (this && this.__asyncValues) || function (o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.deepseekR1 = exports.jais30bChat = exports.microsoftMaiDsR1 = exports.microsoftPhi4MiniInstruct = exports.microsoftPhi4MultimodalInstruct = exports.microsoftPhi4 = exports.microsoftPhi35Vision128kInstruct = exports.microsoftPhi35MoE128kInstruct = exports.microsoftPhi35Mini128kInstruct = exports.microsoftPhi3Medium128kInstruct = exports.microsoftPhi3Medium4kInstruct = exports.microsoftPhi3Small128kInstruct = exports.microsoftPhi3Small8kInstruct = exports.microsoftPhi3Mini128kInstruct = exports.microsoftPhi3Mini4kInstruct = exports.ai21Jamba15Large = exports.ai21Jamba15Mini = exports.ministral3B = exports.mistralNemo = exports.mistralCodestral2501 = exports.mistralLarge2411 = exports.mistralLarge2407 = exports.mistralLarge = exports.mistralSmall = exports.cohereCommandRPlus082024 = exports.cohereCommandRPlus = exports.cohereCommandR082024 = exports.cohereCommandA = exports.cohereCommandR = exports.metaLlama4Maverick17bInstruct = exports.metaLlama4Scout17bInstruct = exports.metaLlama3370bInstruct = exports.metaLlama3290bVisionInstruct = exports.metaLlama3211bVisionInstruct = exports.metaLlama318bInstruct = exports.metaLlama3170bInstruct = exports.metaLlama31405bInstruct = exports.metaLlama38bInstruct = exports.metaLlama370bInstruct = exports.openAIO4Mini = exports.openAIO3Mini = exports.openAIO3 = exports.openAIO1 = exports.openAIO1Mini = exports.openAIO1Preview = exports.openAIGpt4oMini = exports.openAIGpt4o = exports.openAIGpt41Nano = exports.openAIGpt41Mini = exports.openAIGpt41 = void 0;
exports.SUPPORTED_GITHUB_MODELS = void 0;
exports.toGithubTextAndMedia = toGithubTextAndMedia;
exports.toGithubMessages = toGithubMessages;
exports.toGithubRequestBody = toGithubRequestBody;
exports.githubModel = githubModel;
const genkit_1 = require("genkit");
const model_1 = require("genkit/model");
const core_sse_1 = require("@azure/core-sse");
exports.openAIGpt41 = (0, model_1.modelRef)({
name: "github/gpt-4.1",
info: {
versions: ["gpt-4.1"],
label: "OpenAI - GPT-4.1",
supports: {
multiturn: true,
tools: true,
media: true,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.openAIGpt41Mini = (0, model_1.modelRef)({
name: "github/gpt-4.1-mini",
info: {
versions: ["gpt-4.1-mini"],
label: "OpenAI - GPT-4.1 Mini",
supports: {
multiturn: true,
tools: true,
media: true,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.openAIGpt41Nano = (0, model_1.modelRef)({
name: "github/gpt-4.1-nano",
info: {
versions: ["gpt-4.1-nano"],
label: "OpenAI - GPT-4.1 Nano",
supports: {
multiturn: true,
tools: true,
media: true,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.openAIGpt4o = (0, model_1.modelRef)({
name: "github/gpt-4o",
info: {
versions: ["gpt-4o"],
label: "OpenAI - GPT-4o",
supports: {
multiturn: true,
tools: true,
media: true,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.openAIGpt4oMini = (0, model_1.modelRef)({
name: "github/gpt-4o-mini",
info: {
versions: ["gpt-4o-mini"],
label: "OpenAI - GPT-4o-mini",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.openAIO1Preview = (0, model_1.modelRef)({
name: "github/o1-preview",
info: {
versions: ["o1-preview"],
label: "OpenAI - o1-preview",
supports: {
multiturn: true,
tools: false,
media: false,
systemRole: false,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.openAIO1Mini = (0, model_1.modelRef)({
name: "github/o1-mini",
info: {
versions: ["o1-mini"],
label: "OpenAI - o1-mini",
supports: {
multiturn: true,
tools: false,
media: false,
systemRole: false,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.openAIO1 = (0, model_1.modelRef)({
name: "github/o1",
info: {
versions: ["o1"],
label: "OpenAI - o1",
supports: {
multiturn: true,
tools: false,
media: false,
systemRole: false,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.openAIO3 = (0, model_1.modelRef)({
name: "github/o3",
info: {
versions: ["o3"],
label: "OpenAI - o3",
supports: {
multiturn: true,
tools: false,
media: false,
systemRole: false,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.openAIO3Mini = (0, model_1.modelRef)({
name: "github/o3-mini",
info: {
versions: ["o3-mini"],
label: "OpenAI - o3-mini",
supports: {
multiturn: true,
tools: false,
media: false,
systemRole: false,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.openAIO4Mini = (0, model_1.modelRef)({
name: "github/o4-mini",
info: {
versions: ["o4-mini"],
label: "OpenAI - o4-mini",
supports: {
multiturn: true,
tools: false,
media: false,
systemRole: false,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.metaLlama370bInstruct = (0, model_1.modelRef)({
name: "github/meta-llama-3-70b-instruct",
info: {
versions: ["meta-llama-3-70b-instruct"],
label: "Meta - Llama-3-70b-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.metaLlama38bInstruct = (0, model_1.modelRef)({
name: "github/meta-llama-3-8b-instruct",
info: {
versions: ["meta-llama-3-8b-instruct"],
label: "Meta - Llama-3-8b-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.metaLlama31405bInstruct = (0, model_1.modelRef)({
name: "github/meta-llama-3.1-405b-instruct",
info: {
versions: ["meta-llama-3.1-405b-instruct"],
label: "Meta - Llama-3.1-405b-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.metaLlama3170bInstruct = (0, model_1.modelRef)({
name: "github/meta-llama-3.1-70b-instruct",
info: {
versions: ["meta-llama-3.1-70b-instruct"],
label: "Meta - Llama-3.1-70b-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.metaLlama318bInstruct = (0, model_1.modelRef)({
name: "github/meta-llama-3.1-8b-instruct",
info: {
versions: ["meta-llama-3.1-8b-instruct"],
label: "Meta - Llama-3.1-8b-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.metaLlama3211bVisionInstruct = (0, model_1.modelRef)({
name: "github/Llama-3.2-11B-Vision-Instruct",
info: {
versions: ["Llama-3.2-11B-Vision-Instruct"],
label: "Meta - Llama-3.2-11b-vision-instruct",
supports: {
multiturn: true,
tools: true,
media: true,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.metaLlama3290bVisionInstruct = (0, model_1.modelRef)({
name: "github/Llama-3.2-90B-Vision-Instruct",
info: {
versions: ["Llama-3.2-90B-Vision-Instruct"],
label: "Meta - Llama-3.2-90b-vision-instruct",
supports: {
multiturn: true,
tools: true,
media: true,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.metaLlama3370bInstruct = (0, model_1.modelRef)({
name: "github/Llama-3.3-70B-Instruct",
info: {
versions: ["Llama-3.3-70B-Instruct"],
label: "Meta - Llama-3.3-70B-Instruct",
supports: {
multiturn: true,
tools: true,
media: true,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.metaLlama4Scout17bInstruct = (0, model_1.modelRef)({
name: "github/Llama-4-Scout-17B-16E-Instruct",
info: {
versions: ["Llama-4-Scout-17B-16E-Instructt"],
label: "Meta - Llama-4-Scout-17B-16E-Instruct",
supports: {
multiturn: true,
tools: true,
media: true,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.metaLlama4Maverick17bInstruct = (0, model_1.modelRef)({
name: "github/Llama-4-Maverick-17B-128E-Instruct-FP8",
info: {
versions: ["Llama-4-Maverick-17B-128E-Instruct-FP8"],
label: "Meta - Llama-4-Maverick-17B-128E-Instruct-FP8",
supports: {
multiturn: true,
tools: true,
media: true,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.cohereCommandR = (0, model_1.modelRef)({
name: "github/cohere-command-r",
info: {
versions: ["cohere-command-r"],
label: "Cohere - Command-r",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.cohereCommandA = (0, model_1.modelRef)({
name: "github/cohere-command-a",
info: {
versions: ["cohere-command-a"],
label: "Cohere - Command-a",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.cohereCommandR082024 = (0, model_1.modelRef)({
name: "github/Cohere-command-r-08-2024",
info: {
versions: ["Cohere-command-r-08-2024"],
label: "Cohere - Command-r-08-2024",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.cohereCommandRPlus = (0, model_1.modelRef)({
name: "github/cohere-command-r-plus",
info: {
versions: ["cohere-command-r-plus"],
label: "Cohere - Command-r-plus",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.cohereCommandRPlus082024 = (0, model_1.modelRef)({
name: "github/Cohere-command-r-plus-08-2024",
info: {
versions: ["Cohere-command-r-plus-08-2024"],
label: "Cohere - Command-r-plus-08-2024",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.mistralSmall = (0, model_1.modelRef)({
name: "github/Mistral-small",
info: {
versions: ["Mistral-small"],
label: "Mistral - Small",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.mistralLarge = (0, model_1.modelRef)({
name: "github/Mistral-large",
info: {
versions: ["Mistral-large"],
label: "Mistral - Large",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.mistralLarge2407 = (0, model_1.modelRef)({
name: "github/Mistral-large-2407",
info: {
versions: ["Mistral-large-2407"],
label: "Mistral - Large-2407",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.mistralLarge2411 = (0, model_1.modelRef)({
name: "github/Mistral-large-24111",
info: {
versions: ["Mistral-large-2411"],
label: "Mistral - Large-2411",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.mistralCodestral2501 = (0, model_1.modelRef)({
name: "github/Codestral-2501",
info: {
versions: ["Codestral-2501"],
label: "Mistral - Codestral-2501",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.mistralNemo = (0, model_1.modelRef)({
name: "github/Mistral-nemo",
info: {
versions: ["Mistral-nemo"],
label: "Mistral - Nemo",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.ministral3B = (0, model_1.modelRef)({
name: "github/Ministral-3b",
info: {
versions: ["Ministral-3B"],
label: "Ministral - 3B",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.ai21Jamba15Mini = (0, model_1.modelRef)({
name: "github/ai21-jamba-1.5-mini",
info: {
versions: ["ai21-jamba-1.5-mini"],
label: "AI21Labs - Jamba-1.5-mini",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.ai21Jamba15Large = (0, model_1.modelRef)({
name: "github/ai21-jamba-1.5-large",
info: {
versions: ["ai21-jamba-1.5-large"],
label: "AI21Labs - Jamba-1.5-large",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.microsoftPhi3Mini4kInstruct = (0, model_1.modelRef)({
name: "github/Phi-3-mini-4k-instruct",
info: {
versions: ["Phi-3-mini-4k-instruct"],
label: "Microsoft - Phi-3-mini-4k-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.microsoftPhi3Mini128kInstruct = (0, model_1.modelRef)({
name: "github/Phi-3-mini-128k-instruct",
info: {
versions: ["Phi-3-mini-128k-instruct"],
label: "Microsoft - Phi-3-mini-128k-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.microsoftPhi3Small8kInstruct = (0, model_1.modelRef)({
name: "github/Phi-3-small-8k-instruct",
info: {
versions: ["Phi-3-small-8k-instruct"],
label: "Microsoft - Phi-3-small-8k-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.microsoftPhi3Small128kInstruct = (0, model_1.modelRef)({
name: "github/Phi-3-small-128k-instruct",
info: {
versions: ["Phi-3-small-128k-instruct"],
label: "Microsoft - Phi-3-small-128k-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.microsoftPhi3Medium4kInstruct = (0, model_1.modelRef)({
name: "github/Phi-3-medium-4k-instruct",
info: {
versions: ["Phi-3-medium-4k-instruct"],
label: "Microsoft - Phi-3-medium-4k-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.microsoftPhi3Medium128kInstruct = (0, model_1.modelRef)({
name: "github/Phi-3-medium-128k-instruct",
info: {
versions: ["Phi-3-medium-128k-instruct"],
label: "Microsoft - Phi-3-medium-128k-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.microsoftPhi35Mini128kInstruct = (0, model_1.modelRef)({
name: "github/Phi-3.5-mini-instruct",
info: {
versions: ["Phi-3.5-mini-instruct"],
label: "Microsoft - Phi-3.5-mini-128k-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.microsoftPhi35MoE128kInstruct = (0, model_1.modelRef)({
name: "github/Phi-3.5-moe-instruct",
info: {
versions: ["Phi-3.5-moe-instruct"],
label: "Microsoft - Phi-3.5-moe-128k-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.microsoftPhi35Vision128kInstruct = (0, model_1.modelRef)({
name: "github/Phi-3.5-vision-instruct",
info: {
versions: ["Phi-3.5-vision-instruct"],
label: "Microsoft - Phi-3.5-vision-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.microsoftPhi4 = (0, model_1.modelRef)({
name: "github/Phi-4",
info: {
versions: ["Phi-4"],
label: "Microsoft - Phi-4",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.microsoftPhi4MultimodalInstruct = (0, model_1.modelRef)({
name: "github/Phi-4-multimodal-instruct",
info: {
versions: ["Phi-4-multimodal-instruct"],
label: "Microsoft - Phi-4-multimodal",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.microsoftPhi4MiniInstruct = (0, model_1.modelRef)({
name: "github/Phi-4-mini-instruct",
info: {
versions: ["Phi-4-mini-instruct"],
label: "Microsoft - Phi-4-mini-instruct",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.microsoftMaiDsR1 = (0, model_1.modelRef)({
name: "github/MAI-DS-R1",
info: {
versions: ["MAI-DS-R1"],
label: "Microsoft - MAI-DS-R1",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.jais30bChat = (0, model_1.modelRef)({
name: "github/jais-30b-chat",
info: {
versions: ["jais-30b-chat"],
label: "Jais - 30b-chat",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.deepseekR1 = (0, model_1.modelRef)({
name: "github/DeepSeek-R1",
info: {
versions: ["DeepSeek-R1"],
label: "DeepSeek - R1",
supports: {
multiturn: true,
tools: true,
media: false,
systemRole: true,
output: ["text", "json"],
},
},
configSchema: genkit_1.GenerationCommonConfigSchema,
});
exports.SUPPORTED_GITHUB_MODELS = {
"gpt-4.1": exports.openAIGpt41,
"gpt-4.1-mini": exports.openAIGpt41Mini,
"gpt-4.1-nano": exports.openAIGpt41Nano,
"gpt-4o": exports.openAIGpt4o,
"gpt-4o-mini": exports.openAIGpt4oMini,
"o1-preview": exports.openAIO1Preview,
"o1-mini": exports.openAIO1Mini,
o1: exports.openAIO1,
o3: exports.openAIO3,
"o3-mini": exports.openAIO3Mini,
"o4-mini": exports.openAIO4Mini,
"meta-llama-3-70b-instruct": exports.metaLlama370bInstruct,
"meta-llama-3-8b-instruct": exports.metaLlama38bInstruct,
"meta-llama-3.1-405b-instruct": exports.metaLlama31405bInstruct,
"meta-llama-3.1-70b-instruct": exports.metaLlama3170bInstruct,
"meta-llama-3.1-8b-instruct": exports.metaLlama318bInstruct,
"Llama-3.2-11B-Vision-Instruct": exports.metaLlama3211bVisionInstruct,
"Llama-3.2-90B-Vision-Instruct": exports.metaLlama3290bVisionInstruct,
"Llama-3.3-70B-Instruct": exports.metaLlama3370bInstruct,
"Llama-4-Scout-17B-16E-Instruct": exports.metaLlama4Scout17bInstruct,
"Llama-4-Maverick-17B-128E-Instruct-FP8": exports.metaLlama4Maverick17bInstruct,
"cohere-command-a": exports.cohereCommandA,
"Cohere-command-r": exports.cohereCommandR,
"Cohere-command-r-plus": exports.cohereCommandRPlus,
"Cohere-command-r-08-2024": exports.cohereCommandR082024,
"Cohere-command-r-plus-08-2024": exports.cohereCommandRPlus082024,
"Mistral-small": exports.mistralSmall,
"Mistral-large": exports.mistralLarge,
"Mistral-large-2407": exports.mistralLarge2407,
"Mistral-large-2411": exports.mistralLarge2411,
"Mistral-nemo": exports.mistralNemo,
"Ministral-3B": exports.ministral3B,
"Codestral-2501": exports.mistralCodestral2501,
"ai21-jamba-1.5-mini": exports.ai21Jamba15Mini,
"ai21-jamba-1.5-large": exports.ai21Jamba15Large,
"Phi-3-mini-4k-instruct": exports.microsoftPhi3Mini4kInstruct,
"Phi-3-mini-128k-instruct": exports.microsoftPhi3Mini128kInstruct,
"Phi-3-small-8k-instruct": exports.microsoftPhi3Small8kInstruct,
"Phi-3-small-128k-instruct": exports.microsoftPhi3Small128kInstruct,
"Phi-3-medium-4k-instruct": exports.microsoftPhi3Medium4kInstruct,
"Phi-3-medium-128k-instruct": exports.microsoftPhi3Medium128kInstruct,
"Phi-3.5-mini-instruct": exports.microsoftPhi35Mini128kInstruct,
"Phi-3.5-moe-instruct": exports.microsoftPhi35MoE128kInstruct,
"Phi-3.5-vision-instruct": exports.microsoftPhi35Vision128kInstruct,
"Phi-4": exports.microsoftPhi4,
"Phi-4-multimodal-instruct": exports.microsoftPhi4MultimodalInstruct,
"Phi-4-mini-instruct": exports.microsoftPhi4MiniInstruct,
"MAI-DS-R1": exports.microsoftMaiDsR1,
"jais-30b-chat": exports.jais30bChat,
"DeepSeek-R1": exports.deepseekR1,
};
function toGithubRole(role) {
switch (role) {
case "user":
return "user";
case "model":
return "assistant";
case "system":
return "system";
case "tool":
return "tool";
default:
throw new Error(`role ${role} doesn't map to an Github Models role.`);
}
}
function toGithubTool(tool) {
return {
type: "function",
function: {
name: tool.name,
parameters: tool.inputSchema,
description: tool.description,
},
};
}
function toGithubTextAndMedia(part, visualDetailLevel) {
if (part.text) {
return {
type: "text",
text: part.text,
};
}
else if (part.media) {
return {
type: "image_url",
image_url: {
url: part.media.url,
detail: visualDetailLevel,
},
};
}
throw Error(`Unsupported genkit part fields encountered for current message role: ${part}.`);
}
function toGithubMessages(messages, visualDetailLevel = "auto") {
const githubMsgs = [];
for (const message of messages) {
const msg = new genkit_1.Message(message);
const role = toGithubRole(message.role);
switch (role) {
case "user": {
const textAndMedia = msg.content.map((part) => toGithubTextAndMedia(part, visualDetailLevel));
if (textAndMedia.length > 1) {
githubMsgs.push({
role: role,
content: textAndMedia,
});
}
else {
githubMsgs.push({
role: role,
content: msg.text,
});
}
break;
}
case "system":
githubMsgs.push({
role: role,
content: msg.text,
});
break;
case "assistant": {
const toolCalls = msg.content
.filter((part) => part.toolRequest)
.map((part) => {
if (!part.toolRequest) {
throw Error("Mapping genkit message to openai tool call content part but message.toolRequest not provided.");
}
return {
id: part.toolRequest.ref || "",
type: "function",
function: {
name: part.toolRequest.name,
arguments: JSON.stringify(part.toolRequest.input),
},
};
});
if ((toolCalls === null || toolCalls === void 0 ? void 0 : toolCalls.length) > 0) {
githubMsgs.push({
role: role,
tool_calls: toolCalls,
});
}
else {
githubMsgs.push({
role: role,
content: msg.text,
});
}
break;
}
case "tool": {
const toolResponseParts = msg.toolResponseParts();
toolResponseParts.map((part) => {
githubMsgs.push({
role: role,
tool_call_id: part.toolResponse.ref || "",
content: typeof part.toolResponse.output === "string"
? part.toolResponse.output
: JSON.stringify(part.toolResponse.output),
});
});
break;
}
default:
throw new Error("unrecognized role");
}
}
return githubMsgs;
}
const finishReasonMap = {
length: "length",
stop: "stop",
tool_calls: "stop",
content_filter: "blocked",
};
function fromGithubToolCall(toolCall) {
if (!("function" in toolCall)) {
throw Error(`Unexpected github chunk choice. tool_calls was provided but one or more tool_calls is missing.`);
}
const f = toolCall.function;
return {
toolRequest: {
name: f.name,
ref: toolCall.id,
input: f.arguments ? JSON.parse(f.arguments) : f.arguments,
},
};
}
function fromGithubChoice(choice, jsonMode = false) {
var _a, _b;
const toolRequestParts = (_a = choice.message.tool_calls) === null || _a === void 0 ? void 0 : _a.map(fromGithubToolCall);
return {
finishReason: "finish_reason" in choice
? finishReasonMap[choice.finish_reason]
: "other",
message: {
role: "model",
content: ((_b = toolRequestParts === null || toolRequestParts === void 0 ? void 0 : toolRequestParts.length) !== null && _b !== void 0 ? _b : 0) > 0
? toolRequestParts
: [
jsonMode
? { data: JSON.parse(choice.message.content) }
: { text: choice.message.content },
],
},
custom: {},
};
}
function fromGithubChunkChoice(choice) {
var _a, _b;
return {
finishReason: choice.content
? finishReasonMap[choice.finishReason] || "other"
: "unknown",
message: {
role: "model",
content: [{ text: (_b = (_a = choice.delta) === null || _a === void 0 ? void 0 : _a.content) !== null && _b !== void 0 ? _b : "" }],
},
custom: {},
};
}
function toGithubRequestBody(modelName, request) {
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u;
const model = exports.SUPPORTED_GITHUB_MODELS[modelName];
if (!model)
throw new Error(`Unsupported model: ${modelName}`);
const githubMessages = toGithubMessages(request.messages);
const jsonMode = ((_a = request.output) === null || _a === void 0 ? void 0 : _a.format) === "json" ||
((_b = request.output) === null || _b === void 0 ? void 0 : _b.contentType) === "application/json";
const textMode = ((_c = request.output) === null || _c === void 0 ? void 0 : _c.format) === "text" ||
((_d = request.output) === null || _d === void 0 ? void 0 : _d.contentType) === "plain/text";
let responseFormat;
const response_format = ((_e = request.output) === null || _e === void 0 ? void 0 : _e.format)
? (_f = request.output) === null || _f === void 0 ? void 0 : _f.format
: (_g = request.output) === null || _g === void 0 ? void 0 : _g.contentType;
if (jsonMode && ((_j = (_h = model.info.supports) === null || _h === void 0 ? void 0 : _h.output) === null || _j === void 0 ? void 0 : _j.includes("json"))) {
responseFormat = {
type: "json_object",
};
}
else if ((textMode && ((_l = (_k = model.info.supports) === null || _k === void 0 ? void 0 : _k.output) === null || _l === void 0 ? void 0 : _l.includes("text"))) ||
((_o = (_m = model.info.supports) === null || _m === void 0 ? void 0 : _m.output) === null || _o === void 0 ? void 0 : _o.includes("text"))) {
responseFormat = {
type: "text",
};
}
else {
throw new Error(`${response_format} format is not supported for GPT models currently`);
}
const modelString = (((_p = request.config) === null || _p === void 0 ? void 0 : _p.version) ||
model.version ||
modelName);
const body = {
body: {
messages: githubMessages,
tools: (_q = request.tools) === null || _q === void 0 ? void 0 : _q.map(toGithubTool),
model: modelString,
max_tokens: (_r = request.config) === null || _r === void 0 ? void 0 : _r.maxOutputTokens,
temperature: (_s = request.config) === null || _s === void 0 ? void 0 : _s.temperature,
top_p: (_t = request.config) === null || _t === void 0 ? void 0 : _t.topP,
n: request.candidates,
stop: (_u = request.config) === null || _u === void 0 ? void 0 : _u.stopSequences,
// FIXME: coherence models don't support response_format for now
response_format: modelString.includes("cohere") ? "" : responseFormat,
},
};
for (const key in body.body) {
if (!body.body[key] ||
(Array.isArray(body.body[key]) && !body.body[key].length))
delete body.body[key];
}
return body;
}
function githubModel(name, client, ai) {
const modelId = `github/${name}`;
const model = exports.SUPPORTED_GITHUB_MODELS[name];
if (!model)
throw new Error(`Unsupported model: ${name}`);
return ai.defineModel(Object.assign(Object.assign({ name: modelId }, model.info), { configSchema: exports.SUPPORTED_GITHUB_MODELS[name].configSchema }), (request, streamingCallback) => __awaiter(this, void 0, void 0, function* () {
var _a, e_1, _b, _c;
var _d, _e, _f, _g;
let response;
const body = toGithubRequestBody(name, request);
if (streamingCallback) {
body.body.stream = true;
response = yield client.path("/chat/completions").post(body);
const stream = response.body;
const sseStream = (0, core_sse_1.createSseStream)(stream);
try {
for (var _h = true, sseStream_1 = __asyncValues(sseStream), sseStream_1_1; sseStream_1_1 = yield sseStream_1.next(), _a = sseStream_1_1.done, !_a; _h = true) {
_c = sseStream_1_1.value;
_h = false;
const event = _c;
if (event.data === "[DONE]") {
break;
}
for (const choice of JSON.parse(event.data).choices) {
const c = fromGithubChunkChoice(choice);
streamingCallback({
content: [Object.assign(Object.assign({}, c), { custom: c.custom })],
});
}
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (!_h && !_a && (_b = sseStream_1.return)) yield _b.call(sseStream_1);
}
finally { if (e_1) throw e_1.error; }
}
}
else {
response = yield client.path("/chat/completions").post(body);
}
return {
message: (typeof response.body === 'object' && response.body && "choices" in response.body)
? fromGithubChoice(response.body.choices[0], ((_d = request.output) === null || _d === void 0 ? void 0 : _d.format) === "json").message
: { role: "model", content: [] },
usage: {
inputTokens: (typeof response.body === 'object' && response.body && "usage" in response.body) ? (_e = response.body.usage) === null || _e === void 0 ? void 0 : _e.prompt_tokens : 0,
outputTokens: (typeof response.body === 'object' && response.body && "usage" in response.body)
? (_f = response.body.usage) === null || _f === void 0 ? void 0 : _f.completion_tokens
: 0,
totalTokens: (typeof response.body === 'object' && response.body && "usage" in response.body) ? (_g = response.body.usage) === null || _g === void 0 ? void 0 : _g.total_tokens : 0,
},
custom: response,
};
}));
}
//# sourceMappingURL=github_llms.js.map