@kumologica/kumologica-contrib-amazonbedrock
Version:
Kumologica Designer Node for Amazon Bedrock AI service
164 lines (146 loc) • 5.69 kB
JavaScript
const { InvokeModelCommand, ApplyGuardrailCommand } = require("@aws-sdk/client-bedrock-runtime");
const { ListFoundationModelsCommand } = require("@aws-sdk/client-bedrock");
async function invokeModel(data, awsServiceBRC) {
let decoded = {};
const client = awsServiceBRC;
const amazonPayload = buildBedrockPayload(
data.ModelId,
data.Messages,
data.SystemPrompt,
data.Version,
300,
{ temperature: 0.7, topP: 0.9 }
);
const input = amazonPayload;
//Working Gaurdrail check
// const params = {
// guardrailIdentifier: "",
// guardrailVersion: "",
// source: "INPUT",
// // or "OUTPUT"
// content: [
// {"text":
// { text: "Tell me about SpaceX rockets and Mars exploration technologies" },
// "qualifiers": ["query"]}],
// };
// try {
// const command = new ApplyGuardrailCommand(params);
// const response = await client.send(command);
// console.log("Guardrail response:", response);
// // Analyze the response to determine if the guardrail was triggered and why.
// } catch (error) {
// console.error("Error applying guardrail:", error);
// }
const command = new InvokeModelCommand(input);
//const command = new ConverseCommand(input);
//for testing gaurdrail.
try {
const response = await client.send(command);
// Collect all bytes into a single array
const byteArray = [];
for await (const chunk of response.body) {
if (typeof chunk === 'number') {
byteArray.push(chunk);
// Add numeric bytes
} else if (chunk instanceof Uint8Array) {
byteArray.push(...chunk);
// Spread if already Uint8Array
} else {
throw new Error("Unsupported chunk type: " + typeof chunk);
}
}// Convert to Buffer
const buffer = Buffer.from(byteArray);
const responseBodyString = buffer.toString("utf-8");
decoded = JSON.parse(responseBodyString);
} catch (err) { throw new err }
return decoded;
}
async function listModels(data, awsServiceBC) {
let models = [];
try {
const client = awsServiceBC;
const command = new ListFoundationModelsCommand({});
const response = await client.send(command);
response.modelSummaries.forEach(model => {
models.push({ "provider": model.modelId, "providerName": model.providerName })
});
} catch (err) {
throw new err
}
return models;
}
function buildBedrockPayload(modelId, prompt, sysprompt, version, maxTokens = 300, options = {}) {
if (modelId.includes("amazon.")) {
// Amazon Nova payload (Converse API)
let options = {
systemPrompt: sysprompt
}
return {
modelId,
contentType: "application/json",
accept: "application/json",
body: JSON.stringify({
...(options.systemPrompt ? { system: [{ text: options.systemPrompt }] } : {}),
messages: [
{ role: "user", content: [{ text: prompt }] }
],
inferenceConfig: {
maxTokens,
...(options.temperature !== undefined ? { temperature: options.temperature } : {}),
...(options.topP !== undefined ? { topP: options.topP } : {})
},
...(options.additionalModelRequestFields
? { additionalModelRequestFields: options.additionalModelRequestFields }
: {})
})
};
}
if (modelId.includes("anthropic.")) {
// Anthropic Claude payload (Bedrock with system prompt support)
const messages = [];
let options = {
systemPrompt: sysprompt
}
messages.push({
role: "user",
content: [{ type: "text", text: prompt }]
});
return {
modelId,
contentType: "application/json",
accept: "application/json",
body: JSON.stringify({
anthropic_version: version, // required
...(options.systemPrompt ? { system: options.systemPrompt } : {}),
messages,
max_tokens: maxTokens,
...(options.temperature !== undefined ? { temperature: options.temperature } : {}),
...(options.top_p !== undefined ? { top_p: options.top_p } : {})
})
};
}
if (modelId.includes("mistral.")) {
// Mistral payload with system prompt support
let finalPrompt = prompt;
let options = {
systemPrompt: sysprompt
}
if (options.systemPrompt) {
// Prepend system prompt to user prompt with [INST] tags
finalPrompt = `[INST] ${options.systemPrompt} [/INST] ${prompt}`;
}
return {
modelId,
contentType: "application/json",
accept: "application/json",
body: JSON.stringify({
prompt: finalPrompt,
max_tokens: maxTokens,
...(options.temperature !== undefined ? { temperature: options.temperature } : {}),
...(options.top_p !== undefined ? { top_p: options.top_p } : {})
})
};
}
throw new Error(`Unsupported modelId: ${modelId}`);
}
module.exports = { invokeModel, listModels };