@arizeai/phoenix-client
Version:
A client for the Phoenix API
198 lines • 7.21 kB
JavaScript
;
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.createPrompt = createPrompt;
exports.promptVersion = promptVersion;
const client_1 = require("../client");
const assertUnreachable_1 = require("../utils/assertUnreachable");
/**
* Create a prompt and store it in Phoenix.
*
* If a prompt with the same name exists, a new version of the prompt will be appended to the history.
*
* @param params - The parameters to create a prompt, including optional metadata.
* @returns The created prompt version.
* @example
* ```typescript
* await createPrompt({
* name: "my-prompt",
* description: "A helpful prompt",
* metadata: { environment: "production", team: "ai" },
* version: promptVersion({
* modelProvider: "OPENAI",
* modelName: "gpt-4",
* template: [{ role: "user", content: "Hello {{name}}" }]
* })
* });
* ```
*/
async function createPrompt(_a) {
var _b;
var { client: _client, version } = _a, promptParams = __rest(_a, ["client", "version"]);
const client = _client !== null && _client !== void 0 ? _client : (0, client_1.createClient)();
const response = await client.POST("/v1/prompts", {
body: {
prompt: promptParams,
version: version,
},
});
const createdPromptVersion = (_b = response.data) === null || _b === void 0 ? void 0 : _b.data;
if (!createdPromptVersion) {
throw new Error("Failed to create prompt");
}
return createdPromptVersion;
}
/**
* A helper function to construct a prompt version declaratively.
*
* The output of this function can be used to create a prompt version in Phoenix.
*
* @param params - The parameters to create a prompt version.
* @returns Structured prompt version data, not yet persisted to Phoenix.
*/
function promptVersion(params) {
const { description = "", modelProvider: model_provider, modelName: model_name, template: templateMessages, templateFormat: template_format = "MUSTACHE", invocationParameters: invocation_parameters, } = params;
switch (model_provider) {
case "OPENAI":
return {
description,
model_provider,
model_name,
template_type: "CHAT",
template_format,
template: {
type: "chat",
messages: templateMessages,
},
invocation_parameters: {
type: "openai",
openai: invocation_parameters !== null && invocation_parameters !== void 0 ? invocation_parameters : {},
},
};
case "AZURE_OPENAI":
return {
description,
model_provider,
model_name,
template_type: "CHAT",
template_format,
template: {
type: "chat",
messages: templateMessages,
},
invocation_parameters: {
type: "azure_openai",
azure_openai: invocation_parameters !== null && invocation_parameters !== void 0 ? invocation_parameters : {},
},
};
case "ANTHROPIC":
return {
description,
model_provider,
model_name,
template_type: "CHAT",
template_format,
template: {
type: "chat",
messages: templateMessages,
},
invocation_parameters: {
type: "anthropic",
anthropic: invocation_parameters,
},
};
case "GOOGLE":
return {
description,
model_provider,
model_name,
template_type: "CHAT",
template_format,
template: {
type: "chat",
messages: templateMessages,
},
invocation_parameters: {
type: "google",
google: invocation_parameters !== null && invocation_parameters !== void 0 ? invocation_parameters : {},
},
};
case "DEEPSEEK":
return {
description,
model_provider,
model_name,
template_type: "CHAT",
template_format,
template: {
type: "chat",
messages: templateMessages,
},
invocation_parameters: {
type: "deepseek",
deepseek: invocation_parameters !== null && invocation_parameters !== void 0 ? invocation_parameters : {},
},
};
case "XAI":
return {
description,
model_provider,
model_name,
template_type: "CHAT",
template_format,
template: {
type: "chat",
messages: templateMessages,
},
invocation_parameters: {
type: "xai",
xai: invocation_parameters !== null && invocation_parameters !== void 0 ? invocation_parameters : {},
},
};
case "OLLAMA":
return {
description,
model_provider,
model_name,
template_type: "CHAT",
template_format,
template: {
type: "chat",
messages: templateMessages,
},
invocation_parameters: {
type: "ollama",
ollama: invocation_parameters !== null && invocation_parameters !== void 0 ? invocation_parameters : {},
},
};
case "AWS":
return {
description,
model_provider,
model_name,
template_type: "CHAT",
template_format,
template: {
type: "chat",
messages: templateMessages,
},
invocation_parameters: {
type: "aws",
aws: invocation_parameters !== null && invocation_parameters !== void 0 ? invocation_parameters : {},
},
};
default:
(0, assertUnreachable_1.assertUnreachable)(model_provider);
}
}
//# sourceMappingURL=createPrompt.js.map