@genkit-ai/dotprompt
Version:
Genkit AI framework `.prompt` file format and management library.
362 lines • 13.7 kB
JavaScript
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __defProps = Object.defineProperties;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __propIsEnum = Object.prototype.propertyIsEnumerable;
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp.call(b, prop))
__defNormalProp(a, prop, b[prop]);
if (__getOwnPropSymbols)
for (var prop of __getOwnPropSymbols(b)) {
if (__propIsEnum.call(b, prop))
__defNormalProp(a, prop, b[prop]);
}
return a;
};
var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
var __objRest = (source, exclude) => {
var target = {};
for (var prop in source)
if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
target[prop] = source[prop];
if (source != null && __getOwnPropSymbols)
for (var prop of __getOwnPropSymbols(source)) {
if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop))
target[prop] = source[prop];
}
return target;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var __async = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
step(generator.next(value));
} catch (e) {
reject(e);
}
};
var rejected = (value) => {
try {
step(generator.throw(value));
} catch (e) {
reject(e);
}
};
var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
step((generator = generator.apply(__this, __arguments)).next());
});
};
var prompt_exports = {};
__export(prompt_exports, {
Dotprompt: () => Dotprompt,
DotpromptRef: () => DotpromptRef,
defineDotprompt: () => defineDotprompt
});
module.exports = __toCommonJS(prompt_exports);
var import_ai = require("@genkit-ai/ai");
var import_session = require("@genkit-ai/ai/session");
var import_core = require("@genkit-ai/core");
var import_schema = require("@genkit-ai/core/schema");
var import_tracing = require("@genkit-ai/core/tracing");
var import_crypto = require("crypto");
var import_front_matter = __toESM(require("front-matter"));
var import_metadata = require("./metadata.js");
var import_registry2 = require("./registry.js");
var import_template = require("./template.js");
class Dotprompt {
constructor(registry, options, template, action) {
this.registry = registry;
this.name = options.name || "untitledPrompt";
this.description = options.description;
this.variant = options.variant;
this.model = options.model;
this.input = options.input || { schema: import_core.z.any() };
this.output = options.output;
this.tools = options.tools;
this.config = options.config;
this.template = template;
this.hash = (0, import_crypto.createHash)("sha256").update(JSON.stringify(this)).digest("hex");
this._render = (0, import_template.compile)(this.template, options);
this._promptAction = action;
}
static parse(registry, name, source) {
try {
const fmResult = (0, import_front_matter.default)(source.trimStart(), {
allowUnsafe: false
});
return new Dotprompt(
registry,
__spreadProps(__spreadValues({}, (0, import_metadata.toMetadata)(registry, fmResult.attributes)), {
name
}),
fmResult.body
);
} catch (e) {
throw new import_core.GenkitError({
source: "Dotprompt",
status: "INVALID_ARGUMENT",
message: `Error parsing YAML frontmatter of '${name}' prompt: ${e.stack}`
});
}
}
static fromAction(registry, action) {
var _b, _c, _d, _e;
const _a = action.__action.metadata.prompt, { template } = _a, options = __objRest(_a, ["template"]);
const pm = options;
if ((_b = pm.input) == null ? void 0 : _b.schema) {
pm.input.jsonSchema = (_c = options.input) == null ? void 0 : _c.schema;
delete pm.input.schema;
}
if ((_d = pm.output) == null ? void 0 : _d.schema) {
pm.output.jsonSchema = (_e = options.output) == null ? void 0 : _e.schema;
}
const prompt = new Dotprompt(
registry,
options,
template,
action
);
return prompt;
}
/**
* Renders all of the prompt's text parts into a raw string.
*
* @param input User input to the prompt template.
* @param options Optional context and/or history for the prompt template.
* @returns all of the text parts concatenated into a string.
*/
renderText(input, options) {
const result = this.renderMessages(input, options);
if (result.length !== 1) {
throw new Error("Multi-message prompt can't be rendered as text.");
}
let out = "";
for (const part of result[0].content) {
if (!part.text) {
throw new Error("Multimodal prompt can't be rendered as text.");
}
out += part.text;
}
return out;
}
/**
* Renders the prompt template into an array of messages.
*
* @param input User input to the prompt template
* @param options optional context and/or history for the prompt template.
* @returns an array of messages representing an exchange between a user and a model.
*/
renderMessages(input, options) {
var _a, _b, _c, _d;
let sessionStateData = void 0;
if ((0, import_session.getCurrentSession)()) {
sessionStateData = { state: (_a = (0, import_session.getCurrentSession)()) == null ? void 0 : _a.state };
}
input = (0, import_schema.parseSchema)(input, {
schema: (_b = this.input) == null ? void 0 : _b.schema,
jsonSchema: (_c = this.input) == null ? void 0 : _c.jsonSchema
});
return this._render(
__spreadValues(__spreadValues({}, (_d = this.input) == null ? void 0 : _d.default), input),
options,
sessionStateData
);
}
toJSON() {
return __spreadProps(__spreadValues({}, (0, import_metadata.toFrontmatter)(this)), { template: this.template });
}
define(options) {
var _a, _b, _c;
this._promptAction = (0, import_ai.definePrompt)(
this.registry,
{
name: (0, import_registry2.registryDefinitionKey)(this.name, this.variant, options == null ? void 0 : options.ns),
description: (_a = options == null ? void 0 : options.description) != null ? _a : this.description,
inputSchema: (_b = this.input) == null ? void 0 : _b.schema,
inputJsonSchema: (_c = this.input) == null ? void 0 : _c.jsonSchema,
metadata: {
type: "prompt",
prompt: this.toJSON()
}
},
(input) => __async(this, null, function* () {
return (0, import_ai.toGenerateRequest)(this.registry, this.render({ input }));
})
);
}
get promptAction() {
return this._promptAction;
}
_generateOptions(options) {
var _a, _b, _c, _d, _e, _f;
const messages = this.renderMessages(options.input, {
messages: options.messages,
docs: options.docs
});
let renderedPrompt;
let renderedMessages;
if (messages.length > 0 && messages[messages.length - 1].role === "user") {
renderedPrompt = messages[messages.length - 1].content;
renderedMessages = messages.slice(0, messages.length - 1);
} else {
renderedPrompt = void 0;
renderedMessages = messages;
}
return {
model: options.model || this.model,
config: __spreadValues(__spreadValues({}, this.config), options.config),
messages: renderedMessages,
prompt: renderedPrompt,
docs: options.docs,
output: {
format: ((_a = options.output) == null ? void 0 : _a.format) || ((_b = this.output) == null ? void 0 : _b.format) || void 0,
schema: ((_c = options.output) == null ? void 0 : _c.schema) || ((_d = this.output) == null ? void 0 : _d.schema),
jsonSchema: ((_e = options.output) == null ? void 0 : _e.jsonSchema) || ((_f = this.output) == null ? void 0 : _f.jsonSchema)
},
tools: (options.tools || []).concat(this.tools || []),
streamingCallback: options.streamingCallback,
returnToolRequests: options.returnToolRequests,
use: options.use
};
}
/**
* Renders the prompt template based on user input.
*
* @param opt Options for the prompt template, including user input variables and custom model configuration options.
* @returns a `GenerateOptions` object to be used with the `generate()` function from @genkit-ai/ai.
*/
render(opt) {
return this._generateOptions(opt);
}
renderInNewSpan(opt) {
return __async(this, null, function* () {
const spanName = this.variant ? `${this.name}.${this.variant}` : this.name;
return (0, import_tracing.runInNewSpan)(
{
metadata: {
name: spanName,
input: opt
},
labels: {
[import_tracing.SPAN_TYPE_ATTR]: "dotprompt"
}
},
(metadata) => __async(this, null, function* () {
(0, import_tracing.setCustomMetadataAttribute)("prompt_fingerprint", this.hash);
const generateOptions = this._generateOptions(opt);
metadata.output = generateOptions;
return generateOptions;
})
);
});
}
/**
* Generates a response by rendering the prompt template with given user input and then calling the model.
*
* @param opt Options for the prompt template, including user input variables and custom model configuration options.
* @returns the model response as a promise of `GenerateResponse`.
*/
generate(opt) {
return __async(this, null, function* () {
const renderedOpts = this.renderInNewSpan(opt);
return (0, import_ai.generate)(this.registry, renderedOpts);
});
}
/**
* Generates a streaming response by rendering the prompt template with given user input and then calling the model.
*
* @param opt Options for the prompt template, including user input variables and custom model configuration options.
* @returns the model response as a promise of `GenerateStreamResponse`.
*/
generateStream(opt) {
return __async(this, null, function* () {
const renderedOpts = yield this.renderInNewSpan(opt);
return (0, import_ai.generateStream)(this.registry, renderedOpts);
});
}
}
class DotpromptRef {
constructor(name, options) {
this.name = name;
this.variant = options == null ? void 0 : options.variant;
this.dir = options == null ? void 0 : options.dir;
}
/** Loads the prompt which is referenced. */
loadPrompt(registry) {
return __async(this, null, function* () {
if (this._prompt) return this._prompt;
this._prompt = yield (0, import_registry2.lookupPrompt)(
registry,
this.name,
this.variant,
this.dir
);
return this._prompt;
});
}
/**
* Generates a response by rendering the prompt template with given user input and then calling the model.
*
* @param opt Options for the prompt template, including user input variables and custom model configuration options.
* @returns the model response as a promise of `GenerateResponse`.
*/
generate(registry, opt) {
return __async(this, null, function* () {
const prompt = yield this.loadPrompt(registry);
return prompt.generate(opt);
});
}
/**
* Renders the prompt template based on user input.
*
* @param opt Options for the prompt template, including user input variables and custom model configuration options.
* @returns a `GenerateOptions` object to be used with the `generate()` function from @genkit-ai/ai.
*/
render(registry, opt) {
return __async(this, null, function* () {
const prompt = yield this.loadPrompt(registry);
return prompt.render(opt);
});
}
}
function defineDotprompt(registry, options, template) {
const prompt = new Dotprompt(registry, options, template);
prompt.define({ description: options.description });
return prompt;
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
Dotprompt,
DotpromptRef,
defineDotprompt
});
//# sourceMappingURL=prompt.js.map