@genkit-ai/dotprompt
Version:
Genkit AI framework `.prompt` file format and management library.
295 lines • 10.1 kB
JavaScript
import {
__async,
__objRest,
__spreadProps,
__spreadValues
} from "./chunk-RGHW4PYM.mjs";
import {
definePrompt,
generate,
generateStream,
toGenerateRequest
} from "@genkit-ai/ai";
import { getCurrentSession } from "@genkit-ai/ai/session";
import { GenkitError, z } from "@genkit-ai/core";
import { parseSchema } from "@genkit-ai/core/schema";
import {
runInNewSpan,
setCustomMetadataAttribute,
SPAN_TYPE_ATTR
} from "@genkit-ai/core/tracing";
import { createHash } from "crypto";
import fm from "front-matter";
import {
toFrontmatter,
toMetadata
} from "./metadata.js";
import { lookupPrompt, registryDefinitionKey } from "./registry.js";
import { compile } from "./template.js";
class Dotprompt {
constructor(registry, options, template, action) {
this.registry = registry;
this.name = options.name || "untitledPrompt";
this.description = options.description;
this.variant = options.variant;
this.model = options.model;
this.input = options.input || { schema: z.any() };
this.output = options.output;
this.tools = options.tools;
this.config = options.config;
this.template = template;
this.hash = createHash("sha256").update(JSON.stringify(this)).digest("hex");
this._render = compile(this.template, options);
this._promptAction = action;
}
static parse(registry, name, source) {
try {
const fmResult = fm(source.trimStart(), {
allowUnsafe: false
});
return new Dotprompt(
registry,
__spreadProps(__spreadValues({}, toMetadata(registry, fmResult.attributes)), {
name
}),
fmResult.body
);
} catch (e) {
throw new GenkitError({
source: "Dotprompt",
status: "INVALID_ARGUMENT",
message: `Error parsing YAML frontmatter of '${name}' prompt: ${e.stack}`
});
}
}
static fromAction(registry, action) {
var _b, _c, _d, _e;
const _a = action.__action.metadata.prompt, { template } = _a, options = __objRest(_a, ["template"]);
const pm = options;
if ((_b = pm.input) == null ? void 0 : _b.schema) {
pm.input.jsonSchema = (_c = options.input) == null ? void 0 : _c.schema;
delete pm.input.schema;
}
if ((_d = pm.output) == null ? void 0 : _d.schema) {
pm.output.jsonSchema = (_e = options.output) == null ? void 0 : _e.schema;
}
const prompt = new Dotprompt(
registry,
options,
template,
action
);
return prompt;
}
/**
* Renders all of the prompt's text parts into a raw string.
*
* @param input User input to the prompt template.
* @param options Optional context and/or history for the prompt template.
* @returns all of the text parts concatenated into a string.
*/
renderText(input, options) {
const result = this.renderMessages(input, options);
if (result.length !== 1) {
throw new Error("Multi-message prompt can't be rendered as text.");
}
let out = "";
for (const part of result[0].content) {
if (!part.text) {
throw new Error("Multimodal prompt can't be rendered as text.");
}
out += part.text;
}
return out;
}
/**
* Renders the prompt template into an array of messages.
*
* @param input User input to the prompt template
* @param options optional context and/or history for the prompt template.
* @returns an array of messages representing an exchange between a user and a model.
*/
renderMessages(input, options) {
var _a, _b, _c, _d;
let sessionStateData = void 0;
if (getCurrentSession()) {
sessionStateData = { state: (_a = getCurrentSession()) == null ? void 0 : _a.state };
}
input = parseSchema(input, {
schema: (_b = this.input) == null ? void 0 : _b.schema,
jsonSchema: (_c = this.input) == null ? void 0 : _c.jsonSchema
});
return this._render(
__spreadValues(__spreadValues({}, (_d = this.input) == null ? void 0 : _d.default), input),
options,
sessionStateData
);
}
toJSON() {
return __spreadProps(__spreadValues({}, toFrontmatter(this)), { template: this.template });
}
define(options) {
var _a, _b, _c;
this._promptAction = definePrompt(
this.registry,
{
name: registryDefinitionKey(this.name, this.variant, options == null ? void 0 : options.ns),
description: (_a = options == null ? void 0 : options.description) != null ? _a : this.description,
inputSchema: (_b = this.input) == null ? void 0 : _b.schema,
inputJsonSchema: (_c = this.input) == null ? void 0 : _c.jsonSchema,
metadata: {
type: "prompt",
prompt: this.toJSON()
}
},
(input) => __async(this, null, function* () {
return toGenerateRequest(this.registry, this.render({ input }));
})
);
}
get promptAction() {
return this._promptAction;
}
_generateOptions(options) {
var _a, _b, _c, _d, _e, _f;
const messages = this.renderMessages(options.input, {
messages: options.messages,
docs: options.docs
});
let renderedPrompt;
let renderedMessages;
if (messages.length > 0 && messages[messages.length - 1].role === "user") {
renderedPrompt = messages[messages.length - 1].content;
renderedMessages = messages.slice(0, messages.length - 1);
} else {
renderedPrompt = void 0;
renderedMessages = messages;
}
return {
model: options.model || this.model,
config: __spreadValues(__spreadValues({}, this.config), options.config),
messages: renderedMessages,
prompt: renderedPrompt,
docs: options.docs,
output: {
format: ((_a = options.output) == null ? void 0 : _a.format) || ((_b = this.output) == null ? void 0 : _b.format) || void 0,
schema: ((_c = options.output) == null ? void 0 : _c.schema) || ((_d = this.output) == null ? void 0 : _d.schema),
jsonSchema: ((_e = options.output) == null ? void 0 : _e.jsonSchema) || ((_f = this.output) == null ? void 0 : _f.jsonSchema)
},
tools: (options.tools || []).concat(this.tools || []),
streamingCallback: options.streamingCallback,
returnToolRequests: options.returnToolRequests,
use: options.use
};
}
/**
* Renders the prompt template based on user input.
*
* @param opt Options for the prompt template, including user input variables and custom model configuration options.
* @returns a `GenerateOptions` object to be used with the `generate()` function from @genkit-ai/ai.
*/
render(opt) {
return this._generateOptions(opt);
}
renderInNewSpan(opt) {
return __async(this, null, function* () {
const spanName = this.variant ? `${this.name}.${this.variant}` : this.name;
return runInNewSpan(
{
metadata: {
name: spanName,
input: opt
},
labels: {
[SPAN_TYPE_ATTR]: "dotprompt"
}
},
(metadata) => __async(this, null, function* () {
setCustomMetadataAttribute("prompt_fingerprint", this.hash);
const generateOptions = this._generateOptions(opt);
metadata.output = generateOptions;
return generateOptions;
})
);
});
}
/**
* Generates a response by rendering the prompt template with given user input and then calling the model.
*
* @param opt Options for the prompt template, including user input variables and custom model configuration options.
* @returns the model response as a promise of `GenerateResponse`.
*/
generate(opt) {
return __async(this, null, function* () {
const renderedOpts = this.renderInNewSpan(opt);
return generate(this.registry, renderedOpts);
});
}
/**
* Generates a streaming response by rendering the prompt template with given user input and then calling the model.
*
* @param opt Options for the prompt template, including user input variables and custom model configuration options.
* @returns the model response as a promise of `GenerateStreamResponse`.
*/
generateStream(opt) {
return __async(this, null, function* () {
const renderedOpts = yield this.renderInNewSpan(opt);
return generateStream(this.registry, renderedOpts);
});
}
}
class DotpromptRef {
constructor(name, options) {
this.name = name;
this.variant = options == null ? void 0 : options.variant;
this.dir = options == null ? void 0 : options.dir;
}
/** Loads the prompt which is referenced. */
loadPrompt(registry) {
return __async(this, null, function* () {
if (this._prompt) return this._prompt;
this._prompt = yield lookupPrompt(
registry,
this.name,
this.variant,
this.dir
);
return this._prompt;
});
}
/**
* Generates a response by rendering the prompt template with given user input and then calling the model.
*
* @param opt Options for the prompt template, including user input variables and custom model configuration options.
* @returns the model response as a promise of `GenerateResponse`.
*/
generate(registry, opt) {
return __async(this, null, function* () {
const prompt = yield this.loadPrompt(registry);
return prompt.generate(opt);
});
}
/**
* Renders the prompt template based on user input.
*
* @param opt Options for the prompt template, including user input variables and custom model configuration options.
* @returns a `GenerateOptions` object to be used with the `generate()` function from @genkit-ai/ai.
*/
render(registry, opt) {
return __async(this, null, function* () {
const prompt = yield this.loadPrompt(registry);
return prompt.render(opt);
});
}
}
function defineDotprompt(registry, options, template) {
const prompt = new Dotprompt(registry, options, template);
prompt.define({ description: options.description });
return prompt;
}
export {
Dotprompt,
DotpromptRef,
defineDotprompt
};
//# sourceMappingURL=prompt.mjs.map