llama-flow
Version:
The Typescript-first prompt engineering toolkit for working with chat based LLMs.
38 lines (37 loc) • 1.52 kB
JavaScript
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const zod_1 = require("zod");
const boolean_1 = __importDefault(require("./boolean"));
const bulletPoints_1 = __importDefault(require("./bulletPoints"));
const json_1 = __importDefault(require("./json"));
describe('Prompt types', () => {
it('Should build bullet prompt correctly', () => {
expect((0, bulletPoints_1.default)({
message: 'What are the meanings of life?',
})).toMatchSnapshot();
expect((0, bulletPoints_1.default)({
message: 'What are the meanings of life?',
amount: 10,
})).toMatchSnapshot();
expect((0, bulletPoints_1.default)({
message: 'What are the meanings of life?',
amount: 3,
length: 140,
})).toMatchSnapshot();
});
it('Should build boolean prompt correctly', () => {
expect((0, boolean_1.default)({
message: 'The iPhone 14 was released in 2012',
promptRetries: 2,
})).toMatchSnapshot();
});
it('Should build JSON prompt correctly', () => {
expect((0, json_1.default)({
message: 'What are some good baby names? Respond as a JSON array, where each element in the array is one name.',
schema: zod_1.z.array(zod_1.z.string()),
})).toMatchSnapshot();
});
});