llama-flow
Version:
The Typescript-first prompt engineering toolkit for working with chat based LLMs.
18 lines (17 loc) • 794 B
JavaScript
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.bulletPoints = exports.json = exports.boolean = exports.text = void 0;
const boolean_1 = __importDefault(require("./boolean"));
const bulletPoints_1 = __importDefault(require("./bulletPoints"));
const json_1 = __importDefault(require("./json"));
const text = (p) => typeof p === 'string' ? { message: p } : p;
exports.text = text;
const boolean = (p) => (0, boolean_1.default)(p);
exports.boolean = boolean;
const json = (p) => (0, json_1.default)(p);
exports.json = json;
const bulletPoints = (p) => (0, bulletPoints_1.default)(p);
exports.bulletPoints = bulletPoints;
;