llama-flow
Version:
The Typescript-first prompt engineering toolkit for working with chat based LLMs.
15 lines (14 loc) • 706 B
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.extractBulletPointsResponse = exports.extractJSONArrayResponse = exports.extractJSONObjectResponse = void 0;
const extractJSONObjectResponse = (res) => res.match(/\{(.|\n)*\}/g)?.[0];
exports.extractJSONObjectResponse = extractJSONObjectResponse;
const extractJSONArrayResponse = (res) => res.match(/\[(.|\n)*\]/g)?.[0];
exports.extractJSONArrayResponse = extractJSONArrayResponse;
const extractBulletPointsResponse = (res) => {
return res
.split('\n')
.map((s) => s.trim().replace('- ', ''))
.filter((s) => s.length > 0);
};
exports.extractBulletPointsResponse = extractBulletPointsResponse;