llama-flow
Version:
The Typescript-first prompt engineering toolkit for working with chat based LLMs.
21 lines (20 loc) • 586 B
JavaScript
Object.defineProperty(exports, "__esModule", { value: true });
exports.sleep = exports.debug = void 0;
const debug_1 = require("debug");
const error = (0, debug_1.debug)('llamaflow:error');
const log = (0, debug_1.debug)('llamaflow:log');
log.log = console.log.bind(console);
exports.debug = {
error,
log,
write: (t) => process.env.DEBUG &&
'llamaflow:log'.match(process.env.DEBUG) &&
process.stdout.write(t),
};
function sleep(delay) {
return new Promise((resolve) => {
setTimeout(resolve, delay);
});
}
exports.sleep = sleep;
;