inference-server
Version:
Libraries and server to build AI applications. Adapters to various native bindings allowing local inference. Integrate it with your application, or use as a microservice.
25 lines • 1.03 kB
JavaScript
import path from 'node:path';
import chalk from 'chalk';
export function renderListView(tree, parentPath = '') {
const output = [];
for (const item of tree) {
const currentPath = parentPath ? path.join(parentPath, item.name) : item.name;
if (item.type === 'directory') {
const pathSegments = path.posix.normalize(currentPath).split('/');
if (pathSegments[0] === 'huggingface.co') {
if (pathSegments.length === 3) {
output.push(`${chalk.blue(currentPath)} ${chalk.yellow(`(${item.sizeFormatted})`)}`);
}
}
else {
output.push(`${chalk.blue(currentPath)} ${chalk.yellow(`(${item.sizeFormatted})`)}`);
}
output.push(...renderListView(item.children, currentPath));
}
else {
output.push(`${chalk.gray(currentPath)} ${chalk.yellow(`(${item.sizeFormatted})`)}`);
}
}
return output;
}
//# sourceMappingURL=renderListView.js.map