typeref-mcp
Version:
TypeScript type inference and symbol navigation MCP server for Claude Code
445 lines • 19.3 kB
JavaScript
import * as fs from 'fs/promises';
import * as path from 'path';
import { parquetRead } from 'hyparquet';
import { asyncBufferFromFile } from 'hyparquet';
import { parquetWriteFile } from 'hyparquet-writer';
import { ProjectTemplate } from '../utils/ProjectTemplate.js';
export class ParquetCache {
logger;
CACHE_VERSION = '1.0.0';
CACHE_DIR = '.typeref';
TYPESCRIPT_CACHE_DIR = 'cache/typescript';
METADATA_DIR = 'cache/metadata';
LOGS_DIR = 'logs';
constructor(logger) {
this.logger = logger;
}
async initializeProjectStructure(projectPath) {
try {
await fs.access(projectPath);
}
catch (error) {
throw new Error(`Project path does not exist or is not accessible: ${projectPath}`);
}
const typerefDir = path.join(projectPath, this.CACHE_DIR);
const dirs = [
typerefDir,
path.join(typerefDir, 'cache'),
path.join(typerefDir, this.TYPESCRIPT_CACHE_DIR),
path.join(typerefDir, this.METADATA_DIR),
path.join(typerefDir, this.LOGS_DIR),
];
for (const dir of dirs) {
await fs.mkdir(dir, { recursive: true });
}
const gitignorePath = path.join(typerefDir, '.gitignore');
const gitignoreContent = `# TypeRef cache files
/cache/
/logs/
*.log
*.tmp
`;
try {
await fs.access(gitignorePath);
}
catch {
await fs.writeFile(gitignorePath, gitignoreContent);
}
const projectName = path.basename(projectPath);
const projectConfigPath = path.join(typerefDir, 'project.yml');
try {
await fs.access(projectConfigPath);
}
catch {
await ProjectTemplate.createProjectConfig(projectConfigPath, {
PROJECT_NAME: projectName,
VERSION: this.CACHE_VERSION,
CREATED_DATE: new Date().toISOString()
});
}
this.logger.info(`Initialized TypeRef project structure at ${typerefDir}`);
}
getCacheFilePaths(projectPath) {
const baseDir = path.join(projectPath, this.CACHE_DIR, this.TYPESCRIPT_CACHE_DIR);
const metaDir = path.join(projectPath, this.CACHE_DIR, this.METADATA_DIR);
return {
symbols: path.join(baseDir, `symbols_v${this.CACHE_VERSION}.parquet`),
types: path.join(baseDir, `types_v${this.CACHE_VERSION}.parquet`),
modules: path.join(baseDir, `modules_v${this.CACHE_VERSION}.parquet`),
dependencies: path.join(baseDir, `dependencies_v${this.CACHE_VERSION}.parquet`),
fileHashes: path.join(metaDir, `file_hashes_v${this.CACHE_VERSION}.json`),
projectInfo: path.join(metaDir, `project_info_v${this.CACHE_VERSION}.json`),
};
}
async getFileHashes(projectPath) {
const fileHashes = new Map();
try {
const files = await this.findTypeScriptFiles(projectPath);
for (const filePath of files) {
try {
const stats = await fs.stat(filePath);
fileHashes.set(filePath, stats.mtime.toISOString());
}
catch (error) {
this.logger.debug(`Could not stat file ${filePath}: ${error}`);
}
}
}
catch (error) {
this.logger.warn(`Failed to get file hashes for ${projectPath}: ${error}`);
}
return fileHashes;
}
async findTypeScriptFiles(projectPath) {
const files = [];
const excludeDirs = ['node_modules', 'dist', 'build', '.git', 'coverage'];
const scanDir = async (dirPath) => {
try {
const entries = await fs.readdir(dirPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
if (entry.isDirectory()) {
if (!excludeDirs.includes(entry.name)) {
await scanDir(fullPath);
}
}
else if (entry.isFile() && /\.(ts|tsx)$/.test(entry.name)) {
files.push(fullPath);
}
}
}
catch (error) {
this.logger.debug(`Could not scan directory ${dirPath}: ${error}`);
}
};
await scanDir(projectPath);
return files;
}
symbolsToParquetData(symbols) {
const data = [];
for (const [symbolName, symbolInfos] of symbols) {
for (const symbolInfo of symbolInfos) {
data.push({
symbolName,
name: symbolInfo.name,
kind: symbolInfo.kind,
type: symbolInfo.type,
location: JSON.stringify(symbolInfo.location),
documentation: symbolInfo.documentation ?? null,
isExported: symbolInfo.isExported,
module: symbolInfo.module ?? null,
signature: symbolInfo.signature ?? null,
});
}
}
return data;
}
typesToParquetData(types) {
const data = [];
for (const [typeName, typeInfo] of types) {
data.push({
typeName,
name: typeInfo.name,
kind: typeInfo.kind,
properties: JSON.stringify(typeInfo.properties),
methods: typeInfo.methods ? JSON.stringify(typeInfo.methods) : null,
extends: typeInfo.extends ? JSON.stringify(typeInfo.extends) : null,
implements: typeInfo.implements ? JSON.stringify(typeInfo.implements) : null,
location: JSON.stringify(typeInfo.location),
documentation: typeInfo.documentation ?? null,
typeParameters: typeInfo.typeParameters ? JSON.stringify(typeInfo.typeParameters) : null,
});
}
return data;
}
modulesToParquetData(modules) {
const data = [];
for (const [modulePath, moduleInfo] of modules) {
data.push({
modulePath,
path: moduleInfo.path,
exports: JSON.stringify(moduleInfo.exports),
imports: JSON.stringify(moduleInfo.imports),
dependencies: JSON.stringify(moduleInfo.dependencies),
});
}
return data;
}
dependenciesToParquetData(dependencies) {
const data = [];
for (const [fromModule, toModules] of dependencies) {
data.push({
fromModule,
toModules: JSON.stringify(toModules),
});
}
return data;
}
async saveProjectIndex(index) {
try {
const projectPath = index.projectPath;
await this.initializeProjectStructure(projectPath);
const cachePaths = this.getCacheFilePaths(projectPath);
this.logger.info(`Saving index to Parquet files: ${Object.keys(cachePaths).length} files`);
const symbolsData = this.symbolsToParquetData(index.symbols);
const typesData = this.typesToParquetData(index.types);
const modulesData = this.modulesToParquetData(index.modules);
const dependenciesData = this.dependenciesToParquetData(index.dependencies);
await Promise.all([
this.writeParquetFile(cachePaths.symbols, symbolsData),
this.writeParquetFile(cachePaths.types, typesData),
this.writeParquetFile(cachePaths.modules, modulesData),
this.writeParquetFile(cachePaths.dependencies, dependenciesData),
]);
const fileHashes = await this.getFileHashes(projectPath);
const metadata = {
projectPath,
lastIndexed: index.lastIndexed,
fileCount: fileHashes.size,
fileHashes,
version: this.CACHE_VERSION,
cacheFormat: 'parquet',
};
const projectInfo = {
projectName: path.basename(projectPath),
language: 'typescript',
version: this.CACHE_VERSION,
lastIndexed: index.lastIndexed.toISOString(),
fileCount: fileHashes.size,
symbolCount: symbolsData.length,
typeCount: typesData.length,
};
await fs.writeFile(cachePaths.fileHashes, JSON.stringify({
...metadata,
fileHashes: Object.fromEntries(metadata.fileHashes)
}, null, 2));
await fs.writeFile(cachePaths.projectInfo, JSON.stringify(projectInfo, null, 2));
this.logger.info(`Saved Parquet cache for ${projectPath}: ${symbolsData.length} symbols, ${typesData.length} types, ${modulesData.length} modules`);
}
catch (error) {
this.logger.error(`Failed to save Parquet cache for ${index.projectPath}: ${error}`);
throw error;
}
}
async writeParquetFile(filePath, data) {
if (data.length === 0) {
this.logger.debug(`Skipping empty data for ${filePath}`);
return;
}
try {
const dir = path.dirname(filePath);
await fs.mkdir(dir, { recursive: true });
if (data.length === 0)
return;
const columns = Object.keys(data[0]);
const columnData = columns.map(columnName => ({
name: columnName,
data: data.map(row => row[columnName])
}));
await parquetWriteFile({
filename: filePath,
columnData
});
this.logger.debug(`Written ${data.length} records to ${filePath}`);
}
catch (error) {
this.logger.error(`Failed to write Parquet file ${filePath}:`, error);
throw error;
}
}
async isCacheValid(projectPath) {
try {
const cachePaths = this.getCacheFilePaths(projectPath);
await Promise.all([
fs.access(cachePaths.symbols),
fs.access(cachePaths.types),
fs.access(cachePaths.modules),
fs.access(cachePaths.dependencies),
fs.access(cachePaths.fileHashes),
fs.access(cachePaths.projectInfo),
]);
const metadataContent = await fs.readFile(cachePaths.fileHashes, 'utf8');
const metadata = JSON.parse(metadataContent);
if (metadata.version !== this.CACHE_VERSION) {
this.logger.info(`Cache version mismatch for ${projectPath}, invalidating`);
return false;
}
const currentFileHashes = await this.getFileHashes(projectPath);
const cachedFileHashes = new Map(Object.entries(metadata.fileHashes));
if (currentFileHashes.size !== cachedFileHashes.size) {
this.logger.info(`File count changed for ${projectPath} (${cachedFileHashes.size} → ${currentFileHashes.size}), invalidating cache`);
return false;
}
for (const [filePath, currentHash] of currentFileHashes) {
const cachedHash = cachedFileHashes.get(filePath);
if (!cachedHash || cachedHash !== currentHash) {
this.logger.info(`File modified: ${filePath}, invalidating cache`);
return false;
}
}
this.logger.info(`Cache is valid for ${projectPath} (${currentFileHashes.size} files)`);
return true;
}
catch (error) {
this.logger.debug(`Cache validation failed for ${projectPath}: ${error}`);
return false;
}
}
async loadProjectIndex(projectPath) {
try {
const cachePaths = this.getCacheFilePaths(projectPath);
this.logger.info(`Loading index from Parquet files: ${projectPath}`);
const [symbolsData, typesData, modulesData, dependenciesData, projectInfo] = await Promise.all([
this.readParquetFile(cachePaths.symbols),
this.readParquetFile(cachePaths.types),
this.readParquetFile(cachePaths.modules),
this.readParquetFile(cachePaths.dependencies),
fs.readFile(cachePaths.projectInfo, 'utf8').then(content => JSON.parse(content)),
]);
this.logger.debug(`Loaded raw data: symbols=${symbolsData.length}, types=${typesData.length}, modules=${modulesData.length}`);
const symbols = new Map();
const types = new Map();
const modules = new Map();
const dependencies = new Map();
for (const row of symbolsData) {
try {
const symbolName = row.symbolName;
const symbolInfo = {
name: row.name,
kind: row.kind,
type: row.type,
location: JSON.parse(row.location),
documentation: row.documentation || undefined,
isExported: row.isExported,
module: row.module || undefined,
signature: row.signature || undefined,
};
if (!symbols.has(symbolName)) {
symbols.set(symbolName, []);
}
symbols.get(symbolName).push(symbolInfo);
}
catch (error) {
this.logger.error(`Failed to parse symbol row: ${JSON.stringify(row)}, error: ${error}`);
throw error;
}
}
for (const row of typesData) {
try {
const typeInfo = {
name: row.name,
kind: row.kind,
properties: JSON.parse(row.properties),
methods: row.methods ? JSON.parse(row.methods) : undefined,
extends: row.extends ? JSON.parse(row.extends) : undefined,
implements: row.implements ? JSON.parse(row.implements) : undefined,
location: JSON.parse(row.location),
documentation: row.documentation || undefined,
typeParameters: row.typeParameters ? JSON.parse(row.typeParameters) : undefined,
};
types.set(row.typeName, typeInfo);
}
catch (error) {
this.logger.error(`Failed to parse type row: ${JSON.stringify(row)}, error: ${error}`);
throw error;
}
}
for (const row of modulesData) {
try {
const moduleInfo = {
path: row.path,
exports: JSON.parse(row.exports),
imports: JSON.parse(row.imports),
dependencies: JSON.parse(row.dependencies),
};
modules.set(row.modulePath, moduleInfo);
}
catch (error) {
this.logger.error(`Failed to parse module row: ${JSON.stringify(row)}, error: ${error}`);
throw error;
}
}
for (const row of dependenciesData) {
try {
dependencies.set(row.fromModule, JSON.parse(row.toModules));
}
catch (error) {
this.logger.error(`Failed to parse dependency row: ${JSON.stringify(row)}, error: ${error}`);
throw error;
}
}
const index = {
projectPath,
symbols,
types,
modules,
dependencies,
lastIndexed: new Date(projectInfo.lastIndexed),
};
this.logger.info(`Loaded Parquet cache for ${projectPath}: ${symbols.size} symbol groups, ${types.size} types, ${modules.size} modules`);
return index;
}
catch (error) {
this.logger.debug(`Failed to load Parquet cache for ${projectPath}: ${error}`);
return null;
}
}
async readParquetFile(filePath) {
try {
const file = await asyncBufferFromFile(filePath);
const results = [];
let columns = [];
await parquetRead({
file,
onComplete: (data, metadata) => {
if (metadata && metadata.schema && data.length > 0) {
columns = metadata.schema.map((col) => col.name || col);
}
else if (data.length > 0 && Array.isArray(data[0])) {
if (filePath.includes('symbols_v')) {
columns = ['symbolName', 'name', 'kind', 'type', 'location', 'documentation', 'isExported', 'module', 'signature'];
}
else if (filePath.includes('types_v')) {
columns = ['typeName', 'name', 'kind', 'properties', 'methods', 'extends', 'implements', 'location', 'documentation', 'typeParameters'];
}
else if (filePath.includes('modules_v')) {
columns = ['modulePath', 'path', 'exports', 'imports', 'dependencies'];
}
else if (filePath.includes('dependencies_v')) {
columns = ['fromModule', 'toModules'];
}
}
if (columns.length > 0 && data.length > 0 && Array.isArray(data[0])) {
const objectData = data.map((row) => {
const obj = {};
columns.forEach((colName, index) => {
obj[colName] = row[index];
});
return obj;
});
results.push(...objectData);
}
else {
results.push(...data);
}
}
});
return results;
}
catch (error) {
this.logger.error(`Failed to read Parquet file ${filePath}:`, error);
throw error;
}
}
async clearProjectCache(projectPath) {
try {
const cacheDir = path.join(projectPath, this.CACHE_DIR);
await fs.rm(cacheDir, { recursive: true, force: true });
this.logger.info(`Cleared cache for ${projectPath}`);
}
catch (error) {
this.logger.debug(`Failed to clear cache for ${projectPath}: ${error}`);
}
}
}
//# sourceMappingURL=ParquetCache.js.map