UNPKG

signalk-parquet

Version:

SignalK plugin and webapp that archives SK data to Parquet files with a regimen control system, advanced querying, Claude integrated AI analysis, spatial capabilities, and REST API.

33 lines 1.43 kB
import { DataRecord, ParquetWriterOptions } from './types'; import { SchemaService } from './schema-service'; export declare class ParquetWriter { private format; private app?; private schemaService?; private directoryScanner; constructor(options?: ParquetWriterOptions); getSchemaService(): SchemaService | undefined; writeRecords(filepath: string, records: DataRecord[]): Promise<string>; /** * Find the base data directory from a nested path * This helps invalidate the right cache entry */ private findBaseDataDir; writeJSON(filepath: string, records: DataRecord[]): Promise<string>; writeCSV(filepath: string, records: DataRecord[]): Promise<string>; writeParquet(filepath: string, records: DataRecord[]): Promise<string>; createParquetSchema(records: DataRecord[], currentPath?: string): Promise<any>; private getTypeForEmptyColumn; private getTypeForExplodedField; private getTypeFromOtherFiles; private inferTypeFromFieldName; private extractOutputDirectory; prepareRecordForParquet(record: DataRecord, schema: any): { [key: string]: any; }; mergeFiles(sourceFiles: string[], targetFile: string): Promise<number>; private validateParquetFile; private logQuarantine; consolidateDaily(dataDir: string, date: Date, filenamePrefix?: string): Promise<number>; } //# sourceMappingURL=parquet-writer.d.ts.map