UNPKG

@codeplaydata/datasus

Version:

This application decompress the datasus micro data and serve as a gateway class.

109 lines (105 loc) 3.6 kB
// @filename: Dbc.ts /* * Copyright 2025 Pedro Paulo Teixeira dos Santos Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import { statSync, unlink } from "node:fs"; import { tmpdir } from "node:os"; import { parse } from "node:path"; import { DBFFile } from 'dbffile'; import { dbc2dbf } from "@codeplaydata/dbc2dbf"; /** * Wrapper around dbffile to handle DATASUS DBC files conversion and reading. * * It converts the .dbc file to a temporary .dbf (using @codeplaydata/dbc2dbf), * exposes basic metadata (record count and fields) and provides iteration helpers. */ export class Dbc { dbf; io; /** Total number of records in the dataset. */ size; /** Field descriptors as reported by dbffile. */ fields; /** * @param dbf Opened DBFFile instance. * @param io Input/output paths used for conversion and temp file storage. */ constructor(dbf, io) { this.dbf = dbf; this.io = io; this.size = dbf.recordCount; this.fields = dbf.fields; /* if (process.platform !== 'linux') { OSNotSupported.exception(); } */ } /** * Loads a .dbc file, converting it to a temporary .dbf if necessary. * @param inputFile Full path to the .dbc file. * @returns A ready-to-use Dbc wrapper instance. */ static async load(inputFile) { const inputFilePath = parse(inputFile); const io = { input: inputFile, output: `${tmpdir()}/${inputFilePath.name}${inputFilePath.ext}` }; try { statSync(io.output); } catch (error) { dbc2dbf(io); } let dbf = await DBFFile.open(io.output); return new Dbc(dbf, io); } /** * Reads a batch of records from the DBF. * @param count Optional number of records to read (defaults to all records). */ async readBatch(count) { return await this.dbf.readRecords(count || this.size); } /** * Deletes the temporary .dbf file created during conversion. */ remove() { const inputFilePath = parse(this.io.input); unlink(this.io.output, (error) => { if (error) CanNotExcludeDbcFile.exception(`${inputFilePath.name}${inputFilePath.ext}`); console.log(`${inputFilePath.name}${inputFilePath.ext} excluded.`); }); } /** * Iterates through each record asynchronously, invoking the callback. * @param callback Async function receiving each record. */ async forEachRecords(callback) { for await (let record of this.dbf) { await callback(record); } } } /** * Error thrown when the temporary .dbf file cannot be removed. */ export class CanNotExcludeDbcFile extends Error { constructor(file) { super(`A error occurred when deleting file: ${file}`); this.name = 'CanNotExcludeDbcFile'; this.cause = 'The file was already excluded.'; } static exception(file) { throw new CanNotExcludeDbcFile(file); } }