UNPKG

@dbml/connector

Version:

This package was created to fetch the schema JSON from many kind of databases.

54 lines (53 loc) 2.08 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.loadCredentialFromFile = loadCredentialFromFile; exports.parseBigQueryCredential = parseBigQueryCredential; const promises_1 = require("fs/promises"); async function loadCredentialFromFile(filename) { try { const data = await (0, promises_1.readFile)(filename, { encoding: 'utf8' }); return data; } catch (error) { if (error.name === 'TypeError' || error.name === 'Error') { throw new Error(`Load credential error: ${error.message}`); } throw error; } } function parseBigQueryCredential(credentialString) { try { const credentialJson = JSON.parse(credentialString); const { project_id: projectId, client_email: clientEmail, private_key: privateKey, datasets, } = credentialJson; if (typeof projectId !== 'string' || !projectId || !projectId.trim()) { throw new Error('project_id must be a non-empty string'); } if (typeof clientEmail !== 'string' || !clientEmail || !clientEmail.trim()) { throw new Error('client_email must be a non-empty string'); } if (typeof privateKey !== 'string' || !privateKey || !privateKey.trim()) { throw new Error('private_key must be a non-empty string'); } if (typeof clientEmail !== 'string' || !clientEmail || !clientEmail.trim()) { throw new Error('client_email must be a non-empty string'); } // valid datasets: ['dataset_1', 'dataset_2'] const parsedDatasets = !Array.isArray(datasets) ? [] : datasets.filter((dataset) => typeof dataset === 'string'); return { projectId, credentials: { clientEmail, privateKey, }, datasets: parsedDatasets, }; } catch (error) { if (error instanceof SyntaxError) { throw new Error('Credentials must be in JSON format'); } throw error; } }