firebase-authentication-to-bigquery-export
Version:
An automatic tool for copying and converting Firestore data to BigQuery.
205 lines (204 loc) • 7.21 kB
JavaScript
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.copyToBigQuery = exports.deleteBigQueryTables = exports.createBigQueryTables = exports.setBigQueryConfig = exports.setFirebaseConfig = void 0;
const admin = __importStar(require("firebase-admin"));
const BigQuery = __importStar(require("@google-cloud/bigquery"));
let bigQuery = undefined;
let app = undefined;
const datasetID = "authentication";
const tableName = "authentication";
/**
* Connecting to the given Firebase project.
*
* @param {JSON} serviceAccountFile
* @public
*/
function setFirebaseConfig(serviceAccountFile) {
const keys = {
credential: admin.credential.cert(serviceAccountFile),
};
app = admin.initializeApp(keys, "authentication-to-bigquery-export");
}
exports.setFirebaseConfig = setFirebaseConfig;
/**
* Connecting to the given BigQuery project.
*
* @param {JSON} serviceAccountFile
* @public
*/
function setBigQueryConfig(serviceAccountFile) {
bigQuery = new BigQuery.BigQuery({
projectId: serviceAccountFile.project_id,
credentials: serviceAccountFile
});
}
exports.setBigQueryConfig = setBigQueryConfig;
/**
* Creating a BigQuery dataset.
* Creating the tables with the correct schema if the table doesn't already exist.
*
* @public
*/
async function createBigQueryTables() {
if (!bigQuery) {
console.error("please setBigQueryConfig");
return;
}
const datasetIsExists = await bigQuery.dataset(datasetID).exists();
if (!datasetIsExists[0]) {
await bigQuery.createDataset(datasetID);
}
const schema = [
{ name: 'userId', type: 'STRING', mode: 'REQUIRED' },
{ name: 'mail', type: 'string' },
{ name: 'creationTime', type: 'integer' },
{ name: 'lastSignInTime', type: 'integer' },
{ name: 'displayName', type: 'string' },
{ name: 'photoURL', type: 'string' },
{ name: 'phoneNumber', type: 'string' },
{ name: 'providerIds', type: 'STRING', mode: 'REPEATED' },
{ name: 'tokensValidAfterTime', type: 'integer' },
{ name: 'emailVerified', type: 'bool' },
{ name: 'disabled', type: 'bool' },
];
const options = {
name: undefined,
friendlyName: "auth",
partitioning: undefined,
view: undefined,
schema: schema
};
return bigQuery.dataset(datasetID).createTable(tableName, options);
}
exports.createBigQueryTables = createBigQueryTables;
/**
* Deletes all the given tables.
*
* @public
*/
async function deleteBigQueryTables() {
if (!bigQuery) {
console.error("please setBigQueryConfig");
return;
}
const datasetIsExists = await bigQuery.dataset(datasetID).exists();
if (!datasetIsExists[0]) {
console.log("Not found Dataset: " + datasetID);
return;
}
const tableIsExists = await bigQuery.dataset(datasetID).table(tableName).exists();
if (!tableIsExists[0]) {
console.log("Not found table: " + tableName);
return;
}
return bigQuery.dataset(datasetID).table(tableName).delete();
}
exports.deleteBigQueryTables = deleteBigQueryTables;
/**
* Runs through the given QuerySnapshot and converts and copies it to an array.
* Inserts the array into a BigQuery table with the given collectionName.
*
* @param {boolean} [verbose = false]
* @returns {Promise<Number>}
* @public
*/
async function copyToBigQuery(verbose = false) {
if (!bigQuery || !app) {
console.error("please setBigQueryConfig and setFirebaseConfig");
return;
}
const results = await app.auth().listUsers(1000);
let pageToken = results.pageToken;
let users = results.users;
if (pageToken) {
while (pageToken) {
const results = await app.auth().listUsers(1000, pageToken);
pageToken = results.pageToken;
users = users.concat(results.users);
}
}
if (verbose) {
console.log("Done: fetch user data");
}
const rows = users.map(x => {
const creationTime = new Date(x.metadata.creationTime).getTime() / 1000;
const lastSignInTime = new Date(x.metadata.lastSignInTime).getTime() / 1000;
let tokensValidAfterTime = null;
if (x.tokensValidAfterTime) {
tokensValidAfterTime = new Date(x.tokensValidAfterTime).getTime() / 1000;
}
return {
userId: x.uid,
mail: x.email ? x.email : null,
emailVerified: x.emailVerified,
disabled: x.disabled,
displayName: x.displayName ? x.displayName : null,
phoneNumber: x.phoneNumber ? x.phoneNumber : null,
photoURL: x.photoURL ? x.photoURL : null,
providerIds: x.providerData.map((provider) => { return provider.providerId; }),
tokensValidAfterTime: tokensValidAfterTime,
creationTime: creationTime,
lastSignInTime: lastSignInTime
};
});
if (verbose) {
console.log("inserting data ...");
}
let index2 = 0;
let init = [[]];
const data = rows.reduce((pre, cur) => {
if (pre[index2].length >= 1000)
index2++;
if (!pre[index2])
pre[index2] = [];
pre[index2].push(cur);
return pre;
}, init);
const tables = bigQuery.dataset(datasetID).table(tableName);
return Promise.all(data.map(async (x) => {
await new Promise(resolve => setTimeout(resolve, 0.5));
return tables.insert(x);
})).then(() => {
if (verbose)
console.log('Successfully copied authentication to BigQuery.');
})
.catch(e => {
let errorMessage = '';
if (e.errors.length) {
errorMessage = e.errors.length + ' errors.';
console.error(e.errors.length + ' errors. Here are the first three:');
console.error(e.errors[0]);
console.error(e.errors[1]);
console.error(e.errors[2]);
}
else {
errorMessage = e;
console.error(e);
}
throw new Error(errorMessage);
});
}
exports.copyToBigQuery = copyToBigQuery;
;