@atomist/sdm-pack-aspect
Version:
an Atomist SDM Extension Pack for visualizing drift across an organization
647 lines • 32.1 kB
JavaScript
"use strict";
/*
* Copyright © 2019 Atomist, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __asyncValues = (this && this.__asyncValues) || function (o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
};
Object.defineProperty(exports, "__esModule", { value: true });
const automation_client_1 = require("@atomist/automation-client");
const sdm_pack_fingerprint_1 = require("@atomist/sdm-pack-fingerprint");
const camelcaseKeys = require("camelcase-keys");
const _ = require("lodash");
const bands_1 = require("../../../util/bands");
const commonBands_1 = require("../../../util/commonBands");
const ProjectAnalysisResult_1 = require("../../ProjectAnalysisResult");
const pgUtils_1 = require("./pgUtils");
const ProjectAnalysisResultStore_1 = require("./ProjectAnalysisResultStore");
const repoTree_1 = require("./repoTree");
// tslint:disable:max-file-line-count
class PostgresProjectAnalysisResultStore {
constructor(clientFactory) {
this.clientFactory = clientFactory;
}
fingerprintsToReposTree(treeQuery) {
return repoTree_1.fingerprintsToReposTreeQuery(treeQuery, this.clientFactory);
}
aspectDriftTree(workspaceId, percentile, options) {
return !!options && !!options.type ?
repoTree_1.driftTreeForSingleAspect(workspaceId, percentile, options, this.clientFactory) :
repoTree_1.driftTreeForAllAspects(workspaceId, percentile, this.clientFactory);
}
distinctRepoCount(workspaceId) {
const sql = `SELECT COUNT(1) FROM (SELECT DISTINCT url
FROM repo_snapshots
WHERE workspace_id ${workspaceId === "*" ? "<>" : "="} $1) as repos`;
return pgUtils_1.doWithClient(sql, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const result = yield client.query(sql, [workspaceId]);
return +result.rows[0].count;
}));
}
virtualProjectCount(workspaceId) {
const sql = `SELECT COUNT(1) FROM (SELECT DISTINCT repo_snapshot_id, path
FROM repo_snapshots, repo_fingerprints
WHERE workspace_id ${workspaceId === "*" ? "<>" : "="} $1
AND repo_fingerprints.repo_snapshot_id = repo_snapshots.id) as virtual_repos`;
return pgUtils_1.doWithClient(sql, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const result = yield client.query(sql, [workspaceId]);
return +result.rows[0].count;
}));
}
latestTimestamp(workspaceId) {
const sql = `SELECT timestamp FROM repo_snapshots WHERE workspace_id ${workspaceId === "*" ? "<>" : "="} $1
ORDER BY timestamp DESC LIMIT 1`;
return pgUtils_1.doWithClient(sql, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const result = yield client.query(sql, [workspaceId]);
return result.rows[0].timestamp;
}));
}
loadInWorkspace(workspaceId, deep) {
return this.loadInWorkspaceInternal(workspaceId || "*", deep);
}
/**
* Load repo
* @param {string} workspaceId workspace id
* @param {boolean} deep whether to load fingerprints also
* @param {string} additionalWhereClause does not use aliases, but original table names
* @param {any[]} additionalParameters additional parameters required by additional where clause
* @return {Promise<ProjectAnalysisResult[]>}
*/
loadInWorkspaceInternal(workspaceId, deep, additionalWhereClause = "true", additionalParameters = []) {
return __awaiter(this, void 0, void 0, function* () {
const reposOnly = `SELECT id, owner, name, url, commit_sha, timestamp, workspace_id
FROM repo_snapshots
WHERE workspace_id ${workspaceId !== "*" ? "=" : "<>"} $1
AND ${additionalWhereClause}`;
const reposAndFingerprints = `SELECT repo_snapshots.id,
repo_snapshots.owner,
repo_snapshots.name,
repo_snapshots.url,
repo_snapshots.commit_sha,
repo_snapshots.timestamp,
repo_snapshots.workspace_id,
json_agg(json_build_object('path', path, 'id', fingerprint_id)) as fingerprint_refs
FROM repo_snapshots
LEFT JOIN repo_fingerprints ON repo_snapshots.id = repo_fingerprints.repo_snapshot_id
WHERE workspace_id ${workspaceId !== "*" ? "=" : "<>"} $1
AND ${additionalWhereClause}
GROUP BY repo_snapshots.id`;
const queryForRepoRows = pgUtils_1.doWithClient(deep ? reposAndFingerprints : reposOnly, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
// Load all fingerprints in workspace so we can look up
const repoSnapshotRows = yield client.query(deep ? reposAndFingerprints : reposOnly, [workspaceId, ...additionalParameters]);
return repoSnapshotRows.rows.map(whyDoesPostgresPutANewlineOnSomeFields).map(row => {
const repoRef = rowToRepoRef(row);
return {
id: row.id,
owner: row.owner,
name: row.name,
url: row.url,
commitSha: row.commit_sha,
timestamp: row.timestamp,
workspaceId: row.workingDescription,
repoRef,
fingerprintRefs: row.fingerprint_refs,
analysis: undefined,
};
});
}), []);
if (deep) {
// We do this join manually instead of returning JSON because of the extent of the duplication
// and the resulting memory usage.
// We parallelize the 2 needed queries to reduce latency
const getFingerprints = this.fingerprintsInWorkspaceRecord(workspaceId);
const [repoRows, fingerprints] = yield Promise.all([queryForRepoRows, getFingerprints]);
for (const repo of repoRows) {
repo.analysis = {
id: repo.repoRef,
fingerprints: repo.fingerprintRefs.map(fref => {
return Object.assign(Object.assign({}, fingerprints[fref.id]), { path: fref.path });
}),
};
}
return repoRows;
}
return queryForRepoRows;
});
}
loadById(id, deep) {
return __awaiter(this, void 0, void 0, function* () {
const hits = yield this.loadInWorkspaceInternal("*", deep, "repo_snapshots.id = $2", [id]);
return hits.length === 1 ? hits[0] : undefined;
});
}
loadByRepoRef(repo, deep) {
return __awaiter(this, void 0, void 0, function* () {
const hits = yield this.loadInWorkspaceInternal("*", deep, "repo_snapshots.owner = $2 AND repo_snapshots.name = $3 AND repo_snapshots.commit_sha = $4", [repo.owner, repo.repo, repo.sha]);
return hits.length === 1 ? hits[0] : undefined;
});
}
persist(repos) {
return __awaiter(this, void 0, void 0, function* () {
return this.persistAnalysisResults(ProjectAnalysisResult_1.isProjectAnalysisResult(repos) ? [repos] : repos);
});
}
distinctFingerprintKinds(workspaceId) {
return __awaiter(this, void 0, void 0, function* () {
const sql = `SELECT DISTINCT f.name, feature_name as type
FROM repo_fingerprints rf, repo_snapshots rs, fingerprints f
WHERE rf.repo_snapshot_id = rs.id AND rf.fingerprint_id = f.id
AND rs.workspace_id ${workspaceId === "*" ? "<>" : "="} $1`;
return pgUtils_1.doWithClient(sql, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const result = yield client.query(sql, [workspaceId]);
return result.rows;
}), []);
});
}
distinctRepoFingerprintKinds(workspaceId) {
return __awaiter(this, void 0, void 0, function* () {
const sql = `SELECT DISTINCT rs.owner, rs.name as repo, f.name, feature_name as type
FROM repo_fingerprints rf, repo_snapshots rs, fingerprints f
WHERE rf.repo_snapshot_id = rs.id AND rf.fingerprint_id = f.id
AND rs.workspace_id ${workspaceId === "*" ? "<>" : "="} $1`;
return pgUtils_1.doWithClient(sql, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const result = yield client.query(sql, [workspaceId]);
return _.map(_.groupBy(result.rows, r => `${r.owner}/${r.repo}`), (v, k) => {
return {
owner: k.split("/")[0],
repo: k.split("/")[1],
fingerprints: v,
};
});
}), []);
});
}
tags(workspaceId) {
const sql = `SELECT fp.name as name, fp.data ->> 'description' as description, fp.feature_name as parent, count(fp.name)
FROM repo_snapshots rs, repo_fingerprints j, fingerprints fp
WHERE j.repo_snapshot_id = rs.id and j.fingerprint_id = fp.id
AND rs.workspace_id ${workspaceId === "*" ? "<>" : "="} $1
AND fp.data ->> 'reason' IS NOT NULL
GROUP BY fp.name, parent, description`;
return pgUtils_1.doWithClient(sql, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const result = yield client.query(sql, [workspaceId]);
return result.rows;
}), []);
}
fingerprintUsageForType(workspaceId, type) {
return fingerprintUsageForType(this.clientFactory, workspaceId, type);
}
storeIdeal(workspaceId, ideal) {
return __awaiter(this, void 0, void 0, function* () {
if (sdm_pack_fingerprint_1.isConcreteIdeal(ideal)) {
yield pgUtils_1.doWithClient("Store ideal", this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
// Clear out any existing ideal
yield client.query("DELETE FROM ideal_fingerprints WHERE workspace_id = $1 AND fingerprint_id IN " +
"(SELECT id from fingerprints where feature_name = $2 AND name = $3)", [workspaceId, ideal.ideal.type, ideal.ideal.name]);
const fid = yield this.ensureFingerprintStored(ideal.ideal, client);
yield client.query(`INSERT INTO ideal_fingerprints (workspace_id, fingerprint_id, authority)
values ($1, $2, 'local-user')`, [
workspaceId, fid
]);
}));
}
else {
throw new Error("Elimination ideals not yet supported");
}
});
}
setIdeal(workspaceId, fingerprintId) {
return __awaiter(this, void 0, void 0, function* () {
const ideal = yield this.loadFingerprintById(fingerprintId);
if (!ideal) {
throw new Error(`Fingerprint with id=${fingerprintId} not found and cannot be used as an ideal`);
}
const ci = {
reason: "Local database",
ideal,
};
yield this.storeIdeal(workspaceId, ci);
});
}
loadIdeals(workspaceId) {
return __awaiter(this, void 0, void 0, function* () {
const sql = `SELECT id, name, feature_name as type, sha, data
FROM ideal_fingerprints, fingerprints
WHERE workspace_id = $1 AND ideal_fingerprints.fingerprint_id = fingerprints.id`;
return pgUtils_1.doWithClient(sql, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const rows = yield client.query(sql, [workspaceId]);
if (!rows.rows) {
return [];
}
return rows.rows.map(idealRowToIdeal);
}), []);
});
}
noteProblem(workspaceId, fingerprintId) {
return __awaiter(this, void 0, void 0, function* () {
const fingerprint = yield this.loadFingerprintById(fingerprintId);
if (!fingerprint) {
throw new Error(`Fingerprint with id=${fingerprintId} not found and cannot be noted as problem`);
}
yield this.storeProblemFingerprint(workspaceId, { fingerprint, severity: "warn", authority: "local-user" });
});
}
storeProblemFingerprint(workspaceId, fp) {
return __awaiter(this, void 0, void 0, function* () {
const sql = `INSERT INTO problem_fingerprints (workspace_id, fingerprint_id, severity, authority, date_added)
values ($1, $2, $3, $4, current_timestamp)`;
yield pgUtils_1.doWithClient(sql, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
// Clear out any existing ideal
const fid = yield this.ensureFingerprintStored(fp.fingerprint, client);
yield client.query(sql, [
workspaceId, fid, fp.severity, fp.authority
]);
}));
});
}
loadProblems(workspaceId) {
return __awaiter(this, void 0, void 0, function* () {
const sql = `SELECT id, name, feature_name as type, sha, data, authority, severity, description, url
FROM problem_fingerprints, fingerprints
WHERE workspace_id = $1 AND problem_fingerprints.fingerprint_id = fingerprints.id`;
return pgUtils_1.doWithClient(sql, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const rows = yield client.query(sql, [workspaceId]);
if (!rows.rows) {
return [];
}
return rows.rows.map(problemRowToProblem);
}), []);
});
}
loadIdeal(workspaceId, type, name) {
return __awaiter(this, void 0, void 0, function* () {
const sql = `SELECT id, name, feature_name as type, sha, data, display_name, display_value
FROM ideal_fingerprints, fingerprints
WHERE workspace_id = $1 AND ideal_fingerprints.fingerprint_id = fingerprints.id
AND feature_name = $2 AND name = $3`;
const rawRow = yield pgUtils_1.doWithClient(sql, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const rows = yield client.query(sql, [workspaceId, type, name]);
return rows.rows.length === 1 ? rows.rows[0] : undefined;
}));
if (!rawRow) {
return undefined;
}
return camelcaseKeys(idealRowToIdeal(rawRow), { deep: true });
});
}
loadFingerprintById(id) {
return __awaiter(this, void 0, void 0, function* () {
const sql = `SELECT id, name, feature_name as type, sha, data FROM fingerprints
WHERE id = $1`;
return pgUtils_1.doWithClient(sql, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const rows = yield client.query(sql, [id]);
return rows.rows.length === 1 ? rows.rows[0] : undefined;
}));
});
}
/**
* Key is persistent fingerprint id
*/
fingerprintsInWorkspaceRecord(workspaceId, type, name) {
return __awaiter(this, void 0, void 0, function* () {
const fingerprintsArray = yield this.fingerprintsInWorkspace(workspaceId, true, type, name);
const fingerprints = {};
fingerprintsArray.forEach(fp => fingerprints[fp.id] = fp);
return fingerprints;
});
}
fingerprintsInWorkspace(workspaceId, distinct, type, name) {
return __awaiter(this, void 0, void 0, function* () {
return fingerprintsInWorkspace(this.clientFactory, workspaceId, distinct, type, name);
});
}
fingerprintsForProject(snapshotId) {
return __awaiter(this, void 0, void 0, function* () {
return fingerprintsForProject(this.clientFactory, snapshotId);
});
}
averageFingerprintCount(workspaceId) {
return __awaiter(this, void 0, void 0, function* () {
const sql = `SELECT avg(count) as average_fingerprints from (SELECT repo_snapshots.id, count(feature_name) from repo_snapshots,
(select distinct feature_name, repo_snapshot_id, repo_fingerprints.path
FROM repo_fingerprints, fingerprints
WHERE repo_fingerprints.fingerprint_id = fingerprints.id)
AS aspects
WHERE workspace_id ${workspaceId === "*" ? "<>" : "="} $1
AND repo_snapshot_id = repo_snapshots.id
GROUP by repo_snapshots.id) stats;`;
return pgUtils_1.doWithClient(sql, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const rows = yield client.query(sql, [workspaceId || "*"]);
return rows.rows.length === 1 ? rows.rows[0].average_fingerprints : -1;
}), () => -1);
});
}
persistAnalytics(data) {
return __awaiter(this, void 0, void 0, function* () {
return pgUtils_1.doWithClient("Persist analytics", this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
for (const { kind, workspaceId, cohortAnalysis } of data) {
const sql = `INSERT INTO fingerprint_analytics (feature_name, name, workspace_id, entropy, variants, count)
values ($1, $2, $3, $4, $5, $6)
ON CONFLICT ON CONSTRAINT fingerprint_analytics_pkey DO UPDATE SET entropy = $4, variants = $5, count = $6`;
yield client.query(sql, [kind.type, kind.name, workspaceId,
cohortAnalysis.entropy, cohortAnalysis.variants, cohortAnalysis.count]);
}
return true;
}));
});
}
persistAnalysisResults(analysisResultIterator) {
return __awaiter(this, void 0, void 0, function* () {
return pgUtils_1.doWithClient("Persist analysis results", this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
var e_1, _a;
const persistResults = [];
try {
for (var analysisResultIterator_1 = __asyncValues(analysisResultIterator), analysisResultIterator_1_1; analysisResultIterator_1_1 = yield analysisResultIterator_1.next(), !analysisResultIterator_1_1.done;) {
const analysisResult = analysisResultIterator_1_1.value;
persistResults.push(yield this.persistOne(client, analysisResult));
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (analysisResultIterator_1_1 && !analysisResultIterator_1_1.done && (_a = analysisResultIterator_1.return)) yield _a.call(analysisResultIterator_1);
}
finally { if (e_1) throw e_1.error; }
}
return persistResults.reduce(ProjectAnalysisResultStore_1.combinePersistResults, ProjectAnalysisResultStore_1.emptyPersistResult);
}), ProjectAnalysisResultStore_1.emptyPersistResult);
});
}
persistOne(client, analysisResult) {
return __awaiter(this, void 0, void 0, function* () {
const repoRef = analysisResult.repoRef;
if (!repoRef) {
return {
attemptedCount: 1,
succeeded: [],
failed: [{
repoUrl: "missing repoRef",
whileTryingTo: "build object to persist",
message: "No RepoRef",
}],
failedFingerprints: [],
};
}
if (!repoRef.url || !repoRef.sha) {
return {
attemptedCount: 1,
succeeded: [],
failed: [{
repoUrl: "missing repoUrl. Repo is named " + repoRef.repo,
whileTryingTo: "build object to persist",
message: `Incomplete RepoRef ${JSON.stringify(repoRef)}`,
}],
failedFingerprints: [],
};
}
try {
// Whack any snapshot we already hold for this repository
yield deleteOldSnapshotForRepository(repoRef, client);
// Use this as unique database id
const id = snapshotIdFor(repoRef);
const shaToUse = repoRef.sha;
const repoSnapshotsInsertSql = `INSERT INTO repo_snapshots (id, workspace_id, provider_id, owner, name, url,
commit_sha, query, timestamp)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, current_timestamp)`;
automation_client_1.logger.debug("Executing SQL:\n%s", repoSnapshotsInsertSql);
yield client.query(repoSnapshotsInsertSql, [id,
analysisResult.workspaceId,
"github",
repoRef.owner,
repoRef.repo,
repoRef.url,
shaToUse,
analysisResult.query,
]);
const fingerprintPersistResults = yield this.persistFingerprints(analysisResult.analysis, id, client);
fingerprintPersistResults.failures.forEach(f => {
automation_client_1.logger.error(`Could not persist fingerprint.
Error: ${f.error.message}
Repo: ${repoRef.url}
Fingerprint: ${JSON.stringify(f.failedFingerprint, undefined, 2)}`);
});
return {
succeeded: [id],
attemptedCount: 1,
failed: [],
failedFingerprints: fingerprintPersistResults.failures,
};
}
catch (err) {
return {
attemptedCount: 1,
succeeded: [],
failed: [{
repoUrl: repoRef.url,
whileTryingTo: "persist in DB",
message: err.message,
}],
failedFingerprints: [],
};
}
});
}
persistAdditionalFingerprints(analyzed) {
return __awaiter(this, void 0, void 0, function* () {
return pgUtils_1.doWithClient(`Persist additional fingerprints for project at ${analyzed.id.url}`, this.clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
return this.persistFingerprints(analyzed, snapshotIdFor(analyzed.id), client);
}), {
insertedCount: 0,
failures: analyzed.fingerprints
.map(failedFingerprint => ({ failedFingerprint, error: undefined })),
});
});
}
// Persist the fingerprints for this analysis
persistFingerprints(pa, id, client) {
return __awaiter(this, void 0, void 0, function* () {
let insertedCount = 0;
const failures = [];
for (const fp of pa.fingerprints) {
const aspectName = fp.type || "unknown";
const fingerprintId = aspectName + "_" + fp.name + "_" + fp.sha;
// console.log("Persist fingerprint " + JSON.stringify(fp) + " for id " + id);
// Create fp record if it doesn't exist
try {
yield this.ensureFingerprintStored(fp, client);
const insertRepoFingerprintSql = `INSERT INTO repo_fingerprints (repo_snapshot_id, fingerprint_id, path)
VALUES ($1, $2, $3) ON CONFLICT DO NOTHING`;
yield client.query(insertRepoFingerprintSql, [id, fingerprintId, fp.path || ""]);
insertedCount++;
}
catch (error) {
failures.push({ failedFingerprint: fp, error });
}
}
return {
insertedCount,
failures,
};
});
}
/**
* Persist the given fingerprint if it's not already known
* @param {FP} fp
* @param {Client} client
* @return {Promise<void>}
*/
ensureFingerprintStored(fp, client) {
return __awaiter(this, void 0, void 0, function* () {
const aspectName = fp.type || "unknown";
const fingerprintId = aspectName + "_" + fp.name + "_" + fp.sha;
// console.log("Persist fingerprint " + JSON.stringify(fp) + " for id " + id);
// Create fp record if it doesn't exist
const insertFingerprintSql = `INSERT INTO fingerprints (id, name, feature_name, sha, data, display_name, display_value)
VALUES ($1, $2, $3, $4, $5, $6, $7) ON CONFLICT DO NOTHING`;
automation_client_1.logger.debug("Persisting fingerprint %j SQL\n%s", fp, insertFingerprintSql);
yield client.query(insertFingerprintSql, [fingerprintId, fp.name, aspectName, fp.sha,
JSON.stringify(fp.data), fp.displayName, fp.displayValue]);
return fingerprintId;
});
}
}
exports.PostgresProjectAnalysisResultStore = PostgresProjectAnalysisResultStore;
function idealRowToIdeal(rawRow) {
if (!!rawRow.data) {
const result = {
ideal: rawRow,
reason: `Local database row ${rawRow.id}`,
};
return result;
}
throw new Error("Elimination ideals not yet supported");
}
function problemRowToProblem(rawRow) {
return {
fingerprint: {
name: rawRow.name,
type: rawRow.feature_name,
data: rawRow.data,
sha: rawRow.sha,
},
authority: rawRow.authority,
severity: rawRow.severity,
description: rawRow.description,
url: rawRow.url,
};
}
/**
* Raw fingerprints in the workspace
* @return {Promise<FP[]>}
*/
function fingerprintsInWorkspace(clientFactory, workspaceId, distinct, type, name) {
return __awaiter(this, void 0, void 0, function* () {
const sql = `SELECT ${distinct ? "DISTINCT" : ""} f.name, f.id, f.feature_name as type, f.sha, f.data
FROM repo_snapshots rs
RIGHT JOIN repo_fingerprints rf ON rf.repo_snapshot_id = rs.id
INNER JOIN fingerprints f ON rf.fingerprint_id = f.id
WHERE rs.workspace_id ${workspaceId === "*" ? "<>" : "="} $1
AND ${type ? "f.feature_name = $2" : "true"} AND ${name ? "f.name = $3" : "true"}`;
return pgUtils_1.doWithClient(sql, clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const params = [workspaceId];
if (!!type) {
params.push(type);
}
if (!!name) {
params.push(name);
}
const rows = yield client.query(sql, params);
const fps = rows.rows;
automation_client_1.logger.debug("%d fingerprints in workspace '%s'", fps.length, workspaceId);
return fps;
}), []);
});
}
function fingerprintsForProject(clientFactory, snapshotId) {
return __awaiter(this, void 0, void 0, function* () {
const sql = `SELECT f.name as fingerprintName, f.feature_name, f.sha, f.data, rf.path, rs.timestamp, rs.commit_sha
FROM repo_fingerprints rf, repo_snapshots rs, fingerprints f
WHERE rs.id = $1 AND rf.repo_snapshot_id = rs.id AND rf.fingerprint_id = f.id
ORDER BY feature_name, fingerprintName ASC`;
return pgUtils_1.doWithClient(sql, clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const rows = yield client.query(sql, [snapshotId]);
return rows.rows.map(row => {
return {
name: row.fingerprintname,
type: row.feature_name,
sha: row.sha,
data: row.data,
path: row.path,
timestamp: row.timestamp,
commitSha: row.commit_sha,
};
});
}), []);
});
}
function fingerprintUsageForType(clientFactory, workspaceId, type) {
return __awaiter(this, void 0, void 0, function* () {
const sql = `SELECT name, feature_name as type, variants, count, entropy, compliance
FROM fingerprint_analytics f
WHERE f.workspace_id ${workspaceId === "*" ? "!=" : "="} $1
AND ${type ? "f.feature_name = $2" : "true"}
ORDER BY entropy DESC`;
return pgUtils_1.doWithClient(sql, clientFactory, (client) => __awaiter(this, void 0, void 0, function* () {
const params = [workspaceId];
if (!!type) {
params.push(type);
}
const rows = yield client.query(sql, params);
return rows.rows.map(r => ({
name: r.name,
type: r.type,
variants: +r.variants,
count: +r.count,
entropy: +r.entropy,
compliance: +r.compliance,
entropy_band: bands_1.bandFor(commonBands_1.EntropySizeBands, +r.entropy, { casing: bands_1.BandCasing.Sentence, includeNumber: false }),
}));
}), []);
});
}
/**
* Delete the data we hold for this repository.
*/
function deleteOldSnapshotForRepository(repoRef, client) {
return __awaiter(this, void 0, void 0, function* () {
const deleteFingperintsSql = `DELETE from repo_fingerprints WHERE repo_snapshot_id IN
(SELECT id from repo_snapshots WHERE url = $1)`;
yield client.query(deleteFingperintsSql, [repoRef.url]);
yield client.query(`DELETE from repo_snapshots WHERE url = $1`, [repoRef.url]);
});
}
function rowToRepoRef(row) {
return automation_client_1.GitHubRepoRef.from(Object.assign(Object.assign({}, row), { sha: row.commit_sha, repo: row.name }));
}
function whyDoesPostgresPutANewlineOnSomeFields(row) {
return Object.assign(Object.assign({}, row), { commit_sha: row.commit_sha ? row.commit_sha.trim() : undefined, id: row.id ? row.id.trim() : undefined });
}
function snapshotIdFor(repoRef) {
return repoRef.url.replace("/", "") + "_" + repoRef.sha;
}
//# sourceMappingURL=PostgresProjectAnalysisResultStore.js.map