@backstage/backend-defaults
Version:
Backend defaults used by Backstage backend apps
188 lines (182 loc) • 6.68 kB
JavaScript
;
var errors = require('@backstage/errors');
var integration = require('@backstage/integration');
var parseGitUrl = require('git-url-parse');
var lodash = require('lodash');
var minimatch = require('minimatch');
var ReadUrlResponseFactory = require('./ReadUrlResponseFactory.cjs.js');
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
var parseGitUrl__default = /*#__PURE__*/_interopDefaultCompat(parseGitUrl);
class BitbucketServerUrlReader {
static factory = ({ config, treeResponseFactory }) => {
const integrations = integration.ScmIntegrations.fromConfig(config);
return integrations.bitbucketServer.list().map((integration) => {
const reader = new BitbucketServerUrlReader(integration, {
treeResponseFactory
});
const predicate = (url) => url.host === integration.config.host;
return { reader, predicate };
});
};
integration;
deps;
constructor(integration, deps) {
this.integration = integration;
this.deps = deps;
}
async read(url) {
const response = await this.readUrl(url);
return response.buffer();
}
async readUrl(url, options) {
const { etag, lastModifiedAfter, signal } = options ?? {};
const bitbucketUrl = integration.getBitbucketServerFileFetchUrl(
url,
this.integration.config
);
const requestOptions = integration.getBitbucketServerRequestOptions(
this.integration.config
);
let response;
try {
response = await fetch(bitbucketUrl.toString(), {
headers: {
...requestOptions.headers,
...etag && { "If-None-Match": etag },
...lastModifiedAfter && {
"If-Modified-Since": lastModifiedAfter.toUTCString()
}
},
// TODO(freben): The signal cast is there because pre-3.x versions of
// node-fetch have a very slightly deviating AbortSignal type signature.
// The difference does not affect us in practice however. The cast can be
// removed after we support ESM for CLI dependencies and migrate to
// version 3 of node-fetch.
// https://github.com/backstage/backstage/issues/8242
...signal && { signal }
});
} catch (e) {
throw new Error(`Unable to read ${url}, ${e}`);
}
if (response.status === 304) {
throw new errors.NotModifiedError();
}
if (response.ok) {
return ReadUrlResponseFactory.ReadUrlResponseFactory.fromResponse(response);
}
const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`;
if (response.status === 404) {
throw new errors.NotFoundError(message);
}
throw new Error(message);
}
async readTree(url, options) {
const { filepath } = parseGitUrl__default.default(url);
const lastCommitShortHash = await this.getLastCommitShortHash(url);
if (options?.etag && options.etag === lastCommitShortHash) {
throw new errors.NotModifiedError();
}
const downloadUrl = await integration.getBitbucketServerDownloadUrl(
url,
this.integration.config
);
const archiveResponse = await fetch(
downloadUrl,
integration.getBitbucketServerRequestOptions(this.integration.config)
);
if (!archiveResponse.ok) {
const message = `Failed to read tree from ${url}, ${archiveResponse.status} ${archiveResponse.statusText}`;
if (archiveResponse.status === 404) {
throw new errors.NotFoundError(message);
}
throw new Error(message);
}
return await this.deps.treeResponseFactory.fromTarArchive({
response: archiveResponse,
subpath: filepath,
etag: lastCommitShortHash,
filter: options?.filter
});
}
async search(url, options) {
const { filepath } = parseGitUrl__default.default(url);
if (!filepath?.match(/[*?]/)) {
try {
const data = await this.readUrl(url, options);
return {
files: [
{
url,
content: data.buffer,
lastModifiedAt: data.lastModifiedAt
}
],
etag: data.etag ?? ""
};
} catch (error) {
errors.assertError(error);
if (error.name === "NotFoundError") {
return {
files: [],
etag: ""
};
}
throw error;
}
}
const matcher = new minimatch.Minimatch(filepath);
const treeUrl = lodash.trimEnd(url.replace(filepath, ""), "/");
const tree = await this.readTree(treeUrl, {
etag: options?.etag,
filter: (path) => matcher.match(path)
});
const files = await tree.files();
return {
etag: tree.etag,
files: files.map((file) => ({
url: this.integration.resolveUrl({
url: `/${file.path}`,
base: url
}),
content: file.content,
lastModifiedAt: file.lastModifiedAt
}))
};
}
toString() {
const { host, token } = this.integration.config;
const authed = Boolean(token);
return `bitbucketServer{host=${host},authed=${authed}}`;
}
async getLastCommitShortHash(url) {
const { name: repoName, owner: project, ref: branch } = parseGitUrl__default.default(url);
const branchParameter = branch ? `?filterText=${encodeURIComponent(branch)}` : "/default";
const branchListUrl = `${this.integration.config.apiBaseUrl}/projects/${project}/repos/${repoName}/branches${branchParameter}`;
const branchListResponse = await fetch(
branchListUrl,
integration.getBitbucketServerRequestOptions(this.integration.config)
);
if (!branchListResponse.ok) {
const message = `Failed to retrieve branch list from ${branchListUrl}, ${branchListResponse.status} ${branchListResponse.statusText}`;
if (branchListResponse.status === 404) {
throw new errors.NotFoundError(message);
}
throw new Error(message);
}
const branchMatches = await branchListResponse.json();
if (branchMatches && branchMatches.size > 0) {
const exactBranchMatch = branchMatches.values.filter(
(branchDetails) => branchDetails.displayId === branch
)[0];
return exactBranchMatch.latestCommit.substring(0, 12);
}
if (!branch && branchMatches) {
return branchMatches.latestCommit.substring(0, 12);
}
throw new Error(
`Failed to find Last Commit using ${branch ? `branch "${branch}"` : "default branch"} in response from ${branchListUrl}`
);
}
}
exports.BitbucketServerUrlReader = BitbucketServerUrlReader;
//# sourceMappingURL=BitbucketServerUrlReader.cjs.js.map