@backstage/backend-defaults
Version:
Backend defaults used by Backstage backend apps
251 lines (245 loc) • 8.99 kB
JavaScript
;
var integration = require('@backstage/integration');
var fetch = require('node-fetch');
var parseGitUrl = require('git-url-parse');
var minimatch = require('minimatch');
var stream = require('stream');
var errors = require('@backstage/errors');
var ReadUrlResponseFactory = require('./ReadUrlResponseFactory.cjs.js');
var util = require('./util.cjs.js');
var isGlob = require('is-glob');
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
var fetch__default = /*#__PURE__*/_interopDefaultCompat(fetch);
var parseGitUrl__default = /*#__PURE__*/_interopDefaultCompat(parseGitUrl);
var isGlob__default = /*#__PURE__*/_interopDefaultCompat(isGlob);
class GithubUrlReader {
constructor(integration, deps) {
this.integration = integration;
this.deps = deps;
if (!integration.config.apiBaseUrl && !integration.config.rawBaseUrl) {
throw new Error(
`GitHub integration '${integration.title}' must configure an explicit apiBaseUrl or rawBaseUrl`
);
}
}
static factory = ({ config, treeResponseFactory }) => {
const integrations = integration.ScmIntegrations.fromConfig(config);
const credentialsProvider = integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations);
return integrations.github.list().map((integration) => {
const reader = new GithubUrlReader(integration, {
treeResponseFactory,
credentialsProvider
});
const predicate = (url) => url.host === integration.config.host;
return { reader, predicate };
});
};
async read(url) {
const response = await this.readUrl(url);
return response.buffer();
}
getCredentials = async (url, options) => {
if (options?.token) {
return {
headers: {
Authorization: `Bearer ${options.token}`
},
type: "token",
token: options.token
};
}
return await this.deps.credentialsProvider.getCredentials({
url
});
};
async readUrl(url, options) {
const credentials = await this.getCredentials(url, options);
const ghUrl = integration.getGithubFileFetchUrl(
url,
this.integration.config,
credentials
);
const response = await this.fetchResponse(ghUrl, {
headers: {
...credentials?.headers,
...options?.etag && { "If-None-Match": options.etag },
...options?.lastModifiedAfter && {
"If-Modified-Since": options.lastModifiedAfter.toUTCString()
},
Accept: "application/vnd.github.v3.raw"
},
// TODO(freben): The signal cast is there because pre-3.x versions of
// node-fetch have a very slightly deviating AbortSignal type signature.
// The difference does not affect us in practice however. The cast can
// be removed after we support ESM for CLI dependencies and migrate to
// version 3 of node-fetch.
// https://github.com/backstage/backstage/issues/8242
signal: options?.signal
});
return ReadUrlResponseFactory.ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
etag: response.headers.get("ETag") ?? void 0,
lastModifiedAt: util.parseLastModified(response.headers.get("Last-Modified"))
});
}
async readTree(url, options) {
const repoDetails = await this.getRepoDetails(url, options);
const commitSha = repoDetails.commitSha;
if (options?.etag && options.etag === commitSha) {
throw new errors.NotModifiedError();
}
const { filepath } = parseGitUrl__default.default(url);
const { headers } = await this.getCredentials(url, options);
return this.doReadTree(
repoDetails.repo.archive_url,
commitSha,
filepath,
// TODO(freben): The signal cast is there because pre-3.x versions of
// node-fetch have a very slightly deviating AbortSignal type signature.
// The difference does not affect us in practice however. The cast can be
// removed after we support ESM for CLI dependencies and migrate to
// version 3 of node-fetch.
// https://github.com/backstage/backstage/issues/8242
{ headers, signal: options?.signal },
options
);
}
async search(url, options) {
const { filepath } = parseGitUrl__default.default(url);
if (!isGlob__default.default(filepath)) {
try {
const data = await this.readUrl(url, options);
return {
files: [
{
url,
content: data.buffer,
lastModifiedAt: data.lastModifiedAt
}
],
etag: data.etag ?? ""
};
} catch (error) {
errors.assertError(error);
if (error.name === "NotFoundError") {
return {
files: [],
etag: ""
};
}
throw error;
}
}
const repoDetails = await this.getRepoDetails(url, options);
const commitSha = repoDetails.commitSha;
if (options?.etag && options.etag === commitSha) {
throw new errors.NotModifiedError();
}
const { headers } = await this.getCredentials(url, options);
const files = await this.doSearch(
url,
repoDetails.repo.trees_url,
repoDetails.repo.archive_url,
commitSha,
filepath,
{ headers, signal: options?.signal }
);
return { files, etag: commitSha };
}
toString() {
const { host, token } = this.integration.config;
return `github{host=${host},authed=${Boolean(token)}}`;
}
async doReadTree(archiveUrl, sha, subpath, init, options) {
const archive = await this.fetchResponse(
archiveUrl.replace("{archive_format}", "tarball").replace("{/ref}", `/${sha}`),
init
);
return await this.deps.treeResponseFactory.fromTarArchive({
// TODO(Rugvip): Underlying implementation of fetch will be node-fetch, we probably want
// to stick to using that in exclusively backend code.
stream: stream.Readable.from(archive.body),
subpath,
etag: sha,
filter: options?.filter
});
}
async doSearch(url, treesUrl, archiveUrl, sha, query, init) {
function pathToUrl(path) {
const updated = new URL(url);
const base = updated.pathname.split("/").slice(1, 5).join("/");
updated.pathname = `${base}/${path}`;
return updated.toString();
}
const matcher = new minimatch.Minimatch(query.replace(/^\/+/, ""));
const recursiveTree = await this.fetchJson(
treesUrl.replace("{/sha}", `/${sha}?recursive=true`),
init
);
if (!recursiveTree.truncated) {
const matching = recursiveTree.tree.filter(
(item) => item.type === "blob" && item.path && item.url && matcher.match(item.path)
);
return matching.map((item) => ({
url: pathToUrl(item.path),
content: async () => {
const blob = await this.fetchJson(item.url, init);
return Buffer.from(blob.content, "base64");
}
}));
}
const tree = await this.doReadTree(archiveUrl, sha, "", init, {
filter: (path) => matcher.match(path)
});
const files = await tree.files();
return files.map((file) => ({
url: pathToUrl(file.path),
content: file.content,
lastModifiedAt: file.lastModifiedAt
}));
}
async getRepoDetails(url, options) {
const parsed = parseGitUrl__default.default(url);
const { ref, full_name } = parsed;
const credentials = await this.getCredentials(url, options);
const { headers } = credentials;
const commitStatus = await this.fetchJson(
`${this.integration.config.apiBaseUrl}/repos/${full_name}/commits/${ref || await this.getDefaultBranch(full_name, credentials)}/status?per_page=0`,
{ headers }
);
return {
commitSha: commitStatus.sha,
repo: commitStatus.repository
};
}
async getDefaultBranch(repoFullName, credentials) {
const repo = await this.fetchJson(
`${this.integration.config.apiBaseUrl}/repos/${repoFullName}`,
{ headers: credentials.headers }
);
return repo.default_branch;
}
async fetchResponse(url, init) {
const urlAsString = url.toString();
const response = await fetch__default.default(urlAsString, init);
if (!response.ok) {
let message = `Request failed for ${urlAsString}, ${response.status} ${response.statusText}`;
if (response.status === 304) {
throw new errors.NotModifiedError();
}
if (response.status === 404) {
throw new errors.NotFoundError(message);
}
if (this.integration.parseRateLimitInfo(response).isRateLimited) {
message += " (rate limit exceeded)";
}
throw new Error(message);
}
return response;
}
async fetchJson(url, init) {
const response = await this.fetchResponse(url, init);
return await response.json();
}
}
exports.GithubUrlReader = GithubUrlReader;
//# sourceMappingURL=GithubUrlReader.cjs.js.map