UNPKG

@backstage/backend-defaults

Version:

Backend defaults used by Backstage backend apps

173 lines (167 loc) • 6.14 kB
'use strict'; var integration = require('@backstage/integration'); var parseGitUrl = require('git-url-parse'); var minimatch = require('minimatch'); var errors = require('@backstage/errors'); var ReadUrlResponseFactory = require('./ReadUrlResponseFactory.cjs.js'); function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; } var parseGitUrl__default = /*#__PURE__*/_interopDefaultCompat(parseGitUrl); class AzureUrlReader { constructor(integration, deps) { this.integration = integration; this.deps = deps; } static factory = ({ config, treeResponseFactory }) => { const integrations = integration.ScmIntegrations.fromConfig(config); const credentialProvider = integration.DefaultAzureDevOpsCredentialsProvider.fromIntegrations(integrations); return integrations.azure.list().map((integration) => { const reader = new AzureUrlReader(integration, { treeResponseFactory, credentialsProvider: credentialProvider }); const predicate = (url) => url.host === integration.config.host; return { reader, predicate }; }); }; async read(url) { const response = await this.readUrl(url); return response.buffer(); } async readUrl(url, options) { const { signal } = options ?? {}; const builtUrl = integration.getAzureFileFetchUrl(url); let response; try { const credentials = await this.deps.credentialsProvider.getCredentials({ url: builtUrl }); response = await fetch(builtUrl, { headers: credentials?.headers, // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can // be removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 ...signal && { signal } }); } catch (e) { throw new Error(`Unable to read ${url}, ${e}`); } if (response.ok && response.status !== 203) { return ReadUrlResponseFactory.ReadUrlResponseFactory.fromResponse(response); } const message = `${url} could not be read as ${builtUrl}, ${response.status} ${response.statusText}`; if (response.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } async readTree(url, options) { const { etag, filter, signal } = options ?? {}; const credentials = await this.deps.credentialsProvider.getCredentials({ url }); const commitsAzureResponse = await fetch(integration.getAzureCommitsUrl(url), { headers: credentials?.headers }); if (!commitsAzureResponse.ok) { const message = `Failed to read tree from ${url}, ${commitsAzureResponse.status} ${commitsAzureResponse.statusText}`; if (commitsAzureResponse.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } const commitSha = (await commitsAzureResponse.json()).value[0].commitId; if (etag && etag === commitSha) { throw new errors.NotModifiedError(); } const archiveAzureResponse = await fetch(integration.getAzureDownloadUrl(url), { headers: { ...credentials?.headers, Accept: "application/zip" }, // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can be // removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 ...signal && { signal } }); if (!archiveAzureResponse.ok) { const message = `Failed to read tree from ${url}, ${archiveAzureResponse.status} ${archiveAzureResponse.statusText}`; if (archiveAzureResponse.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } let subpath; const path = new URL(url).searchParams.get("path"); if (path) { subpath = path.split("/").filter(Boolean).slice(-1)[0]; } return await this.deps.treeResponseFactory.fromZipArchive({ response: archiveAzureResponse, etag: commitSha, filter, subpath }); } async search(url, options) { const { filepath } = parseGitUrl__default.default(url); if (!filepath?.match(/[*?]/)) { try { const data = await this.readUrl(url, options); return { files: [ { url, content: data.buffer, lastModifiedAt: data.lastModifiedAt } ], etag: data.etag ?? "" }; } catch (error) { errors.assertError(error); if (error.name === "NotFoundError") { return { files: [], etag: "" }; } throw error; } } const treeUrl = new URL(url); const path = treeUrl.searchParams.get("path"); const matcher = path && new minimatch.Minimatch(path.replace(/^\/+/, "")); treeUrl.searchParams.delete("path"); const tree = await this.readTree(treeUrl.toString(), { etag: options?.etag, signal: options?.signal, filter: (p) => matcher ? matcher.match(p) : true }); const files = await tree.files(); return { etag: tree.etag, files: files.map((file) => ({ url: this.integration.resolveUrl({ url: `/${file.path}`, base: url }), content: file.content, lastModifiedAt: file.lastModifiedAt })) }; } toString() { const { host, credentials } = this.integration.config; return `azure{host=${host},authed=${Boolean( credentials !== void 0 && credentials.length > 0 )}}`; } } exports.AzureUrlReader = AzureUrlReader; //# sourceMappingURL=AzureUrlReader.cjs.js.map