UNPKG

@teambit/export

Version:
858 lines (847 loc) • 37.9 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = exports.ExportMain = void 0; exports.isUserTryingToExportLanes = isUserTryingToExportLanes; function _fsExtra() { const data = _interopRequireDefault(require("fs-extra")); _fsExtra = function () { return data; }; return data; } function _cli() { const data = require("@teambit/cli"); _cli = function () { return data; }; return data; } function _scope() { const data = require("@teambit/scope"); _scope = function () { return data; }; return data; } function _bitError() { const data = require("@teambit/bit-error"); _bitError = function () { return data; }; return data; } function _legacy() { const data = require("@teambit/legacy.analytics"); _legacy = function () { return data; }; return data; } function _componentId() { const data = require("@teambit/component-id"); _componentId = function () { return data; }; return data; } function _legacy2() { const data = require("@teambit/legacy.constants"); _legacy2 = function () { return data; }; return data; } function _legacy3() { const data = require("@teambit/legacy.component-list"); _legacy3 = function () { return data; }; return data; } function _remove() { const data = require("@teambit/remove"); _remove = function () { return data; }; return data; } function _legacy4() { const data = require("@teambit/legacy.utils"); _legacy4 = function () { return data; }; return data; } function _workspace() { const data = require("@teambit/workspace"); _workspace = function () { return data; }; return data; } function _logger() { const data = require("@teambit/logger"); _logger = function () { return data; }; return data; } function _lodash() { const data = require("lodash"); _lodash = function () { return data; }; return data; } function _pMapSeries() { const data = _interopRequireDefault(require("p-map-series")); _pMapSeries = function () { return data; }; return data; } function _laneId() { const data = require("@teambit/lane-id"); _laneId = function () { return data; }; return data; } function _scope2() { const data = require("@teambit/scope.remotes"); _scope2 = function () { return data; }; return data; } function _eject() { const data = require("@teambit/eject"); _eject = function () { return data; }; return data; } function _scope3() { const data = require("@teambit/scope.network"); _scope3 = function () { return data; }; return data; } function _workspaceModules() { const data = require("@teambit/workspace.modules.node-modules-linker"); _workspaceModules = function () { return data; }; return data; } function _dependencyResolver() { const data = require("@teambit/dependency-resolver"); _dependencyResolver = function () { return data; }; return data; } function _exportScopeComponents() { const data = require("./export-scope-components"); _exportScopeComponents = function () { return data; }; return data; } function _objects() { const data = require("@teambit/objects"); _objects = function () { return data; }; return data; } function _legacy5() { const data = require("@teambit/legacy.scope"); _legacy5 = function () { return data; }; return data; } function _component() { const data = require("@teambit/component.snap-distance"); _component = function () { return data; }; return data; } function _export() { const data = require("./export.aspect"); _export = function () { return data; }; return data; } function _exportCmd() { const data = require("./export-cmd"); _exportCmd = function () { return data; }; return data; } function _resumeExportCmd() { const data = require("./resume-export-cmd"); _resumeExportCmd = function () { return data; }; return data; } const _excluded = ["ids", "includeNonStaged", "headOnly", "originDirectly"]; function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; } function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; } function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; } function _objectWithoutProperties(e, t) { if (null == e) return {}; var o, r, i = _objectWithoutPropertiesLoose(e, t); if (Object.getOwnPropertySymbols) { var n = Object.getOwnPropertySymbols(e); for (r = 0; r < n.length; r++) o = n[r], -1 === t.indexOf(o) && {}.propertyIsEnumerable.call(e, o) && (i[o] = e[o]); } return i; } function _objectWithoutPropertiesLoose(r, e) { if (null == r) return {}; var t = {}; for (var n in r) if ({}.hasOwnProperty.call(r, n)) { if (-1 !== e.indexOf(n)) continue; t[n] = r[n]; } return t; } function _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; } function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == typeof i ? i : i + ""; } function _toPrimitive(t, r) { if ("object" != typeof t || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != typeof i) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); } const BEFORE_EXPORT = 'exporting component'; const BEFORE_EXPORTS = 'exporting components'; const BEFORE_LOADING_COMPONENTS = 'loading components'; class ExportMain { constructor(workspace, remove, depResolver, logger, eject) { this.workspace = workspace; this.remove = remove; this.depResolver = depResolver; this.logger = logger; this.eject = eject; } async export(params = {}) { const { nonExistOnBitMap, newIdsOnRemote, missingScope, exported, removedIds, exportedLanes, rippleJobs } = await this.exportComponents(params); let ejectResults; await this.workspace.clearCache(); // needed when one process executes multiple commands, such as in "bit test" or "bit cli" if (params.eject) ejectResults = await this.ejectExportedComponents(exported); const exportResults = { componentsIds: exported, newIdsOnRemote, nonExistOnBitMap, removedIds, missingScope, ejectResults, exportedLanes, rippleJobs, rippleJobUrls: this.getRippleJobUrls(exportedLanes, rippleJobs) }; if (_legacy5().Scope.onPostExport) { await _legacy5().Scope.onPostExport(exported, exportedLanes).catch(err => { this.logger.error('fatal: onPostExport encountered an error (this error does not stop the process)', err); }); } return exportResults; } getRippleJobUrls(exportedLanes, rippleJobs) { const lane = exportedLanes.length ? exportedLanes?.[0] : undefined; const rippleJobUrls = lane ? rippleJobs.map(job => `https://${(0, _legacy2().getCloudDomain)()}/${lane.scope.replace('.', '/')}/~lane/${lane.name}/~ripple-ci/job/${job}`) : rippleJobs.map(job => `https://${(0, _legacy2().getCloudDomain)()}/ripple-ci/job/${job}`); return rippleJobUrls; } async exportComponents(_ref) { let { ids, includeNonStaged, headOnly, originDirectly } = _ref, params = _objectWithoutProperties(_ref, _excluded); if (!this.workspace) throw new (_workspace().OutsideWorkspaceError)(); const consumer = this.workspace.consumer; const { idsToExport, missingScope, laneObject } = await this.getComponentsToExport(ids, includeNonStaged || headOnly); if (!idsToExport.length && !laneObject) { return { updatedIds: [], nonExistOnBitMap: [], removedIds: new (_componentId().ComponentIdList)(), missingScope, exported: [], newIdsOnRemote: [], exportedLanes: [], rippleJobs: [] }; } if (!idsToExport.length && laneObject && params.forkLaneNewScope) { throw new (_bitError().BitError)(`the forked lane "${laneObject.name}" has no changes, to export all its components, please use "--all" flag if the export fails with missing objects/versions/components, run "bit fetch --lanes <lane-name> --all-history", to make sure you have the full history locally`); } // validate lane readme component and ensure it has been snapped if (laneObject?.readmeComponent) { _throwForUnsnappedLaneReadme(laneObject); } if (!params.forkLaneNewScope && laneObject?.forkedFrom && laneObject.isNew && laneObject.forkedFrom.scope !== laneObject.scope) { throw new (_bitError().BitError)(`error: the current lane ${laneObject.id().toString()} was forked from ${laneObject.forkedFrom.toString()} and is about to export to a different scope (${laneObject.scope}) than the original lane (${laneObject.forkedFrom.scope}). on large lanes with long history graph, it results in exporting lots of objects to the new scope, some of them might be missing locally. if you can use the same scope as the original name, change it now by running "bit lane change-scope ${laneObject.name} ${laneObject.forkedFrom.scope}". otherwise, re-run the export with "--fork-lane-new-scope" flag. if the export fails with missing objects/versions/components, run "bit fetch --lanes <lane-name> --all-history", to make sure you have the full history locally`); } const isOnMain = consumer.isOnMain(); const { exported, updatedLocally, newIdsOnRemote, rippleJobs } = await this.pushToScopes(_objectSpread(_objectSpread({}, params), {}, { exportHeadsOnly: headOnly, scope: consumer.scope, ids: idsToExport, laneObject, originDirectly, isOnMain, filterOutExistingVersions: Boolean(!params.allVersions && laneObject) })); if (laneObject) await updateLanesAfterExport(consumer, laneObject); const removedIds = await this.getRemovedStagedBitIds(); const workspaceIds = this.workspace.listIds(); const nonExistOnBitMap = exported.filter(id => !workspaceIds.hasWithoutVersion(id) && !removedIds.hasWithoutVersion(id)); const updatedIds = _updateIdsOnBitMap(consumer.bitMap, updatedLocally); // re-generate the package.json, this way, it has the correct data in the componentId prop. await (0, _workspaceModules().linkToNodeModulesByIds)(this.workspace, updatedIds, true); await this.workspace.removeFromStagedConfig(exported); // ideally we should delete the staged-snaps only for the exported snaps. however, it's not easy, and it's ok to // delete them all because this file is mainly an optimization for the import process. await this.workspace.scope.legacyScope.stagedSnaps.deleteFile(); await _fsExtra().default.remove(this.workspace.scope.getLastMergedPath()); _legacy().Analytics.setExtraData('num_components', exported.length); // it is important to have consumer.onDestroy() before running the eject operation, we want the // export and eject operations to function independently. we don't want to lose the changes to // .bitmap file done by the export action in case the eject action has failed. await consumer.onDestroy('export'); return { updatedIds, nonExistOnBitMap, removedIds, missingScope, exported, newIdsOnRemote, exportedLanes: laneObject ? [laneObject] : [], rippleJobs }; } /** * @deprecated use `pushToScopes` instead */ async exportMany(params) { return this.pushToScopes(params); } /** * the export process uses four steps. read more about it here: https://github.com/teambit/bit/pull/3371 */ async pushToScopes({ scope, ids, // when exporting a lane, the ids are the lane component ids laneObject, allVersions, originDirectly, resumeExportId, throwForMissingArtifacts, isOnMain = true, exportHeadsOnly, // relevant when exporting from bare-scope, especially when re-exporting existing versions, the normal calculation based on getDivergeData won't work includeParents, // relevant when exportHeadsOnly is used. sometimes the parents head are needed as well filterOutExistingVersions, // go to the remote and check whether the version exists there. if so, don't export it exportOrigin = 'export' }) { this.logger.debug(`scope.exportMany, ids: ${ids.toString()}`); const scopeRemotes = await (0, _scope2().getScopeRemotes)(scope); const groupByScopeName = idList => { return idList.reduce((acc, current) => { const scopeName = current.scope; if (!scopeName) { throw new Error(`toGroupByScopeName() expect ids to have a scope name, got ${current.toString()}`); } if (acc[scopeName]) acc[scopeName].push(current);else acc[scopeName] = new (_componentId().ComponentIdList)(current); return acc; }, {}); }; const idsGroupedByScope = groupByScopeName(ids); /** * when a component is exported for the first time, and the lane-scope is not the same as the component-scope, it's * important to validate that there is no such component in the original scope. otherwise, later, it'll be impossible * to merge the lane because these two components don't have any snap in common. */ const validateTargetScopeForLanes = async () => { if (!laneObject) { return; } const newIds = _componentId().ComponentIdList.fromArray(ids.filter(id => !scope.isExported(id))); const newIdsGrouped = groupByScopeName(newIds); await (0, _pMapSeries().default)(Object.keys(newIdsGrouped), async scopeName => { if (scopeName === laneObject.scope) { // this validation is redundant if the lane-component is in the same scope as the lane-object return; } // by getting the remote we also validate that this scope actually exists. const remote = await scopeRemotes.resolve(scopeName); const list = await remote.list(); const listIds = _componentId().ComponentIdList.fromArray(list.map(listItem => listItem.id)); newIdsGrouped[scopeName].forEach(id => { if (listIds.hasWithoutVersion(id)) { throw new Error(`unable to export a lane with a new component "${id.toString()}", which has the default-scope "${scopeName}". this scope already has a component with the same name. as such, it'll be impossible to merge the lane later because these two components are different`); } }); }); }; /** * by default, when exporting a lane, it traverse from the Lane's head and therefore it may skip the main head. * later, if for some reason the original component was deleted in its scope, the head object will be missing. */ const addMainHeadIfPossible = async (allHashes, modelComponent) => { const head = modelComponent.head; if (!head) return; if (allHashes.find(h => h.hash === head.hash)) return; // head is already in the list if (!(await scope.objects.has(head))) return; // it should not happen. but if it does, we don't want to block the export allHashes.push(head); }; const getVersionsToExport = async modelComponent => { if (exportHeadsOnly) { const head = laneObject?.getCompHeadIncludeUpdateDependents(modelComponent.toComponentId()) || modelComponent.head; if (!head) { throw new Error(`getVersionsToExport should export the head only, but the head of ${modelComponent.id()} is missing`); } if (includeParents) { const headVersion = await modelComponent.loadVersion(head.toString(), scope.objects); return [head, ...headVersion.parents]; } return [head]; } const fromWorkspace = this.workspace?.getIdIfExist(modelComponent.toComponentId()); const localTagsOrHashes = await modelComponent.getLocalHashes(scope.objects, fromWorkspace); if (!allVersions) { return localTagsOrHashes; } const allHashes = await (0, _component().getAllVersionHashes)({ modelComponent, repo: scope.objects }); await addMainHeadIfPossible(allHashes, modelComponent); return allHashes; }; await validateTargetScopeForLanes(); const groupedByScopeString = Object.keys(idsGroupedByScope).map(scopeName => `scope "${scopeName}": ${idsGroupedByScope[scopeName].toString()}`).join(', '); this.logger.debug(`export-scope-components, export to the following scopes ${groupedByScopeString}`); const getUpdatedObjectsToExport = async (remoteNameStr, bitIds, lane) => { bitIds.throwForDuplicationIgnoreVersion(); const remote = await scopeRemotes.resolve(remoteNameStr); const idsToChangeLocally = _componentId().ComponentIdList.fromArray(bitIds.filter(id => !scope.isExported(id))); const componentsAndObjects = []; const objectList = new (_objects().ObjectList)(); const objectListPerName = {}; const modelComponents = await (0, _pMapSeries().default)(bitIds, id => scope.getModelComponent(id)); // super important! otherwise, the processModelComponent() changes objects in memory, while the key remains the same scope.objects.clearObjectsFromCache(); const refsToPotentialExportPerComponent = await (0, _pMapSeries().default)(modelComponents, async modelComponent => { const refs = await getVersionsToExport(modelComponent); return { modelComponent, refs }; }); const getRefsToExportPerComp = async () => { if (!filterOutExistingVersions) { return refsToPotentialExportPerComponent; } const allHashesAsStr = refsToPotentialExportPerComponent.map(r => r.refs).flat().map(ref => ref.toString()); const existingOnRemote = await scope.scopeImporter.checkWhatHashesExistOnRemote(remoteNameStr, allHashesAsStr); // for lanes, some snaps might be already on the remote, and the reason they're staged is due to a previous merge. const refsToExportPerComponent = refsToPotentialExportPerComponent.map(({ modelComponent, refs }) => { const filteredOutRefs = []; const refsToExport = refs.filter(ref => { const existing = existingOnRemote.includes(ref.toString()); if (existing) filteredOutRefs.push(ref.toString()); return !existing; }); if (filteredOutRefs.length) this.logger.debug(`export-scope-components, the following refs were filtered out from ${modelComponent.id().toString()} because they already exist on the remote:\n${filteredOutRefs.join('\n')}`); return refsToExport.length ? { modelComponent, refs: refsToExport } : null; }); return (0, _lodash().compact)(refsToExportPerComponent); }; const bitObjectToObjectItem = async obj => { return { ref: obj.hash(), buffer: await obj.compress(), type: obj.getType() }; }; const processModelComponent = async ({ modelComponent, refs }) => { const idFromWorkspace = this.workspace?.getIdIfExist(modelComponent.toComponentId()); modelComponent.clearStateData(); const objectItems = await modelComponent.collectVersionsObjects(scope.objects, refs.map(ref => ref.toString()), throwForMissingArtifacts, idFromWorkspace); const objectsList = await new (_objects().ObjectList)(objectItems).toBitObjects(); const componentAndObject = { component: modelComponent, objects: objectsList.getAll() }; await this.convertToCorrectScope(scope, componentAndObject, remoteNameStr, bitIds, ids); const remoteObj = { url: remote.host, name: remote.name, date: Date.now().toString() }; modelComponent.addScopeListItem(remoteObj); componentsAndObjects.push(componentAndObject); const componentBuffer = await modelComponent.compress(); const componentData = { ref: modelComponent.hash(), buffer: componentBuffer, type: modelComponent.getType() }; const objectsBuffer = await Promise.all(componentAndObject.objects.map(async obj => bitObjectToObjectItem(obj))); const allObjectsData = [componentData, ...objectsBuffer]; objectListPerName[modelComponent.name] = new (_objects().ObjectList)(allObjectsData); objectList.addIfNotExist(allObjectsData); }; const refsToExportPerComponent = await getRefsToExportPerComp(); // don't use Promise.all, otherwise, it'll throw "JavaScript heap out of memory" on a large set of data await (0, _pMapSeries().default)(refsToExportPerComponent, processModelComponent); if (lane) { const laneHistory = await scope.lanes.getOrCreateLaneHistory(lane); const laneHistoryData = await bitObjectToObjectItem(laneHistory); objectList.addIfNotExist([laneHistoryData]); const laneData = await bitObjectToObjectItem(lane); objectList.addIfNotExist([laneData]); } return { remote, objectList, objectListPerName, idsToChangeLocally, componentsAndObjects }; }; const manyObjectsPerRemote = laneObject ? [await getUpdatedObjectsToExport(laneObject.scope, ids, laneObject)] : await (0, _pMapSeries().default)(Object.keys(idsGroupedByScope), scopeName => getUpdatedObjectsToExport(scopeName, idsGroupedByScope[scopeName], laneObject)); const pushAllToCentralHub = async () => { const objectList = this.transformToOneObjectListWithScopeData(manyObjectsPerRemote); const http = await _scope3().Http.connect(_legacy2().CENTRAL_BIT_HUB_URL, _legacy2().CENTRAL_BIT_HUB_NAME); const pushResults = await http.pushToCentralHub(objectList, { origin: exportOrigin }); const { failedScopes, successIds, errors, metadata } = pushResults; if (failedScopes.length) { throw new (_legacy5().PersistFailed)(failedScopes, errors); } const exportedBitIds = successIds.map(id => _componentId().ComponentID.fromString(id)); if (manyObjectsPerRemote.length === 1) { // when on a lane, it's always exported to the lane. and the ids can be from different scopes, so having the // filter below, will remove these components from the output of bit-export at the end. manyObjectsPerRemote[0].exportedIds = exportedBitIds.map(id => id.toString()); } else { manyObjectsPerRemote.forEach(objectPerRemote => { const idsPerScope = exportedBitIds.filter(id => id.scope === objectPerRemote.remote.name); // it's possible that idsPerScope is an empty array, in case the objects were exported already before objectPerRemote.exportedIds = idsPerScope.map(id => id.toString()); }); } return { rippleJobs: metadata?.jobs }; }; const updateLocalObjects = async lane => { return (0, _pMapSeries().default)(manyObjectsPerRemote, async objectsPerRemote => { const { remote, idsToChangeLocally, componentsAndObjects, exportedIds } = objectsPerRemote; const remoteNameStr = remote.name; componentsAndObjects.forEach(componentObject => scope.sources.put(componentObject)); // update lanes if (lane) { if (idsToChangeLocally.length) { // otherwise, we don't want to update scope-name of components in the lane object scope.objects.add(lane); } await scope.objects.remoteLanes.syncWithLaneObject(remoteNameStr, lane); } if (isOnMain && !lane) { // all exported from main const remoteLaneId = _laneId().LaneId.from(_laneId().DEFAULT_LANE, remoteNameStr); await scope.objects.remoteLanes.loadRemoteLane(remoteLaneId); await Promise.all(componentsAndObjects.map(async ({ component }) => { await scope.objects.remoteLanes.addEntry(remoteLaneId, component.toComponentId(), component.getHead()); })); } await scope.objects.persist(); // eslint-disable-next-line @typescript-eslint/no-non-null-assertion const newIdsOnRemote = exportedIds.map(id => _componentId().ComponentID.fromString(id)); // remove version. exported component might have multiple versions exported const idsWithRemoteScope = newIdsOnRemote.map(id => id.changeVersion(undefined)); const idsWithRemoteScopeUniq = _componentId().ComponentIdList.uniqFromArray(idsWithRemoteScope).sort(); return { newIdsOnRemote, exported: idsWithRemoteScopeUniq, updatedLocally: _componentId().ComponentIdList.fromArray(idsWithRemoteScopeUniq.filter(id => idsToChangeLocally.hasWithoutScopeAndVersion(id))) }; }); }; const warnCancelExport = () => { this.logger.consoleWarning(`unable to cancel the export process at this point because the communication with the remote already started`); }; process.on('SIGINT', warnCancelExport); let centralHubResults; if (resumeExportId) { const remotes = manyObjectsPerRemote.map(o => o.remote); await (0, _exportScopeComponents().validateRemotes)(remotes, resumeExportId); await (0, _exportScopeComponents().persistRemotes)(manyObjectsPerRemote, resumeExportId); } else if (this.shouldPushToCentralHub(manyObjectsPerRemote, scopeRemotes, originDirectly)) { centralHubResults = await pushAllToCentralHub(); } else { // await pushToRemotes(); await this.pushToRemotesCarefully(manyObjectsPerRemote, resumeExportId); } this.logger.setStatusLine('updating data locally...'); const results = await updateLocalObjects(laneObject); process.removeListener('SIGINT', warnCancelExport); return { newIdsOnRemote: results.map(r => r.newIdsOnRemote).flat(), exported: _componentId().ComponentIdList.uniqFromArray(results.map(r => r.exported).flat()), updatedLocally: _componentId().ComponentIdList.uniqFromArray(results.map(r => r.updatedLocally).flat()), rippleJobs: centralHubResults?.rippleJobs || [] }; } transformToOneObjectListWithScopeData(objectsPerRemote) { const objectList = new (_objects().ObjectList)(); objectsPerRemote.forEach(objPerRemote => { objPerRemote.objectList.addScopeName(objPerRemote.remote.name); objectList.mergeObjectList(objPerRemote.objectList); }); return objectList; } async ejectExportedComponents(componentsIds) { const consumer = this.workspace.consumer; let ejectResults; try { ejectResults = await this.eject.eject(componentsIds, { force: true }); } catch (err) { const ejectErr = `The components ${componentsIds.map(c => c.toString()).join(', ')} were exported successfully. However, the eject operation has failed due to an error: ${err.msg || err}`; this.logger.error(ejectErr, err); throw new Error(ejectErr); } // run the consumer.onDestroy() again, to write the changes done by the eject action to .bitmap await consumer.onDestroy('export (eject)'); return ejectResults; } async pushToRemotesCarefully(manyObjectsPerRemote, resumeExportId) { const remotes = manyObjectsPerRemote.map(o => o.remote); const clientId = resumeExportId || Date.now().toString(); await this.pushRemotesPendingDir(clientId, manyObjectsPerRemote, resumeExportId); await (0, _exportScopeComponents().validateRemotes)(remotes, clientId, Boolean(resumeExportId)); await (0, _exportScopeComponents().persistRemotes)(manyObjectsPerRemote, clientId); } async pushRemotesPendingDir(clientId, manyObjectsPerRemote, resumeExportId) { if (resumeExportId) { this.logger.debug('pushRemotesPendingDir - skip as the resumeExportId was passed'); // no need to transfer the objects, they're already on the server. also, since this clientId // exists already on the remote pending-dir, it'll cause a collision. return; } const pushOptions = { clientId }; const pushedRemotes = []; await (0, _pMapSeries().default)(manyObjectsPerRemote, async objectsPerRemote => { const { remote, objectList } = objectsPerRemote; this.logger.setStatusLine(`transferring ${objectList.count()} objects to the remote "${remote.name}"...`); try { await remote.pushMany(objectList, pushOptions, {}); this.logger.debug('pushRemotesPendingDir, successfully pushed all objects to the pending-dir directory on the remote'); pushedRemotes.push(remote); } catch (err) { this.logger.warn('exportMany, failed pushing objects to the remote'); await (0, _exportScopeComponents().removePendingDirs)(pushedRemotes, clientId); throw err; } }); } shouldPushToCentralHub(manyObjectsPerRemote, scopeRemotes, originDirectly = false) { if (originDirectly) return false; const hubRemotes = manyObjectsPerRemote.filter(m => scopeRemotes.isHub(m.remote.name)); if (!hubRemotes.length) return false; if (hubRemotes.length === manyObjectsPerRemote.length) return true; // all are hub // @todo: maybe create a flag "no-central" to support this workflow throw new (_bitError().BitError)(`some of your components are configured to be exported to a local scope and some to the bit.cloud hub. this is not supported`); } /** * Component and dependencies id changes: * When exporting components with dependencies to a bare-scope, some of the dependencies may be created locally and as * a result their scope-name is null. Before the bare-scope gets the components, convert these scope names * to the bare-scope name. * * This is the Harmony version of "convertToCorrectScope". No more codemod and no more hash changes. */ async convertToCorrectScope(scope, componentsObjects, remoteScope, exportingIds, ids, shouldFork = false // not in used currently, but might be needed soon ) { const shouldChangeScope = shouldFork ? remoteScope !== componentsObjects.component.scope : !componentsObjects.component.scope; const hasComponentChanged = shouldChangeScope; if (shouldChangeScope) { const idWithFutureScope = ids.searchWithoutScopeAndVersion(componentsObjects.component.toComponentId()); componentsObjects.component.scope = idWithFutureScope?.scope || remoteScope; } // return true if one of the versions has changed or the component itself return hasComponentChanged; } async getComponentsToExport(ids = [], includeNonStaged) { const consumer = this.workspace.consumer; const componentsList = new (_legacy3().ComponentsList)(this.workspace); const idsHaveWildcard = (0, _legacy4().hasWildcard)(ids); const throwForLocalOnlyIfNeeded = async bitIds => { const localOnlyComponents = this.workspace.listLocalOnly(); const localOnlyExportPending = bitIds.filter(id => localOnlyComponents.hasWithoutScopeAndVersion(id)); if (localOnlyExportPending.length) { throw new (_bitError().BitError)(`unable to export the following components as they are local only: (either bit-reset them or run "bit local-only unset" to make them non local only) ${localOnlyExportPending.map(c => c.toString()).join('\n')}`); } return { idsToExport: _componentId().ComponentIdList.fromArray(bitIds), missingScope: [] }; }; if (isUserTryingToExportLanes(consumer)) { if (ids.length) { throw new (_bitError().BitError)(`when checked out to a lane, all its components are exported. please omit the ids`); } const { componentsToExport, laneObject } = await this.getLaneCompIdsToExport(consumer, includeNonStaged); const loaderMsg = componentsToExport.length > 1 ? BEFORE_EXPORTS : BEFORE_EXPORT; this.logger.setStatusLine(loaderMsg); const filtered = await throwForLocalOnlyIfNeeded(componentsToExport); return _objectSpread(_objectSpread({}, filtered), {}, { laneObject }); } if (!ids.length || idsHaveWildcard) { this.logger.setStatusLine(BEFORE_LOADING_COMPONENTS); const exportPendingComponents = includeNonStaged ? await componentsList.listNonNewComponentsIds() : await componentsList.listExportPendingComponentsIds(); const componentsToExport = idsHaveWildcard ? _legacy3().ComponentsList.filterComponentsByWildcard(exportPendingComponents, ids) : exportPendingComponents; const loaderMsg = componentsToExport.length > 1 ? BEFORE_EXPORTS : BEFORE_EXPORT; this.logger.setStatusLine(loaderMsg); return throwForLocalOnlyIfNeeded(componentsToExport); } this.logger.setStatusLine(BEFORE_EXPORT); // show single export const parsedIds = await Promise.all(ids.map(id => getParsedId(consumer, id))); // load the components for fixing any out-of-sync issues. await consumer.loadComponents(_componentId().ComponentIdList.fromArray(parsedIds)); return throwForLocalOnlyIfNeeded(_componentId().ComponentIdList.fromArray(parsedIds)); } async getLaneCompIdsToExport(consumer, includeNonStaged) { const currentLaneId = consumer.getCurrentLaneId(); const laneObject = await consumer.scope.loadLane(currentLaneId); if (!laneObject) { throw new Error(`fatal: unable to load the current lane object (${currentLaneId.toString()})`); } this.logger.setStatusLine(BEFORE_LOADING_COMPONENTS); const componentsList = new (_legacy3().ComponentsList)(this.workspace); const componentsToExportWithoutRemoved = includeNonStaged ? await componentsList.listNonNewComponentsIds() : await componentsList.listExportPendingComponentsIds(laneObject); const removedStagedBitIds = await this.getRemovedStagedBitIds(); const componentsToExport = _componentId().ComponentIdList.uniqFromArray([...componentsToExportWithoutRemoved, ...removedStagedBitIds]); return { componentsToExport, laneObject }; } async getRemovedStagedBitIds() { const removedStaged = await this.remove.getRemovedStaged(); return _componentId().ComponentIdList.fromArray(removedStaged.map(id => id.changeVersion(undefined))); } static async provider([cli, scope, workspace, remove, depResolver, loggerMain, eject]) { const logger = loggerMain.createLogger(_export().ExportAspect.id); const exportMain = new ExportMain(workspace, remove, depResolver, logger, eject); cli.register(new (_resumeExportCmd().ResumeExportCmd)(scope), new (_exportCmd().ExportCmd)(exportMain)); return exportMain; } } exports.ExportMain = ExportMain; _defineProperty(ExportMain, "runtime", _cli().MainRuntime); _defineProperty(ExportMain, "dependencies", [_cli().CLIAspect, _scope().ScopeAspect, _workspace().WorkspaceAspect, _remove().RemoveAspect, _dependencyResolver().DependencyResolverAspect, _logger().LoggerAspect, _eject().EjectAspect]); _export().ExportAspect.addRuntime(ExportMain); /** * the componentsIds passed here are the ones that didn't have scope-name before, and now they have. * so if the bitMap.updateComponentId returns bitId without scope-name is because it couldn't find it there */ function _updateIdsOnBitMap(bitMap, componentsIds) { const updatedIds = []; componentsIds.forEach(componentsId => { const resultId = bitMap.updateComponentId(componentsId, true); if (resultId.hasVersion()) updatedIds.push(resultId); }); return updatedIds; } async function getParsedId(consumer, id) { // reason why not calling `consumer.getParsedId()` only is because a component might not be on // .bitmap and only in the scope. we support this case and enable to export try { return consumer.getParsedId(id); } catch { return consumer.scope.getParsedId(id); } } function _throwForUnsnappedLaneReadme(lane) { const readmeComponent = lane.readmeComponent; const isValid = readmeComponent?.head && lane.getComponent(readmeComponent.id) && lane.getComponentHead(readmeComponent.id)?.isEqual(readmeComponent?.head); if (!isValid) { throw new (_bitError().BitError)(`${lane?.name} has a readme component ${readmeComponent.id} that hasn't been snapped on the lane. Please run either snap -a or snap ${readmeComponent.id} to snap the component on the lane before exporting it.`); } } async function updateLanesAfterExport(consumer, lane) { const currentLane = consumer.getCurrentLaneId(); const isCurrentLane = lane.name === currentLane.name; if (!isCurrentLane) { throw new Error(`updateLanesAfterExport should get called only with current lane, got ${lane.name}, current ${currentLane.name}`); } consumer.setCurrentLane(lane.toLaneId(), true); consumer.scope.scopeJson.removeLaneFromNew(lane.name); lane.isNew = false; } function isUserTryingToExportLanes(consumer) { return consumer.isOnLane(); } var _default = exports.default = ExportMain; //# sourceMappingURL=export.main.runtime.js.map