mongodb-data-service
Version:
MongoDB Data Service
1,031 lines • 97.4 kB
JavaScript
"use strict";
var __runInitializers = (this && this.__runInitializers) || function (thisArg, initializers, value) {
var useValue = arguments.length > 2;
for (var i = 0; i < initializers.length; i++) {
value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg);
}
return useValue ? value : void 0;
};
var __esDecorate = (this && this.__esDecorate) || function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) {
function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; }
var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value";
var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null;
var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {});
var _, done = false;
for (var i = decorators.length - 1; i >= 0; i--) {
var context = {};
for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p];
for (var p in contextIn.access) context.access[p] = contextIn.access[p];
context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); };
var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context);
if (kind === "accessor") {
if (result === void 0) continue;
if (result === null || typeof result !== "object") throw new TypeError("Object expected");
if (_ = accept(result.get)) descriptor.get = _;
if (_ = accept(result.set)) descriptor.set = _;
if (_ = accept(result.init)) initializers.unshift(_);
}
else if (_ = accept(result)) {
if (kind === "field") initializers.unshift(_);
else descriptor[key] = _;
}
}
if (target) Object.defineProperty(target, contextIn.name, descriptor);
done = true;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.DataServiceImpl = void 0;
const events_1 = require("events");
const mongodb_1 = require("mongodb");
const mongodb_2 = require("mongodb");
const mongodb_connection_string_url_1 = __importDefault(require("mongodb-connection-string-url"));
const mongodb_ns_1 = __importDefault(require("mongodb-ns"));
const instance_detail_helper_1 = require("./instance-detail-helper");
const instance_detail_helper_2 = require("./instance-detail-helper");
const redact_1 = require("./redact");
const connect_mongo_client_1 = require("./connect-mongo-client");
const run_command_1 = require("./run-command");
const csfle_collection_tracker_1 = require("./csfle-collection-tracker");
const compass_utils_1 = require("@mongodb-js/compass-utils");
const index_detail_helper_1 = require("./index-detail-helper");
const logger_1 = require("./logger");
const lodash_1 = require("lodash");
function uniqueBy(values, key) {
return Array.from(new Map(values.map((val) => [val[key], val])).values());
}
function isEmptyObject(obj) {
return Object.keys(obj).length === 0;
}
function isReadPreferenceSet(connectionString) {
return !!new mongodb_connection_string_url_1.default(connectionString).searchParams.get('readPreference');
}
function readPreferenceWithoutTags(readPreference) {
return new mongodb_2.ReadPreference(readPreference.mode, undefined, readPreference);
}
function maybeOverrideReadPreference(isMongos, readPreference) {
// see ticket COMPASS-9111 for why this is necessary
if (isMongos && readPreference?.tags) {
const newReadPreference = readPreferenceWithoutTags(readPreference);
return { readPreference: newReadPreference };
}
return {};
}
let id = 0;
const kSessionClientType = Symbol('kSessionClientType');
const maybePickNs = ([ns]) => {
if (typeof ns === 'string') {
return { ns };
}
};
const isPromiseLike = (val) => {
return 'then' in val && typeof val.then === 'function';
};
/**
* Translates the error message to something human readable.
* @param error - The error.
* @returns The error with message translated.
*/
const translateErrorMessage = (error) => {
if (typeof error === 'string') {
error = { message: error };
}
else if (!error.message) {
error.message = error.err || error.errmsg;
}
return error;
};
/**
* Decorator to do standard op handling that is applied to every public method
* of the data service
*
* - transform error message before throwing
* - log method success / failure
*/
function op(logId, pickLogAttrs = maybePickNs) {
return function (target, context) {
const opName = String(context.name);
return function (...args) {
const handleResult = (result) => {
this._logger.info(logId, `Running ${opName}`, pickLogAttrs(args, result));
return result;
};
const handleError = (error) => {
const err = translateErrorMessage(error);
this._logger.error((0, logger_1.mongoLogId)(1_001_000_058), 'Failed to perform data service operation', {
op: opName,
message: err,
...(pickLogAttrs(args) ?? {}),
});
throw err;
};
try {
const result = target.call(this, ...args);
if (isPromiseLike(result)) {
return result.then(handleResult, handleError);
}
else {
return handleResult(result);
}
}
catch (error) {
return handleError(error);
}
};
};
}
let DataServiceImpl = (() => {
var _a;
let _classSuper = logger_1.WithLogContext;
let _instanceExtraInitializers = [];
let _collectionStats_decorators;
let _collectionInfo_decorators;
let _killOp_decorators;
let __connectionStatus_decorators;
let _listCollections_decorators;
let _listDatabases_decorators;
let _estimatedCount_decorators;
let _count_decorators;
let _createCollection_decorators;
let _createIndex_decorators;
let _deleteOne_decorators;
let _deleteMany_decorators;
let _dropCollection_decorators;
let _renameCollection_decorators;
let _dropDatabase_decorators;
let _dropIndex_decorators;
let _isListSearchIndexesSupported_decorators;
let _getSearchIndexes_decorators;
let _createSearchIndex_decorators;
let _updateSearchIndex_decorators;
let _dropSearchIndex_decorators;
let _aggregateCursor_decorators;
let _aggregate_decorators;
let _find_decorators;
let _findCursor_decorators;
let _findOneAndReplace_decorators;
let _findOneAndUpdate_decorators;
let _updateOne_decorators;
let _replaceOne_decorators;
let _explainFind_decorators;
let _explainAggregate_decorators;
let _indexes_decorators;
let _instance_decorators;
let _insertOne_decorators;
let _insertMany_decorators;
let _updateCollection_decorators;
let _currentOp_decorators;
let _serverStatus_decorators;
let _top_decorators;
let _createView_decorators;
let _databaseStats_decorators;
let _previewUpdate_decorators;
let _createDataKey_decorators;
let _listStreamProcessors_decorators;
let _startStreamProcessor_decorators;
let _stopStreamProcessor_decorators;
let _dropStreamProcessor_decorators;
return _a = class DataServiceImpl extends _classSuper {
_getOptionsWithFallbackReadPreference(options, executionOptions) {
const readPreferencesOverride = isReadPreferenceSet(this._connectionOptions.connectionString)
? undefined
: executionOptions?.fallbackReadPreference;
if (!readPreferencesOverride) {
return options;
}
return {
...options,
readPreference: readPreferencesOverride,
};
}
constructor(connectionOptions, logger, proxyOptions) {
super();
this._connectionOptions = __runInitializers(this, _instanceExtraInitializers);
this._isConnecting = false;
this._useCRUDClient = true;
this._reauthenticationHandlers = new Set();
/**
* Stores the most recent topology description from the server's SDAM events:
* https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-discovery-and-monitoring-monitoring.rst#events
*/
this._lastSeenTopology = null;
this._isWritable = false;
this._emitter = new events_1.EventEmitter();
this._id = id++;
this._connectionOptions = connectionOptions;
this._proxyOptions = proxyOptions ?? {};
const logComponent = 'COMPASS-DATA-SERVICE';
const logCtx = `Connection ${this._id}`;
this._logger = {
debug: (logId, ...args) => {
return logger?.debug(logComponent, logId, logCtx, ...args);
},
info: (logId, ...args) => {
return logger?.info(logComponent, logId, logCtx, ...args);
},
warn: (logId, ...args) => {
return logger?.warn(logComponent, logId, logCtx, ...args);
},
error: (logId, ...args) => {
return logger?.error(logComponent, logId, logCtx, ...args);
},
fatal: (logId, ...args) => {
return logger?.fatal(logComponent, logId, logCtx, ...args);
},
};
if (logger) {
this._unboundLogger = Object.assign(logger, { mongoLogId: logger_1.mongoLogId });
}
}
get id() {
return this._id;
}
on(...args) {
this._emitter.on(...args);
return this;
}
off(...args) {
this._emitter.off(...args);
return this;
}
removeListener(...args) {
this._emitter.off(...args);
return this;
}
once(...args) {
this._emitter.once(...args);
return this;
}
getMongoClientConnectionOptions() {
// `notifyDeviceFlow` is a function which cannot be serialized for inclusion
// in the shell, `signal` is an abortSignal, and `allowedFlows` is turned
// into a function by the connection code.
return (0, lodash_1.omit)(this._mongoClientConnectionOptions, 'options.oidc.notifyDeviceFlow', 'options.oidc.signal', 'options.oidc.allowedFlows', 'options.oidc.customFetch', 'options.oidc.customHttpOptions');
}
getConnectionOptions() {
return this._connectionOptions;
}
getConnectionString() {
return new mongodb_connection_string_url_1.default(this._connectionOptions.connectionString);
}
setCSFLEEnabled(enabled) {
this._logger.info((0, logger_1.mongoLogId)(1_001_000_117), 'Setting CSFLE mode', {
enabled,
});
this._useCRUDClient = enabled;
}
getCSFLEMode() {
if (this._crudClient && (0, instance_detail_helper_2.checkIsCSFLEConnection)(this._crudClient)) {
if (this._useCRUDClient) {
return 'enabled';
}
else {
return 'disabled';
}
}
else {
return 'unavailable';
}
}
async collectionStats(databaseName, collectionName) {
const ns = `${databaseName}.${collectionName}`;
try {
const coll = this._collection(ns, 'CRUD');
const collStats = await coll
.aggregate([
{ $collStats: { storageStats: {} } },
{
$group: {
_id: null,
capped: { $first: '$storageStats.capped' },
count: { $sum: '$storageStats.count' },
size: { $sum: { $toDouble: '$storageStats.size' } },
storageSize: {
$sum: { $toDouble: '$storageStats.storageSize' },
},
totalIndexSize: {
$sum: { $toDouble: '$storageStats.totalIndexSize' },
},
freeStorageSize: {
$sum: { $toDouble: '$storageStats.freeStorageSize' },
},
unscaledCollSize: {
$sum: {
$multiply: [
{ $toDouble: '$storageStats.avgObjSize' },
{ $toDouble: '$storageStats.count' },
],
},
},
nindexes: { $max: '$storageStats.nindexes' },
},
},
{
$addFields: {
// `avgObjSize` is the average of per-shard `avgObjSize` weighted by `count`
avgObjSize: {
$cond: {
if: { $ne: ['$count', 0] },
then: {
$divide: ['$unscaledCollSize', { $toDouble: '$count' }],
},
else: 0,
},
},
},
},
], { enableUtf8Validation: false })
.toArray();
if (!collStats || collStats[0] === undefined) {
throw new Error(`Error running $collStats aggregation stage on ${ns}`);
}
collStats[0].ns = ns;
return this._buildCollectionStats(databaseName, collectionName, collStats[0]);
}
catch (error) {
const message = error.message;
if (
// We ignore errors for fetching collStats when requesting on an
// unsupported collection type: either a view or a ADF
message.includes('not valid for Data Lake') ||
message.includes('is a view, not a collection') ||
// When trying to fetch collectionStats for a collection whose db
// does not exist or the collection itself does not exist, the
// server throws an error. This happens because we show collections
// to the user from their privileges.
message.includes(`Database [${databaseName}] not found`) ||
message.includes(`Collection [${databaseName}.${collectionName}] not found`)) {
return this._buildCollectionStats(databaseName, collectionName, {});
}
throw error;
}
}
async collectionInfo(dbName, collName) {
const [collInfo] = await this._listCollections(dbName, { name: collName });
return ((0, instance_detail_helper_2.adaptCollectionInfo)({
db: dbName,
...collInfo,
}) ?? null);
}
async killOp(id, comment) {
const db = this._database('admin', 'META');
return (0, run_command_1.runCommand)(db, { killOp: 1, id, comment }, { enableUtf8Validation: false });
}
isWritable() {
return this._isWritable;
}
isMongos() {
return this.getCurrentTopologyType() === 'Sharded';
}
getCurrentTopologyType() {
return this.getLastSeenTopology()?.type ?? 'Unknown';
}
async _connectionStatus() {
const adminDb = this._database('admin', 'META');
return await (0, run_command_1.runCommand)(adminDb, {
connectionStatus: 1,
showPrivileges: true,
}, { enableUtf8Validation: false });
}
async _getPrivilegesOrFallback(privileges = null) {
if (privileges) {
return privileges;
}
const { authInfo: { authenticatedUserPrivileges }, } = await this._connectionStatus();
return authenticatedUserPrivileges;
}
async _getRolesOrFallback(roles = null) {
if (roles) {
return roles;
}
const { authInfo: { authenticatedUserRoles }, } = await this._connectionStatus();
return authenticatedUserRoles;
}
async _listCollections(databaseName, filter = {}, { nameOnly } = {}) {
try {
const cursor = this._database(databaseName, 'CRUD').listCollections(filter, {
nameOnly,
...maybeOverrideReadPreference(this.isMongos(), this._crudClient?.readPreference),
});
// Iterate instead of using .toArray() so we can emit
// collection info update events as they come in.
const results = [];
for await (const result of cursor) {
if (!nameOnly) {
this._csfleCollectionTracker?.updateCollectionInfo(`${databaseName}.${result.name}`, result);
}
results.push(result);
}
return results;
}
catch (err) {
// Currently Compass should not fail if listCollections failed for
// any possible reason to preserve current behavior. We probably
// want this to check at least that what we got back is a server
// error and not a weird runtime issue on our side that can be
// swallowed in this case, ideally we know exactly what server
// errors we want to handle here and only avoid throwing in these
// cases
//
// TODO: https://jira.mongodb.org/browse/COMPASS-5275
this._logger.warn((0, logger_1.mongoLogId)(1_001_000_099), 'Failed to run listCollections', { message: err.message });
return [];
}
}
async listCollections(databaseName, filter = {}, { nameOnly, fetchNamespacesFromPrivileges = true, privileges = null, } = {}) {
const listCollections = async () => {
const colls = await this._listCollections(databaseName, filter, {
nameOnly,
});
return colls.map((coll) => ({
inferred_from_privileges: false,
...coll,
}));
};
const getCollectionsFromPrivileges = async () => {
if (!fetchNamespacesFromPrivileges) {
return [];
}
const databases = (0, instance_detail_helper_2.getPrivilegesByDatabaseAndCollection)(await this._getPrivilegesOrFallback(privileges), ['find']);
return Object.keys(
// Privileges might not have a database we are looking for
databases[databaseName] || {})
.filter(
// Privileges can have collection name '' that indicates
// privileges on all collections in the database, we don't want
// those registered as "real" collection names
Boolean)
.map((name) => ({ name, inferred_from_privileges: true }));
};
const [listedCollections, collectionsFromPrivileges] = await Promise.all([
listCollections(),
// If the filter is not empty, we can't meaningfully derive collections
// from privileges and filter them as the criteria might include any key
// from the listCollections result object and there is no such info in
// privileges. Because of that we are ignoring privileges completely if
// listCollections was called with a filter.
isEmptyObject(filter) ? getCollectionsFromPrivileges() : [],
]);
const collections = uniqueBy(
// NB: Order is important, we want listed collections to take precedence
// if they were fetched successfully
[...collectionsFromPrivileges, ...listedCollections], 'name').map(({ inferred_from_privileges, ...coll }) => ({
inferred_from_privileges,
...(0, instance_detail_helper_2.adaptCollectionInfo)({ db: databaseName, ...coll }),
}));
return collections;
}
async listDatabases({ nameOnly, fetchNamespacesFromPrivileges = true, privileges = null, roles = null, } = {}) {
const adminDb = this._database('admin', 'CRUD');
const listDatabases = async () => {
try {
const { databases } = await (0, run_command_1.runCommand)(adminDb, {
listDatabases: 1,
nameOnly,
}, {
enableUtf8Validation: false,
...maybeOverrideReadPreference(this.isMongos(), this._crudClient?.readPreference),
});
return databases.map((x) => ({
...x,
inferred_from_privileges: false,
}));
}
catch (err) {
// Currently Compass should not fail if listDatabase failed for any
// possible reason to preserve current behavior. We probably want this
// to check at least that what we got back is a server error and not a
// weird runtime issue on our side that can be swallowed in this case,
// ideally we know exactly what server errors we want to handle here
// and only avoid throwing in these cases
//
// TODO: https://jira.mongodb.org/browse/COMPASS-5275
this._logger.warn((0, logger_1.mongoLogId)(1_001_000_098), 'Failed to run listDatabases', { message: err.message });
return [];
}
};
const getDatabasesFromPrivileges = async () => {
if (!fetchNamespacesFromPrivileges) {
return [];
}
const databases = (0, instance_detail_helper_2.getPrivilegesByDatabaseAndCollection)(await this._getPrivilegesOrFallback(privileges), ['find']);
return Object.keys(databases)
.filter(
// For the roles created in admin database, the database name
// can be '' meaning that it applies to all databases. We can't
// meaningfully handle this in the UI so we are filtering these
// out
Boolean)
.map((name) => ({ name, inferred_from_privileges: true }));
};
const getDatabasesFromRoles = async () => {
const databases = (0, instance_detail_helper_2.getDatabasesByRoles)(await this._getRolesOrFallback(roles),
// https://jira.mongodb.org/browse/HELP-32199
// Atlas shared tier MongoDB server version v5+ does not return
// `authenticatedUserPrivileges` as part of the `connectionStatus`.
// As a workaround we show the databases the user has
// certain general built-in roles for.
// This does not cover custom user roles which can
// have custom privileges that we can't currently fetch.
['read', 'readWrite', 'dbAdmin', 'dbOwner']);
return databases.map((name) => ({
name,
inferred_from_privileges: true,
}));
};
const [listedDatabases, databasesFromPrivileges, databasesFromRoles] = await Promise.all([
listDatabases(),
getDatabasesFromPrivileges(),
getDatabasesFromRoles(),
]);
const databases = uniqueBy(
// NB: Order is important, we want listed collections to take precedence
// if they were fetched successfully
[...databasesFromRoles, ...databasesFromPrivileges, ...listedDatabases], 'name').map(({ name, inferred_from_privileges, ...db }) => {
return {
_id: name,
name,
inferred_from_privileges,
...(0, instance_detail_helper_2.adaptDatabaseInfo)(db),
};
});
return databases;
}
addReauthenticationHandler(handler) {
this._reauthenticationHandlers.add(handler);
}
async _requestReauthenticationFromUser() {
this._logger.info((0, logger_1.mongoLogId)(1_001_000_194), 'Requesting re-authentication from user');
let threw = true;
try {
for (const handler of this._reauthenticationHandlers)
await handler();
threw = false;
}
finally {
this._logger.info((0, logger_1.mongoLogId)(1_001_000_193), 'Completed re-authentication request', {
wantsReauth: !threw,
});
}
}
async connect({ signal, productName, productDocsLink, } = {}) {
if (this._metadataClient) {
(0, logger_1.debug)('already connected');
return;
}
if (this._isConnecting) {
(0, logger_1.debug)('connect method called more than once');
return;
}
(0, logger_1.debug)('connecting...');
this._isConnecting = true;
const clusterName = this._connectionOptions.lookup?.().clusterName;
this._logger.info((0, logger_1.mongoLogId)(1_001_000_014), 'Connecting Started', {
connectionId: this._id,
url: (0, redact_1.redactConnectionString)(this._connectionOptions.connectionString),
csfle: this._csfleLogInformation(this._connectionOptions.fleOptions),
...(clusterName && { clusterName }),
});
try {
const [metadataClient, crudClient, tunnel, state, connectionOptions] = await (0, connect_mongo_client_1.connectMongoClientDataService)({
connectionOptions: this._connectionOptions,
proxyOptions: this._proxyOptions,
setupListeners: this._setupListeners.bind(this),
signal,
logger: this._unboundLogger,
productName,
productDocsLink,
reauthenticationHandler: this._requestReauthenticationFromUser.bind(this),
});
const attr = {
connectionId: this._id,
isWritable: this.isWritable(),
isMongos: this.isMongos(),
...(clusterName && { clusterName }),
};
this._logger.info((0, logger_1.mongoLogId)(1_001_000_015), 'Connecting Succeeded', attr);
(0, logger_1.debug)('connected!', attr);
state.oidcPlugin.logger.on('mongodb-oidc-plugin:state-updated', () => {
this._emitter.emit('connectionInfoSecretsChanged');
});
state.oidcPlugin.logger.on('mongodb-oidc-plugin:auth-failed', ({ error }) => {
this._emitter.emit('oidcAuthFailed', error);
});
this._metadataClient = metadataClient;
this._crudClient = crudClient;
this._tunnel = tunnel;
this._state = state;
this._mongoClientConnectionOptions = connectionOptions;
this._csfleCollectionTracker = new csfle_collection_tracker_1.CSFLECollectionTrackerImpl(this, this._crudClient);
}
catch (error) {
this._logger.info((0, logger_1.mongoLogId)(1_001_000_359), 'Connecting Failed', {
connectionId: this._id,
error: error && typeof error === 'object' && 'message' in error
? error?.message
: 'unknown error',
...(clusterName && { clusterName }),
});
throw error;
}
finally {
this._isConnecting = false;
}
}
estimatedCount(ns, options = {}, executionOptions) {
const maxTimeMS = options.maxTimeMS ?? 500;
return this._cancellableOperation(async (session) => {
return this._collection(ns, 'CRUD').estimatedDocumentCount({
...options,
maxTimeMS,
session,
});
}, (session) => session.endSession(), executionOptions?.abortSignal);
}
count(ns, filter, options = {}, executionOptions) {
return this._cancellableOperation(async (session) => {
return this._collection(ns, 'CRUD').countDocuments(filter, {
...this._getOptionsWithFallbackReadPreference(options, executionOptions),
session,
});
}, (session) => session.endSession(), executionOptions?.abortSignal);
}
async createCollection(ns, options) {
const collectionName = this._collectionName(ns);
const db = this._database(ns, 'CRUD');
return await db.createCollection(collectionName, options);
}
async createIndex(ns, spec, options) {
const coll = this._collection(ns, 'CRUD');
return await coll.createIndex(spec, options);
}
async deleteOne(ns, filter, options) {
const coll = this._collection(ns, 'CRUD');
return await coll.deleteOne(filter, options);
}
async deleteMany(ns, filter, options) {
const coll = this._collection(ns, 'CRUD');
return await coll.deleteMany(filter, options);
}
async updateMany(ns, filter, update, options) {
const coll = this._collection(ns, 'CRUD');
return await coll.updateMany(filter, update, options);
}
async disconnect() {
this._logger.info((0, logger_1.mongoLogId)(1_001_000_016), 'Disconnecting');
try {
await Promise.all([
this._metadataClient
?.close(true)
.catch((err) => (0, logger_1.debug)('failed to close MongoClient', err)),
this._crudClient !== this._metadataClient &&
this._crudClient
?.close(true)
.catch((err) => (0, logger_1.debug)('failed to close MongoClient', err)),
this._tunnel
?.close()
.catch((err) => (0, logger_1.debug)('failed to close tunnel', err)),
this._state
?.destroy()
.catch((err) => (0, logger_1.debug)('failed to destroy DevtoolsConnectionState', err)),
]);
}
finally {
this._cleanup();
this._logger.info((0, logger_1.mongoLogId)(1_001_000_017), 'Fully closed');
}
}
async dropCollection(ns) {
const db = this._database(ns, 'CRUD');
const collName = this._collectionName(ns);
const coll = db.collection(collName);
let result = null;
try {
[result] = await db
.listCollections({ name: collName }, { nameOnly: false })
.toArray();
}
catch {
// ignore
}
const options = {};
const encryptedFieldsInfo = result?.options?.encryptedFields;
if (encryptedFieldsInfo) {
options.encryptedFields = encryptedFieldsInfo;
}
return await coll.drop(options);
}
renameCollection(ns, newCollectionName) {
const db = this._database(ns, 'META');
return db.renameCollection(this._collectionName(ns), newCollectionName);
}
async dropDatabase(name) {
const db = this._database(name, 'CRUD');
return await db.dropDatabase();
}
async dropIndex(ns, name) {
const coll = this._collection(ns, 'CRUD');
return await coll.dropIndex(name);
}
async isListSearchIndexesSupported(ns) {
try {
await this.getSearchIndexes(ns);
}
catch {
return false;
}
return true;
}
async getSearchIndexes(ns) {
const coll = this._collection(ns, 'CRUD');
const cursor = coll.listSearchIndexes();
const indexes = await cursor.toArray();
void cursor.close();
return indexes;
}
async createSearchIndex(ns, description) {
const coll = this._collection(ns, 'CRUD');
return coll.createSearchIndex(description);
}
async updateSearchIndex(ns, name, definition) {
const coll = this._collection(ns, 'CRUD');
return coll.updateSearchIndex(name, definition);
}
async dropSearchIndex(ns, name) {
const coll = this._collection(ns, 'CRUD');
return coll.dropSearchIndex(name);
}
aggregateCursor(ns, pipeline, options = {}) {
return this._collection(ns, 'CRUD').aggregate(pipeline, options);
}
// @ts-expect-error generic in the method trips up TS here resulting in
// Promise<unknown> is not assignable to Promise<Document[]>
aggregate(ns, pipeline, options = {}, executionOptions) {
let cursor;
return this._cancellableOperation(async (session) => {
cursor = this._collection(ns, 'CRUD').aggregate(pipeline, {
...options,
session,
});
const results = await cursor.toArray();
void cursor.close();
return results;
}, () => cursor?.close(), executionOptions?.abortSignal);
}
find(ns, filter, options = {}, executionOptions) {
let cursor;
return this._cancellableOperation(async (session) => {
cursor = this._collection(ns, 'CRUD').find(filter, {
...options,
session,
});
const results = await cursor.toArray();
void cursor.close();
return results;
}, () => cursor?.close(), executionOptions?.abortSignal);
}
findCursor(ns, filter, options = {}) {
return this._collection(ns, 'CRUD').find(filter, options);
}
async findOneAndReplace(ns, filter, replacement, options) {
const coll = this._collection(ns, 'CRUD');
return await coll.findOneAndReplace(filter, replacement, options);
}
async findOneAndUpdate(ns, filter, update, options) {
const coll = this._collection(ns, 'CRUD');
return await coll.findOneAndUpdate(filter, update, options);
}
async updateOne(ns, filter, update, options) {
const coll = this._collection(ns, 'CRUD');
return await coll.updateOne(filter, update, options);
}
async replaceOne(ns, filter, replacement, options) {
const coll = this._collection(ns, 'CRUD');
return await coll.replaceOne(filter, replacement, options);
}
explainFind(ns, filter, options = {}, executionOptions) {
const verbosity = executionOptions?.explainVerbosity || mongodb_1.ExplainVerbosity.executionStats;
let cursor;
return this._cancellableOperation(async (session) => {
cursor = this._collection(ns, 'CRUD').find(filter, {
...options,
session,
});
const results = await cursor.explain(verbosity);
void cursor.close();
return results;
}, () => cursor?.close(), executionOptions?.abortSignal);
}
explainAggregate(ns, pipeline, options, executionOptions) {
const verbosity = executionOptions?.explainVerbosity || mongodb_1.ExplainVerbosity.executionStats;
let cursor;
return this._cancellableOperation(async (session) => {
cursor = this._collection(ns, 'CRUD').aggregate(pipeline, {
...options,
session,
});
const results = await cursor.explain(verbosity);
void cursor.close();
return results;
}, () => cursor?.close(), executionOptions?.abortSignal);
}
async _indexStats(ns) {
try {
const stats = await this.aggregate(ns, [
{ $indexStats: {} },
{
$project: {
name: 1,
usageHost: '$host',
usageCount: '$accesses.ops',
usageSince: '$accesses.since',
},
},
]);
return Object.fromEntries(stats.map((index) => {
return [index.name, index];
}));
}
catch (err) {
if ((0, instance_detail_helper_1.isNotAuthorized)(err) || (0, instance_detail_helper_1.isNotSupportedPipelineStage)(err)) {
return {};
}
throw err;
}
}
async _indexSizes(ns) {
try {
const coll = this._collection(ns, 'CRUD');
const aggResult = (await coll
.aggregate([
{ $collStats: { storageStats: {} } },
{
$project: {
indexSizes: { $objectToArray: '$storageStats.indexSizes' },
},
},
{ $unwind: '$indexSizes' },
{
$group: {
_id: '$indexSizes.k',
size: { $sum: { $toDouble: '$indexSizes.v' } },
},
},
])
.toArray());
return Object.fromEntries(aggResult.map(({ _id, size }) => [_id, size]));
}
catch (err) {
if ((0, instance_detail_helper_1.isNotAuthorized)(err) || (0, instance_detail_helper_1.isNotSupportedPipelineStage)(err)) {
return {};
}
throw err;
}
}
async _indexProgress(ns) {
const currentOp = { $currentOp: { allUsers: true, localOps: false } };
const pipeline = [
// get all ops
currentOp,
{
// filter for createIndexes commands
$match: {
ns,
progress: { $type: 'object' },
'command.createIndexes': { $exists: true },
},
},
{
// explode the "indexes" array for each createIndexes command
$unwind: '$command.indexes',
},
{
// group on index name
$group: {
_id: '$command.indexes.name',
progress: {
$first: {
$cond: {
if: { $gt: ['$progress.total', 0] },
then: { $divide: ['$progress.done', '$progress.total'] },
else: 0,
},
},
},
},
},
];
let currentOps = [];
const db = this._database('admin', 'META');
try {
currentOps = (await db
.aggregate(pipeline)
.toArray());
}
catch {
// Try limiting the permissions needed:
currentOp.$currentOp.allUsers = false;
try {
currentOps = (await db
.aggregate(pipeline)
.toArray());
}
catch {
// ignore errors
}
}
const indexToProgress = Object.create(null);
for (const { _id, progress } of currentOps) {
indexToProgress[_id] = progress;
}
return indexToProgress;
}
async indexes(ns, options) {
if (options?.full === false) {
const indexes = Object.entries(await this._collection(ns, 'CRUD').indexes({ ...options, full: false }));
return indexes.map((compactIndexEntry) => {
const [name, keys] = compactIndexEntry;
return (0, index_detail_helper_1.createIndexDefinition)(ns, {
name,
key: Object.fromEntries(keys),
});
});
}
const [indexes, indexStats, indexSizes, indexProgress] = await Promise.all([
this._collection(ns, 'CRUD').indexes({ ...options, full: true }),
this._indexStats(ns),
this._indexSizes(ns),
this._indexProgress(ns),
]);
const maxSize = Math.max(...Object.values(indexSizes));
return indexes
.filter((index) => {
return !!index.name;
})
.map((index) => {
const name = index.name;
return (0, index_detail_helper_1.createIndexDefinition)(ns, index, indexStats[name], indexSizes[name], maxSize, indexProgress[name]);
});
}
async instance() {
return {
...(await (0, instance_detail_helper_2.getInstance)(this._initializedClient('META'), this._connectionOptions.connectionString)),
// Need to get the CSFLE flag from the CRUD client, not the META one
csfleMode: this.getCSFLEMode(),
};
}
async insertOne(ns, doc, options) {
const coll = this._collection(ns, 'CRUD');
return await coll.insertOne(doc, options);
}
async insertMany(ns, docs, options) {
const coll = this._collection(ns, 'CRUD');
return await coll.insertMany(docs, options);
}
async updateCollection(ns,
// Collection name to update that will be passed to the collMod command will
// be derived from the provided namespace, this is why we are explicitly
// prohibiting to pass collMod flag here
flags = {}) {
const collectionName = this._collectionName(ns);
const db = this._database(ns, 'CRUD');
const result = await (0, run_command_1.runCommand)(db, {
// Order of arguments is important here, collMod is a command name and it
// should always be the first one in the object
collMod: collectionName,
...flags,
});
// Reset the CSFLE-enabled client (if any) to clear any collection
// metadata caches that might still be active.
await this._resetCRUDClient();
return result;
}
bulkWrite(ns, operations, options) {
return this._collection(ns, 'CRUD').bulkWrite(operations, options);
}
async currentOp() {
const db = this._database('admin', 'META');
const pipelineWithTruncateOps = [
{
$currentOp: {
allUsers: true,
idleConnections: false,
truncateOps: false,
},
},
];
const currentOp = await db.aggregate(pipelineWithTruncateOps).toArray();
return { inprog: currentOp };
}
getLastSeenTopology() {
return this._lastSeenTopology;
}
async serverStatus() {
const admin = this._database('admin', 'META');
return await (0, run_command_1.runCommand)(admin, { serverStatus: 1 }, { enableUtf8Validation: false });
}
async top() {
const adminDb = this._database('admin', 'META');
return await (0, run_command_1.runCommand)(adminDb, { top: 1 }, { enableUtf8Validation: f