webgme-engine
Version:
WebGME server and Client API without a GUI
1,232 lines (1,088 loc) • 81.2 kB
JavaScript
/*globals define*/
/*eslint-env browser*/
/**
* @author kecso / https://github.com/kecso
* @author pmeijer / https://github.com/pmeijer
*/
define([
'./logger',
'common/storage/browserstorage',
'common/EventDispatcher',
'common/core/coreQ',
'./constants',
'common/util/assert',
'common/core/tasync',
'common/util/guid',
'common/core/users/metarules',
'./gmeNodeGetter',
'./gmeNodeSetter',
'./libraries',
'./gmeServerRequests',
'./stateloghelpers',
'./pluginmanager',
'./websocketRouterAccess',
'superagent'
], function (Logger,
Storage,
EventDispatcher,
Core,
CONSTANTS,
ASSERT,
TASYNC,
GUID,
metaRules,
getNode,
getNodeSetters,
getLibraryFunctions,
getServerRequests,
stateLogHelpers,
PluginManager,
WebsocketRouterAccess,
superagent) {
'use strict';
function Client(gmeConfig) {
var self = this,
logger = Logger.create('gme:client', gmeConfig.client.log),
storage = Storage.getStorage(logger, gmeConfig, true),
state = {
// CONSTANTS.STORAGE. CONNECTED/DISCONNECTED/RECONNECTED/INCOMPATIBLE_CONNECTION/CONNECTION_ERROR
connection: null,
renewingToken: false,
exception: null,
project: null,
projectAccess: null,
projectInfo: null,
core: null,
branchName: null,
branchStatus: null, //CONSTANTS.BRANCH_STATUS. SYNC/AHEAD_SYNC/AHEAD_NOT_SYNC/PULLING/ERROR or null
readOnlyProject: false,
viewer: false, // This means that a specific commit is selected w/o regards to any branch.
users: {},
nodes: {},
loadNodes: {},
rootHash: null,
rootObject: null,
commitHash: null,
undoRedoChain: null, //{commitHash: '#hash', rootHash: '#hash', previous: object, next: object}
transactions: {
opened: 0,
callbacks: []
},
msg: '',
gHash: 0,
loadError: null,
ongoingTerritoryUpdateCounter: 0,
ongoingLoadPatternsCounter: 0,
pendingTerritoryUpdatePatterns: {},
loadingStatus: null,
inLoading: false,
loading: {
rootHash: null,
commitHash: null,
changedNodes: null,
next: null
}
},
monkeyPatchKey,
pluginManager,
nodeSetterFunctions,
coreLibraryFunctions,
serverRequests,
ROOT_PATH = '',
//addOnFunctions = new AddOn(state, storage, logger, gmeConfig),
loadPatternThrottled;
EventDispatcher.call(this);
this.CONSTANTS = CONSTANTS;
// Internal functions
function copy(object) {
if (object) {
return JSON.parse(JSON.stringify(object));
}
return null;
}
function addLoadUnloadPathToUpdates(paths) {
var i,
pathPieces;
for (i = 0; i < paths.length; i += 1) {
pathPieces = paths[i].split(CONSTANTS.CORE.PATH_SEP);
while (pathPieces.length > 1) {
state.loading.changedNodes.update[pathPieces.join(CONSTANTS.CORE.PATH_SEP)] = true;
pathPieces.pop();
}
}
}
function wasNodeUpdated(changedNodes, node) {
// Is changedNodes available at all? If not (undo/redo) emit for all nodes...
if (!changedNodes) {
return true;
}
// Did the node have a collection update?
if (changedNodes.partialUpdate[state.core.getPath(node)] === true) {
return true;
}
// Did any of the base classes have a non-collection update?
while (node) {
if (changedNodes.update[state.core.getPath(node)] === true) {
return true;
}
node = state.core.getBase(node);
}
return false;
}
function getModifiedNodes(newerNodes) {
var modifiedNodes = [],
updatedMetaPaths = [],
metaNodes,
metaPath,
updatePath,
nodePath,
i;
// For the client these rules apply for finding the affected nodes.
// 1. Updates should be triggered to any node that core.isTypeOf (i.e. mixins accounted for).
// 2. Root node should always be triggered.
// 3. loads/unloads should trigger updates for the parent chain.
if (state.loading.changedNodes) {
// 1. Account for mixins - i.e resolve isTypeOf.
// Gather all meta-nodes that had an update.
metaNodes = state.core.getAllMetaNodes(newerNodes[ROOT_PATH].node);
for (updatePath in state.loading.changedNodes.update) {
if (Object.hasOwn(metaNodes, updatePath)) {
updatedMetaPaths.push(updatePath);
}
}
if (updatedMetaPaths.length > 0) {
// There are meta-nodes with updates.
for (metaPath in metaNodes) {
// For all meta nodes..
if (Object.hasOwn(metaNodes, metaPath)) {
for (i = 0; i < updatedMetaPaths.length; i += 1) {
// check if it is a typeOf (includes mixins) any of the updated meta-nodes
if (state.core.isTypeOf(metaNodes[metaPath], metaNodes[updatedMetaPaths[i]]) === true) {
// if so add its path to the update nodes.
state.loading.changedNodes.update[metaPath] = true;
}
}
}
}
}
//console.log('Update after meta considered', Object.keys(state.loading.changedNodes.update));
// 2. Add Root node
state.loading.changedNodes.update[ROOT_PATH] = true;
// 3. Account for loads and unloads.
addLoadUnloadPathToUpdates(Object.keys(state.loading.changedNodes.load));
addLoadUnloadPathToUpdates(Object.keys(state.loading.changedNodes.unload));
//console.log('Update after loads and unloads considered',
// Object.keys(state.loading.changedNodes.update));
}
for (nodePath in state.nodes) {
if (Object.hasOwn(state.nodes, nodePath) && Object.hasOwn(newerNodes, nodePath) &&
wasNodeUpdated(state.loading.changedNodes, newerNodes[nodePath].node)) {
modifiedNodes.push(nodePath);
}
}
//console.log('NewerNodes, modifiedNodes', Object.keys(newerNodes).length, modifiedNodes.length);
return modifiedNodes;
}
//this is just a first brute implementation it needs serious optimization!!!
function fitsInPatternTypes(path, pattern) {
var i;
if (pattern.items && pattern.items.length > 0) {
for (i = 0; i < pattern.items.length; i += 1) {
if (self.isTypeOf(path, pattern.items[i])) {
return true;
}
}
return false;
} else {
return true;
}
}
function patternToPaths(patternId, pattern, pathsSoFar) {
var children,
subPattern,
i;
if (state.nodes[patternId]) {
pathsSoFar[patternId] = true;
if (pattern.children && pattern.children > 0) {
children = state.core.getChildrenPaths(state.nodes[patternId].node);
subPattern = copy(pattern);
subPattern.children -= 1;
for (i = 0; i < children.length; i += 1) {
if (fitsInPatternTypes(children[i], pattern)) {
patternToPaths(children[i], subPattern, pathsSoFar);
}
}
}
} else {
state.loadError++;
}
}
function userEvents(userId, modifiedNodes) {
var newPaths = {},
startErrorLevel = state.loadError,
loadedOrUnloaded = {},
i,
events = [];
for (i in state.users[userId].PATTERNS) {
if (Object.hasOwn(state.users[userId].PATTERNS, i)) {
if (state.nodes[i]) { //TODO we only check pattern if its root is there...
patternToPaths(i, state.users[userId].PATTERNS[i], newPaths);
}
}
}
if (startErrorLevel !== state.loadError) {
return; //we send events only when everything is there correctly
}
//deleted items
for (i in state.users[userId].PATHS) {
if (!newPaths[i]) {
events.push({etype: 'unload', eid: i});
loadedOrUnloaded[i] = true;
}
}
//added items
for (i in newPaths) {
if (!state.users[userId].PATHS[i]) {
events.push({etype: 'load', eid: i});
loadedOrUnloaded[i] = true;
}
}
//updated items
for (i = 0; i < modifiedNodes.length; i++) {
// Check that there wasn't a load or unload event for the node
if (newPaths[modifiedNodes[i]] && !loadedOrUnloaded[modifiedNodes[i]]) {
events.push({etype: 'update', eid: modifiedNodes[i]});
}
}
state.users[userId].PATHS = newPaths;
//this is how the events should go
if (events.length > 0) {
if (state.loadError > startErrorLevel) {
events.unshift({etype: 'incomplete', eid: null});
} else {
events.unshift({etype: 'complete', eid: null});
}
} else {
events.unshift({etype: 'complete', eid: null});
}
state.users[userId].FN(events); //eslint-disable-line new-cap
}
function loadChildrenPattern(core, nodesSoFar, node, level, callback) {
var path = core.getPath(node),
childrenPaths = core.getChildrenPaths(node),
childrenRelids = core.getChildrenRelids(node),
missing = childrenPaths.length,
error = null,
i,
childrenPatternLoaded = function (err) {
error = error || err;
missing -= 1;
if (missing === 0) {
callback(error);
}
},
childLoaded = function (err, child) {
if (err || child === null) {
error = error || err;
missing -= 1;
if (missing === 0) {
callback(error);
}
} else {
loadChildrenPattern(core, nodesSoFar, child, level - 1, childrenPatternLoaded);
}
};
if (!nodesSoFar[path]) {
nodesSoFar[path] = {
node: node
};
}
if (level > 0) {
if (missing > 0) {
for (i = 0; i < childrenPaths.length; i++) {
if (nodesSoFar[childrenPaths[i]]) {
loadChildrenPattern(core,
nodesSoFar,
nodesSoFar[childrenPaths[i]].node,
level - 1, childrenPatternLoaded);
} else {
core.loadChild(node, childrenRelids[i], childLoaded);
}
}
} else {
callback(error);
}
} else {
callback(error);
}
}
function loadPattern(core, id, pattern, nodesSoFar, callback) {
//console.log('LP',id,pattern);
//var _callback = callback;
//callback = function(error){
// console.log('LPF',id,pattern);
// _callback(error);
//};
var base = null,
baseLoaded = function () {
if (pattern.children && pattern.children > 0) {
var level = pattern.children;
loadChildrenPattern(core, nodesSoFar, base, level, callback);
} else {
callback(null);
}
};
if (nodesSoFar[id]) {
base = nodesSoFar[id].node;
baseLoaded();
} else {
if (!nodesSoFar[ROOT_PATH]) {
logger.error('pattern cannot be loaded if there is no root!!!');
}
base = nodesSoFar[ROOT_PATH].node;
core.loadByPath(base, id, function (err, node) {
var path;
if (!err && node && !core.isEmpty(node)) {
path = core.getPath(node);
if (!nodesSoFar[path]) {
nodesSoFar[path] = {
node: node
};
}
base = node;
baseLoaded();
} else {
callback(err);
}
});
}
}
loadPatternThrottled = TASYNC.throttle(loadPattern, 1); //magic number could be fine-tuned
function reLaunchUsers() {
var i;
for (i in state.users) {
if (Object.hasOwn(state.users, i)) {
if (state.users[i].UI && typeof state.users[i].UI === 'object' &&
typeof state.users[i].UI.reLaunch === 'function') {
state.users[i].UI.reLaunch();
}
}
}
}
function _updateTerritoryAllDone(guid, patterns, error) {
logger.debug('updateTerritory related loads finished', {
metadata: {
userId: guid, patterns: patterns, error: error
}
});
if (state.users[guid]) {
state.users[guid].PATTERNS = copy(patterns);
if (!error) {
userEvents(guid, []);
}
}
}
function canSwitchStates() {
if (state.inLoading && state.ongoingTerritoryUpdateCounter === 0 &&
state.ongoingLoadPatternsCounter === 0) {
return true;
}
return false;
}
function switchStates() {
//it is safe now to move the loadNodes into nodes,
// refresh the metaNodes and generate events - all in a synchronous manner!!!
var modifiedPaths,
key;
//console.time('switchStates');
logger.debug('switching project state [C#' +
state.commitHash + ']->[C#' + state.loading.commitHash + '] : [R#' +
state.rootHash + ']->[R#' + state.loading.rootHash + ']');
//console.time('getModifiedNodes');
modifiedPaths = getModifiedNodes(state.loadNodes);
//console.timeEnd('getModifiedNodes');
state.nodes = state.loadNodes;
state.loadNodes = {};
self.getAllMetaNodes(); //This ensures that all language elements can be accessed with getNode
state.inLoading = false;
state.rootHash = state.loading.rootHash;
state.loading.rootHash = null;
state.commitHash = state.loading.commitHash;
state.loading.commitHash = null;
//checkMetaNameCollision(state.core, state.nodes[ROOT_PATH].node);
//checkMixinErrors(state.core, state.nodes[ROOT_PATH].node);
// These are checked by the meta-editor..
for (key in state.users) {
if (Object.hasOwn(state.users, key)) {
userEvents(key, modifiedPaths);
}
}
if (state.loadingStatus) {
state.loading.next(state.loadingStatus);
} else {
state.loading.next(null);
}
//console.timeEnd('switchStates');
}
function loadingPatternFinished(err) {
state.loadingStatus = state.loadingStatus || err;
state.ongoingLoadPatternsCounter -= 1;
if (canSwitchStates()) {
switchStates();
}
}
function loading(newRootHash, newCommitHash, changedNodes, callback) {
var i, j,
userIds,
patternPaths,
patternsToLoad = [];
if (state.ongoingLoadPatternsCounter !== 0) {
callback(new Error('at the start of loading counter should bee zero!!! [' +
state.ongoingLoadPatternsCounter + ']'));
return;
}
state.loadingStatus = null;
state.loadNodes = {};
state.loading.rootHash = newRootHash;
state.loading.commitHash = newCommitHash;
state.loading.next = callback;
state.loading.changedNodes = changedNodes;
state.core.loadRoot(state.loading.rootHash, function (err, root) {
if (err) {
state.loading.next(err);
return;
}
state.inLoading = true;
state.loadNodes[state.core.getPath(root)] = {
node: root
};
//we first only set the counter of patterns but we also generate a completely separate pattern queue
//as we cannot be sure if all the users will remain at the point of giving the actual load command!
userIds = Object.keys(state.users);
for (i = 0; i < userIds.length; i += 1) {
state.ongoingLoadPatternsCounter += Object.keys(state.users[userIds[i]].PATTERNS || {}).length;
patternPaths = Object.keys(state.users[userIds[i]].PATTERNS || {});
for (j = 0; j < patternPaths.length; j += 1) {
patternsToLoad.push({
id: patternPaths[j],
pattern: copy(state.users[userIds[i]].PATTERNS[patternPaths[j]])
});
}
}
userIds = Object.keys(state.pendingTerritoryUpdatePatterns);
for (i = 0; i < userIds.length; i += 1) {
state.ongoingLoadPatternsCounter +=
Object.keys(state.pendingTerritoryUpdatePatterns[userIds[i]] || {}).length;
patternPaths = Object.keys(state.pendingTerritoryUpdatePatterns[userIds[i]] || {});
for (j = 0; j < patternPaths.length; j += 1) {
patternsToLoad.push({
id: patternPaths[j],
pattern: copy(state.pendingTerritoryUpdatePatterns[userIds[i]][patternPaths[j]])
});
}
}
//empty load check
if (state.ongoingLoadPatternsCounter === 0) {
if (canSwitchStates()) {
switchStates();
reLaunchUsers();
}
return;
}
for (i = 0; i < patternsToLoad.length; i += 1) {
loadPatternThrottled(state.core,
patternsToLoad[i].id, patternsToLoad[i].pattern, state.loadNodes, loadingPatternFinished);
}
});
}
function cleanUsersTerritories() {
//look out as the user can remove itself at any time!!!
var userIds = Object.keys(state.users),
i,
j,
events;
for (i = 0; i < userIds.length; i++) {
if (state.users[userIds[i]]) {
events = [{eid: null, etype: 'complete'}];
for (j in state.users[userIds[i]].PATHS) {
events.push({etype: 'unload', eid: j});
}
state.users[userIds[i]].PATTERNS = {};
state.users[userIds[i]].PATHS = {};
state.users[userIds[i]].SENDEVENTS = true;
state.users[userIds[i]].FN(events); //eslint-disable-line new-cap
}
}
}
function logState(level, msg) {
var indent = gmeConfig.debug ? 2 : 0;
if (level === 'console') {
/*eslint-disable no-console*/
console.log('state at ' + msg,
stateLogHelpers.getStateLogString(self, state, gmeConfig.debug, indent));
/*eslint-enable no-console*/
} else {
logger[level]('state at ' + msg,
stateLogHelpers.getStateLogString(self, state, gmeConfig.debug, indent));
}
}
function getNewToken(callback) {
if (state.renewingToken === false) {
state.renewingToken = true;
(new superagent.Request('GET', gmeConfig.client.mountedPath + '/api/user/token'))
.end(function (err, res) {
state.renewingToken = false;
callback(err, res.body.webgmeToken);
});
} else {
logger.debug('Awaiting token renewal..');
}
}
// Forwarded functions
function saveRoot(msg, callback) {
var persisted,
numberOfPersistedObjects,
wrappedCallback,
callbacks;
logger.debug('saveRoot msg', msg);
if (!state.viewer && !state.readOnlyProject && state.nodes[ROOT_PATH]) {
if (state.msg && msg) {
state.msg += '\n' + msg;
} else {
state.msg = msg;
}
if (state.transactions.opened === 0) {
ASSERT(state.project && state.core && state.branchName);
callbacks = state.transactions.callbacks;
state.transactions.callbacks = [];
wrappedCallback = function (err, result) {
if (err) {
logger.error('saveRoot failure', err);
} else {
logger.debug('saveRoot', result);
}
callbacks.forEach(function (cb) {
cb(err, result);
});
};
logger.debug('is NOT in transaction - will persist.');
// console.time('persist');
persisted = state.core.persist(state.nodes[ROOT_PATH].node);
// console.timeEnd('persist');
logger.debug('persisted', persisted);
numberOfPersistedObjects = Object.keys(persisted.objects).length;
if (numberOfPersistedObjects === 0) {
logger.warn('No changes after persist will return from saveRoot.');
wrappedCallback(null);
return;
} else if (numberOfPersistedObjects > 200) {
//This is just for debugging
logger.warn('Lots of persisted objects', numberOfPersistedObjects);
}
// Make the commit on the storage (will emit hashUpdated)
storage.makeCommit(
state.project.projectId,
state.branchName,
[state.commitHash],
persisted.rootHash,
persisted.objects,
state.msg,
wrappedCallback
);
state.msg = '';
} else {
logger.debug('is in transaction - will NOT persist.');
}
} else {
//TODO: Why is this set to empty here?
state.msg = '';
callback && callback(null);
}
}
function storeNode(node /*, basic */) {
var path;
//basic = basic || true;
if (node) {
path = state.core.getPath(node);
//state.metaNodes[path] = node;
if (state.nodes[path]) {
//TODO we try to avoid this
} else {
state.nodes[path] = {
node: node
};
//TODO this only needed when real eventing will be reintroduced
//_inheritanceHash[path] = getInheritanceChain(node);
}
return path;
}
return null;
}
// Plugin Manager
pluginManager = new PluginManager(self, storage, state, logger, gmeConfig);
this.getCurrentPluginContext = pluginManager.getCurrentPluginContext;
this.runBrowserPlugin = pluginManager.runBrowserPlugin;
this.runServerPlugin = pluginManager.runServerPlugin;
this.filterPlugins = pluginManager.filterPlugins;
this.dispatchPluginNotification = pluginManager.dispatchPluginNotification;
this.getRunningPlugins = pluginManager.getRunningPlugins;
this.abortPlugin = pluginManager.abortPlugin;
this.sendMessageToPlugin = pluginManager.sendMessageToPlugin;
function printCoreError(error) {
logger.error('Faulty core usage raised an error', error);
self.dispatchEvent(CONSTANTS.NOTIFICATION, {
type: 'CORE',
severity: 'error',
message: error.message
});
}
// Node setters and getters.
nodeSetterFunctions = getNodeSetters(logger, state, saveRoot, storeNode, printCoreError);
for (monkeyPatchKey in nodeSetterFunctions) {
if (Object.hasOwn(nodeSetterFunctions, monkeyPatchKey)) {
self[monkeyPatchKey] = nodeSetterFunctions[monkeyPatchKey];
}
}
coreLibraryFunctions = getLibraryFunctions(logger, state, storage, saveRoot);
for (monkeyPatchKey in coreLibraryFunctions) {
if (Object.hasOwn(coreLibraryFunctions, monkeyPatchKey)) {
self[monkeyPatchKey] = coreLibraryFunctions[monkeyPatchKey];
}
}
serverRequests = getServerRequests(self, logger, state, storage);
for (monkeyPatchKey in serverRequests) {
if (Object.hasOwn(serverRequests, monkeyPatchKey)) {
self[monkeyPatchKey] = serverRequests[monkeyPatchKey];
}
}
// Main API functions (with helpers) for connecting, selecting project and branches etc.
function closeProject(projectId, callback) {
var prevBranchName = state.branchName;
state.project = null;
state.projectAccess = null;
state.projectInfo = null;
state.readOnlyProject = false;
//TODO what if for some reason we are in transaction?
storage.closeProject(projectId, function (err) {
if (err) {
callback(err);
return;
}
state.core = null;
state.branchName = null;
state.commitHash = null;
//self.dispatchEvent(null);
state.patterns = {};
//state.gHash = 0;
state.nodes = {};
state.loadNodes = {};
state.loadError = 0;
state.rootHash = null;
//state.rootObject = null;
state.transactions.opened = 0;
state.transactions.callbacks = [];
state.msg = '';
cleanUsersTerritories();
if (prevBranchName) {
self.dispatchEvent(CONSTANTS.BRANCH_CLOSED, prevBranchName);
}
self.dispatchEvent(CONSTANTS.BRANCH_CHANGED, null);
self.dispatchEvent(CONSTANTS.PROJECT_CLOSED, projectId);
callback(null);
});
}
/**
* Closes the currently open project.
* @param {function} callback
*/
this.closeProject = function (callback) {
if (state.project && state.project.projectId) {
closeProject(state.project.projectId, callback);
} else {
callback(new Error('There is no open project.'));
}
};
this.connectToDatabase = function (callback) {
if (self.isConnected()) {
logger.warn('connectToDatabase - already connected');
callback(null);
return;
}
storage.open(function (connectionState) {
state.connection = connectionState;
if (connectionState === CONSTANTS.STORAGE.CONNECTED) {
//N.B. this event will only be triggered once.
self.dispatchEvent(CONSTANTS.NETWORK_STATUS_CHANGED, connectionState);
storage.webSocket.addEventListener(CONSTANTS.STORAGE.NOTIFICATION,
function (emitter, eventData) {
logger.debug('received notification', eventData);
if (eventData.type === CONSTANTS.STORAGE.BRANCH_ROOM_SOCKETS) {
self.dispatchConnectedUsersChanged(eventData);
// If a new socket joined our branch -> emit to the branch room letting
// any newly connected users know that we are in this branch too.
// If a socket with the same userId as us disconnected -> emit to
// make sure we're not deleted from the other users.
self.emitStateNotification();
} else if (eventData.type === CONSTANTS.STORAGE.PLUGIN_NOTIFICATION) {
self.dispatchPluginNotification(eventData);
} else if (eventData.type === CONSTANTS.STORAGE.ADD_ON_NOTIFICATION) {
self.dispatchAddOnNotification(eventData);
} else if (eventData.type === CONSTANTS.STORAGE.CLIENT_STATE_NOTIFICATION) {
self.dispatchConnectedUsersChanged(eventData);
} else {
logger.error('Unknown notification type', eventData.type, eventData);
}
}
);
reLaunchUsers();
callback(null);
} else if (connectionState === CONSTANTS.STORAGE.DISCONNECTED) {
if (state.connection !== CONSTANTS.STORAGE.INCOMPATIBLE_CONNECTION) {
self.dispatchEvent(CONSTANTS.NETWORK_STATUS_CHANGED, connectionState);
}
} else if (connectionState === CONSTANTS.STORAGE.RECONNECTED) {
self.dispatchEvent(CONSTANTS.NETWORK_STATUS_CHANGED, connectionState);
} else if (connectionState === CONSTANTS.STORAGE.INCOMPATIBLE_CONNECTION) {
self.disconnectFromDatabase(function (err) {
if (err) {
logger.error(err);
}
self.dispatchEvent(CONSTANTS.NETWORK_STATUS_CHANGED, connectionState);
});
} else if (connectionState === CONSTANTS.STORAGE.JWT_ABOUT_TO_EXPIRE) {
logger.warn('Token about is about to expire');
getNewToken(function (err, newToken) {
if (err) {
logger.error('Failed to renew token', err);
// Token will probably expire soon
} else {
storage.setToken(newToken);
}
});
} else if (connectionState === CONSTANTS.STORAGE.JWT_EXPIRED) {
self.disconnectFromDatabase(function (err) {
if (err) {
logger.error(err);
}
self.dispatchEvent(CONSTANTS.NETWORK_STATUS_CHANGED, CONSTANTS.STORAGE.JWT_EXPIRED);
});
} else {
logger.error(new Error('Connection failed error ' + connectionState));
self.disconnectFromDatabase(function (err) {
if (err) {
logger.error(err);
}
self.dispatchEvent(CONSTANTS.NETWORK_STATUS_CHANGED, CONSTANTS.STORAGE.CONNECTION_ERROR);
callback(new Error('Connection failed! ' + connectionState));
});
}
});
};
this.disconnectFromDatabase = function (callback) {
function closeStorage(err) {
storage.close(function (err2) {
if (state.connection !== CONSTANTS.STORAGE.INCOMPATIBLE_CONNECTION &&
state.connection !== CONSTANTS.STORAGE.CONNECTION_ERROR) {
state.connection = CONSTANTS.STORAGE.DISCONNECTED;
}
callback(err || err2);
});
}
if (state.project) {
closeProject(state.project.projectId, closeStorage);
} else {
closeStorage(null);
}
};
/**
* If branchName is given and it does not exist, the project will be closed and callback resolved with an error.
* If branchName NOT given it will attempt the following in order and break if successful at any step:
* 1) Select the master if available.
* 2) Select any available branch.
* 3) Select the latest commit.
* 4) Close the project and resolve with error.
* @param {string} projectId
* @param {string} [branchName='master']
* @param {function} callback
*/
this.selectProject = function (projectId, branchName, callback) {
if (callback === undefined && typeof branchName === 'function') {
callback = branchName;
branchName = undefined;
}
if (self.isConnected() === false) {
callback(new Error('There is no open database connection!'));
}
var prevProjectId,
branchToOpen = branchName || 'master';
logger.debug('selectProject', projectId, branchToOpen);
function projectOpened(err, project, branches, access) {
if (err) {
callback(err);
return;
}
project.getProjectInfo(function (err, projectInfo) {
if (err) {
callback(err);
return;
}
state.project = project;
state.projectAccess = access;
state.projectInfo = projectInfo;
state.readOnlyProject = access.write === false;
state.core = new Core(project, {
globConf: gmeConfig,
logger: logger.fork('core')
});
logState('info', 'projectOpened');
logger.debug('projectOpened, branches: ', branches);
self.dispatchEvent(CONSTANTS.PROJECT_OPENED, projectId);
if (Object.hasOwn(branches, branchToOpen) === false) {
if (branchName) {
logger.error('Given branch does not exist "' + branchName + '"');
closeProject(projectId, function (err) {
if (err) {
logger.error('closeProject after missing branch failed with err', err);
}
callback(new Error('Given branch does not exist "' + branchName + '"'));
});
return;
}
logger.warn('Project "' + projectId + '" did not have branch', branchToOpen);
branchToOpen = Object.keys(branches)[0] || null;
logger.debug('Picked "' + branchToOpen + '".');
}
if (branchToOpen) {
self.selectBranch(branchToOpen, null, function (err) {
if (err) {
callback(err);
return;
}
logState('info', 'selectBranch');
reLaunchUsers();
callback(null);
});
} else {
logger.warn('No branches available in project, will attempt to select latest commit.');
self.getCommits(projectId, Date.now(), 1, function (err, commitObjects) {
if (err || commitObjects.length === 0) {
logger.error(err);
closeProject(projectId, function (err) {
if (err) {
logger.error('closeProject after missing any commits failed with err', err);
}
callback(new Error('Project does not have any commits.'));
});
return;
}
self.selectCommit(commitObjects[0]._id, function (err) {
if (err) {
logger.error(err);
closeProject(projectId, function (err) {
if (err) {
logger.error('closeProject after missing any commits failed with err', err);
}
callback(new Error('Failed selecting commit when opening project.'));
});
return;
}
reLaunchUsers();
callback(null);
});
});
}
});
}
if (state.project) {
prevProjectId = state.project.projectId;
logger.debug('A project was open, closing it', prevProjectId);
if (prevProjectId === projectId) {
logger.warn('projectId is already opened', projectId);
callback(null);
return;
}
closeProject(prevProjectId, function (err) {
if (err) {
logger.error('problems closing previous project', err);
callback(err);
return;
}
storage.openProject(projectId, projectOpened);
});
} else {
storage.openProject(projectId, projectOpened);
}
};
function addModification(commitObject, clear) {
var newItem,
commitHash = commitObject[CONSTANTS.STORAGE.MONGO_ID],
currItem;
if (clear) {
logger.debug('foreign modification clearing undo-redo chain');
state.undoRedoChain = {
commitHash: commitHash,
rootHash: commitObject.root,
previous: null,
next: null
};
return;
}
// Check if the modification already exist, i.e. commit is from undoing or redoing.
currItem = state.undoRedoChain;
while (currItem) {
if (currItem.commitHash === commitHash) {
return;
}
currItem = currItem.previous;
}
currItem = state.undoRedoChain;
while (currItem) {
if (currItem.commitHash === commitHash) {
return;
}
currItem = currItem.next;
}
newItem = {
commitHash: commitHash,
rootHash: commitObject.root,
previous: state.undoRedoChain,
next: null
};
state.undoRedoChain.next = newItem;
state.undoRedoChain = newItem;
}
function canUndo() {
var result = false;
if (state.undoRedoChain && state.undoRedoChain.previous && state.undoRedoChain.previous.commitHash) {
result = true;
}
return result;
}
function canRedo() {
var result = false;
if (state.undoRedoChain && state.undoRedoChain.next) {
result = true;
}
return result;
}
function getBranchStatusHandler() {
return function (branchStatus, commitQueue, updateQueue) {
logger.debug('branchStatus changed', branchStatus, commitQueue, updateQueue);
logState('debug', 'branchStatus');
state.branchStatus = branchStatus;
self.dispatchEvent(CONSTANTS.BRANCH_STATUS_CHANGED,
{
status: branchStatus,
commitQueue: commitQueue,
updateQueue: updateQueue
}
);
};
}
function getHashUpdateHandler() {
return function (data, commitQueue, updateQueue, callback) {
var commitData = data.commitData,
clearUndoRedo = data.local !== true,
commitHash = commitData.commitObject[CONSTANTS.STORAGE.MONGO_ID];
logger.debug('hashUpdateHandler invoked. project, branch, commitHash',
commitData.projectId, commitData.branchName, commitHash);
if (state.transactions.opened > 0) {
logger.warn('Is in transaction, will not load in changes');
callback(null, false); // proceed: false
return;
}
logger.debug('loading commitHash, local?', commitHash, data.local);
loading(commitData.commitObject.root, commitHash, commitData.changedNodes, function (err, aborted) {
if (err) {
logger.error('hashUpdateHandler invoked loading and it returned error',
commitData.commitObject.root, err);
logState('error', 'hashUpdateHandler');
callback(err, false); // proceed: false
} else if (aborted === true) {
logState('warn', 'hashUpdateHandler');
callback(null, false); // proceed: false
} else {
logger.debug('loading complete for incoming rootHash', commitData.commitObject.root);
logState('debug', 'hashUpdateHandler');
//undo-redo
addModification(commitData.commitObject, clearUndoRedo);
self.dispatchEvent(CONSTANTS.UNDO_AVAILABLE, canUndo());
self.dispatchEvent(CONSTANTS.REDO_AVAILABLE, canRedo());
self.dispatchEvent(CONSTANTS.NEW_COMMIT_STATE, {
data: data,
uiState: typeof self.uiStateGetter === 'function' ? self.uiStateGetter() : null
});
callback(null, true); // proceed: true
}
});
};
}
/**
* Opens the given branch and closes any open branch (even though the same branch is opened)
* @param {string} branchName - name of branch to open.
* @param {function} [branchStatusHandler=getDefaultCommitHandler()] - Handles returned statuses after commits.
* @param callback
*/
this.selectBranch = function (branchName, branchStatusHandler, callback) {
var prevBranchName = state.branchName;
logger.debug('selectBranch', branchName);
if (self.isConnected() === false) {
callback(new Error('There is no open database connection!'));
return;
}
if (!state.project) {
callback(new Error('selectBranch invoked without an opened project'));
return;
}
if (branchStatusHandler) {
logger.warn('passing branchStatusHandler is deprecated, use addHashUpdateHandler or' +
' addBranchStatusHandler on the branch object instead (getProjectObject().branches[branchName]).');
}
function openBranch() {
logger.debug('openBranch, calling storage openBranch', state.project.projectId, branchName);
storage.openBranch(state.project.projectId, branchName,
getHashUpdateHandler(), getBranchStatusHandler(),
function (err /*, latestCommit*/) {
if (err) {
logger.error('storage.openBranch returned with error', err);
self.dispatchEvent(CONSTANTS.BRANCH_CHANGED, null);
callback(err);
return;
}
state.viewer = false;
state.branchName = branchName;
self.dispatchEvent(CONSTANTS.BRANCH_OPENED, branchName);
self.dispatchEvent(CONSTANTS.BRANCH_CHANGED, branchName);
logState('info', 'openBranch');
callback(null);
}
);
}
if (prevBranchName !== null) {
logger.debug('Branch was open, closing it first', prevBranchName);
storage.closeBranch(state.project.projectId, prevBranchName, function (err) {
if (err) {
logger.error('Problems closing existing branch', err);
callback(err);
return;
}
state.branchName = null;
self.dispatchEvent(CONSTANTS.BRANCH_CLOSED, prevBranchName);
openBranch();
});
} else {
openBranch();
}
};
this.selectCommit = function (commitHash, callback) {
self._selectCommitFilteredEvents(commitHash, null, callback);
};
this._selectCommitFilteredEvents = function (commitHash, changedNodes, callback) {
var prevBranchName;
logger.debug('selectCommit', commitHash);
if (self.isConnected() === false) {
callback(new Error('There is no open database connection!'));
return;
}
if (!state.project) {
callback(new Error('selectCommit invoked without open