UNPKG

matrix-js-sdk

Version:
1,206 lines (1,068 loc) 79.9 kB
/* Copyright 2015 - 2022 The Matrix.org Foundation C.I.C. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* * TODO: * This class mainly serves to take all the syncing logic out of client.js and * into a separate file. It's all very fluid, and this class gut wrenches a lot * of MatrixClient props (e.g. http). Given we want to support WebSockets as * an alternative syncing API, we may want to have a proper syncing interface * for HTTP and WS at some point. */ import { Optional } from "matrix-events-sdk"; import type { SyncCryptoCallbacks } from "./common-crypto/CryptoBackend"; import { User, UserEvent } from "./models/user"; import { NotificationCountType, Room, RoomEvent } from "./models/room"; import * as utils from "./utils"; import { IDeferred } from "./utils"; import { Filter } from "./filter"; import { EventTimeline } from "./models/event-timeline"; import { PushProcessor } from "./pushprocessor"; import { logger } from "./logger"; import { InvalidStoreError, InvalidStoreState } from "./errors"; import { ClientEvent, IStoredClientOpts, MatrixClient, PendingEventOrdering, ResetTimelineCallback } from "./client"; import { IEphemeral, IInvitedRoom, IInviteState, IJoinedRoom, ILeftRoom, IMinimalEvent, IRoomEvent, IStateEvent, IStrippedState, ISyncResponse, ITimeline, IToDeviceEvent, } from "./sync-accumulator"; import { MatrixEvent } from "./models/event"; import { MatrixError, Method } from "./http-api"; import { ISavedSync } from "./store"; import { EventType } from "./@types/event"; import { IPushRules } from "./@types/PushRules"; import { RoomStateEvent, IMarkerFoundOptions } from "./models/room-state"; import { RoomMemberEvent } from "./models/room-member"; import { BeaconEvent } from "./models/beacon"; import { IEventsResponse } from "./@types/requests"; import { UNREAD_THREAD_NOTIFICATIONS } from "./@types/sync"; import { Feature, ServerSupport } from "./feature"; import { Crypto } from "./crypto"; const DEBUG = true; // /sync requests allow you to set a timeout= but the request may continue // beyond that and wedge forever, so we need to track how long we are willing // to keep open the connection. This constant is *ADDED* to the timeout= value // to determine the max time we're willing to wait. const BUFFER_PERIOD_MS = 80 * 1000; // Number of consecutive failed syncs that will lead to a syncState of ERROR as opposed // to RECONNECTING. This is needed to inform the client of server issues when the // keepAlive is successful but the server /sync fails. const FAILED_SYNC_ERROR_THRESHOLD = 3; export enum SyncState { /** Emitted after we try to sync more than `FAILED_SYNC_ERROR_THRESHOLD` * times and are still failing. Or when we enounter a hard error like the * token being invalid. */ Error = "ERROR", /** Emitted after the first sync events are ready (this could even be sync * events from the cache) */ Prepared = "PREPARED", /** Emitted when the sync loop is no longer running */ Stopped = "STOPPED", /** Emitted after each sync request happens */ Syncing = "SYNCING", /** Emitted after a connectivity error and we're ready to start syncing again */ Catchup = "CATCHUP", /** Emitted for each time we try reconnecting. Will switch to `Error` after * we reach the `FAILED_SYNC_ERROR_THRESHOLD` */ Reconnecting = "RECONNECTING", } // Room versions where "insertion", "batch", and "marker" events are controlled // by power-levels. MSC2716 is supported in existing room versions but they // should only have special meaning when the room creator sends them. const MSC2716_ROOM_VERSIONS = ["org.matrix.msc2716v3"]; function getFilterName(userId: string, suffix?: string): string { // scope this on the user ID because people may login on many accounts // and they all need to be stored! return `FILTER_SYNC_${userId}` + (suffix ? "_" + suffix : ""); } /* istanbul ignore next */ function debuglog(...params: any[]): void { if (!DEBUG) return; logger.log(...params); } /** * Options passed into the constructor of SyncApi by MatrixClient */ export interface SyncApiOptions { /** * Crypto manager * * @deprecated in favour of cryptoCallbacks */ crypto?: Crypto; /** * If crypto is enabled on our client, callbacks into the crypto module */ cryptoCallbacks?: SyncCryptoCallbacks; /** * A function which is called * with a room ID and returns a boolean. It should return 'true' if the SDK can * SAFELY remove events from this room. It may not be safe to remove events if * there are other references to the timelines for this room. */ canResetEntireTimeline?: ResetTimelineCallback; } interface ISyncOptions { filter?: string; hasSyncedBefore?: boolean; } export interface ISyncStateData { /** * The matrix error if `state=ERROR`. */ error?: Error; /** * The 'since' token passed to /sync. * `null` for the first successful sync since this client was * started. Only present if `state=PREPARED` or * `state=SYNCING`. */ oldSyncToken?: string; /** * The 'next_batch' result from /sync, which * will become the 'since' token for the next call to /sync. Only present if * `state=PREPARED</code> or <code>state=SYNCING`. */ nextSyncToken?: string; /** * True if we are working our way through a * backlog of events after connecting. Only present if `state=SYNCING`. */ catchingUp?: boolean; fromCache?: boolean; } enum SetPresence { Offline = "offline", Online = "online", Unavailable = "unavailable", } interface ISyncParams { filter?: string; timeout: number; since?: string; // eslint-disable-next-line camelcase full_state?: boolean; // eslint-disable-next-line camelcase set_presence?: SetPresence; _cacheBuster?: string | number; // not part of the API itself } type WrappedRoom<T> = T & { room: Room; isBrandNewRoom: boolean; }; /** add default settings to an IStoredClientOpts */ export function defaultClientOpts(opts?: IStoredClientOpts): IStoredClientOpts { return { initialSyncLimit: 8, resolveInvitesToProfiles: false, pollTimeout: 30 * 1000, pendingEventOrdering: PendingEventOrdering.Chronological, experimentalThreadSupport: false, ...opts, }; } export function defaultSyncApiOpts(syncOpts?: SyncApiOptions): SyncApiOptions { return { canResetEntireTimeline: (_roomId): boolean => false, ...syncOpts, }; } export class SyncApi { private readonly opts: IStoredClientOpts; private readonly syncOpts: SyncApiOptions; private _peekRoom: Optional<Room> = null; private currentSyncRequest?: Promise<ISyncResponse>; private abortController?: AbortController; private syncState: SyncState | null = null; private syncStateData?: ISyncStateData; // additional data (eg. error object for failed sync) private catchingUp = false; private running = false; private keepAliveTimer?: ReturnType<typeof setTimeout>; private connectionReturnedDefer?: IDeferred<boolean>; private notifEvents: MatrixEvent[] = []; // accumulator of sync events in the current sync response private failedSyncCount = 0; // Number of consecutive failed /sync requests private storeIsInvalid = false; // flag set if the store needs to be cleared before we can start /** * Construct an entity which is able to sync with a homeserver. * @param client - The matrix client instance to use. * @param opts - client config options * @param syncOpts - sync-specific options passed by the client * @internal */ public constructor(private readonly client: MatrixClient, opts?: IStoredClientOpts, syncOpts?: SyncApiOptions) { this.opts = defaultClientOpts(opts); this.syncOpts = defaultSyncApiOpts(syncOpts); if (client.getNotifTimelineSet()) { client.reEmitter.reEmit(client.getNotifTimelineSet()!, [RoomEvent.Timeline, RoomEvent.TimelineReset]); } } public createRoom(roomId: string): Room { const room = _createAndReEmitRoom(this.client, roomId, this.opts); room.on(RoomStateEvent.Marker, (markerEvent, markerFoundOptions) => { this.onMarkerStateEvent(room, markerEvent, markerFoundOptions); }); return room; } /** When we see the marker state change in the room, we know there is some * new historical messages imported by MSC2716 `/batch_send` somewhere in * the room and we need to throw away the timeline to make sure the * historical messages are shown when we paginate `/messages` again. * @param room - The room where the marker event was sent * @param markerEvent - The new marker event * @param setStateOptions - When `timelineWasEmpty` is set * as `true`, the given marker event will be ignored */ private onMarkerStateEvent( room: Room, markerEvent: MatrixEvent, { timelineWasEmpty }: IMarkerFoundOptions = {}, ): void { // We don't need to refresh the timeline if it was empty before the // marker arrived. This could be happen in a variety of cases: // 1. From the initial sync // 2. If it's from the first state we're seeing after joining the room // 3. Or whether it's coming from `syncFromCache` if (timelineWasEmpty) { logger.debug( `MarkerState: Ignoring markerEventId=${markerEvent.getId()} in roomId=${room.roomId} ` + `because the timeline was empty before the marker arrived which means there is nothing to refresh.`, ); return; } const isValidMsc2716Event = // Check whether the room version directly supports MSC2716, in // which case, "marker" events are already auth'ed by // power_levels MSC2716_ROOM_VERSIONS.includes(room.getVersion()) || // MSC2716 is also supported in all existing room versions but // special meaning should only be given to "insertion", "batch", // and "marker" events when they come from the room creator markerEvent.getSender() === room.getCreator(); // It would be nice if we could also specifically tell whether the // historical messages actually affected the locally cached client // timeline or not. The problem is we can't see the prev_events of // the base insertion event that the marker was pointing to because // prev_events aren't available in the client API's. In most cases, // the history won't be in people's locally cached timelines in the // client, so we don't need to bother everyone about refreshing // their timeline. This works for a v1 though and there are use // cases like initially bootstrapping your bridged room where people // are likely to encounter the historical messages affecting their // current timeline (think someone signing up for Beeper and // importing their Whatsapp history). if (isValidMsc2716Event) { // Saw new marker event, let's let the clients know they should // refresh the timeline. logger.debug( `MarkerState: Timeline needs to be refreshed because ` + `a new markerEventId=${markerEvent.getId()} was sent in roomId=${room.roomId}`, ); room.setTimelineNeedsRefresh(true); room.emit(RoomEvent.HistoryImportedWithinTimeline, markerEvent, room); } else { logger.debug( `MarkerState: Ignoring markerEventId=${markerEvent.getId()} in roomId=${room.roomId} because ` + `MSC2716 is not supported in the room version or for any room version, the marker wasn't sent ` + `by the room creator.`, ); } } /** * Sync rooms the user has left. * @returns Resolved when they've been added to the store. */ public async syncLeftRooms(): Promise<Room[]> { const client = this.client; // grab a filter with limit=1 and include_leave=true const filter = new Filter(this.client.credentials.userId); filter.setTimelineLimit(1); filter.setIncludeLeaveRooms(true); const localTimeoutMs = this.opts.pollTimeout! + BUFFER_PERIOD_MS; const filterId = await client.getOrCreateFilter( getFilterName(client.credentials.userId!, "LEFT_ROOMS"), filter, ); const qps: ISyncParams = { timeout: 0, // don't want to block since this is a single isolated req filter: filterId, }; const data = await client.http.authedRequest<ISyncResponse>(Method.Get, "/sync", qps as any, undefined, { localTimeoutMs, }); let leaveRooms: WrappedRoom<ILeftRoom>[] = []; if (data.rooms?.leave) { leaveRooms = this.mapSyncResponseToRoomArray(data.rooms.leave); } const rooms = await Promise.all( leaveRooms.map(async (leaveObj) => { const room = leaveObj.room; if (!leaveObj.isBrandNewRoom) { // the intention behind syncLeftRooms is to add in rooms which were // *omitted* from the initial /sync. Rooms the user were joined to // but then left whilst the app is running will appear in this list // and we do not want to bother with them since they will have the // current state already (and may get dupe messages if we add // yet more timeline events!), so skip them. // NB: When we persist rooms to localStorage this will be more // complicated... return; } leaveObj.timeline = leaveObj.timeline || { prev_batch: null, events: [], }; const events = this.mapSyncEventsFormat(leaveObj.timeline, room); const stateEvents = this.mapSyncEventsFormat(leaveObj.state, room); // set the back-pagination token. Do this *before* adding any // events so that clients can start back-paginating. room.getLiveTimeline().setPaginationToken(leaveObj.timeline.prev_batch, EventTimeline.BACKWARDS); await this.injectRoomEvents(room, stateEvents, events); room.recalculate(); client.store.storeRoom(room); client.emit(ClientEvent.Room, room); this.processEventsForNotifs(room, events); return room; }), ); return rooms.filter(Boolean) as Room[]; } /** * Peek into a room. This will result in the room in question being synced so it * is accessible via getRooms(). Live updates for the room will be provided. * @param roomId - The room ID to peek into. * @returns A promise which resolves once the room has been added to the * store. */ public peek(roomId: string): Promise<Room> { if (this._peekRoom?.roomId === roomId) { return Promise.resolve(this._peekRoom); } const client = this.client; this._peekRoom = this.createRoom(roomId); return this.client.roomInitialSync(roomId, 20).then((response) => { // make sure things are init'd response.messages = response.messages || { chunk: [] }; response.messages.chunk = response.messages.chunk || []; response.state = response.state || []; // FIXME: Mostly duplicated from injectRoomEvents but not entirely // because "state" in this API is at the BEGINNING of the chunk const oldStateEvents = utils.deepCopy(response.state).map(client.getEventMapper()); const stateEvents = response.state.map(client.getEventMapper()); const messages = response.messages.chunk.map(client.getEventMapper()); // XXX: copypasted from /sync until we kill off this minging v1 API stuff) // handle presence events (User objects) if (Array.isArray(response.presence)) { response.presence.map(client.getEventMapper()).forEach(function (presenceEvent) { let user = client.store.getUser(presenceEvent.getContent().user_id); if (user) { user.setPresenceEvent(presenceEvent); } else { user = createNewUser(client, presenceEvent.getContent().user_id); user.setPresenceEvent(presenceEvent); client.store.storeUser(user); } client.emit(ClientEvent.Event, presenceEvent); }); } // set the pagination token before adding the events in case people // fire off pagination requests in response to the Room.timeline // events. if (response.messages.start) { this._peekRoom!.oldState.paginationToken = response.messages.start; } // set the state of the room to as it was after the timeline executes this._peekRoom!.oldState.setStateEvents(oldStateEvents); this._peekRoom!.currentState.setStateEvents(stateEvents); this.resolveInvites(this._peekRoom!); this._peekRoom!.recalculate(); // roll backwards to diverge old state. addEventsToTimeline // will overwrite the pagination token, so make sure it overwrites // it with the right thing. this._peekRoom!.addEventsToTimeline( messages.reverse(), true, this._peekRoom!.getLiveTimeline(), response.messages.start, ); client.store.storeRoom(this._peekRoom!); client.emit(ClientEvent.Room, this._peekRoom!); this.peekPoll(this._peekRoom!); return this._peekRoom!; }); } /** * Stop polling for updates in the peeked room. NOPs if there is no room being * peeked. */ public stopPeeking(): void { this._peekRoom = null; } /** * Do a peek room poll. * @param token - from= token */ private peekPoll(peekRoom: Room, token?: string): void { if (this._peekRoom !== peekRoom) { debuglog("Stopped peeking in room %s", peekRoom.roomId); return; } // FIXME: gut wrenching; hard-coded timeout values this.client.http .authedRequest<IEventsResponse>( Method.Get, "/events", { room_id: peekRoom.roomId, timeout: String(30 * 1000), from: token, }, undefined, { localTimeoutMs: 50 * 1000, abortSignal: this.abortController?.signal, }, ) .then( (res) => { if (this._peekRoom !== peekRoom) { debuglog("Stopped peeking in room %s", peekRoom.roomId); return; } // We have a problem that we get presence both from /events and /sync // however, /sync only returns presence for users in rooms // you're actually joined to. // in order to be sure to get presence for all of the users in the // peeked room, we handle presence explicitly here. This may result // in duplicate presence events firing for some users, which is a // performance drain, but such is life. // XXX: copypasted from /sync until we can kill this minging v1 stuff. res.chunk .filter(function (e) { return e.type === "m.presence"; }) .map(this.client.getEventMapper()) .forEach((presenceEvent) => { let user = this.client.store.getUser(presenceEvent.getContent().user_id); if (user) { user.setPresenceEvent(presenceEvent); } else { user = createNewUser(this.client, presenceEvent.getContent().user_id); user.setPresenceEvent(presenceEvent); this.client.store.storeUser(user); } this.client.emit(ClientEvent.Event, presenceEvent); }); // strip out events which aren't for the given room_id (e.g presence) // and also ephemeral events (which we're assuming is anything without // and event ID because the /events API doesn't separate them). const events = res.chunk .filter(function (e) { return e.room_id === peekRoom.roomId && e.event_id; }) .map(this.client.getEventMapper()); peekRoom.addLiveEvents(events); this.peekPoll(peekRoom, res.end); }, (err) => { logger.error("[%s] Peek poll failed: %s", peekRoom.roomId, err); setTimeout(() => { this.peekPoll(peekRoom, token); }, 30 * 1000); }, ); } /** * Returns the current state of this sync object * @see MatrixClient#event:"sync" */ public getSyncState(): SyncState | null { return this.syncState; } /** * Returns the additional data object associated with * the current sync state, or null if there is no * such data. * Sync errors, if available, are put in the 'error' key of * this object. */ public getSyncStateData(): ISyncStateData | null { return this.syncStateData ?? null; } public async recoverFromSyncStartupError(savedSyncPromise: Promise<void> | undefined, error: Error): Promise<void> { // Wait for the saved sync to complete - we send the pushrules and filter requests // before the saved sync has finished so they can run in parallel, but only process // the results after the saved sync is done. Equivalently, we wait for it to finish // before reporting failures from these functions. await savedSyncPromise; const keepaliveProm = this.startKeepAlives(); this.updateSyncState(SyncState.Error, { error }); await keepaliveProm; } /** * Is the lazy loading option different than in previous session? * @param lazyLoadMembers - current options for lazy loading * @returns whether or not the option has changed compared to the previous session */ private async wasLazyLoadingToggled(lazyLoadMembers = false): Promise<boolean> { // assume it was turned off before // if we don't know any better let lazyLoadMembersBefore = false; const isStoreNewlyCreated = await this.client.store.isNewlyCreated(); if (!isStoreNewlyCreated) { const prevClientOptions = await this.client.store.getClientOptions(); if (prevClientOptions) { lazyLoadMembersBefore = !!prevClientOptions.lazyLoadMembers; } return lazyLoadMembersBefore !== lazyLoadMembers; } return false; } private shouldAbortSync(error: MatrixError): boolean { if (error.errcode === "M_UNKNOWN_TOKEN") { // The logout already happened, we just need to stop. logger.warn("Token no longer valid - assuming logout"); this.stop(); this.updateSyncState(SyncState.Error, { error }); return true; } return false; } private getPushRules = async (): Promise<void> => { try { debuglog("Getting push rules..."); const result = await this.client.getPushRules(); debuglog("Got push rules"); this.client.pushRules = result; } catch (err) { logger.error("Getting push rules failed", err); if (this.shouldAbortSync(<MatrixError>err)) return; // wait for saved sync to complete before doing anything else, // otherwise the sync state will end up being incorrect debuglog("Waiting for saved sync before retrying push rules..."); await this.recoverFromSyncStartupError(this.savedSyncPromise, <Error>err); return this.getPushRules(); // try again } }; private buildDefaultFilter = (): Filter => { const filter = new Filter(this.client.credentials.userId); if (this.client.canSupport.get(Feature.ThreadUnreadNotifications) !== ServerSupport.Unsupported) { filter.setUnreadThreadNotifications(true); } return filter; }; private checkLazyLoadStatus = async (): Promise<void> => { debuglog("Checking lazy load status..."); if (this.opts.lazyLoadMembers && this.client.isGuest()) { this.opts.lazyLoadMembers = false; } if (this.opts.lazyLoadMembers) { debuglog("Checking server lazy load support..."); const supported = await this.client.doesServerSupportLazyLoading(); if (supported) { debuglog("Enabling lazy load on sync filter..."); if (!this.opts.filter) { this.opts.filter = this.buildDefaultFilter(); } this.opts.filter.setLazyLoadMembers(true); } else { debuglog("LL: lazy loading requested but not supported " + "by server, so disabling"); this.opts.lazyLoadMembers = false; } } // need to vape the store when enabling LL and wasn't enabled before debuglog("Checking whether lazy loading has changed in store..."); const shouldClear = await this.wasLazyLoadingToggled(this.opts.lazyLoadMembers); if (shouldClear) { this.storeIsInvalid = true; const error = new InvalidStoreError(InvalidStoreState.ToggledLazyLoading, !!this.opts.lazyLoadMembers); this.updateSyncState(SyncState.Error, { error }); // bail out of the sync loop now: the app needs to respond to this error. // we leave the state as 'ERROR' which isn't great since this normally means // we're retrying. The client must be stopped before clearing the stores anyway // so the app should stop the client, clear the store and start it again. logger.warn("InvalidStoreError: store is not usable: stopping sync."); return; } if (this.opts.lazyLoadMembers) { this.syncOpts.crypto?.enableLazyLoading(); } try { debuglog("Storing client options..."); await this.client.storeClientOptions(); debuglog("Stored client options"); } catch (err) { logger.error("Storing client options failed", err); throw err; } }; private getFilter = async (): Promise<{ filterId?: string; filter?: Filter; }> => { debuglog("Getting filter..."); let filter: Filter; if (this.opts.filter) { filter = this.opts.filter; } else { filter = this.buildDefaultFilter(); } let filterId: string; try { filterId = await this.client.getOrCreateFilter(getFilterName(this.client.credentials.userId!), filter); } catch (err) { logger.error("Getting filter failed", err); if (this.shouldAbortSync(<MatrixError>err)) return {}; // wait for saved sync to complete before doing anything else, // otherwise the sync state will end up being incorrect debuglog("Waiting for saved sync before retrying filter..."); await this.recoverFromSyncStartupError(this.savedSyncPromise, <Error>err); return this.getFilter(); // try again } return { filter, filterId }; }; private savedSyncPromise?: Promise<void>; /** * Main entry point */ public async sync(): Promise<void> { this.running = true; this.abortController = new AbortController(); global.window?.addEventListener?.("online", this.onOnline, false); if (this.client.isGuest()) { // no push rules for guests, no access to POST filter for guests. return this.doSync({}); } // Pull the saved sync token out first, before the worker starts sending // all the sync data which could take a while. This will let us send our // first incremental sync request before we've processed our saved data. debuglog("Getting saved sync token..."); const savedSyncTokenPromise = this.client.store.getSavedSyncToken().then((tok) => { debuglog("Got saved sync token"); return tok; }); this.savedSyncPromise = this.client.store .getSavedSync() .then((savedSync) => { debuglog(`Got reply from saved sync, exists? ${!!savedSync}`); if (savedSync) { return this.syncFromCache(savedSync); } }) .catch((err) => { logger.error("Getting saved sync failed", err); }); // We need to do one-off checks before we can begin the /sync loop. // These are: // 1) We need to get push rules so we can check if events should bing as we get // them from /sync. // 2) We need to get/create a filter which we can use for /sync. // 3) We need to check the lazy loading option matches what was used in the // stored sync. If it doesn't, we can't use the stored sync. // Now start the first incremental sync request: this can also // take a while so if we set it going now, we can wait for it // to finish while we process our saved sync data. await this.getPushRules(); await this.checkLazyLoadStatus(); const { filterId, filter } = await this.getFilter(); if (!filter) return; // bail, getFilter failed // reset the notifications timeline to prepare it to paginate from // the current point in time. // The right solution would be to tie /sync pagination tokens into // /notifications API somehow. this.client.resetNotifTimelineSet(); if (!this.currentSyncRequest) { let firstSyncFilter = filterId; const savedSyncToken = await savedSyncTokenPromise; if (savedSyncToken) { debuglog("Sending first sync request..."); } else { debuglog("Sending initial sync request..."); const initialFilter = this.buildDefaultFilter(); initialFilter.setDefinition(filter.getDefinition()); initialFilter.setTimelineLimit(this.opts.initialSyncLimit!); // Use an inline filter, no point uploading it for a single usage firstSyncFilter = JSON.stringify(initialFilter.getDefinition()); } // Send this first sync request here so we can then wait for the saved // sync data to finish processing before we process the results of this one. this.currentSyncRequest = this.doSyncRequest({ filter: firstSyncFilter }, savedSyncToken); } // Now wait for the saved sync to finish... debuglog("Waiting for saved sync before starting sync processing..."); await this.savedSyncPromise; // process the first sync request and continue syncing with the normal filterId return this.doSync({ filter: filterId }); } /** * Stops the sync object from syncing. */ public stop(): void { debuglog("SyncApi.stop"); // It is necessary to check for the existance of // global.window AND global.window.removeEventListener. // Some platforms (e.g. React Native) register global.window, // but do not have global.window.removeEventListener. global.window?.removeEventListener?.("online", this.onOnline, false); this.running = false; this.abortController?.abort(); if (this.keepAliveTimer) { clearTimeout(this.keepAliveTimer); this.keepAliveTimer = undefined; } } /** * Retry a backed off syncing request immediately. This should only be used when * the user <b>explicitly</b> attempts to retry their lost connection. * @returns True if this resulted in a request being retried. */ public retryImmediately(): boolean { if (!this.connectionReturnedDefer) { return false; } this.startKeepAlives(0); return true; } /** * Process a single set of cached sync data. * @param savedSync - a saved sync that was persisted by a store. This * should have been acquired via client.store.getSavedSync(). */ private async syncFromCache(savedSync: ISavedSync): Promise<void> { debuglog("sync(): not doing HTTP hit, instead returning stored /sync data"); const nextSyncToken = savedSync.nextBatch; // Set sync token for future incremental syncing this.client.store.setSyncToken(nextSyncToken); // No previous sync, set old token to null const syncEventData: ISyncStateData = { nextSyncToken, catchingUp: false, fromCache: true, }; const data: ISyncResponse = { next_batch: nextSyncToken, rooms: savedSync.roomsData, account_data: { events: savedSync.accountData, }, }; try { await this.processSyncResponse(syncEventData, data); } catch (e) { logger.error("Error processing cached sync", e); } // Don't emit a prepared if we've bailed because the store is invalid: // in this case the client will not be usable until stopped & restarted // so this would be useless and misleading. if (!this.storeIsInvalid) { this.updateSyncState(SyncState.Prepared, syncEventData); } } /** * Invoke me to do /sync calls */ private async doSync(syncOptions: ISyncOptions): Promise<void> { while (this.running) { const syncToken = this.client.store.getSyncToken(); let data: ISyncResponse; try { if (!this.currentSyncRequest) { this.currentSyncRequest = this.doSyncRequest(syncOptions, syncToken); } data = await this.currentSyncRequest; } catch (e) { const abort = await this.onSyncError(<MatrixError>e); if (abort) return; continue; } finally { this.currentSyncRequest = undefined; } // set the sync token NOW *before* processing the events. We do this so // if something barfs on an event we can skip it rather than constantly // polling with the same token. this.client.store.setSyncToken(data.next_batch); // Reset after a successful sync this.failedSyncCount = 0; await this.client.store.setSyncData(data); const syncEventData = { oldSyncToken: syncToken ?? undefined, nextSyncToken: data.next_batch, catchingUp: this.catchingUp, }; if (this.syncOpts.crypto) { // tell the crypto module we're about to process a sync // response await this.syncOpts.crypto.onSyncWillProcess(syncEventData); } try { await this.processSyncResponse(syncEventData, data); } catch (e) { // log the exception with stack if we have it, else fall back // to the plain description logger.error("Caught /sync error", e); // Emit the exception for client handling this.client.emit(ClientEvent.SyncUnexpectedError, <Error>e); } // update this as it may have changed syncEventData.catchingUp = this.catchingUp; // emit synced events if (!syncOptions.hasSyncedBefore) { this.updateSyncState(SyncState.Prepared, syncEventData); syncOptions.hasSyncedBefore = true; } // tell the crypto module to do its processing. It may block (to do a // /keys/changes request). if (this.syncOpts.cryptoCallbacks) { await this.syncOpts.cryptoCallbacks.onSyncCompleted(syncEventData); } // keep emitting SYNCING -> SYNCING for clients who want to do bulk updates this.updateSyncState(SyncState.Syncing, syncEventData); if (this.client.store.wantsSave()) { // We always save the device list (if it's dirty) before saving the sync data: // this means we know the saved device list data is at least as fresh as the // stored sync data which means we don't have to worry that we may have missed // device changes. We can also skip the delay since we're not calling this very // frequently (and we don't really want to delay the sync for it). if (this.syncOpts.crypto) { await this.syncOpts.crypto.saveDeviceList(0); } // tell databases that everything is now in a consistent state and can be saved. this.client.store.save(); } } if (!this.running) { debuglog("Sync no longer running: exiting."); if (this.connectionReturnedDefer) { this.connectionReturnedDefer.reject(); this.connectionReturnedDefer = undefined; } this.updateSyncState(SyncState.Stopped); } } private doSyncRequest(syncOptions: ISyncOptions, syncToken: string | null): Promise<ISyncResponse> { const qps = this.getSyncParams(syncOptions, syncToken); return this.client.http.authedRequest<ISyncResponse>(Method.Get, "/sync", qps as any, undefined, { localTimeoutMs: qps.timeout + BUFFER_PERIOD_MS, abortSignal: this.abortController?.signal, }); } private getSyncParams(syncOptions: ISyncOptions, syncToken: string | null): ISyncParams { let timeout = this.opts.pollTimeout!; if (this.getSyncState() !== SyncState.Syncing || this.catchingUp) { // unless we are happily syncing already, we want the server to return // as quickly as possible, even if there are no events queued. This // serves two purposes: // // * When the connection dies, we want to know asap when it comes back, // so that we can hide the error from the user. (We don't want to // have to wait for an event or a timeout). // // * We want to know if the server has any to_device messages queued up // for us. We do that by calling it with a zero timeout until it // doesn't give us any more to_device messages. this.catchingUp = true; timeout = 0; } let filter = syncOptions.filter; if (this.client.isGuest() && !filter) { filter = this.getGuestFilter(); } const qps: ISyncParams = { filter, timeout }; if (this.opts.disablePresence) { qps.set_presence = SetPresence.Offline; } if (syncToken) { qps.since = syncToken; } else { // use a cachebuster for initialsyncs, to make sure that // we don't get a stale sync // (https://github.com/vector-im/vector-web/issues/1354) qps._cacheBuster = Date.now(); } if ([SyncState.Reconnecting, SyncState.Error].includes(this.getSyncState()!)) { // we think the connection is dead. If it comes back up, we won't know // about it till /sync returns. If the timeout= is high, this could // be a long time. Set it to 0 when doing retries so we don't have to wait // for an event or a timeout before emiting the SYNCING event. qps.timeout = 0; } return qps; } private async onSyncError(err: MatrixError): Promise<boolean> { if (!this.running) { debuglog("Sync no longer running: exiting"); if (this.connectionReturnedDefer) { this.connectionReturnedDefer.reject(); this.connectionReturnedDefer = undefined; } this.updateSyncState(SyncState.Stopped); return true; // abort } logger.error("/sync error %s", err); if (this.shouldAbortSync(err)) { return true; // abort } this.failedSyncCount++; logger.log("Number of consecutive failed sync requests:", this.failedSyncCount); debuglog("Starting keep-alive"); // Note that we do *not* mark the sync connection as // lost yet: we only do this if a keepalive poke // fails, since long lived HTTP connections will // go away sometimes and we shouldn't treat this as // erroneous. We set the state to 'reconnecting' // instead, so that clients can observe this state // if they wish. const keepAlivePromise = this.startKeepAlives(); this.currentSyncRequest = undefined; // Transition from RECONNECTING to ERROR after a given number of failed syncs this.updateSyncState( this.failedSyncCount >= FAILED_SYNC_ERROR_THRESHOLD ? SyncState.Error : SyncState.Reconnecting, { error: err }, ); const connDidFail = await keepAlivePromise; // Only emit CATCHUP if we detected a connectivity error: if we didn't, // it's quite likely the sync will fail again for the same reason and we // want to stay in ERROR rather than keep flip-flopping between ERROR // and CATCHUP. if (connDidFail && this.getSyncState() === SyncState.Error) { this.updateSyncState(SyncState.Catchup, { catchingUp: true, }); } return false; } /** * Process data returned from a sync response and propagate it * into the model objects * * @param syncEventData - Object containing sync tokens associated with this sync * @param data - The response from /sync */ private async processSyncResponse(syncEventData: ISyncStateData, data: ISyncResponse): Promise<void> { const client = this.client; // data looks like: // { // next_batch: $token, // presence: { events: [] }, // account_data: { events: [] }, // device_lists: { changed: ["@user:server", ... ]}, // to_device: { events: [] }, // device_one_time_keys_count: { signed_curve25519: 42 }, // rooms: { // invite: { // $roomid: { // invite_state: { events: [] } // } // }, // join: { // $roomid: { // state: { events: [] }, // timeline: { events: [], prev_batch: $token, limited: true }, // ephemeral: { events: [] }, // summary: { // m.heroes: [ $user_id ], // m.joined_member_count: $count, // m.invited_member_count: $count // }, // account_data: { events: [] }, // unread_notifications: { // highlight_count: 0, // notification_count: 0, // } // } // }, // leave: { // $roomid: { // state: { events: [] }, // timeline: { events: [], prev_batch: $token } // } // } // } // } // TODO-arch: // - Each event we pass through needs to be emitted via 'event', can we // do this in one place? // - The isBrandNewRoom boilerplate is boilerplatey. // handle presence events (User objects) if (Array.isArray(data.presence?.events)) { data.presence!.events.map(client.getEventMapper()).forEach(function (presenceEvent) { let user = client.store.getUser(presenceEvent.getSender()!); if (user) { user.setPresenceEvent(presenceEvent); } else { user = createNewUser(client, presenceEvent.getSender()!); user.setPresenceEvent(presenceEvent); client.store.storeUser(user); } client.emit(ClientEvent.Event, presenceEvent); }); } // handle non-room account_data if (Array.isArray(data.account_data?.events)) { const events = data.account_data.events.map(client.getEventMapper()); const prevEventsMap = events.reduce<Record<string, MatrixEvent | undefined>>((m, c) => { m[c.getType()!] = client.store.getAccountData(c.getType()); return m; }, {}); client.store.storeAccountDataEvents(events); events.forEach(function (accountDataEvent) { // Honour push rules that come down the sync stream but also // honour push rules that were previously cached. Base rules // will be updated when we receive push rules via getPushRules // (see sync) before syncing over the network. if (accountDataEvent.getType() === EventType.PushRules) { const rules = accountDataEvent.getContent<IPushRules>(); client.pushRules = PushProcessor.rewriteDefaultRules(rules); } const prevEvent = prevEventsMap[accountDataEvent.getType()!]; client.emit(ClientEvent.AccountData, accountDataEvent, prevEvent); return accountDataEvent; }); } // handle to-device events if (data.to_device && Array.isArray(data.to_device.events) && data.to_device.events.length > 0) { let toDeviceMessages: IToDeviceEvent[] = data.to_device.events; if (this.syncOpts.cryptoCallbacks) { toDeviceMessages = await this.syncOpts.cryptoCallbacks.preprocessToDeviceMessages(toDeviceMessages); } const cancelledKeyVerificationTxns: string[] = []; toDeviceMessages .map(client.getEventMapper({ toDevice: true })) .map((toDeviceEvent) => { // map is a cheap inline forEach // We want to flag m.key.verification.start events as cancelled // if there's an accompanying m.key.verification.cancel event, so // we pull out the transaction IDs from the cancellation events // so we can flag the verification events as cancelled in the loop // below. if (toDeviceEvent.getType() === "m.key.verification.cancel") { const txnId: string = toDeviceEvent.getContent()["transaction_id"]; if (txnId) { cancelledKeyVerificationTxns.push(txnId); } } // as mentioned above, .map is a cheap inline forEach, so return // the unmodified event. return toDeviceEvent; }) .forEach(function (toDeviceEvent) { const content = toDeviceEvent.getContent(); if (toDeviceEvent.getType() == "m.room.message" && content.msgtype == "m.bad.encrypted") { // the mapper already logged a warning.