UNPKG

@graffy/server

Version:

Node.js library for building an API for a Graffy store.

249 lines (196 loc) 7.77 kB
"use strict"; var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); exports.__esModule = true; exports["default"] = server; var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); var _setInterval2 = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/set-interval")); var _regenerator = _interopRequireDefault(require("@babel/runtime-corejs3/regenerator")); var _extends2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/extends")); var _stringify = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/json/stringify")); var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/asyncToGenerator")); var _asyncIterator2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/asyncIterator")); var _ws = _interopRequireDefault(require("ws")); var PING_INTERVAL = 30000; function server(store) { if (!store) throw new Error('server.store_undef'); var wss = new _ws["default"].Server({ noServer: true }); wss.on('connection', function connection(ws) { ws.graffyStreams = {}; // We use this to keep track of streams to close. ws.on('message', /*#__PURE__*/ function () { var _message = (0, _asyncToGenerator2["default"])( /*#__PURE__*/ _regenerator["default"].mark(function _callee(msg) { var _JSON$parse, id, op, payload, options, result, stream, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, _value, value; return _regenerator["default"].wrap(function _callee$(_context) { while (1) { switch (_context.prev = _context.next) { case 0: _context.prev = 0; _JSON$parse = JSON.parse(msg), id = _JSON$parse[0], op = _JSON$parse[1], payload = _JSON$parse[2], options = _JSON$parse[3]; if (!(id === ':pong')) { _context.next = 5; break; } ws.pingPending = false; return _context.abrupt("return"); case 5: _context.t0 = op; _context.next = _context.t0 === 'read' ? 8 : _context.t0 === 'write' ? 8 : _context.t0 === 'watch' ? 19 : _context.t0 === 'unwatch' ? 61 : 66; break; case 8: _context.prev = 8; _context.next = 11; return store.call(op, payload, options); case 11: result = _context.sent; ws.send((0, _stringify["default"])([id, null, result])); _context.next = 18; break; case 15: _context.prev = 15; _context.t1 = _context["catch"](8); ws.send((0, _stringify["default"])([id, _context.t1.message])); case 18: return _context.abrupt("break", 66); case 19: _context.prev = 19; stream = store.call('watch', payload, (0, _extends2["default"])({}, options, { raw: true })); ws.graffyStreams[id] = stream; _iteratorNormalCompletion = true; _didIteratorError = false; _context.prev = 24; _iterator = (0, _asyncIterator2["default"])(stream); case 26: _context.next = 28; return _iterator.next(); case 28: _step = _context.sent; _iteratorNormalCompletion = _step.done; _context.next = 32; return _step.value; case 32: _value = _context.sent; if (_iteratorNormalCompletion) { _context.next = 39; break; } value = _value; ws.send((0, _stringify["default"])([id, null, value])); case 36: _iteratorNormalCompletion = true; _context.next = 26; break; case 39: _context.next = 45; break; case 41: _context.prev = 41; _context.t2 = _context["catch"](24); _didIteratorError = true; _iteratorError = _context.t2; case 45: _context.prev = 45; _context.prev = 46; if (!(!_iteratorNormalCompletion && _iterator["return"] != null)) { _context.next = 50; break; } _context.next = 50; return _iterator["return"](); case 50: _context.prev = 50; if (!_didIteratorError) { _context.next = 53; break; } throw _iteratorError; case 53: return _context.finish(50); case 54: return _context.finish(45); case 55: _context.next = 60; break; case 57: _context.prev = 57; _context.t3 = _context["catch"](19); ws.send((0, _stringify["default"])([id, _context.t3.message])); case 60: return _context.abrupt("break", 66); case 61: if (ws.graffyStreams[id]) { _context.next = 63; break; } return _context.abrupt("break", 66); case 63: ws.graffyStreams[id]["return"](); delete ws.graffyStreams[id]; return _context.abrupt("break", 66); case 66: _context.next = 71; break; case 68: _context.prev = 68; _context.t4 = _context["catch"](0); ws.close(); case 71: case "end": return _context.stop(); } } }, _callee, null, [[0, 68], [8, 15], [19, 57], [24, 41, 45, 55], [46,, 50, 54]]); })); function message(_x) { return _message.apply(this, arguments); } return message; }()); ws.on('close', function () { for (var id in ws.graffyStreams) { ws.graffyStreams[id]["return"](); delete ws.graffyStreams[id]; } }); }); (0, _setInterval2["default"])(function ping() { var _context2; (0, _forEach["default"])(_context2 = wss.clients).call(_context2, function each(ws) { if (ws.pingPending) return ws.terminate(); ws.pingPending = true; ws.send((0, _stringify["default"])([':ping'])); }); }, PING_INTERVAL); return ( /*#__PURE__*/ function () { var _ref = (0, _asyncToGenerator2["default"])( /*#__PURE__*/ _regenerator["default"].mark(function _callee2(request, socket, head) { return _regenerator["default"].wrap(function _callee2$(_context3) { while (1) { switch (_context3.prev = _context3.next) { case 0: wss.handleUpgrade(request, socket, head, function done(ws) { wss.emit('connection', ws, request); }); case 1: case "end": return _context3.stop(); } } }, _callee2); })); return function (_x2, _x3, _x4) { return _ref.apply(this, arguments); }; }() ); } module.exports = exports.default;