tedious
Version:
A TDS driver, for connecting to MS SQLServer databases.
121 lines (92 loc) • 3.83 kB
JavaScript
// Generated by CoffeeScript 1.7.1
var EventEmitter, Parser, ReadableTrackingBuffer, TYPE, tokenParsers,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
ReadableTrackingBuffer = require('../tracking-buffer/tracking-buffer').ReadableTrackingBuffer;
EventEmitter = require('events').EventEmitter;
TYPE = require('./token').TYPE;
tokenParsers = {};
tokenParsers[TYPE.COLMETADATA] = require('./colmetadata-token-parser');
tokenParsers[TYPE.DONE] = require('./done-token-parser').doneParser;
tokenParsers[TYPE.DONEINPROC] = require('./done-token-parser').doneInProcParser;
tokenParsers[TYPE.DONEPROC] = require('./done-token-parser').doneProcParser;
tokenParsers[TYPE.ENVCHANGE] = require('./env-change-token-parser');
tokenParsers[TYPE.ERROR] = require('./infoerror-token-parser').errorParser;
tokenParsers[TYPE.INFO] = require('./infoerror-token-parser').infoParser;
tokenParsers[TYPE.LOGINACK] = require('./loginack-token-parser');
tokenParsers[TYPE.ORDER] = require('./order-token-parser');
tokenParsers[TYPE.RETURNSTATUS] = require('./returnstatus-token-parser');
tokenParsers[TYPE.RETURNVALUE] = require('./returnvalue-token-parser');
tokenParsers[TYPE.ROW] = require('./row-token-parser');
tokenParsers[TYPE.NBCROW] = require('./nbcrow-token-parser');
tokenParsers[TYPE.SSPI] = require('./sspi-token-parser');
/*
Buffers are thrown at the parser (by calling addBuffer).
Tokens are parsed from the buffer until there are no more tokens in
the buffer, or there is just a partial token left.
If there is a partial token left over, then it is kept until another
buffer is added, which should contain the remainder of the partial
token, along with (perhaps) more tokens.
The partial token and the new buffer are concatenated, and the token
parsing resumes.
*/
Parser = (function(_super) {
__extends(Parser, _super);
function Parser(debug, colMetadata, options) {
this.debug = debug;
this.colMetadata = colMetadata;
this.options = options;
this.buffer = new ReadableTrackingBuffer(new Buffer(0), 'ucs2');
this.position = 0;
}
Parser.prototype.addBuffer = function(buffer) {
this.buffer.add(buffer);
this.position = this.buffer.position;
while (this.nextToken()) {
'NOOP';
}
return this.buffer.position = this.position;
};
Parser.prototype.isEnd = function() {
return this.buffer.empty();
};
Parser.prototype.nextToken = function() {
var error, token, type;
try {
if (!(this.buffer.buffer.length > this.buffer.position)) {
return false;
}
type = this.buffer.readUInt8();
if (tokenParsers[type]) {
token = tokenParsers[type](this.buffer, this.colMetadata, this.options);
if (token) {
this.debug.token(token);
this.position = this.buffer.position;
if (token.event) {
this.emit(token.event, token);
}
switch (token.name) {
case 'COLMETADATA':
this.colMetadata = token.columns;
}
return true;
} else {
return false;
}
} else {
this.emit('tokenStreamError', "Unrecognized token " + type + " at offset " + this.buffer.position);
return false;
}
} catch (_error) {
error = _error;
if ((error != null ? error.code : void 0) === 'oob') {
return false;
} else {
this.emit('tokenStreamError', error);
return false;
}
}
};
return Parser;
})(EventEmitter);
exports.Parser = Parser;