UNPKG

jsforce

Version:

Salesforce API Library for JavaScript

280 lines (247 loc) 7.5 kB
/** * @file Represents stream that handles Salesforce record as stream data * @author Shinichi Tomita <shinichi.tomita@gmail.com> */ 'use strict'; var events = require('events'), stream = require('readable-stream'), Duplex = stream.Duplex, Transform = stream.Transform, PassThrough = stream.PassThrough, inherits = require('inherits'), _ = require('lodash/core'), CSV = require('./csv'); /** * Class for Record Stream * * @class * @constructor * @extends stream.Transform */ var RecordStream = module.exports = function() { RecordStream.super_.call(this, { objectMode: true }); }; inherits(RecordStream, Transform); /* * @override */ RecordStream.prototype._transform = function(record, enc, callback) { this.emit('record', record); this.push(record); callback(); }; /** * Get record stream of queried records applying the given mapping function * * @param {RecordMapFunction} fn - Record mapping function * @returns {RecordStream} */ RecordStream.prototype.map = function(fn) { return this.pipe(RecordStream.map(fn)); }; /** * Get record stream of queried records, applying the given filter function * * @param {RecordFilterFunction} fn - Record filtering function * @returns {RecordStream} */ RecordStream.prototype.filter = function(fn) { return this.pipe(RecordStream.filter(fn)); }; /** * @class RecordStream.Serializable * @extends {RecordStream} */ var Serializable = RecordStream.Serializable = function() { Serializable.super_.call(this); this._dataStream = null; }; inherits(Serializable, RecordStream); /** * Create readable data stream which emits serialized record data * * @param {String} [type] - Type of outgoing data format. Currently 'csv' is default value and the only supported. * @param {Object} [options] - Options passed to converter * @returns {stream.Readable} */ Serializable.prototype.stream = function(type, options) { type = type || 'csv'; var converter = DataStreamConverters[type]; if (!converter) { throw new Error('Converting [' + type + '] data stream is not supported.'); } if (!this._dataStream) { this._dataStream = new PassThrough(); this.pipe(converter.serialize(options)) .pipe(this._dataStream); } return this._dataStream; }; /** * @class RecordStream.Parsable * @extends {RecordStream} */ var Parsable = RecordStream.Parsable = function() { Parsable.super_.call(this); this._dataStream = null; }; inherits(Parsable, RecordStream); /** * Create writable data stream which accepts serialized record data * * @param {String} [type] - Type of outgoing data format. Currently 'csv' is default value and the only supported. * @param {Object} [options] - Options passed to converter * @returns {stream.Readable} */ Parsable.prototype.stream = function(type, options) { type = type || 'csv'; var converter = DataStreamConverters[type]; if (!converter) { throw new Error('Converting [' + type + '] data stream is not supported.'); } if (!this._dataStream) { this._dataStream = new PassThrough(); this._parserStream = converter.parse(options).on('error', function(error) { this.emit('error', error); }); this._parserStream.pipe(this).pipe(new PassThrough({ objectMode: true, highWaterMark: ( 500 * 1000 ) })); } return this._dataStream; }; /* @override */ Parsable.prototype.on = function(ev, fn) { if (ev === 'readable' || ev === 'record') { this._dataStream.pipe(this._parserStream); } return Parsable.super_.prototype.on.call(this, ev, fn); }; /* @override */ Parsable.prototype.addListener = Parsable.prototype.on; /* --------------------------------------------------- */ /** * @callback RecordMapFunction * @param {Record} record - Source record to map * @returns {Record} */ /** * Create a record stream which maps records and pass them to downstream * * @param {RecordMapFunction} fn - Record mapping function * @returns {RecordStream.Serializable} */ RecordStream.map = function(fn) { var mapStream = new RecordStream.Serializable(); mapStream._transform = function(record, enc, callback) { var rec = fn(record) || record; // if not returned record, use same record this.push(rec); callback(); }; return mapStream; }; /** * Create mapping stream using given record template * * @param {Record} record - Mapping record object. In mapping field value, temlate notation can be used to refer field value in source record, if noeval param is not true. * @param {Boolean} [noeval] - Disable template evaluation in mapping record. * @returns {RecordStream.Serializable} */ RecordStream.recordMapStream = function(record, noeval) { return RecordStream.map(function(rec) { var mapped = { Id: rec.Id }; for (var prop in record) { mapped[prop] = noeval ? record[prop] : evalMapping(record[prop], rec); } return mapped; }); function evalMapping(value, mapping) { if (_.isString(value)) { var m = /^\$\{(\w+)\}$/.exec(value); if (m) { return mapping[m[1]]; } return value.replace(/\$\{(\w+)\}/g, function($0, prop) { var v = mapping[prop]; return _.isNull(v) || _.isUndefined(v) ? "" : String(v); }); } else { return value; } } }; /** * @callback RecordFilterFunction * @param {Record} record - Source record to filter * @returns {Boolean} */ /** * Create a record stream which filters records and pass them to downstream * * @param {RecordFilterFunction} fn - Record filtering function * @returns {RecordStream.Serializable} */ RecordStream.filter = function(fn) { var filterStream = new RecordStream.Serializable(); filterStream._transform = function(record, enc, callback) { if (fn(record)) { this.push(record); } callback(); }; return filterStream; }; /** * @private */ function convertRecordForSerialization(record, options) { return Object.keys(record).reduce(function(rec, key) { var value = rec[key]; var t = typeof value; var urec = {}; if (key === 'attributes') { // 'attributes' prop will be ignored rec = _.extend({}, rec); delete rec[key]; } else if (options.nullValue && value === null) { urec[key] = options.nullValue; rec = _.extend({}, rec, urec); } else if (value !== null && typeof value === 'object') { var precord = convertRecordForSerialization(value, options); rec = Object.keys(precord).reduce(function(prec, pkey) { prec[key + '.' + pkey] = precord[pkey]; return prec; }, _.extend({}, rec)); } return rec; }, record); } /** * @private */ function createPipelineStream(s1, s2) { var pipeline = new PassThrough(); pipeline.on('pipe', function(source) { source.unpipe(pipeline); source.pipe(s1).pipe(s2); }); pipeline.pipe = function(dest, options) { return s2.pipe(dest, options); }; return pipeline; } /** ---------------------------------------------------------------------- **/ /** * @private */ var CSVStreamConverter = { serialize: function(options) { options = options || {}; return createPipelineStream( RecordStream.map(function(record) { return convertRecordForSerialization(record, options); }), CSV.serializeCSVStream(options) ); }, parse: function(options) { return CSV.parseCSVStream(options); } }; /** * @private */ var DataStreamConverters = RecordStream.DataStreamConverters = { csv: CSVStreamConverter };