imago-azure-storage
Version:
An opinionated async wrapper for azure-storage for working with Azure Storage such as tables and queues.
75 lines (63 loc) • 1.9 kB
JavaScript
const { Readable } = require('stream');
/**
* Converts a stream into a string.
* @param {Writable} stream - Any stream, e.g. a MemoryStream.
*/
function readStreamAsString(stream) {
const bufferParts = [];
// TODO FIXME - it may happen that the promise never resolves?
// Should we have a timeout?
return new Promise((resolve, reject) => {
stream.on('data', (buffer) => {
bufferParts.push(buffer);
});
stream.on('end', () => {
resolve(Buffer.concat(bufferParts));
});
stream.on('error', (error) => {
reject(error);
});
});
}
/**
* Converts a buffer or string into a readable stream.
* @param {string|Buffer} contentStringOrBuffer
*/
function stringToStream(contentStringOrBuffer) {
const dataStream = new Readable();
// For some reason, if content is a string, then content.length
// will be incorrect and Azure will create an incomplete file. So convert
// the string into the buffer.
const contentBuffer = Buffer.from(contentStringOrBuffer);
dataStream.push(contentBuffer);
dataStream.push(null); // end the stream
return [dataStream, contentBuffer.length];
}
/**
* Converts the array of objects, each of which is in Azure Table's internal
* format, into an array of normal JavaScript objects where each key is a
* column name.
* @param {object[]} data
*/
async function azureTablesArrayToObject(data) {
const result = [];
for (const row of data) {
result.push(azureTablesRowToObject(row));
}
return result;
}
function azureTablesRowToObject(row) {
const data = {};
Object.keys(row).forEach((column) => {
if (column !== '.metadata') {
data[column] = row[column]._;
}
});
return data;
}
module.exports = {
azureTablesArrayToObject,
azureTablesRowToObject,
readStreamAsString,
stringToStream,
};