molstar
Version:
A comprehensive macromolecular library.
206 lines (205 loc) • 8.03 kB
JavaScript
/**
* Copyright (c) 2018-2020 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author David Sehnal <david.sehnal@gmail.com>
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.StructureCache = exports.StructureSourceType = void 0;
exports.createStructureWrapperFromJobEntry = createStructureWrapperFromJobEntry;
exports.readDataAndFrame = readDataAndFrame;
exports.readStructureWrapper = readStructureWrapper;
exports.resolveStructure = resolveStructure;
exports.resolveStructures = resolveStructures;
const tslib_1 = require("tslib");
const structure_1 = require("../../../mol-model/structure");
const performance_monitor_1 = require("../../../mol-util/performance-monitor");
const cache_1 = require("./cache");
const config_1 = require("../config");
const cif_1 = require("../../../mol-io/reader/cif");
const util = tslib_1.__importStar(require("util"));
const fs = tslib_1.__importStar(require("fs"));
const zlib = tslib_1.__importStar(require("zlib"));
const console_logger_1 = require("../../../mol-util/console-logger");
const mmcif_1 = require("../../../mol-model-formats/structure/mmcif");
const fetch_retry_1 = require("../utils/fetch-retry");
const mol_task_1 = require("../../../mol-task");
require('util.promisify').shim();
var StructureSourceType;
(function (StructureSourceType) {
StructureSourceType[StructureSourceType["File"] = 0] = "File";
StructureSourceType[StructureSourceType["Cache"] = 1] = "Cache";
})(StructureSourceType || (exports.StructureSourceType = StructureSourceType = {}));
async function createStructureWrapperFromJobEntry(entry, propertyProvider, allowCache = true) {
if (allowCache && config_1.ModelServerConfig.cacheMaxSizeInBytes > 0) {
const ret = exports.StructureCache.get(entry.key);
if (ret)
return ret;
}
const ret = await readStructureWrapper(entry.key, entry.sourceId, entry.entryId, entry.job.id, propertyProvider);
if (allowCache && config_1.ModelServerConfig.cacheMaxSizeInBytes > 0) {
exports.StructureCache.add(ret);
}
return ret;
}
exports.StructureCache = new cache_1.Cache(s => s.key, s => s.approximateSize);
const perf = new performance_monitor_1.PerformanceMonitor();
const readFileAsync = util.promisify(fs.readFile);
const unzipAsync = util.promisify(zlib.unzip);
async function readFile(filename) {
const isGz = /\.gz$/i.test(filename);
if (filename.match(/\.bcif/)) {
let input = await readFileAsync(filename);
if (isGz)
input = await unzipAsync(input);
const data = new Uint8Array(input.byteLength);
for (let i = 0; i < input.byteLength; i++)
data[i] = input[i];
return { data, isBinary: true };
}
else {
if (isGz) {
const data = await unzipAsync(await readFileAsync(filename));
return { data: data.toString('utf8'), isBinary: false };
}
return { data: await readFileAsync(filename, 'utf8'), isBinary: false };
}
}
async function parseCif(data) {
const comp = cif_1.CIF.parse(data);
const parsed = await comp.run();
if (parsed.isError)
throw parsed;
return parsed.result;
}
async function readDataAndFrame(filename, key) {
perf.start('read');
let data, isBinary;
try {
const read = await readFile(filename);
data = read.data;
isBinary = read.isBinary;
}
catch (e) {
console_logger_1.ConsoleLogger.error(key || filename, '' + e);
throw new Error(`Could not read the file for '${key || filename}' from disk.`);
}
perf.end('read');
perf.start('parse');
const frame = (await parseCif(data)).blocks[0];
perf.end('parse');
return { data, frame, isBinary };
}
async function fetchDataAndFrame(jobId, uri, format, key) {
perf.start('read');
const isBinary = format.startsWith('bcif');
let data;
try {
console_logger_1.ConsoleLogger.logId(jobId, 'Fetch', `${uri}`);
const response = await (0, fetch_retry_1.fetchRetry)(uri, 500, 3, () => console_logger_1.ConsoleLogger.logId(jobId, 'Fetch', `Retrying to fetch '${uri}'`));
if (format.endsWith('.gz')) {
const input = await unzipAsync(await response.arrayBuffer());
if (isBinary) {
data = new Uint8Array(input.byteLength);
for (let i = 0; i < input.byteLength; i++)
data[i] = input[i];
}
else {
data = input.toString('utf8');
}
}
else {
data = isBinary ? new Uint8Array(await response.arrayBuffer()) : await response.text();
}
}
catch (e) {
console_logger_1.ConsoleLogger.error(key || uri, '' + e);
throw new Error(`Could not fetch the file for '${key || uri}'.`);
}
perf.end('read');
perf.start('parse');
const frame = (await parseCif(data)).blocks[0];
perf.end('parse');
return { data, frame, isBinary };
}
function readOrFetch(jobId, key, sourceId, entryId) {
const mapped = sourceId === '_local_' ? [entryId] : (0, config_1.mapSourceAndIdToFilename)(sourceId, entryId);
if (!mapped)
throw new Error(`Cound not map '${key}' for a resource.`);
const uri = mapped[0].toLowerCase();
if (uri.startsWith('http://') || uri.startsWith('https://') || uri.startsWith('ftp://') || uri.startsWith('gs://')) {
return fetchDataAndFrame(jobId, mapped[0], (mapped[1] || 'cif').toLowerCase(), key);
}
if (!fs.existsSync(mapped[0]))
throw new Error(`Could not find source file for '${key}'.`);
return readDataAndFrame(mapped[0], key);
}
async function readStructureWrapper(key, sourceId, entryId, jobId, propertyProvider) {
const { data, frame, isBinary } = await readOrFetch(jobId || '', key, sourceId, entryId);
perf.start('createModel');
const trajectory = await (0, mmcif_1.trajectoryFromMmCIF)(frame).run();
perf.end('createModel');
const models = [];
const modelMap = new Map();
for (let i = 0; i < trajectory.frameCount; i++) {
const m = await mol_task_1.Task.resolveInContext(trajectory.getFrameAtIndex(i));
models.push(m);
modelMap.set(m.modelNum, m);
}
const ret = {
info: {
sourceType: StructureSourceType.File,
readTime: perf.time('read'),
parseTime: perf.time('parse'),
createModelTime: perf.time('createModel'),
attachPropsTime: 0, // perf.time('attachProps'),
sourceId,
entryId
},
isBinary,
key,
approximateSize: typeof data === 'string' ? 2 * data.length : data.length,
models,
modelMap,
structureModelMap: new Map(),
cifFrame: frame,
propertyProvider,
cache: Object.create(null)
};
return ret;
}
async function resolveStructure(wrapper, modelNum) {
if (typeof modelNum === 'undefined')
modelNum = wrapper.models[0].modelNum;
if (wrapper.structureModelMap.has(modelNum))
return wrapper.structureModelMap.get(modelNum);
if (!wrapper.modelMap.has(modelNum)) {
return void 0;
}
const model = wrapper.modelMap.get(modelNum);
const structure = structure_1.Structure.ofModel(model);
if (wrapper.propertyProvider) {
const modelProps = wrapper.propertyProvider(model, wrapper.cache);
for (const p of modelProps) {
await tryAttach(wrapper.key, p);
}
}
return structure;
}
async function resolveStructures(wrapper, modelNums) {
const ret = [];
for (const n of modelNums || wrapper.models.map(m => m.modelNum)) {
const s = await resolveStructure(wrapper, n);
if (s)
ret.push(s);
}
return ret;
}
async function tryAttach(key, promise) {
try {
await promise;
}
catch (e) {
console_logger_1.ConsoleLogger.errorId(key, 'Custom prop:' + e);
}
}
;