UNPKG

molstar

Version:

A comprehensive macromolecular library.

114 lines 5.29 kB
"use strict"; /** * Copyright (c) 2020 mol* contributors, licensed under MIT, See LICENSE file for more info. * * Adapted from NGL. * * @author David Sehnal <david.sehnal@gmail.com> * @author Alexander Rose <alexander.rose@weirdbyte.de> */ Object.defineProperty(exports, "__esModule", { value: true }); exports.parseCube = void 0; var tslib_1 = require("tslib"); var linear_algebra_1 = require("../../../mol-math/linear-algebra"); var tokenizer_1 = require("../common/text/tokenizer"); var db_1 = require("../../../mol-data/db"); var mol_task_1 = require("../../../mol-task"); var result_1 = require("../result"); var number_parser_1 = require("../common/text/number-parser"); var bohrToAngstromFactor = 0.529177210859; function readHeader(tokenizer) { var headerLines = tokenizer_1.Tokenizer.readLines(tokenizer, 6); var h = function (k, l) { var field = +headerLines[k].trim().split(/\s+/g)[l]; return Number.isNaN(field) ? 0 : field; }; var basis = function (i) { var n = h(i + 2, 0); var s = bohrToAngstromFactor; return [Math.abs(n), linear_algebra_1.Vec3.create(h(i + 2, 1) * s, h(i + 2, 2) * s, h(i + 2, 3) * s), n]; }; var comment1 = headerLines[0].trim(); var comment2 = headerLines[1].trim(); var _a = basis(0), atomCount = _a[0], origin = _a[1], rawAtomCount = _a[2]; var _b = basis(1), NVX = _b[0], basisX = _b[1]; var _c = basis(2), NVY = _c[0], basisY = _c[1]; var _d = basis(3), NVZ = _d[0], basisZ = _d[1]; var atoms = readAtoms(tokenizer, atomCount, bohrToAngstromFactor); var dataSetIds = []; if (rawAtomCount >= 0) { var nVal = h(2, 4); if (nVal === 0) nVal = 1; for (var i = 0; i < nVal; i++) dataSetIds.push(i); } else { var counts = tokenizer_1.Tokenizer.readLine(tokenizer).trim().split(/\s+/g); for (var i = 0, _i = +counts[0]; i < _i; i++) dataSetIds.push(+counts[i + 1]); } var header = { orbitals: rawAtomCount < 0, comment1: comment1, comment2: comment2, atomCount: atomCount, origin: origin, dim: linear_algebra_1.Vec3.create(NVX, NVY, NVZ), basisX: basisX, basisY: basisY, basisZ: basisZ, dataSetIds: dataSetIds }; return { header: header, atoms: atoms }; } function readAtoms(tokenizer, count, scaleFactor) { var number = new Int32Array(count); var value = new Float64Array(count); var x = new Float32Array(count); var y = new Float32Array(count); var z = new Float32Array(count); for (var i = 0; i < count; i++) { var fields = tokenizer_1.Tokenizer.readLine(tokenizer).trim().split(/\s+/g); number[i] = +fields[0]; value[i] = +fields[1]; x[i] = +fields[2] * scaleFactor; y[i] = +fields[3] * scaleFactor; z[i] = +fields[4] * scaleFactor; } return { count: count, number: db_1.Column.ofArray({ array: number, schema: db_1.Column.Schema.int }), nuclearCharge: db_1.Column.ofArray({ array: value, schema: db_1.Column.Schema.float }), x: db_1.Column.ofArray({ array: x, schema: db_1.Column.Schema.float }), y: db_1.Column.ofArray({ array: y, schema: db_1.Column.Schema.float }), z: db_1.Column.ofArray({ array: z, schema: db_1.Column.Schema.float }) }; } function readValues(ctx, tokenizer, header) { var N = header.dim[0] * header.dim[1] * header.dim[2] * header.dataSetIds.length; var chunkSize = 100 * 100 * 100; var data = new Float64Array(N); var offset = 0; return (0, mol_task_1.chunkedSubtask)(ctx, chunkSize, data, function (count, data) { var max = Math.min(N, offset + count); for (var i = offset; i < max; i++) { tokenizer_1.Tokenizer.skipWhitespace(tokenizer); tokenizer.tokenStart = tokenizer.position; tokenizer_1.Tokenizer.eatValue(tokenizer); data[i] = (0, number_parser_1.parseFloat)(tokenizer.data, tokenizer.tokenStart, tokenizer.tokenEnd); } offset = max; return max === N ? 0 : chunkSize; }, function (ctx, _, i) { return ctx.update({ current: Math.min(i, N), max: N }); }); } function parseCube(data, name) { var _this = this; return mol_task_1.Task.create('Parse Cube', function (taskCtx) { return (0, tslib_1.__awaiter)(_this, void 0, void 0, function () { var tokenizer, _a, header, atoms, values; return (0, tslib_1.__generator)(this, function (_b) { switch (_b.label) { case 0: return [4 /*yield*/, taskCtx.update('Reading header...')]; case 1: _b.sent(); tokenizer = (0, tokenizer_1.Tokenizer)(data); _a = readHeader(tokenizer), header = _a.header, atoms = _a.atoms; return [4 /*yield*/, readValues(taskCtx, tokenizer, header)]; case 2: values = _b.sent(); return [2 /*return*/, result_1.ReaderResult.success({ header: header, atoms: atoms, values: values, name: name })]; } }); }); }); } exports.parseCube = parseCube; //# sourceMappingURL=parser.js.map