@itwin/core-frontend
Version:
iTwin.js frontend components
977 lines • 103 kB
JavaScript
/*---------------------------------------------------------------------------------------------
* Copyright (c) Bentley Systems, Incorporated. All rights reserved.
* See LICENSE.md in the project root for license terms and full copyright notice.
*--------------------------------------------------------------------------------------------*/
/** @packageDocumentation
* @module Tiles
*/
import { assert, ByteStream, compareBooleans, compareNumbers, compareStrings, Dictionary, JsonUtils, Logger, utf8ToString, } from "@itwin/core-bentley";
import { Angle, IndexedPolyface, Matrix3d, Point2d, Point3d, Point4d, Range2d, Range3d, Transform, Vector3d, } from "@itwin/core-geometry";
import { BatchType, ColorDef, Feature, FeatureIndex, FeatureIndexType, FeatureTable, FillFlags, GlbHeader, ImageSource, LinePixels, MeshEdge, MeshEdges, MeshPolyline, OctEncodedNormal, OctEncodedNormalPair, PackedFeatureTable, QParams2d, QParams3d, QPoint2dList, QPoint3dList, Quantization, RenderMode, RenderTexture, TextureMapping, TextureTransparency, TileFormat, TileReadStatus, } from "@itwin/core-common";
import { IModelApp } from "../IModelApp";
import { RealityMeshParams } from "../render/RealityMeshParams";
import { Mesh } from "../common/internal/render/MeshPrimitives";
import { Triangle } from "../common/internal/render/Primitives";
import { decodeMeshoptBuffer } from "./internal";
import { DisplayParams } from "../common/internal/render/DisplayParams";
import { FrontendLoggerCategory } from "../common/FrontendLoggerCategory";
import { getImageSourceFormatForMimeType, imageBitmapFromImageSource, imageElementFromImageSource, tryImageElementFromUrl } from "../common/ImageUtil";
import { MeshPrimitiveType } from "../common/internal/render/MeshPrimitive";
import { getGltfNodeMeshIds, GltfDataType, gltfDictionaryIterator, GltfMeshMode, GltfTechniqueState, GltfWrapMode, isGltf1Material, traverseGltfNodes, } from "../common/gltf/GltfSchema";
import { createGraphicTemplate } from "../internal/render/GraphicTemplateImpl";
import { compactEdgeIterator } from "../common/imdl/CompactEdges";
/**
* A chunk of binary data exposed as a typed array.
* The count member indicates how many elements exist. This may be less than this.buffer.length due to padding added to the
* binary stream to ensure correct alignment.
* @internal
*/
export class GltfBufferData {
buffer;
count;
constructor(buffer, count) {
this.buffer = buffer;
this.count = count;
}
/**
* Create a GltfBufferData of the desired type. The actual type may differ from the desired type - for example, small 32-bit integers
* may be represented as 8-bit or 16-bit integers instead.
* If the actual data type is not convertible to the desired type, this function returns undefined.
*/
static create(bytes, actualType, expectedType, count) {
if (expectedType !== actualType) {
// Some data is stored in smaller data types to save space if no values exceed the maximum of the smaller type.
switch (expectedType) {
case GltfDataType.Float:
case GltfDataType.UnsignedByte:
return undefined;
case GltfDataType.UnsignedShort:
if (GltfDataType.UnsignedByte !== actualType)
return undefined;
break;
case GltfDataType.UInt32:
if (GltfDataType.UnsignedByte !== actualType && GltfDataType.UnsignedShort !== actualType)
return undefined;
break;
}
}
const data = this.createDataBuffer(bytes, actualType);
return undefined !== data ? new GltfBufferData(data, count) : undefined;
}
static createDataBuffer(bytes, actualType) {
// NB: Endianness of typed array data is determined by the 'platform byte order'. Actual data is always little-endian.
// We are assuming little-endian platform. If we find a big-endian platform, we'll need to use a DataView instead.
switch (actualType) {
case GltfDataType.UnsignedByte:
return bytes;
case GltfDataType.UnsignedShort:
return new Uint16Array(bytes.buffer, bytes.byteOffset, bytes.byteLength / 2);
case GltfDataType.UInt32:
return new Uint32Array(bytes.buffer, bytes.byteOffset, bytes.byteLength / 4);
case GltfDataType.Float:
return new Float32Array(bytes.buffer, bytes.byteOffset, bytes.byteLength / 4);
default:
return undefined;
}
}
}
/**
* A view of a chunk of glTF binary data containing an array of elements of a specific data type.
* The count member indicates how many elements exist; this may be smaller than this.data.length.
* The count member may also indicate the number of elements of a type containing more than one value of the
* underlying type. For example, a buffer of 4 32-bit floating point 'vec2' elements will have a count of 4,
* but its data member will contain 8 32-bit floating point values (2 per vec2).
* The accessor member may contain additional JSON data specific to a particular buffer.
* @internal
*/
class GltfBufferView {
data;
count;
type;
accessor;
stride;
get byteLength() { return this.data.length; }
constructor(data, count, type, accessor, stride) {
this.data = data;
this.count = count;
this.type = type;
this.accessor = accessor;
this.stride = stride;
}
toBufferData(desiredType) {
return GltfBufferData.create(this.data, this.type, desiredType, this.count);
}
}
function templateToGraphicResult(result, system) {
const template = result.template;
delete result.template;
return {
...result,
graphic: template ? system.createGraphicFromTemplate({ template }) : undefined,
};
}
/** Data required for creating a [[GltfReader]] capable of deserializing [glTF](https://www.khronos.org/gltf/).
* @internal
*/
export class GltfReaderProps {
version;
glTF;
yAxisUp;
binaryData;
baseUrl;
constructor(glTF, version, yAxisUp, binaryData, baseUrl) {
this.version = version;
this.glTF = glTF;
this.binaryData = binaryData;
this.yAxisUp = yAxisUp;
this.baseUrl = baseUrl;
}
/** Attempt to construct a new GltfReaderProps from the binary data beginning at the supplied stream's current read position. */
static create(source, yAxisUp = false, baseUrl) {
let version;
let json;
let binaryData;
if (source instanceof Uint8Array) {
// It may be JSON - check for magic indicating glb.
const buffer = ByteStream.fromUint8Array(source);
if (TileFormat.Gltf !== buffer.readUint32()) {
try {
const utf8Json = utf8ToString(source);
if (!utf8Json)
return undefined;
json = JSON.parse(utf8Json);
version = 2;
}
catch {
return undefined;
}
}
else {
buffer.reset();
const header = new GlbHeader(buffer);
if (!header.isValid)
return undefined;
version = header.version;
if (header.binaryChunk)
binaryData = new Uint8Array(source.buffer, source.byteOffset + header.binaryChunk.offset, header.binaryChunk.length);
try {
const jsonBytes = new Uint8Array(source.buffer, source.byteOffset + header.jsonChunk.offset, header.jsonChunk.length);
const jsonStr = utf8ToString(jsonBytes);
if (undefined === jsonStr)
return undefined;
json = JSON.parse(jsonStr);
}
catch {
return undefined;
}
}
}
else {
version = 2; // ###TODO verify against source.asset?.version
json = source;
}
// asset is required in glTF 2, optional in glTF 1
const asset = JsonUtils.asObject(json.asset);
if (version === 2 && !asset)
return undefined;
const glTF = {
asset,
scene: JsonUtils.asString(json.scene),
extensions: JsonUtils.asObject(json.extensions),
extensionsUsed: JsonUtils.asArray(json.extensionsUsed),
extensionsRequired: JsonUtils.asArray(json.extensionsRequired),
accessors: JsonUtils.asObject(json.accessors),
buffers: JsonUtils.asObject(json.buffers),
bufferViews: JsonUtils.asObject(json.bufferViews),
images: JsonUtils.asObject(json.images),
materials: JsonUtils.asObject(json.materials),
meshes: JsonUtils.asObject(json.meshes),
nodes: JsonUtils.asObject(json.nodes),
samplers: JsonUtils.asObject(json.samplers),
scenes: JsonUtils.asObject(json.scenes),
textures: JsonUtils.asObject(json.textures),
techniques: JsonUtils.asObject(json.techniques),
};
return glTF.meshes ? new GltfReaderProps(glTF, version, yAxisUp, binaryData, baseUrl) : undefined;
}
}
/** The GltfMeshData contains the raw GLTF mesh data. If the data is suitable to create a [[RealityMesh]] directly, basically in the quantized format produced by
* ContextCapture, then a RealityMesh is created directly from this data. Otherwise, the mesh primitive is populated from the raw data and a MeshPrimitive
* is generated. The MeshPrimitve path is much less efficient but should be rarely used.
*
* @internal
*/
export class GltfMeshData {
primitive; // Populated with vertex and indices only if the mesh cannot be represented as [[RealityMesh]]
pointQParams;
points;
pointRange;
normals;
uvQParams;
uvs;
uvRange;
indices;
type = "mesh";
constructor(props) {
this.primitive = props;
}
}
const emptyDict = {};
function colorFromJson(values) {
return ColorDef.from(values[0] * 255, values[1] * 255, values[2] * 255, (1.0 - values[3]) * 255);
}
function colorFromMaterial(material, isTransparent) {
let color = ColorDef.white;
if (isGltf1Material(material)) {
if (material.values?.color && Array.isArray(material.values.color))
color = colorFromJson(material.values.color);
}
else if (material.extensions?.KHR_techniques_webgl?.values?.u_color) {
color = colorFromJson(material.extensions.KHR_techniques_webgl.values.u_color);
}
else if (material.pbrMetallicRoughness?.baseColorFactor) {
color = colorFromJson(material.pbrMetallicRoughness.baseColorFactor);
}
// SPEC: Opaque materials ignore any alpha channel.
if (!isTransparent)
color = color.withTransparency(0);
return color;
}
function trsMatrix(translation, rotation, scale, result) {
// SPEC: To compose the local transformation matrix, TRS properties MUST be converted to matrices and postmultiplied in the T * R * S order;
// first the scale is applied to the vertices, then the rotation, and then the translation.
const scaleTf = Transform.createRefs(undefined, scale ? Matrix3d.createScale(scale[0], scale[1], scale[2]) : Matrix3d.identity);
const rotTf = Transform.createRefs(undefined, rotation ? Matrix3d.createFromQuaternion(Point4d.create(rotation[0], rotation[1], rotation[2], rotation[3])) : Matrix3d.identity);
rotTf.matrix.transposeInPlace(); // See comment on Matrix3d.createFromQuaternion
const transTf = Transform.createTranslation(translation ? new Point3d(translation[0], translation[1], translation[2]) : Point3d.createZero());
const tf = rotTf.multiplyTransformTransform(scaleTf, result);
transTf.multiplyTransformTransform(tf, tf);
return tf;
}
class TransformStack {
_stack = [];
constructor(transform) {
if (transform)
this._stack.push(transform);
}
get transform() {
return this._stack.length > 0 ? this._stack[this._stack.length - 1] : undefined;
}
get isEmpty() {
return 0 === this._stack.length;
}
push(node) {
let nodeTransform;
if (node.matrix) {
const origin = Point3d.create(node.matrix[12], node.matrix[13], node.matrix[14]);
const matrix = Matrix3d.createRowValues(node.matrix[0], node.matrix[4], node.matrix[8], node.matrix[1], node.matrix[5], node.matrix[9], node.matrix[2], node.matrix[6], node.matrix[10]);
nodeTransform = Transform.createOriginAndMatrix(origin, matrix);
}
else if (node.rotation || node.scale || node.translation) {
nodeTransform = trsMatrix(node.translation, node.rotation, node.scale);
}
const top = this.transform;
if (!top)
this._stack.push(nodeTransform);
else
this._stack.push(nodeTransform ? top.multiplyTransformTransform(nodeTransform) : top);
}
pop() {
assert(this._stack.length > 0);
this._stack.pop();
}
}
function compareTextureKeys(lhs, rhs) {
const cmp = compareBooleans(lhs.isTransparent, rhs.isTransparent);
if (0 !== cmp)
return cmp;
assert(typeof lhs.id === typeof rhs.id);
if ("string" === typeof lhs.id) {
assert("string" === typeof rhs.id);
return compareStrings(lhs.id, rhs.id);
}
assert("number" === typeof lhs.id && "number" === typeof rhs.id);
return compareNumbers(lhs.id, rhs.id);
}
;
;
/** @internal exported strictly for testing */
export function getMeshPrimitives(mesh) {
const ext = mesh?.extensions?.EXT_mesh_primitive_restart;
const meshPrimitives = mesh?.primitives;
if (!meshPrimitives || meshPrimitives.length === 0 || !ext?.primitiveGroups || ext.primitiveGroups.length === 0) {
return meshPrimitives;
}
// Note: per the spec, any violation of the extension's specification should cause us to fall back to mesh.primitives, if detecting the violation is feasible.
// Start with a copy of mesh.primitives. For each group, replace the first primitive in the group with a primitive representing the entire group,
// and set the rest of the primitives in the group to `undefined`.
// This allows us to identify which remaining primitives do not use primitive restart, and any errors involving a primitive appearing in more than one group.
const primitives = [...meshPrimitives];
for (const group of ext.primitiveGroups) {
// Spec: the group must not be empty and all indices must be valid array indices into mesh.primitives.
const firstPrimitiveIndex = group.primitives[0];
if (undefined === firstPrimitiveIndex || !meshPrimitives[firstPrimitiveIndex]) {
return meshPrimitives;
}
const primitive = { ...meshPrimitives[firstPrimitiveIndex], indices: group.indices };
// Spec: primitive restart only supported for these topologies.
switch (primitive.mode) {
case GltfMeshMode.TriangleFan:
case GltfMeshMode.TriangleStrip:
case GltfMeshMode.LineStrip:
case GltfMeshMode.LineLoop:
break;
default:
return meshPrimitives;
}
for (const primitiveIndex of group.primitives) {
const thisPrimitive = primitives[primitiveIndex];
// Spec: all primitives must use indexed geometry and a given primitive may appear in at most one group.
// Spec: all primitives must have same topology.
if (undefined === thisPrimitive?.indices || thisPrimitive.mode !== primitive.mode) {
return meshPrimitives;
}
primitives[primitiveIndex] = undefined;
}
primitives[firstPrimitiveIndex] = primitive;
}
return primitives.filter((x) => x !== undefined);
}
/** Deserializes [glTF](https://www.khronos.org/gltf/).
* @internal
*/
export class GltfReader {
_glTF;
_version;
_iModel;
_is3d;
_system;
_returnToCenter;
_yAxisUp;
_baseUrl;
_type;
_deduplicateVertices;
_vertexTableRequired;
_canceled;
_sceneNodes;
_computedContentRange;
_resolvedTextures = new Dictionary((lhs, rhs) => compareTextureKeys(lhs, rhs));
_dracoMeshes = new Map();
_containsPointCloud = false;
_instanceFeatures = [];
_meshFeatures = [];
_instanceElementIdToFeatureId = new Map();
_meshElementIdToFeatureIndex = new Map();
_structuralMetadata;
_idMap;
_tileData;
get _nodes() { return this._glTF.nodes ?? emptyDict; }
get _meshes() { return this._glTF.meshes ?? emptyDict; }
get _accessors() { return this._glTF.accessors ?? emptyDict; }
get _bufferViews() { return this._glTF.bufferViews ?? emptyDict; }
get _materials() { return this._glTF.materials ?? emptyDict; }
get _samplers() { return this._glTF.samplers ?? emptyDict; }
get _textures() { return this._glTF.textures ?? emptyDict; }
get _images() { return this._glTF.images ?? emptyDict; }
get _buffers() { return this._glTF.buffers ?? emptyDict; }
get _isCanceled() { return undefined !== this._canceled && this._canceled(this); }
get _isVolumeClassifier() { return BatchType.VolumeClassifier === this._type; }
/** Traverse the nodes specified by their Ids, recursing into their child nodes.
* @param nodeIds The Ids of the nodes to traverse.
* @throws Error if a node appears more than once during traversal
*/
traverseNodes(nodeIds) {
return traverseGltfNodes(nodeIds, this._nodes, new Set());
}
/** Traverse the nodes specified by their scene, recursing into their child nodes.
* @throws Error if a node appears more than once during traversal
*/
traverseScene() {
return this.traverseNodes(this._sceneNodes);
}
get viewFlagOverrides() {
return undefined;
}
getTileTransform(transformToRoot, pseudoRtcBias) {
let transform;
if (this._returnToCenter || pseudoRtcBias || this._yAxisUp || transformToRoot) {
if (this._returnToCenter)
transform = Transform.createTranslation(this._returnToCenter.clone());
else if (pseudoRtcBias)
transform = Transform.createTranslationXYZ(pseudoRtcBias.x, pseudoRtcBias.y, pseudoRtcBias.z);
else
transform = Transform.createIdentity();
if (this._yAxisUp)
transform = transform.multiplyTransformMatrix3d(Matrix3d.createRotationAroundVector(Vector3d.create(1.0, 0.0, 0.0), Angle.createRadians(Angle.piOver2Radians)));
if (transformToRoot)
transform = transformToRoot.multiplyTransformTransform(transform);
}
return transform;
}
readGltfAndCreateGraphics(isLeaf, featureTable, contentRange, transformToRoot, pseudoRtcBias, instances) {
const result = this.readGltfAndCreateTemplate(isLeaf, featureTable, contentRange, true, transformToRoot, pseudoRtcBias, instances);
return templateToGraphicResult(result, this._system);
}
readGltfAndCreateTemplate(isLeaf, featureTable, contentRange, noDispose, transformToRoot, pseudoRtcBias, instances) {
if (this._isCanceled)
return { readStatus: TileReadStatus.Canceled, isLeaf };
// If contentRange was not supplied, we will compute it as we read the meshes.
if (!contentRange)
this._computedContentRange = contentRange = Range3d.createNull();
else
this._computedContentRange = undefined;
// Save feature table model id in case we need to recreate it after reading instances
const featureTableModelId = featureTable?.modelId;
// Flush feature table if instance features are used
if (this._structuralMetadata && this._glTF.extensionsUsed?.includes("EXT_instance_features") && this._idMap) {
featureTable = undefined;
}
// ###TODO this looks like a hack? Why does it assume the first node's transform is special, or that the transform will be specified as a matrix instead of translation+rot+scale?
if (this._returnToCenter || this._nodes[0]?.matrix || (pseudoRtcBias && pseudoRtcBias.magnitude() < 1.0E5))
pseudoRtcBias = undefined;
const transformStack = new TransformStack();
const templateNodes = [];
let readStatus = TileReadStatus.InvalidTileData;
for (const nodeKey of this._sceneNodes) {
assert(transformStack.isEmpty);
const node = this._nodes[nodeKey];
if (node && TileReadStatus.Success !== (readStatus = this.readTemplateNodes(templateNodes, node, featureTable, transformStack, instances, pseudoRtcBias)))
return { readStatus, isLeaf };
}
// Creates a feature table based on instance features
// The table must be created after reading instances, since the maximum number of features is not known until all instances have been read.
if (this._instanceFeatures.length > 0 && this._idMap) {
featureTable = new FeatureTable(this._instanceFeatures.length, featureTableModelId);
for (let instanceFeatureId = 0; instanceFeatureId < this._instanceFeatures.length; instanceFeatureId++) {
featureTable.insertWithIndex(this._instanceFeatures[instanceFeatureId], instanceFeatureId);
}
}
if (this._meshFeatures.length > 0 && this._idMap) {
featureTable = new FeatureTable(this._meshFeatures.length, featureTableModelId);
for (let meshFeatureId = 0; meshFeatureId < this._meshFeatures.length; meshFeatureId++) {
featureTable.insertWithIndex(this._meshFeatures[meshFeatureId], meshFeatureId);
}
}
if (0 === templateNodes.length)
return { readStatus: TileReadStatus.InvalidTileData, isLeaf };
// Compute range in tileset/world space.
let range = contentRange;
const transform = this.getTileTransform(transformToRoot, pseudoRtcBias);
const invTransform = transform?.inverse();
if (invTransform)
range = invTransform.multiplyRange(contentRange);
// The batch range needs to be in tile coordinate space.
// If we computed the content range ourselves, it's already in tile space.
// If the content range was supplied by the caller, it's in tileset space and needs to be transformed to tile space.
const batch = !featureTable ? undefined : {
featureTable: PackedFeatureTable.pack(featureTable),
range: this._computedContentRange ? contentRange : range,
};
const viewFlagOverrides = this.viewFlagOverrides;
const branch = transform || viewFlagOverrides ? { transform, viewFlagOverrides } : undefined;
return {
readStatus,
isLeaf,
contentRange,
range,
copyright: this._glTF.asset?.copyright,
containsPointCloud: this._containsPointCloud,
template: createGraphicTemplate({
nodes: templateNodes,
batch,
branch,
noDispose,
}),
};
}
readGltfAndCreateGeometry(transformToRoot, needNormals = false, needParams = false) {
const transformStack = new TransformStack(this.getTileTransform(transformToRoot));
const polyfaces = [];
for (const nodeKey of this._sceneNodes) {
const node = this._nodes[nodeKey];
if (node)
this.readNodeAndCreatePolyfaces(polyfaces, node, transformStack, needNormals, needParams);
}
return { polyfaces };
}
geometryFromMeshData(gltfMesh, isInstanced) {
if ("pointcloud" === gltfMesh.type)
return this._system.createPointCloudGeometry(gltfMesh);
if (!gltfMesh.points || !gltfMesh.pointRange)
return this._system.createGeometryFromMesh(gltfMesh.primitive, undefined);
let realityMeshPrimitive = (this._vertexTableRequired || isInstanced) ? undefined : RealityMeshParams.fromGltfMesh(gltfMesh);
if (realityMeshPrimitive && this._tileData) {
realityMeshPrimitive = {
...realityMeshPrimitive,
tileData: this._tileData,
};
}
if (realityMeshPrimitive) {
const realityMesh = this._system.createRealityMeshGeometry(realityMeshPrimitive);
if (realityMesh)
return realityMesh;
}
const mesh = gltfMesh.primitive;
const pointCount = gltfMesh.points.length / 3;
assert(mesh.points instanceof QPoint3dList);
mesh.points.fromTypedArray(gltfMesh.pointRange, gltfMesh.points);
if (mesh.triangles && gltfMesh.indices)
mesh.triangles.addFromTypedArray(gltfMesh.indices);
if (gltfMesh.uvs && gltfMesh.uvRange && gltfMesh.uvQParams) {
/** This is ugly and inefficient... unnecessary if Mesh stored uvs as QPoint2dList */
for (let i = 0, j = 0; i < pointCount; i++)
mesh.uvParams.push(gltfMesh.uvQParams.unquantize(gltfMesh.uvs[j++], gltfMesh.uvs[j++]));
}
if (gltfMesh.normals)
for (const normal of gltfMesh.normals)
mesh.normals.push(new OctEncodedNormal(normal));
return this._system.createGeometryFromMesh(mesh, undefined, this._tileData);
}
readInstanceAttributes(node, featureTable) {
const ext = node.extensions?.EXT_mesh_gpu_instancing;
if (!ext || !ext.attributes) {
return undefined;
}
const translationsView = this.getBufferView(ext.attributes, "TRANSLATION");
const translations = translationsView?.toBufferData(GltfDataType.Float);
const rotations = this.getBufferView(ext.attributes, "ROTATION")?.toBufferData(GltfDataType.Float);
const scales = this.getBufferView(ext.attributes, "SCALE")?.toBufferData(GltfDataType.Float);
// All attributes must specify the same count, count must be greater than zero, and at least one attribute must be specified.
const count = translations?.count ?? rotations?.count ?? scales?.count;
if (!count || (rotations && rotations.count !== count) || (scales && scales.count !== count)) {
return undefined;
}
const transformCenter = new Point3d();
const trMin = translationsView?.accessor.min;
const trMax = translationsView?.accessor.max;
if (trMin && trMax) {
const half = (idx) => trMin[idx] + (trMax[idx] - trMin[idx]) / 2;
transformCenter.set(half(0), half(1), half(2));
}
const getTranslation = (index) => {
if (!translations) {
return undefined;
}
index *= 3;
return [
translations.buffer[index + 0] - transformCenter.x,
translations.buffer[index + 1] - transformCenter.y,
translations.buffer[index + 2] - transformCenter.z,
];
};
const getRotation = (index) => {
index *= 4;
return rotations ? [rotations.buffer[index], rotations.buffer[index + 1], rotations.buffer[index + 2], rotations.buffer[index + 3]] : undefined;
};
const getScale = (index) => {
index *= 3;
return scales ? [scales.buffer[index], scales.buffer[index + 1], scales.buffer[index + 2]] : undefined;
};
const transforms = new Float32Array(3 * 4 * count);
const transform = Transform.createIdentity();
for (let i = 0; i < count; i++) {
const tf = trsMatrix(getTranslation(i), getRotation(i), getScale(i), transform);
const idx = i * 3 * 4;
transforms[idx + 0] = tf.matrix.coffs[0];
transforms[idx + 1] = tf.matrix.coffs[1];
transforms[idx + 2] = tf.matrix.coffs[2];
transforms[idx + 3] = tf.origin.x;
transforms[idx + 4] = tf.matrix.coffs[3];
transforms[idx + 5] = tf.matrix.coffs[4];
transforms[idx + 6] = tf.matrix.coffs[5];
transforms[idx + 7] = tf.origin.y;
transforms[idx + 8] = tf.matrix.coffs[6];
transforms[idx + 9] = tf.matrix.coffs[7];
transforms[idx + 10] = tf.matrix.coffs[8];
transforms[idx + 11] = tf.origin.z;
}
let featureIds = ((featureTable && featureTable.isUniform)) ? new Uint8Array(3 * count) : undefined;
// Resolve instance features if the EXT_instance_features if present
const instanceFeaturesExt = node.extensions?.EXT_instance_features;
if (this._structuralMetadata && instanceFeaturesExt && this._idMap) {
if (!featureIds)
featureIds = new Uint8Array(3 * count);
// Resolve feature buffers before creating instance table
const featureBuffers = new Map();
for (const featureIdDesc of instanceFeaturesExt.featureIds) {
if (featureIdDesc.attribute !== undefined) {
const bufferView = this.getBufferView(ext.attributes, `_FEATURE_ID_${featureIdDesc.attribute}`);
if (bufferView) {
const bufferData = bufferView.toBufferData(bufferView.type)?.buffer;
if (bufferData) {
featureBuffers.set(featureIdDesc.attribute, bufferData);
}
}
}
}
for (let localInstanceId = 0; localInstanceId < count; localInstanceId++) {
const instanceProps = {};
for (const featureIdDesc of instanceFeaturesExt.featureIds) {
const table = this._structuralMetadata.tables[featureIdDesc.propertyTable];
instanceProps[table.name] = {};
// If the attribute is not defined, then the feature id corresponds to the instance id
if (featureIdDesc.attribute === undefined) {
for (const entries of table.entries) {
if (entries.values[localInstanceId] !== undefined) {
instanceProps[table.name][entries.name] = entries.values[localInstanceId];
}
}
}
else if (featureBuffers.has(featureIdDesc.attribute)) {
const featureBuffer = featureBuffers.get(featureIdDesc.attribute);
if (!featureBuffer) {
continue;
}
const featureId = featureBuffer[localInstanceId];
if (featureIdDesc.nullFeatureId !== undefined && featureId === featureIdDesc.nullFeatureId) {
continue;
}
for (const entries of table.entries) {
if (entries.values[featureId] !== undefined) {
instanceProps[table.name][entries.name] = entries.values[featureId];
}
}
}
}
const instanceElementId = this._idMap.getBatchId(instanceProps);
// If the element id is already assigned to a previous instance,
// reuse the previous feature id to avoid collision in the feature table
if (!this._instanceElementIdToFeatureId.has(instanceElementId)) {
this._instanceElementIdToFeatureId.set(instanceElementId, this._instanceFeatures.length);
this._instanceFeatures.push(new Feature(instanceElementId));
}
const instanceFeatureId = this._instanceElementIdToFeatureId.get(instanceElementId);
featureIds[localInstanceId * 3 + 0] = instanceFeatureId & 0xFF;
featureIds[localInstanceId * 3 + 1] = (instanceFeatureId >> 8) & 0xFF;
featureIds[localInstanceId * 3 + 2] = (instanceFeatureId >> 16) & 0xFF;
}
}
return { count, transforms, transformCenter, featureIds };
}
readTemplateNodes(templateNodes, node, featureTable, transformStack, batchInstances, pseudoRtcBias) {
if (undefined === node)
return TileReadStatus.InvalidTileData;
// IMPORTANT: Do not return without popping this node from the stack.
transformStack.push(node);
const thisTransform = transformStack.transform;
const nodeInstances = !batchInstances && undefined !== node.mesh ? this.readInstanceAttributes(node, featureTable) : undefined;
/**
* This is a workaround for tiles generated by
* context capture which have a large offset from the tileset origin that exceeds the
* capacity of 32 bit integers. It is essentially an ad hoc RTC applied at read time only if the tile is far from the
* origin and there is no RTC supplied either with the B3DM of the GLTF.
* as the vertices are supplied in a quantized format, applying the RTC bias to
* quantization origin will make these tiles work correctly.
*/
let thisBias;
if (undefined !== pseudoRtcBias)
thisBias = (undefined === thisTransform) ? pseudoRtcBias : thisTransform.matrix.multiplyInverse(pseudoRtcBias);
for (const meshKey of getGltfNodeMeshIds(node)) {
const nodeMesh = this._meshes[meshKey];
if (nodeMesh?.primitives) {
const meshes = this.readMeshPrimitives(node, featureTable, thisTransform, thisBias, nodeInstances);
if (0 !== meshes.length) {
const thisList = [];
for (const mesh of meshes) {
const geometry = this.geometryFromMeshData(mesh, !!batchInstances || !!nodeInstances);
if (undefined !== geometry)
thisList.push(geometry);
}
if (0 !== thisList.length) {
templateNodes.push({
geometry: thisList,
transform: thisTransform && !thisTransform.isIdentity ? thisTransform : undefined,
instances: batchInstances ?? nodeInstances,
});
}
}
}
}
if (node.children) {
for (const childId of node.children) {
const child = this._nodes[childId];
if (child)
this.readTemplateNodes(templateNodes, child, featureTable, transformStack, batchInstances ?? nodeInstances);
}
}
transformStack.pop();
return TileReadStatus.Success;
}
readNodeAndCreatePolyfaces(polyfaces, node, transformStack, needNormals, needParams) {
// IMPORTANT: Do not return without popping this node from the stack.
transformStack.push(node);
const meshes = this.readMeshPrimitives(node);
for (const mesh of meshes) {
if (mesh.type === "mesh") {
const polyface = this.polyfaceFromGltfMesh(mesh, transformStack.transform, needNormals, needParams);
if (polyface)
polyfaces.push(polyface);
}
}
if (node.children) {
for (const childId of node.children) {
const child = this._nodes[childId];
if (child)
this.readNodeAndCreatePolyfaces(polyfaces, child, transformStack, needNormals, needParams);
}
}
}
polyfaceFromGltfMesh(mesh, transform, needNormals, needParams) {
if (!mesh.pointQParams || !mesh.points || !mesh.indices)
return undefined;
const { points, pointQParams, normals, uvs, uvQParams, indices } = mesh;
const includeNormals = needNormals && undefined !== normals;
const includeParams = needParams && undefined !== uvQParams && undefined !== uvs;
const polyface = IndexedPolyface.create(includeNormals, includeParams);
for (let i = 0; i < points.length;) {
const point = pointQParams.unquantize(points[i++], points[i++], points[i++]);
if (transform)
transform.multiplyPoint3d(point, point);
polyface.addPoint(point);
}
if (includeNormals && normals)
for (let i = 0; i < normals.length;)
polyface.addNormal(OctEncodedNormal.decodeValue(normals[i++]));
if (includeParams && uvs && uvQParams)
for (let i = 0; i < uvs.length;)
polyface.addParam(uvQParams.unquantize(uvs[i++], uvs[i++]));
let j = 0;
for (const index of indices) {
polyface.addPointIndex(index);
if (includeNormals)
polyface.addNormalIndex(index);
if (includeParams)
polyface.addParamIndex(index);
if (0 === (++j % 3))
polyface.terminateFacet();
}
return polyface;
}
// ###TODO what is the actual type of `json`?
getBufferView(json, accessorName) {
try {
const accessorValue = JsonUtils.asString(json[accessorName]);
const accessor = accessorValue ? this._accessors[accessorValue] : undefined;
if (!accessor)
return undefined;
const bufferViewAccessorValue = accessor.bufferView;
const bufferView = undefined !== bufferViewAccessorValue ? this._bufferViews[bufferViewAccessorValue] : undefined;
if (!bufferView)
return undefined;
let bufferData = bufferView.resolvedBuffer;
if (!bufferData) {
if (undefined === bufferView.buffer)
return undefined;
const buffer = this._buffers[bufferView.buffer];
bufferData = buffer?.resolvedBuffer;
}
if (!bufferData)
return undefined;
const type = accessor.componentType;
let dataSize = 0;
switch (type) {
case GltfDataType.UnsignedByte:
dataSize = 1;
break;
case GltfDataType.UnsignedShort:
dataSize = 2;
break;
case GltfDataType.UInt32:
case GltfDataType.Float:
dataSize = 4;
break;
default:
return undefined;
}
let componentCount = 1;
switch (accessor.type) {
case "VEC4":
componentCount = 4;
break;
case "VEC3":
componentCount = 3;
break;
case "VEC2":
componentCount = 2;
break;
}
const byteStride = bufferView.byteStride ? bufferView.byteStride : componentCount * dataSize;
const offset = ((bufferView && bufferView.byteOffset) ? bufferView.byteOffset : 0) + (accessor.byteOffset ? accessor.byteOffset : 0);
const length = byteStride * accessor.count;
// If the data is misaligned (Scalable mesh tile publisher) use slice to copy -- else use subarray.
const aligned = 0 === (bufferData.byteOffset + offset) % dataSize;
const bytes = aligned ? bufferData.subarray(offset, offset + length) : bufferData.slice(offset, offset + length);
return new GltfBufferView(bytes, accessor.count, type, accessor, byteStride / dataSize);
}
catch {
return undefined;
}
}
readBufferData32(json, accessorName) { return this.readBufferData(json, accessorName, GltfDataType.UInt32); }
readBufferData16(json, accessorName) { return this.readBufferData(json, accessorName, GltfDataType.UnsignedShort); }
readBufferData8(json, accessorName) { return this.readBufferData(json, accessorName, GltfDataType.UnsignedByte); }
readBufferDataFloat(json, accessorName) { return this.readBufferData(json, accessorName, GltfDataType.Float); }
constructor(args) {
this._tileData = args.tileData;
this._glTF = args.props.glTF;
this._version = args.props.version;
this._yAxisUp = args.props.yAxisUp;
this._baseUrl = args.props.baseUrl;
const rtcCenter = args.props.glTF.extensions?.CESIUM_RTC?.center;
if (rtcCenter && 3 === rtcCenter.length)
if (0 !== rtcCenter[0] || 0 !== rtcCenter[1] || 0 !== rtcCenter[2])
this._returnToCenter = Point3d.fromJSON(rtcCenter);
this._iModel = args.iModel;
this._is3d = true !== args.is2d;
this._system = args.system ?? IModelApp.renderSystem;
this._type = args.type ?? BatchType.Primary;
this._canceled = args.shouldAbort;
this._deduplicateVertices = args.deduplicateVertices ?? false;
this._vertexTableRequired = args.vertexTableRequired ?? false;
const binaryData = args.props.binaryData;
if (binaryData) {
const buffer = this._buffers[this._version === 2 ? 0 : "binary_glTF"];
if (buffer && undefined === buffer.uri)
buffer.resolvedBuffer = binaryData;
}
// The original implementation of GltfReader would process and produce graphics for every node in glTF.nodes.
// What it's *supposed* to do is process the nodes in glTF.scenes[glTF.scene].nodes
// Some nodes may not be referenced by the configured scene, or only indirectly via GltfNode.children.
// Perhaps some faulty tiles existed that didn't define their scenes properly?
let sceneNodes;
if (this._glTF.scenes && undefined !== this._glTF.scene)
sceneNodes = this._glTF.scenes[this._glTF.scene]?.nodes;
if (!sceneNodes)
sceneNodes = Object.keys(this._nodes);
this._sceneNodes = sceneNodes;
this._idMap = args.idMap;
}
readBufferData(json, accessorName, type) {
const view = this.getBufferView(json, accessorName);
return undefined !== view ? view.toBufferData(type) : undefined;
}
readFeatureIndices(_json) { return undefined; }
extractId(value) {
switch (typeof value) {
case "string":
return value;
case "number":
return value.toString();
default:
return undefined;
}
}
extractTextureId(material) {
if (typeof material !== "object")
return undefined;
// Bimium's shader value...almost certainly obsolete at this point.
if (isGltf1Material(material))
return material.diffuse ?? this.extractId(material.values?.tex);
// KHR_techniques_webgl extension
const techniques = this._glTF.extensions?.KHR_techniques_webgl?.techniques;
const ext = Array.isArray(techniques) ? material.extensions?.KHR_techniques_webgl : undefined;
if (techniques && undefined !== ext && typeof (ext.values) === "object") {
const uniforms = typeof ext.technique === "number" ? techniques[ext.technique].uniforms : undefined;
if (typeof uniforms === "object") {
for (const uniformName of Object.keys(uniforms)) {
const uniform = uniforms[uniformName];
if (typeof uniform === "object" && uniform.type === GltfDataType.Sampler2d)
return this.extractId(ext.values[uniformName]?.index);
}
}
}
const id = this.extractId(material.pbrMetallicRoughness?.baseColorTexture?.index);
return id ?? this.extractId(material.emissiveTexture?.index);
}
extractNormalMapId(material) {
if (typeof material !== "object")
return undefined;
if (isGltf1Material(material))
return undefined;
return this.extractId(material.normalTexture?.index);
}
isMaterialTransparent(material) {
if (isGltf1Material(material)) {
if (this._glTF.techniques && undefined !== material.technique) {
const technique = this._glTF.techniques[material.technique];
if (technique?.states?.enable?.some((state) => state === GltfTechniqueState.Blend))
return true;
}
return false;
}
else {
// Default: OPAQUE.
// ###TODO support MASK. For now treat as opaque.
return "BLEND" === material.alphaMode;
}
}
createDisplayParams(material, hasBakedLighting) {
const isTransparent = this.isMaterialTransparent(material);
const textureId = this.extractTextureId(material);
const normalMapId = this.extractNormalMapId(material);
let textureMapping = (undefined !== textureId || undefined !== normalMapId) ? this.findTextureMapping(textureId, isTransparent, normalMapId) : undefined;
const color = colorFromMaterial(material, isTransparent);
let renderMaterial;
if (undefined !== textureMapping && undefined !== textureMapping.normalMapParams) {
const args = { diffuse: { color }, specular: { color: ColorDef.white }, textureMapping };
renderMaterial = IModelApp.renderSystem.createRenderMaterial(args);
// DisplayParams doesn't want a separate texture mapping if the material already has one.
textureMapping = undefined;
}
return new DisplayParams(DisplayParams.Type.Mesh, color, color, 1, LinePixels.Solid, FillFlags.None, renderMaterial, undefined, hasBakedLighting, textureMapping);
}
readMeshPrimitives(node, featureTable, thisTransform, thisBias, instances) {
const meshes = [];
for (const meshKey of getGltfNodeMeshIds(node)) {
const nodeMesh = this._meshes[meshKey];
const primitives = getMeshPrimitives(nodeMesh);
if (primitives) {
for (const primitive of primitives) {
const mesh = this.readMeshPrimitive(primitive, featureTable, thisBias);
if (mesh) {
meshes.push(mesh);
if (this._computedContentRange && mesh.pointRange) {
const meshRange = thisTransform ? thisTransform.multiplyRange(mesh.pointRange) : mesh.pointRange;
if (!instances) {
this._computedContentRange.extendRange(meshRange);
}
else {
const tfs = instances.transforms;
const nodeRange = new Range3d();
const extendTransformedRange = (i, x, y, z) => {
nodeRange.extendXYZ(tfs[i + 3] + tfs[i + 0] * x + tfs[i + 1] * y + tfs[i + 2] * z, tfs[i + 7] + tfs[i + 4] * x + tfs[i + 5] * y + tfs[i + 6] * z, tfs[i + 11] + tfs[i + 8] * x + tfs[i + 9] * y + tfs[i + 10] * z);
};
for (let i = 0; i < tfs.length; i += 3 * 4) {
extendTransformedRange(i, meshRange.low.x, meshRange.low.y, meshRange.low.z);
extendTransformedRange(i, meshRange.low.x, meshRange.low.y, meshRange.high.z);
extendTransformedRange(i, meshRange.low.x, meshRange.high.y, meshRange.low.z);
extendTransformedRange(i, meshRange.low.x, meshRange.high.y, meshRange.high.z);
extendTransformedRange(i, meshRange.high.x, meshRange.low.y, meshRange.low.z);
extendTransformedRange(i, meshRange.high.x, meshRange.low.y, meshRange.high.z);
extendTransformedRange(i, meshRange.high.x, meshRange.high.y, meshRange.low.z);
extendTransformedRange(i, meshRange.high.x, meshRange.high.y, meshRange.high.z);
}
nodeRange.low.addInPlace(instances.transformCenter);
nodeRange.high.addInPlace(instances.transformCenter);
this._computedContentRange.extendRange(nodeRange);
}
}
}
}
}
}
return meshes;
}
readMeshPrimitive(primitive, featureTable, pseudoRtcBias) {
const meshMode = JsonUtils.asInt(primitive.mode, GltfMeshMode.Triangles);
if (meshMode === GltfMeshMode.Points /* && !this._vertexTableRequired */) {
const pointCloud = this.readPointCloud2(primitive, undefined !== featureTable);
if (pointCloud)
return pointCloud;
}
const materialName = JsonUtils.asString(primitive.material);
const material = 0 < materialName.length ? this._materials[materialName] : {};
if (!material)
return undefined;
const hasBakedLighting = undefined === primitive.attributes.NORMAL || undefined !== material.extensions?.KHR_materials_unlit;
const displayPara