nanogl-gltf
Version:
238 lines (237 loc) • 8.79 kB
JavaScript
/// <
import ExtensionsRegistry from './extensions/Registry';
import NanoCamera from 'nanogl-camera';
import NanoglNode from 'nanogl-node';
import { DefaultSemantics } from './Semantics';
import GltfTypes from './types/GltfTypes';
import { DefaultRenderConfig } from './IRenderConfig';
import DepthPass from 'nanogl-pbr/DepthPass';
import { AbortError, AbortSignal } from '@azure/abort-controller';
/**
* Collection of elements of a given type
*/
export class ElementCollection {
constructor() {
// private _byNames : Map<string, T> = new Map()
/**
* Array of elements order by specified index
*/
this.indexed = [];
/**
* Array of elements ordered by push order
*/
this.list = [];
}
/**
* Add an element to the collection
* @param element Element to add
* @param index Index of the element, if -1 or not specified, will not be pushed to indexed array
*/
addElement(element, index = -1) {
if (index > -1)
this.indexed[index] = element;
this.list.push(element);
}
}
/**
* Gltf file representation
*/
export default class Gltf {
constructor() {
/**
* Root node of this Gltf
*/
this.root = new NanoglNode();
/**
* Extras property of this Gltf (custom additional data that can be added to every gltf property)
*/
this.extras = {};
this._collections = new Map([
[GltfTypes.ACCESSOR, new ElementCollection()],
[GltfTypes.ACCESSOR_SPARSE, new ElementCollection()],
[GltfTypes.ACCESSOR_SPARSE_INDICES, new ElementCollection()],
[GltfTypes.ACCESSOR_SPARSE_VALUES, new ElementCollection()],
[GltfTypes.ANIMATION, new ElementCollection()],
[GltfTypes.ANIMATION_SAMPLER, new ElementCollection()],
[GltfTypes.ANIMATION_CHANNEL, new ElementCollection()],
[GltfTypes.ASSET, new ElementCollection()],
[GltfTypes.BUFFER, new ElementCollection()],
[GltfTypes.BUFFERVIEW, new ElementCollection()],
[GltfTypes.CAMERA, new ElementCollection()],
[GltfTypes.IMAGE, new ElementCollection()],
[GltfTypes.MATERIAL, new ElementCollection()],
[GltfTypes.MESH, new ElementCollection()],
[GltfTypes.NODE, new ElementCollection()],
[GltfTypes.NORMAL_TEXTURE_INFO, new ElementCollection()],
[GltfTypes.OCCLUSION_TEXTURE_INFO, new ElementCollection()],
[GltfTypes.PRIMITIVE, new ElementCollection()],
[GltfTypes.SAMPLER, new ElementCollection()],
[GltfTypes.SCENE, new ElementCollection()],
[GltfTypes.SKIN, new ElementCollection()],
[GltfTypes.TEXTURE, new ElementCollection()],
[GltfTypes.TEXTURE_INFO, new ElementCollection()],
]);
this._elements = [];
}
/**
* Add an extension to the static Gltf's extensions registry
* @param ext Extension to add
*/
static addExtension(ext) {
Gltf._extensionsRegistry.addExtension(ext);
}
/**
* Get the static Gltf's semantics resolver
*/
static getSemantics() {
return this._semantics;
}
/**
* Set a custom static Gltf's semantics resolver
* @param semantics Custom semantics resolver
*/
static setSemantics(semantics) {
semantics !== null && semantics !== void 0 ? semantics : (this._semantics = semantics);
}
/**
* Set the static Gltf's render config
*/
static getRenderConfig() {
return this._renderConfig;
}
/**
* Get the static Gltf's extensions registry
*/
static getExtensionsRegistry() {
return this._extensionsRegistry;
}
/**
* Get the Gltf ready to be used in a WebGL context.
* Generally the first method called after loading the Gltf.
* This will allocate all the needed textures to the GPU, create the renderables, cameras, initialize buffers for primitives,
* and create Cameras from nanogl-camera.
* @param gl GL context
* @param abortSignal Abort signal if you want to be able to cancel the request at any time
*/
async allocate(gl, abortSignal = AbortSignal.none) {
this.gl = gl;
this.depthPass = new DepthPass(gl);
this.depthPass.depthFormat.set("D_RGB");
for (const tex of this.textures) {
await tex.allocateGl(gl);
if (abortSignal.aborted)
throw new AbortError('Aborted');
}
this.primitives.forEach(p => p.allocateGl(gl));
this.nodes.forEach(n => n.allocateGl(this));
this.renderables = this.nodes
.map(n => n.renderable)
.filter(n => n !== undefined);
for (const node of this.nodes) {
if (!node._parent) {
this.root.add(node);
}
}
this.createCameras();
}
/**
* Filter all gltf Nodes to get only the ones that are cameras, and create a Camera from nanogl-camera for each of them,
* added as child of the Node, storing them in cameraInstances
*/
createCameras() {
this.cameraInstances = this.nodes
.filter(n => n.camera !== undefined)
.map(n => {
const cam = new NanoCamera(n.camera.lens);
n.add(cam);
return cam;
});
}
get buffers() { return this._getCollection(GltfTypes.BUFFER).list; }
get bufferViews() { return this._getCollection(GltfTypes.BUFFERVIEW).list; }
get accessors() { return this._getCollection(GltfTypes.ACCESSOR).list; }
get animations() { return this._getCollection(GltfTypes.ANIMATION).list; }
get meshes() { return this._getCollection(GltfTypes.MESH).list; }
get nodes() { return this._getCollection(GltfTypes.NODE).list; }
get materials() { return this._getCollection(GltfTypes.MATERIAL).list; }
get cameras() { return this._getCollection(GltfTypes.CAMERA).list; }
get skins() { return this._getCollection(GltfTypes.SKIN).list; }
get primitives() { return this._getCollection(GltfTypes.PRIMITIVE).list; }
get textures() { return this._getCollection(GltfTypes.TEXTURE).list; }
get scenes() { return this._getCollection(GltfTypes.SCENE).list; }
/**
* Get all elements of this Gltf, unordered and unfiltered
*/
getAllElements() {
return this._elements;
}
/**
* Get element by his type and index
* @param type Element's type
* @param index Element's index
*/
getElement(type, index) {
return this._getCollection(type).indexed[index];
}
/**
* Get element by his type and name
* @param type Element's type
* @param name Element's name
*/
getElementByName(type, name) {
const list = this._getCollection(type).list;
for (const el of list) {
if (el.name === name)
return el;
}
return null;
}
/**
* Get all elements of a specific type with a specific name
* @param type Elements' type
* @param name Elements' name
*/
getElementsByName(type, name) {
const list = this._getCollection(type).list;
const ouput = [];
for (const el of list) {
if (el.name === name)
ouput.push(el);
}
return ouput;
}
getNode(name) { return this.getElementByName(GltfTypes.NODE, name); }
getMesh(name) { return this.getElementByName(GltfTypes.MESH, name); }
getMaterial(name) { return this.getElementByName(GltfTypes.MATERIAL, name); }
getAnimation(name) { return this.getElementByName(GltfTypes.ANIMATION, name); }
getScene(name) { return this.getElementByName(GltfTypes.SCENE, name); }
/**
* Get full collection of a specific type
* @param type Type of targeted collection
*/
_getCollection(type) {
return this._collections.get(type);
}
/**
* Add an element to the Gltf
* @param element Element to add
* @param index Index of the element, if -1 or not specified, will not be pushed to indexed array (only the unordered one)
*/
addElement(element, index = -1) {
const collection = this._getCollection(element.gltftype);
collection.addElement(element, index);
this._elements.push(element);
}
}
/**
* Static registry of activated extensions for all Gltf instances
*/
Gltf._extensionsRegistry = new ExtensionsRegistry();
/**
* Static GLSL semantics' resolver for all Gltf instances
*/
Gltf._semantics = new DefaultSemantics();
/**
* Static render config for all Gltf instances
*/
Gltf._renderConfig = DefaultRenderConfig();