three
Version:
JavaScript 3D library
2,078 lines (1,347 loc) • 86.2 kB
JavaScript
import {
BufferAttribute,
ClampToEdgeWrapping,
Color,
DoubleSide,
InterpolateDiscrete,
InterpolateLinear,
NoColorSpace,
LinearFilter,
LinearMipmapLinearFilter,
LinearMipmapNearestFilter,
MathUtils,
Matrix4,
MirroredRepeatWrapping,
NearestFilter,
NearestMipmapLinearFilter,
NearestMipmapNearestFilter,
PropertyBinding,
RGBAFormat,
RepeatWrapping,
Scene,
Source,
SRGBColorSpace,
CompressedTexture,
Vector3,
Quaternion,
REVISION,
ImageUtils
} from 'three';
/**
* The KHR_mesh_quantization extension allows these extra attribute component types
*
* @see https://github.com/KhronosGroup/glTF/blob/main/extensions/2.0/Khronos/KHR_mesh_quantization/README.md#extending-mesh-attributes
*/
const KHR_mesh_quantization_ExtraAttrTypes = {
POSITION: [
'byte',
'byte normalized',
'unsigned byte',
'unsigned byte normalized',
'short',
'short normalized',
'unsigned short',
'unsigned short normalized',
],
NORMAL: [
'byte normalized',
'short normalized',
],
TANGENT: [
'byte normalized',
'short normalized',
],
TEXCOORD: [
'byte',
'byte normalized',
'unsigned byte',
'short',
'short normalized',
'unsigned short',
],
};
/**
* An exporter for `glTF` 2.0.
*
* glTF (GL Transmission Format) is an [open format specification]{@link https://github.com/KhronosGroup/glTF/tree/master/specification/2.0}
* for efficient delivery and loading of 3D content. Assets may be provided either in JSON (.gltf)
* or binary (.glb) format. External files store textures (.jpg, .png) and additional binary
* data (.bin). A glTF asset may deliver one or more scenes, including meshes, materials,
* textures, skins, skeletons, morph targets, animations, lights, and/or cameras.
*
* GLTFExporter supports the [glTF 2.0 extensions]{@link https://github.com/KhronosGroup/glTF/tree/master/extensions/}:
*
* - KHR_lights_punctual
* - KHR_materials_clearcoat
* - KHR_materials_dispersion
* - KHR_materials_emissive_strength
* - KHR_materials_ior
* - KHR_materials_iridescence
* - KHR_materials_specular
* - KHR_materials_sheen
* - KHR_materials_transmission
* - KHR_materials_unlit
* - KHR_materials_volume
* - KHR_mesh_quantization
* - KHR_texture_transform
* - EXT_materials_bump
* - EXT_mesh_gpu_instancing
*
* The following glTF 2.0 extension is supported by an external user plugin:
*
* - [KHR_materials_variants]{@link https://github.com/takahirox/three-gltf-extensions}
*
* ```js
* const exporter = new GLTFExporter();
* const data = await exporter.parseAsync( scene, options );
* ```
*
* @three_import import { GLTFExporter } from 'three/addons/exporters/GLTFExporter.js';
*/
class GLTFExporter {
/**
* Constructs a new glTF exporter.
*/
constructor() {
/**
* A reference to a texture utils module.
*
* @type {?(WebGLTextureUtils|WebGPUTextureUtils)}
* @default null
*/
this.textureUtils = null;
this.pluginCallbacks = [];
this.register( function ( writer ) {
return new GLTFLightExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsUnlitExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsTransmissionExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsVolumeExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsIorExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsSpecularExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsClearcoatExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsDispersionExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsIridescenceExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsSheenExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsAnisotropyExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsEmissiveStrengthExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsBumpExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMeshGpuInstancing( writer );
} );
}
/**
* Registers a plugin callback. This API is internally used to implement the various
* glTF extensions but can also used by third-party code to add additional logic
* to the exporter.
*
* @param {function(writer:GLTFWriter)} callback - The callback function to register.
* @return {GLTFExporter} A reference to this exporter.
*/
register( callback ) {
if ( this.pluginCallbacks.indexOf( callback ) === - 1 ) {
this.pluginCallbacks.push( callback );
}
return this;
}
/**
* Unregisters a plugin callback.
*
* @param {Function} callback - The callback function to unregister.
* @return {GLTFExporter} A reference to this exporter.
*/
unregister( callback ) {
if ( this.pluginCallbacks.indexOf( callback ) !== - 1 ) {
this.pluginCallbacks.splice( this.pluginCallbacks.indexOf( callback ), 1 );
}
return this;
}
/**
* Sets the texture utils for this exporter. Only relevant when compressed textures have to be exported.
*
* Depending on whether you use {@link WebGLRenderer} or {@link WebGPURenderer}, you must inject the
* corresponding texture utils {@link WebGLTextureUtils} or {@link WebGPUTextureUtils}.
*
* @param {WebGLTextureUtils|WebGPUTextureUtils} utils - The texture utils.
* @return {GLTFExporter} A reference to this exporter.
*/
setTextureUtils( utils ) {
this.textureUtils = utils;
return this;
}
/**
* Parses the given scenes and generates the glTF output.
*
* @param {Scene|Array<Scene>} input - A scene or an array of scenes.
* @param {GLTFExporter~OnDone} onDone - A callback function that is executed when the export has finished.
* @param {GLTFExporter~OnError} onError - A callback function that is executed when an error happens.
* @param {GLTFExporter~Options} options - options
*/
parse( input, onDone, onError, options ) {
const writer = new GLTFWriter();
const plugins = [];
for ( let i = 0, il = this.pluginCallbacks.length; i < il; i ++ ) {
plugins.push( this.pluginCallbacks[ i ]( writer ) );
}
writer.setPlugins( plugins );
writer.setTextureUtils( this.textureUtils );
writer.writeAsync( input, onDone, options ).catch( onError );
}
/**
* Async version of {@link GLTFExporter#parse}.
*
* @param {Scene|Array<Scene>} input - A scene or an array of scenes.
* @param {GLTFExporter~Options} options - options.
* @return {Promise<ArrayBuffer|string>} A Promise that resolved with the exported glTF data.
*/
parseAsync( input, options ) {
const scope = this;
return new Promise( function ( resolve, reject ) {
scope.parse( input, resolve, reject, options );
} );
}
}
//------------------------------------------------------------------------------
// Constants
//------------------------------------------------------------------------------
const WEBGL_CONSTANTS = {
POINTS: 0x0000,
LINES: 0x0001,
LINE_LOOP: 0x0002,
LINE_STRIP: 0x0003,
TRIANGLES: 0x0004,
TRIANGLE_STRIP: 0x0005,
TRIANGLE_FAN: 0x0006,
BYTE: 0x1400,
UNSIGNED_BYTE: 0x1401,
SHORT: 0x1402,
UNSIGNED_SHORT: 0x1403,
INT: 0x1404,
UNSIGNED_INT: 0x1405,
FLOAT: 0x1406,
ARRAY_BUFFER: 0x8892,
ELEMENT_ARRAY_BUFFER: 0x8893,
NEAREST: 0x2600,
LINEAR: 0x2601,
NEAREST_MIPMAP_NEAREST: 0x2700,
LINEAR_MIPMAP_NEAREST: 0x2701,
NEAREST_MIPMAP_LINEAR: 0x2702,
LINEAR_MIPMAP_LINEAR: 0x2703,
CLAMP_TO_EDGE: 33071,
MIRRORED_REPEAT: 33648,
REPEAT: 10497
};
const KHR_MESH_QUANTIZATION = 'KHR_mesh_quantization';
const THREE_TO_WEBGL = {};
THREE_TO_WEBGL[ NearestFilter ] = WEBGL_CONSTANTS.NEAREST;
THREE_TO_WEBGL[ NearestMipmapNearestFilter ] = WEBGL_CONSTANTS.NEAREST_MIPMAP_NEAREST;
THREE_TO_WEBGL[ NearestMipmapLinearFilter ] = WEBGL_CONSTANTS.NEAREST_MIPMAP_LINEAR;
THREE_TO_WEBGL[ LinearFilter ] = WEBGL_CONSTANTS.LINEAR;
THREE_TO_WEBGL[ LinearMipmapNearestFilter ] = WEBGL_CONSTANTS.LINEAR_MIPMAP_NEAREST;
THREE_TO_WEBGL[ LinearMipmapLinearFilter ] = WEBGL_CONSTANTS.LINEAR_MIPMAP_LINEAR;
THREE_TO_WEBGL[ ClampToEdgeWrapping ] = WEBGL_CONSTANTS.CLAMP_TO_EDGE;
THREE_TO_WEBGL[ RepeatWrapping ] = WEBGL_CONSTANTS.REPEAT;
THREE_TO_WEBGL[ MirroredRepeatWrapping ] = WEBGL_CONSTANTS.MIRRORED_REPEAT;
const PATH_PROPERTIES = {
scale: 'scale',
position: 'translation',
quaternion: 'rotation',
morphTargetInfluences: 'weights'
};
const DEFAULT_SPECULAR_COLOR = new Color();
// GLB constants
// https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#glb-file-format-specification
const GLB_HEADER_BYTES = 12;
const GLB_HEADER_MAGIC = 0x46546C67;
const GLB_VERSION = 2;
const GLB_CHUNK_PREFIX_BYTES = 8;
const GLB_CHUNK_TYPE_JSON = 0x4E4F534A;
const GLB_CHUNK_TYPE_BIN = 0x004E4942;
//------------------------------------------------------------------------------
// Utility functions
//------------------------------------------------------------------------------
/**
* Compare two arrays
*
* @private
* @param {Array} array1 Array 1 to compare
* @param {Array} array2 Array 2 to compare
* @return {boolean} Returns true if both arrays are equal
*/
function equalArray( array1, array2 ) {
return ( array1.length === array2.length ) && array1.every( function ( element, index ) {
return element === array2[ index ];
} );
}
/**
* Converts a string to an ArrayBuffer.
*
* @private
* @param {string} text
* @return {ArrayBuffer}
*/
function stringToArrayBuffer( text ) {
return new TextEncoder().encode( text ).buffer;
}
/**
* Is identity matrix
*
* @private
* @param {Matrix4} matrix
* @returns {boolean} Returns true, if parameter is identity matrix
*/
function isIdentityMatrix( matrix ) {
return equalArray( matrix.elements, [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ] );
}
/**
* Get the min and max vectors from the given attribute
*
* @private
* @param {BufferAttribute} attribute Attribute to find the min/max in range from start to start + count
* @param {number} start Start index
* @param {number} count Range to cover
* @return {Object} Object containing the `min` and `max` values (As an array of attribute.itemSize components)
*/
function getMinMax( attribute, start, count ) {
const output = {
min: new Array( attribute.itemSize ).fill( Number.POSITIVE_INFINITY ),
max: new Array( attribute.itemSize ).fill( Number.NEGATIVE_INFINITY )
};
for ( let i = start; i < start + count; i ++ ) {
for ( let a = 0; a < attribute.itemSize; a ++ ) {
let value;
if ( attribute.itemSize > 4 ) {
// no support for interleaved data for itemSize > 4
value = attribute.array[ i * attribute.itemSize + a ];
} else {
if ( a === 0 ) value = attribute.getX( i );
else if ( a === 1 ) value = attribute.getY( i );
else if ( a === 2 ) value = attribute.getZ( i );
else if ( a === 3 ) value = attribute.getW( i );
if ( attribute.normalized === true ) {
value = MathUtils.normalize( value, attribute.array );
}
}
output.min[ a ] = Math.min( output.min[ a ], value );
output.max[ a ] = Math.max( output.max[ a ], value );
}
}
return output;
}
/**
* Get the required size + padding for a buffer, rounded to the next 4-byte boundary.
* https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#data-alignment
*
* @private
* @param {number} bufferSize The size the original buffer. Should be an integer.
* @returns {number} new buffer size with required padding as an integer.
*
*/
function getPaddedBufferSize( bufferSize ) {
return Math.ceil( bufferSize / 4 ) * 4;
}
/**
* Returns a buffer aligned to 4-byte boundary.
*
* @private
* @param {ArrayBuffer} arrayBuffer Buffer to pad
* @param {number} [paddingByte=0] Should be an integer
* @returns {ArrayBuffer} The same buffer if it's already aligned to 4-byte boundary or a new buffer
*/
function getPaddedArrayBuffer( arrayBuffer, paddingByte = 0 ) {
const paddedLength = getPaddedBufferSize( arrayBuffer.byteLength );
if ( paddedLength !== arrayBuffer.byteLength ) {
const array = new Uint8Array( paddedLength );
array.set( new Uint8Array( arrayBuffer ) );
if ( paddingByte !== 0 ) {
for ( let i = arrayBuffer.byteLength; i < paddedLength; i ++ ) {
array[ i ] = paddingByte;
}
}
return array.buffer;
}
return arrayBuffer;
}
function getCanvas() {
if ( typeof document === 'undefined' && typeof OffscreenCanvas !== 'undefined' ) {
return new OffscreenCanvas( 1, 1 );
}
return document.createElement( 'canvas' );
}
function getToBlobPromise( canvas, mimeType ) {
if ( canvas.toBlob !== undefined ) {
return new Promise( ( resolve ) => canvas.toBlob( resolve, mimeType ) );
}
let quality;
// Blink's implementation of convertToBlob seems to default to a quality level of 100%
// Use the Blink default quality levels of toBlob instead so that file sizes are comparable.
if ( mimeType === 'image/jpeg' ) {
quality = 0.92;
} else if ( mimeType === 'image/webp' ) {
quality = 0.8;
}
return canvas.convertToBlob( {
type: mimeType,
quality: quality
} );
}
/**
* Writer
*
* @private
*/
class GLTFWriter {
constructor() {
this.plugins = [];
this.options = {};
this.pending = [];
this.buffers = [];
this.byteOffset = 0;
this.buffers = [];
this.nodeMap = new Map();
this.skins = [];
this.extensionsUsed = {};
this.extensionsRequired = {};
this.uids = new Map();
this.uid = 0;
this.json = {
asset: {
version: '2.0',
generator: 'THREE.GLTFExporter r' + REVISION
}
};
this.cache = {
meshes: new Map(),
attributes: new Map(),
attributesNormalized: new Map(),
materials: new Map(),
textures: new Map(),
images: new Map()
};
this.textureUtils = null;
}
setPlugins( plugins ) {
this.plugins = plugins;
}
setTextureUtils( utils ) {
this.textureUtils = utils;
}
/**
* Parse scenes and generate GLTF output
*
* @param {Scene|Array<Scene>} input Scene or Array of THREE.Scenes
* @param {Function} onDone Callback on completed
* @param {Object} options options
*/
async writeAsync( input, onDone, options = {} ) {
this.options = Object.assign( {
// default options
binary: false,
trs: false,
onlyVisible: true,
maxTextureSize: Infinity,
animations: [],
includeCustomExtensions: false
}, options );
if ( this.options.animations.length > 0 ) {
// Only TRS properties, and not matrices, may be targeted by animation.
this.options.trs = true;
}
await this.processInputAsync( input );
await Promise.all( this.pending );
const writer = this;
const buffers = writer.buffers;
const json = writer.json;
options = writer.options;
const extensionsUsed = writer.extensionsUsed;
const extensionsRequired = writer.extensionsRequired;
// Merge buffers.
const blob = new Blob( buffers, { type: 'application/octet-stream' } );
// Declare extensions.
const extensionsUsedList = Object.keys( extensionsUsed );
const extensionsRequiredList = Object.keys( extensionsRequired );
if ( extensionsUsedList.length > 0 ) json.extensionsUsed = extensionsUsedList;
if ( extensionsRequiredList.length > 0 ) json.extensionsRequired = extensionsRequiredList;
// Update bytelength of the single buffer.
if ( json.buffers && json.buffers.length > 0 ) json.buffers[ 0 ].byteLength = blob.size;
if ( options.binary === true ) {
// https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#glb-file-format-specification
const reader = new FileReader();
reader.readAsArrayBuffer( blob );
reader.onloadend = function () {
// Binary chunk.
const binaryChunk = getPaddedArrayBuffer( reader.result );
const binaryChunkPrefix = new DataView( new ArrayBuffer( GLB_CHUNK_PREFIX_BYTES ) );
binaryChunkPrefix.setUint32( 0, binaryChunk.byteLength, true );
binaryChunkPrefix.setUint32( 4, GLB_CHUNK_TYPE_BIN, true );
// JSON chunk.
const jsonChunk = getPaddedArrayBuffer( stringToArrayBuffer( JSON.stringify( json ) ), 0x20 );
const jsonChunkPrefix = new DataView( new ArrayBuffer( GLB_CHUNK_PREFIX_BYTES ) );
jsonChunkPrefix.setUint32( 0, jsonChunk.byteLength, true );
jsonChunkPrefix.setUint32( 4, GLB_CHUNK_TYPE_JSON, true );
// GLB header.
const header = new ArrayBuffer( GLB_HEADER_BYTES );
const headerView = new DataView( header );
headerView.setUint32( 0, GLB_HEADER_MAGIC, true );
headerView.setUint32( 4, GLB_VERSION, true );
const totalByteLength = GLB_HEADER_BYTES
+ jsonChunkPrefix.byteLength + jsonChunk.byteLength
+ binaryChunkPrefix.byteLength + binaryChunk.byteLength;
headerView.setUint32( 8, totalByteLength, true );
const glbBlob = new Blob( [
header,
jsonChunkPrefix,
jsonChunk,
binaryChunkPrefix,
binaryChunk
], { type: 'application/octet-stream' } );
const glbReader = new FileReader();
glbReader.readAsArrayBuffer( glbBlob );
glbReader.onloadend = function () {
onDone( glbReader.result );
};
};
} else {
if ( json.buffers && json.buffers.length > 0 ) {
const reader = new FileReader();
reader.readAsDataURL( blob );
reader.onloadend = function () {
const base64data = reader.result;
json.buffers[ 0 ].uri = base64data;
onDone( json );
};
} else {
onDone( json );
}
}
}
/**
* Serializes a userData.
*
* @param {THREE.Object3D|THREE.Material} object
* @param {Object} objectDef
*/
serializeUserData( object, objectDef ) {
if ( Object.keys( object.userData ).length === 0 ) return;
const options = this.options;
const extensionsUsed = this.extensionsUsed;
try {
const json = JSON.parse( JSON.stringify( object.userData ) );
if ( options.includeCustomExtensions && json.gltfExtensions ) {
if ( objectDef.extensions === undefined ) objectDef.extensions = {};
for ( const extensionName in json.gltfExtensions ) {
objectDef.extensions[ extensionName ] = json.gltfExtensions[ extensionName ];
extensionsUsed[ extensionName ] = true;
}
delete json.gltfExtensions;
}
if ( Object.keys( json ).length > 0 ) objectDef.extras = json;
} catch ( error ) {
console.warn( 'THREE.GLTFExporter: userData of \'' + object.name + '\' ' +
'won\'t be serialized because of JSON.stringify error - ' + error.message );
}
}
/**
* Returns ids for buffer attributes.
*
* @param {Object} attribute
* @param {boolean} [isRelativeCopy=false]
* @return {number} An integer
*/
getUID( attribute, isRelativeCopy = false ) {
if ( this.uids.has( attribute ) === false ) {
const uids = new Map();
uids.set( true, this.uid ++ );
uids.set( false, this.uid ++ );
this.uids.set( attribute, uids );
}
const uids = this.uids.get( attribute );
return uids.get( isRelativeCopy );
}
/**
* Checks if normal attribute values are normalized.
*
* @param {BufferAttribute} normal
* @returns {boolean}
*/
isNormalizedNormalAttribute( normal ) {
const cache = this.cache;
if ( cache.attributesNormalized.has( normal ) ) return false;
const v = new Vector3();
for ( let i = 0, il = normal.count; i < il; i ++ ) {
// 0.0005 is from glTF-validator
if ( Math.abs( v.fromBufferAttribute( normal, i ).length() - 1.0 ) > 0.0005 ) return false;
}
return true;
}
/**
* Creates normalized normal buffer attribute.
*
* @param {BufferAttribute} normal
* @returns {BufferAttribute}
*
*/
createNormalizedNormalAttribute( normal ) {
const cache = this.cache;
if ( cache.attributesNormalized.has( normal ) ) return cache.attributesNormalized.get( normal );
const attribute = normal.clone();
const v = new Vector3();
for ( let i = 0, il = attribute.count; i < il; i ++ ) {
v.fromBufferAttribute( attribute, i );
if ( v.x === 0 && v.y === 0 && v.z === 0 ) {
// if values can't be normalized set (1, 0, 0)
v.setX( 1.0 );
} else {
v.normalize();
}
attribute.setXYZ( i, v.x, v.y, v.z );
}
cache.attributesNormalized.set( normal, attribute );
return attribute;
}
/**
* Applies a texture transform, if present, to the map definition. Requires
* the KHR_texture_transform extension.
*
* @param {Object} mapDef
* @param {THREE.Texture} texture
*/
applyTextureTransform( mapDef, texture ) {
let didTransform = false;
const transformDef = {};
if ( texture.offset.x !== 0 || texture.offset.y !== 0 ) {
transformDef.offset = texture.offset.toArray();
didTransform = true;
}
if ( texture.rotation !== 0 ) {
transformDef.rotation = texture.rotation;
didTransform = true;
}
if ( texture.repeat.x !== 1 || texture.repeat.y !== 1 ) {
transformDef.scale = texture.repeat.toArray();
didTransform = true;
}
if ( didTransform ) {
mapDef.extensions = mapDef.extensions || {};
mapDef.extensions[ 'KHR_texture_transform' ] = transformDef;
this.extensionsUsed[ 'KHR_texture_transform' ] = true;
}
}
async buildMetalRoughTextureAsync( metalnessMap, roughnessMap ) {
if ( metalnessMap === roughnessMap ) return metalnessMap;
function getEncodingConversion( map ) {
if ( map.colorSpace === SRGBColorSpace ) {
return function SRGBToLinear( c ) {
return ( c < 0.04045 ) ? c * 0.0773993808 : Math.pow( c * 0.9478672986 + 0.0521327014, 2.4 );
};
}
return function LinearToLinear( c ) {
return c;
};
}
if ( metalnessMap instanceof CompressedTexture ) {
metalnessMap = await this.decompressTextureAsync( metalnessMap );
}
if ( roughnessMap instanceof CompressedTexture ) {
roughnessMap = await this.decompressTextureAsync( roughnessMap );
}
const metalness = metalnessMap ? metalnessMap.image : null;
const roughness = roughnessMap ? roughnessMap.image : null;
const width = Math.max( metalness ? metalness.width : 0, roughness ? roughness.width : 0 );
const height = Math.max( metalness ? metalness.height : 0, roughness ? roughness.height : 0 );
const canvas = getCanvas();
canvas.width = width;
canvas.height = height;
const context = canvas.getContext( '2d', {
willReadFrequently: true,
} );
context.fillStyle = '#00ffff';
context.fillRect( 0, 0, width, height );
const composite = context.getImageData( 0, 0, width, height );
if ( metalness ) {
context.drawImage( metalness, 0, 0, width, height );
const convert = getEncodingConversion( metalnessMap );
const data = context.getImageData( 0, 0, width, height ).data;
for ( let i = 2; i < data.length; i += 4 ) {
composite.data[ i ] = convert( data[ i ] / 256 ) * 256;
}
}
if ( roughness ) {
context.drawImage( roughness, 0, 0, width, height );
const convert = getEncodingConversion( roughnessMap );
const data = context.getImageData( 0, 0, width, height ).data;
for ( let i = 1; i < data.length; i += 4 ) {
composite.data[ i ] = convert( data[ i ] / 256 ) * 256;
}
}
context.putImageData( composite, 0, 0 );
//
const reference = metalnessMap || roughnessMap;
const texture = reference.clone();
texture.source = new Source( canvas );
texture.colorSpace = NoColorSpace;
texture.channel = ( metalnessMap || roughnessMap ).channel;
if ( metalnessMap && roughnessMap && metalnessMap.channel !== roughnessMap.channel ) {
console.warn( 'THREE.GLTFExporter: UV channels for metalnessMap and roughnessMap textures must match.' );
}
console.warn( 'THREE.GLTFExporter: Merged metalnessMap and roughnessMap textures.' );
return texture;
}
async decompressTextureAsync( texture, maxTextureSize = Infinity ) {
if ( this.textureUtils === null ) {
throw new Error( 'THREE.GLTFExporter: setTextureUtils() must be called to process compressed textures.' );
}
return await this.textureUtils.decompress( texture, maxTextureSize );
}
/**
* Process a buffer to append to the default one.
* @param {ArrayBuffer} buffer
* @return {0}
*/
processBuffer( buffer ) {
const json = this.json;
const buffers = this.buffers;
if ( ! json.buffers ) json.buffers = [ { byteLength: 0 } ];
// All buffers are merged before export.
buffers.push( buffer );
return 0;
}
/**
* Process and generate a BufferView
* @param {BufferAttribute} attribute
* @param {number} componentType
* @param {number} start
* @param {number} count
* @param {number} [target] Target usage of the BufferView
* @return {Object}
*/
processBufferView( attribute, componentType, start, count, target ) {
const json = this.json;
if ( ! json.bufferViews ) json.bufferViews = [];
// Create a new dataview and dump the attribute's array into it
let componentSize;
switch ( componentType ) {
case WEBGL_CONSTANTS.BYTE:
case WEBGL_CONSTANTS.UNSIGNED_BYTE:
componentSize = 1;
break;
case WEBGL_CONSTANTS.SHORT:
case WEBGL_CONSTANTS.UNSIGNED_SHORT:
componentSize = 2;
break;
default:
componentSize = 4;
}
let byteStride = attribute.itemSize * componentSize;
if ( target === WEBGL_CONSTANTS.ARRAY_BUFFER ) {
// Each element of a vertex attribute MUST be aligned to 4-byte boundaries
// inside a bufferView
byteStride = Math.ceil( byteStride / 4 ) * 4;
}
const byteLength = getPaddedBufferSize( count * byteStride );
const dataView = new DataView( new ArrayBuffer( byteLength ) );
let offset = 0;
for ( let i = start; i < start + count; i ++ ) {
for ( let a = 0; a < attribute.itemSize; a ++ ) {
let value;
if ( attribute.itemSize > 4 ) {
// no support for interleaved data for itemSize > 4
value = attribute.array[ i * attribute.itemSize + a ];
} else {
if ( a === 0 ) value = attribute.getX( i );
else if ( a === 1 ) value = attribute.getY( i );
else if ( a === 2 ) value = attribute.getZ( i );
else if ( a === 3 ) value = attribute.getW( i );
if ( attribute.normalized === true ) {
value = MathUtils.normalize( value, attribute.array );
}
}
if ( componentType === WEBGL_CONSTANTS.FLOAT ) {
dataView.setFloat32( offset, value, true );
} else if ( componentType === WEBGL_CONSTANTS.INT ) {
dataView.setInt32( offset, value, true );
} else if ( componentType === WEBGL_CONSTANTS.UNSIGNED_INT ) {
dataView.setUint32( offset, value, true );
} else if ( componentType === WEBGL_CONSTANTS.SHORT ) {
dataView.setInt16( offset, value, true );
} else if ( componentType === WEBGL_CONSTANTS.UNSIGNED_SHORT ) {
dataView.setUint16( offset, value, true );
} else if ( componentType === WEBGL_CONSTANTS.BYTE ) {
dataView.setInt8( offset, value );
} else if ( componentType === WEBGL_CONSTANTS.UNSIGNED_BYTE ) {
dataView.setUint8( offset, value );
}
offset += componentSize;
}
if ( ( offset % byteStride ) !== 0 ) {
offset += byteStride - ( offset % byteStride );
}
}
const bufferViewDef = {
buffer: this.processBuffer( dataView.buffer ),
byteOffset: this.byteOffset,
byteLength: byteLength
};
if ( target !== undefined ) bufferViewDef.target = target;
if ( target === WEBGL_CONSTANTS.ARRAY_BUFFER ) {
// Only define byteStride for vertex attributes.
bufferViewDef.byteStride = byteStride;
}
this.byteOffset += byteLength;
json.bufferViews.push( bufferViewDef );
// @TODO Merge bufferViews where possible.
const output = {
id: json.bufferViews.length - 1,
byteLength: 0
};
return output;
}
/**
* Process and generate a BufferView from an image Blob.
* @param {Blob} blob
* @return {Promise<number>} An integer
*/
processBufferViewImage( blob ) {
const writer = this;
const json = writer.json;
if ( ! json.bufferViews ) json.bufferViews = [];
return new Promise( function ( resolve ) {
const reader = new FileReader();
reader.readAsArrayBuffer( blob );
reader.onloadend = function () {
const buffer = getPaddedArrayBuffer( reader.result );
const bufferViewDef = {
buffer: writer.processBuffer( buffer ),
byteOffset: writer.byteOffset,
byteLength: buffer.byteLength
};
writer.byteOffset += buffer.byteLength;
resolve( json.bufferViews.push( bufferViewDef ) - 1 );
};
} );
}
/**
* Process attribute to generate an accessor
* @param {BufferAttribute} attribute Attribute to process
* @param {?BufferGeometry} [geometry] Geometry used for truncated draw range
* @param {number} [start=0]
* @param {number} [count=Infinity]
* @return {?number} Index of the processed accessor on the "accessors" array
*/
processAccessor( attribute, geometry, start, count ) {
const json = this.json;
const types = {
1: 'SCALAR',
2: 'VEC2',
3: 'VEC3',
4: 'VEC4',
9: 'MAT3',
16: 'MAT4'
};
let componentType;
// Detect the component type of the attribute array
if ( attribute.array.constructor === Float32Array ) {
componentType = WEBGL_CONSTANTS.FLOAT;
} else if ( attribute.array.constructor === Int32Array ) {
componentType = WEBGL_CONSTANTS.INT;
} else if ( attribute.array.constructor === Uint32Array ) {
componentType = WEBGL_CONSTANTS.UNSIGNED_INT;
} else if ( attribute.array.constructor === Int16Array ) {
componentType = WEBGL_CONSTANTS.SHORT;
} else if ( attribute.array.constructor === Uint16Array ) {
componentType = WEBGL_CONSTANTS.UNSIGNED_SHORT;
} else if ( attribute.array.constructor === Int8Array ) {
componentType = WEBGL_CONSTANTS.BYTE;
} else if ( attribute.array.constructor === Uint8Array ) {
componentType = WEBGL_CONSTANTS.UNSIGNED_BYTE;
} else {
throw new Error( 'THREE.GLTFExporter: Unsupported bufferAttribute component type: ' + attribute.array.constructor.name );
}
if ( start === undefined ) start = 0;
if ( count === undefined || count === Infinity ) count = attribute.count;
// Skip creating an accessor if the attribute doesn't have data to export
if ( count === 0 ) return null;
const minMax = getMinMax( attribute, start, count );
let bufferViewTarget;
// If geometry isn't provided, don't infer the target usage of the bufferView. For
// animation samplers, target must not be set.
if ( geometry !== undefined ) {
bufferViewTarget = attribute === geometry.index ? WEBGL_CONSTANTS.ELEMENT_ARRAY_BUFFER : WEBGL_CONSTANTS.ARRAY_BUFFER;
}
const bufferView = this.processBufferView( attribute, componentType, start, count, bufferViewTarget );
const accessorDef = {
bufferView: bufferView.id,
byteOffset: bufferView.byteOffset,
componentType: componentType,
count: count,
max: minMax.max,
min: minMax.min,
type: types[ attribute.itemSize ]
};
if ( attribute.normalized === true ) accessorDef.normalized = true;
if ( ! json.accessors ) json.accessors = [];
return json.accessors.push( accessorDef ) - 1;
}
/**
* Process image
* @param {Image} image to process
* @param {number} format Identifier of the format (RGBAFormat)
* @param {boolean} flipY before writing out the image
* @param {string} mimeType export format
* @return {number} Index of the processed texture in the "images" array
*/
processImage( image, format, flipY, mimeType = 'image/png' ) {
if ( image !== null ) {
const writer = this;
const cache = writer.cache;
const json = writer.json;
const options = writer.options;
const pending = writer.pending;
if ( ! cache.images.has( image ) ) cache.images.set( image, {} );
const cachedImages = cache.images.get( image );
const key = mimeType + ':flipY/' + flipY.toString();
if ( cachedImages[ key ] !== undefined ) return cachedImages[ key ];
if ( ! json.images ) json.images = [];
const imageDef = { mimeType: mimeType };
const canvas = getCanvas();
canvas.width = Math.min( image.width, options.maxTextureSize );
canvas.height = Math.min( image.height, options.maxTextureSize );
const ctx = canvas.getContext( '2d', {
willReadFrequently: true,
} );
if ( flipY === true ) {
ctx.translate( 0, canvas.height );
ctx.scale( 1, - 1 );
}
if ( image.data !== undefined ) { // THREE.DataTexture
if ( format !== RGBAFormat ) {
console.error( 'GLTFExporter: Only RGBAFormat is supported.', format );
}
if ( image.width > options.maxTextureSize || image.height > options.maxTextureSize ) {
console.warn( 'GLTFExporter: Image size is bigger than maxTextureSize', image );
}
const data = new Uint8ClampedArray( image.height * image.width * 4 );
for ( let i = 0; i < data.length; i += 4 ) {
data[ i + 0 ] = image.data[ i + 0 ];
data[ i + 1 ] = image.data[ i + 1 ];
data[ i + 2 ] = image.data[ i + 2 ];
data[ i + 3 ] = image.data[ i + 3 ];
}
ctx.putImageData( new ImageData( data, image.width, image.height ), 0, 0 );
} else {
if ( ( typeof HTMLImageElement !== 'undefined' && image instanceof HTMLImageElement ) ||
( typeof HTMLCanvasElement !== 'undefined' && image instanceof HTMLCanvasElement ) ||
( typeof ImageBitmap !== 'undefined' && image instanceof ImageBitmap ) ||
( typeof OffscreenCanvas !== 'undefined' && image instanceof OffscreenCanvas ) ) {
ctx.drawImage( image, 0, 0, canvas.width, canvas.height );
} else {
throw new Error( 'THREE.GLTFExporter: Invalid image type. Use HTMLImageElement, HTMLCanvasElement, ImageBitmap or OffscreenCanvas.' );
}
}
if ( options.binary === true ) {
pending.push(
getToBlobPromise( canvas, mimeType )
.then( blob => writer.processBufferViewImage( blob ) )
.then( bufferViewIndex => {
imageDef.bufferView = bufferViewIndex;
} )
);
} else {
imageDef.uri = ImageUtils.getDataURL( canvas, mimeType );
}
const index = json.images.push( imageDef ) - 1;
cachedImages[ key ] = index;
return index;
} else {
throw new Error( 'THREE.GLTFExporter: No valid image data found. Unable to process texture.' );
}
}
/**
* Process sampler
* @param {Texture} map Texture to process
* @return {number} Index of the processed texture in the "samplers" array
*/
processSampler( map ) {
const json = this.json;
if ( ! json.samplers ) json.samplers = [];
const samplerDef = {
magFilter: THREE_TO_WEBGL[ map.magFilter ],
minFilter: THREE_TO_WEBGL[ map.minFilter ],
wrapS: THREE_TO_WEBGL[ map.wrapS ],
wrapT: THREE_TO_WEBGL[ map.wrapT ]
};
return json.samplers.push( samplerDef ) - 1;
}
/**
* Process texture
* @param {Texture} map Map to process
* @return {Promise<number>} Index of the processed texture in the "textures" array
*/
async processTextureAsync( map ) {
const writer = this;
const options = writer.options;
const cache = this.cache;
const json = this.json;
if ( cache.textures.has( map ) ) return cache.textures.get( map );
if ( ! json.textures ) json.textures = [];
// make non-readable textures (e.g. CompressedTexture) readable by blitting them into a new texture
if ( map instanceof CompressedTexture ) {
map = await this.decompressTextureAsync( map, options.maxTextureSize );
}
let mimeType = map.userData.mimeType;
if ( mimeType === 'image/webp' ) mimeType = 'image/png';
const textureDef = {
sampler: this.processSampler( map ),
source: this.processImage( map.image, map.format, map.flipY, mimeType )
};
if ( map.name ) textureDef.name = map.name;
await this._invokeAllAsync( async function ( ext ) {
ext.writeTexture && await ext.writeTexture( map, textureDef );
} );
const index = json.textures.push( textureDef ) - 1;
cache.textures.set( map, index );
return index;
}
/**
* Process material
* @param {THREE.Material} material Material to process
* @return {Promise<number|null>} Index of the processed material in the "materials" array
*/
async processMaterialAsync( material ) {
const cache = this.cache;
const json = this.json;
if ( cache.materials.has( material ) ) return cache.materials.get( material );
if ( material.isShaderMaterial ) {
console.warn( 'GLTFExporter: THREE.ShaderMaterial not supported.' );
return null;
}
if ( ! json.materials ) json.materials = [];
// @QUESTION Should we avoid including any attribute that has the default value?
const materialDef = { pbrMetallicRoughness: {} };
if ( material.isMeshStandardMaterial !== true && material.isMeshBasicMaterial !== true ) {
console.warn( 'GLTFExporter: Use MeshStandardMaterial or MeshBasicMaterial for best results.' );
}
// pbrMetallicRoughness.baseColorFactor
const color = material.color.toArray().concat( [ material.opacity ] );
if ( ! equalArray( color, [ 1, 1, 1, 1 ] ) ) {
materialDef.pbrMetallicRoughness.baseColorFactor = color;
}
if ( material.isMeshStandardMaterial ) {
materialDef.pbrMetallicRoughness.metallicFactor = material.metalness;
materialDef.pbrMetallicRoughness.roughnessFactor = material.roughness;
} else {
materialDef.pbrMetallicRoughness.metallicFactor = 0;
materialDef.pbrMetallicRoughness.roughnessFactor = 1;
}
// pbrMetallicRoughness.metallicRoughnessTexture
if ( material.metalnessMap || material.roughnessMap ) {
const metalRoughTexture = await this.buildMetalRoughTextureAsync( material.metalnessMap, material.roughnessMap );
const metalRoughMapDef = {
index: await this.processTextureAsync( metalRoughTexture ),
texCoord: metalRoughTexture.channel
};
this.applyTextureTransform( metalRoughMapDef, metalRoughTexture );
materialDef.pbrMetallicRoughness.metallicRoughnessTexture = metalRoughMapDef;
}
// pbrMetallicRoughness.baseColorTexture
if ( material.map ) {
const baseColorMapDef = {
index: await this.processTextureAsync( material.map ),
texCoord: material.map.channel
};
this.applyTextureTransform( baseColorMapDef, material.map );
materialDef.pbrMetallicRoughness.baseColorTexture = baseColorMapDef;
}
if ( material.emissive ) {
const emissive = material.emissive;
const maxEmissiveComponent = Math.max( emissive.r, emissive.g, emissive.b );
if ( maxEmissiveComponent > 0 ) {
materialDef.emissiveFactor = material.emissive.toArray();
}
// emissiveTexture
if ( material.emissiveMap ) {
const emissiveMapDef = {
index: await this.processTextureAsync( material.emissiveMap ),
texCoord: material.emissiveMap.channel
};
this.applyTextureTransform( emissiveMapDef, material.emissiveMap );
materialDef.emissiveTexture = emissiveMapDef;
}
}
// normalTexture
if ( material.normalMap ) {
const normalMapDef = {
index: await this.processTextureAsync( material.normalMap ),
texCoord: material.normalMap.channel
};
if ( material.normalScale && material.normalScale.x !== 1 ) {
// glTF normal scale is univariate. Ignore `y`, which may be flipped.
// Context: https://github.com/mrdoob/three.js/issues/11438#issuecomment-507003995
normalMapDef.scale = material.normalScale.x;
}
this.applyTextureTransform( normalMapDef, material.normalMap );
materialDef.normalTexture = normalMapDef;
}
// occlusionTexture
if ( material.aoMap ) {
const occlusionMapDef = {
index: await this.processTextureAsync( material.aoMap ),
texCoord: material.aoMap.channel
};
if ( material.aoMapIntensity !== 1.0 ) {
occlusionMapDef.strength = material.aoMapIntensity;
}
this.applyTextureTransform( occlusionMapDef, material.aoMap );
materialDef.occlusionTexture = occlusionMapDef;
}
// alphaMode
if ( material.transparent ) {
materialDef.alphaMode = 'BLEND';
} else {
if ( material.alphaTest > 0.0 ) {
materialDef.alphaMode = 'MASK';
materialDef.alphaCutoff = material.alphaTest;
}
}
// doubleSided
if ( material.side === DoubleSide ) materialDef.doubleSided = true;
if ( material.name !== '' ) materialDef.name = material.name;
this.serializeUserData( material, materialDef );
await this._invokeAllAsync( async function ( ext ) {
ext.writeMaterialAsync && await ext.writeMaterialAsync( material, materialDef );
} );
const index = json.materials.push( materialDef ) - 1;
cache.materials.set( material, index );
return index;
}
/**
* Process mesh
* @param {THREE.Mesh} mesh Mesh to process
* @return {Promise<number|null>} Index of the processed mesh in the "meshes" array
*/
async processMeshAsync( mesh ) {
const cache = this.cache;
const json = this.json;
const meshCacheKeyParts = [ mesh.geometry.uuid ];
if ( Array.isArray( mesh.material ) ) {
for ( let i = 0, l = mesh.material.length; i < l; i ++ ) {
meshCacheKeyParts.push( mesh.material[ i ].uuid );
}
} else {
meshCacheKeyParts.push( mesh.material.uuid );
}
const meshCacheKey = meshCacheKeyParts.join( ':' );
if ( cache.meshes.has( meshCacheKey ) ) return cache.meshes.get( meshCacheKey );
const geometry = mesh.geometry;
let mode;
// Use the correct mode
if ( mesh.isLineSegments ) {
mode = WEBGL_CONSTANTS.LINES;
} else if ( mesh.isLineLoop ) {
mode = WEBGL_CONSTANTS.LINE_LOOP;
} else if ( mesh.isLine ) {
mode = WEBGL_CONSTANTS.LINE_STRIP;
} else if ( mesh.isPoints ) {
mode = WEBGL_CONSTANTS.POINTS;
} else {
mode = mesh.material.wireframe ? WEBGL_CONSTANTS.LINES : WEBGL_CONSTANTS.TRIANGLES;
}
const meshDef = {};
const attributes = {};
const primitives = [];
const targets = [];
// Conversion between attributes names in threejs and gltf spec
const nameConversion = {
uv: 'TEXCOORD_0',
uv1: 'TEXCOORD_1',
uv2: 'TEXCOORD_2',
uv3: 'TEXCOORD_3',
color: 'COLOR_0',
skinWeight: 'WEIGHTS_0',
skinIndex: 'JOINTS_0'
};
const originalNormal = geometry.getAttribute( 'normal' );
if ( originalNormal !== undefined && ! this.isNormalizedNormalAttribute( originalNormal ) ) {
console.warn( 'THREE.GLTFExporter: Creating normalized normal attribute from the non-normalized one.' );
geometry.setAttribute( 'normal', this.createNormalizedNormalAttribute( originalNormal ) );
}
// @QUESTION Detect if .vertexColors = true?
// For every attribute create an accessor
let modifiedAttribute = null;
for ( let attributeName in geometry.attributes ) {
// Ignore morph target attributes, which are exported later.
if ( attributeName.slice( 0, 5 ) === 'morph' ) continue;
const attribute = geometry.attributes[ attributeName ];
attributeName = nameConversion[ attributeName ] || attributeName.toUpperCase();
// Prefix all geometry attributes except the ones specifically
// listed in the spec; non-spec attributes are considered custom.
const validVertexAttributes =
/^(POSITION|NORMAL|TANGENT|TEXCOORD_\d+|COLOR_\d+|JOINTS_\d+|WEIGHTS_\d+)$/;
if ( ! validVertexAttributes.test( attributeName ) ) attributeName = '_' + attributeName;
if ( cache.attributes.has( this.getUID( attribute ) ) ) {
attributes[ attributeName ] = cache.attributes.get( this.getUID( attribute ) );
continue;
}
// Enforce glTF vertex attribute requirements:
// - JOINTS_0 must be UNSIGNED_BYTE or UNSIGNED_SHORT
// - Only custom attributes may be INT or UNSIGNED_INT
modifiedAttribute = null;
const array = attribute.array;
if ( attributeName === 'JOINTS_0' &&
! ( array instanceof Uint16Array ) &&
! ( array instanceof Uint8Array ) ) {
console.warn( 'GLTFExporter: Attribute "skinIndex" converted to type UNSIGNED_SHORT.' );
modifiedAttribute = new BufferAttribute( new Uint16Array( array ), attribute.itemSize, attribute.normalized );
} else if ( ( array instanceof Uint32Array || array instanceof Int32Array ) && ! attributeName.startsWith( '_' ) ) {
console.warn( `GLTFExporter: Attribute "${ attributeName }" converted to type FLOAT.` );
modifiedAttribute = GLTFExporter.Utils.toFloat32BufferAttribute( attribute );
}
const accessor = this.processAccessor( modifiedAttribute || attribute, geometry );
if ( accessor !== null ) {
if ( ! attributeName.startsWith( '_' ) ) {
this.detectMeshQuantization( attributeName, attribute );
}
attributes[ attributeName ] = accessor;
cache.attributes.set( this.getUID( attribute ), accessor );
}
}
if ( originalNormal !== undefined ) geometry.setAttribute( 'normal', originalNormal );
// Skip if no exportable attributes found
if ( Object.keys( attributes ).length === 0 ) return null;
// Morph targets
if ( mesh.morphTargetInfluences !== undefined && mesh.morphTargetInfluences.length > 0 ) {
const weights = [];
const targetNames = [];
const reverseDictionary = {};
if ( mesh.morphTargetDictionary !== undefined ) {
for ( const key in mesh.morphTargetDictionary ) {
reverseDictionary[ mesh.morphTargetDictionary[ key ] ] = key;
}
}
for ( let i = 0; i < mesh.morphTargetInfluences.length; ++ i ) {
const target = {};
let warned = false;
for ( const attributeName in geometry.morphAttributes ) {
// glTF 2.0 morph supports only POSITION/NORMAL/TANGENT.
// Three.js doesn't support TANGENT yet.
if ( attributeName !== 'position' && attributeName !== 'normal' ) {
if ( ! warned ) {
console.warn( 'GLTFExporter: Only POSITION and NORMAL morph are supported.' );
warned = true;
}
continue;
}
const attribute = geometry.morphAttributes[ attributeName ][ i ];
const gltfAttributeName = attributeName.toUpperCase();
// Three.js morph attribute has absolute values while the one of glTF has relative values.
//
// glTF 2.0 Specification:
// https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#morph-targets
const baseAttribute = geometry.attributes[ attributeName ];
if ( cache.attributes.has( this.getUID( attribute, true ) ) ) {
target[ gltfAttributeName ] = cache.attributes.get( this.getUID( attribute, true ) );
continue;
}
// Clones attribute not to override
const relativeAttribute = attribute.clone();
if ( ! geometry.morphTargetsRelative ) {
for ( let j = 0, jl = attribute.count; j < jl; j ++ ) {
for ( let a = 0; a < attribute.itemSize; a ++ ) {
if ( a === 0 ) relativeAttribute.setX( j, attribute.getX( j ) - baseAttribute.getX( j ) );
if ( a === 1 ) relativeAttribute.setY( j, attribute.getY( j ) - baseAttribute.getY( j ) );
if ( a === 2 ) relativeAttribute.setZ( j, attribute.getZ( j ) - baseAttribute.getZ( j ) );
if ( a === 3 ) relativeAttribute.setW( j, attribute.getW( j ) - baseAttribute.getW( j ) );
}
}
}
target[ gltfAttributeName ] = this.processAccessor( relativeAttribute, geometry );
cache.attributes.set( this.getUID( baseAttribute, true ), target[ gltfAttributeName ] );
}
targets.push( target );
weights.push( mesh.morphTargetInfluences[ i ] );
if ( mesh.morphTargetDictionary !== undefined ) targetNames.push( reverseDictionary[ i ] );
}
meshDef.weights = weights;
if ( targetNames.length > 0 ) {
meshDef.extras = {};
meshDef.extras.targetNames = targetNames;
}
}
const isMultiMaterial = Array.isArray( mesh.material );
if ( isMultiMaterial && geometry.groups.length === 0 ) return null;
let didForceIndices = false;
if ( isMultiMaterial && geometry.index === null ) {
const indices = [];
for ( let i = 0, il = geometry.attributes.position.count; i < il; i ++ ) {
indices[ i ] = i;
}
geometry.setIndex( indices );
didForceIndices = true;
}
const materials = isMultiMaterial ? mesh.material : [ mesh.material ];
const groups = isMultiMaterial ? geometry.groups : [ { materialIndex: 0, start: undefined, count: undefined } ];
for ( let i = 0, il = groups.length; i < il; i ++ ) {
const primitive = {
mode: mode,
attributes: attributes,
};
this.serializeUserData( geometry, primitive );
if ( targets.length > 0 ) primitive.targets = targets;
if ( geometry.index !== null ) {
let cacheKey = this.getUID( geometry.index );
if ( groups[ i ].start !== undefined || groups[ i ].count !== undefined ) {
cacheKey += ':' + groups[ i ].start + ':' + groups[ i ].count;
}
if ( cache.attributes.has( cacheKey ) ) {
primitive.indices = cache.attributes.get( cacheKey );
} else {
primitive.indices = this.processAccessor( geometry.index, geometry, groups[ i ].start, groups[ i ].count );
cache.attributes.set( cacheKey, primitive.indices );
}
if ( primitive.indices === null ) delete primitive.indices;
}
const material = await this.processMaterialAsync( materials[ groups[ i ].materialIndex ] );
if ( material !== null ) primitive.material = material;
primitives.push( primitive );
}
if ( didForceIndices === true ) {
geometry.setIndex( null );
}
meshDef.primitives = primitives;
if ( ! json.meshes ) json.meshes = [];
await this._invokeAllAsync( function ( ext ) {
ext.writeMesh && ext.writeMesh( mesh, meshDef );
} );
const index = json.meshes.push( meshDef ) - 1;
cache.meshes.set( meshCacheKey, index );
return index;
}
/**
* If a vertex attribute with a
* [non-standard data type](https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html#meshes-overview)
* is used, it is checked whether it is a valid data type according to the
* [KHR_mesh_quantization](https://github.co