three
Version:
JavaScript 3D library
2,522 lines (1,519 loc) • 85.5 kB
JavaScript
import {
AmbientLight,
AnimationClip,
Bone,
BufferGeometry,
ClampToEdgeWrapping,
Color,
ColorManagement,
DirectionalLight,
DoubleSide,
FileLoader,
Float32BufferAttribute,
FrontSide,
Group,
Line,
LineBasicMaterial,
LineSegments,
Loader,
LoaderUtils,
MathUtils,
Matrix4,
Mesh,
MeshBasicMaterial,
MeshLambertMaterial,
MeshPhongMaterial,
OrthographicCamera,
PerspectiveCamera,
PointLight,
Quaternion,
QuaternionKeyframeTrack,
RepeatWrapping,
Scene,
Skeleton,
SkinnedMesh,
SpotLight,
TextureLoader,
Vector2,
Vector3,
VectorKeyframeTrack,
SRGBColorSpace
} from 'three';
import { TGALoader } from '../loaders/TGALoader.js';
/**
* A loader for the Collada format.
*
* The Collada format is very complex so this loader only supports a subset of what
* is defined in the [official specification]{@link https://www.khronos.org/files/collada_spec_1_5.pdf}.
*
* Assets with a Z-UP coordinate system are transformed it into Y-UP by a simple rotation.
* The vertex data are not converted.
*
* ```js
* const loader = new ColladaLoader();
*
* const result = await loader.loadAsync( './models/collada/elf/elf.dae' );
* scene.add( result.scene );
* ```
*
* @augments Loader
* @three_import import { ColladaLoader } from 'three/addons/loaders/ColladaLoader.js';
*/
class ColladaLoader extends Loader {
/**
* Starts loading from the given URL and passes the loaded Collada asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function({scene:Group,animations:Array<AnimationClip>,kinematics:Object})} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const path = ( scope.path === '' ) ? LoaderUtils.extractUrlBase( url ) : scope.path;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text, path ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given Collada data and returns a result oject holding the parsed scene,
* an array of animation clips and kinematics.
*
* @param {string} text - The raw Collada data as a string.
* @param {string} path - The asset path.
* @return {{scene:Group,animations:Array<AnimationClip>,kinematics:Object}} An object representing the parsed asset.
*/
parse( text, path ) {
function getElementsByTagName( xml, name ) {
// Non recursive xml.getElementsByTagName() ...
const array = [];
const childNodes = xml.childNodes;
for ( let i = 0, l = childNodes.length; i < l; i ++ ) {
const child = childNodes[ i ];
if ( child.nodeName === name ) {
array.push( child );
}
}
return array;
}
function parseStrings( text ) {
if ( text.length === 0 ) return [];
const parts = text.trim().split( /\s+/ );
const array = new Array( parts.length );
for ( let i = 0, l = parts.length; i < l; i ++ ) {
array[ i ] = parts[ i ];
}
return array;
}
function parseFloats( text ) {
if ( text.length === 0 ) return [];
const parts = text.trim().split( /\s+/ );
const array = new Array( parts.length );
for ( let i = 0, l = parts.length; i < l; i ++ ) {
array[ i ] = parseFloat( parts[ i ] );
}
return array;
}
function parseInts( text ) {
if ( text.length === 0 ) return [];
const parts = text.trim().split( /\s+/ );
const array = new Array( parts.length );
for ( let i = 0, l = parts.length; i < l; i ++ ) {
array[ i ] = parseInt( parts[ i ] );
}
return array;
}
function parseId( text ) {
return text.substring( 1 );
}
function generateId() {
return 'three_default_' + ( count ++ );
}
function isEmpty( object ) {
return Object.keys( object ).length === 0;
}
// asset
function parseAsset( xml ) {
return {
unit: parseAssetUnit( getElementsByTagName( xml, 'unit' )[ 0 ] ),
upAxis: parseAssetUpAxis( getElementsByTagName( xml, 'up_axis' )[ 0 ] )
};
}
function parseAssetUnit( xml ) {
if ( ( xml !== undefined ) && ( xml.hasAttribute( 'meter' ) === true ) ) {
return parseFloat( xml.getAttribute( 'meter' ) );
} else {
return 1; // default 1 meter
}
}
function parseAssetUpAxis( xml ) {
return xml !== undefined ? xml.textContent : 'Y_UP';
}
// library
function parseLibrary( xml, libraryName, nodeName, parser ) {
const library = getElementsByTagName( xml, libraryName )[ 0 ];
if ( library !== undefined ) {
const elements = getElementsByTagName( library, nodeName );
for ( let i = 0; i < elements.length; i ++ ) {
parser( elements[ i ] );
}
}
}
function buildLibrary( data, builder ) {
for ( const name in data ) {
const object = data[ name ];
object.build = builder( data[ name ] );
}
}
// get
function getBuild( data, builder ) {
if ( data.build !== undefined ) return data.build;
data.build = builder( data );
return data.build;
}
// animation
function parseAnimation( xml ) {
const data = {
sources: {},
samplers: {},
channels: {}
};
let hasChildren = false;
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
let id;
switch ( child.nodeName ) {
case 'source':
id = child.getAttribute( 'id' );
data.sources[ id ] = parseSource( child );
break;
case 'sampler':
id = child.getAttribute( 'id' );
data.samplers[ id ] = parseAnimationSampler( child );
break;
case 'channel':
id = child.getAttribute( 'target' );
data.channels[ id ] = parseAnimationChannel( child );
break;
case 'animation':
// hierarchy of related animations
parseAnimation( child );
hasChildren = true;
break;
default:
console.log( child );
}
}
if ( hasChildren === false ) {
// since 'id' attributes can be optional, it's necessary to generate a UUID for unique assignment
library.animations[ xml.getAttribute( 'id' ) || MathUtils.generateUUID() ] = data;
}
}
function parseAnimationSampler( xml ) {
const data = {
inputs: {},
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'input':
const id = parseId( child.getAttribute( 'source' ) );
const semantic = child.getAttribute( 'semantic' );
data.inputs[ semantic ] = id;
break;
}
}
return data;
}
function parseAnimationChannel( xml ) {
const data = {};
const target = xml.getAttribute( 'target' );
// parsing SID Addressing Syntax
let parts = target.split( '/' );
const id = parts.shift();
let sid = parts.shift();
// check selection syntax
const arraySyntax = ( sid.indexOf( '(' ) !== - 1 );
const memberSyntax = ( sid.indexOf( '.' ) !== - 1 );
if ( memberSyntax ) {
// member selection access
parts = sid.split( '.' );
sid = parts.shift();
data.member = parts.shift();
} else if ( arraySyntax ) {
// array-access syntax. can be used to express fields in one-dimensional vectors or two-dimensional matrices.
const indices = sid.split( '(' );
sid = indices.shift();
for ( let i = 0; i < indices.length; i ++ ) {
indices[ i ] = parseInt( indices[ i ].replace( /\)/, '' ) );
}
data.indices = indices;
}
data.id = id;
data.sid = sid;
data.arraySyntax = arraySyntax;
data.memberSyntax = memberSyntax;
data.sampler = parseId( xml.getAttribute( 'source' ) );
return data;
}
function buildAnimation( data ) {
const tracks = [];
const channels = data.channels;
const samplers = data.samplers;
const sources = data.sources;
for ( const target in channels ) {
if ( channels.hasOwnProperty( target ) ) {
const channel = channels[ target ];
const sampler = samplers[ channel.sampler ];
const inputId = sampler.inputs.INPUT;
const outputId = sampler.inputs.OUTPUT;
const inputSource = sources[ inputId ];
const outputSource = sources[ outputId ];
const animation = buildAnimationChannel( channel, inputSource, outputSource );
createKeyframeTracks( animation, tracks );
}
}
return tracks;
}
function getAnimation( id ) {
return getBuild( library.animations[ id ], buildAnimation );
}
function buildAnimationChannel( channel, inputSource, outputSource ) {
const node = library.nodes[ channel.id ];
const object3D = getNode( node.id );
const transform = node.transforms[ channel.sid ];
const defaultMatrix = node.matrix.clone().transpose();
let time, stride;
let i, il, j, jl;
const data = {};
// the collada spec allows the animation of data in various ways.
// depending on the transform type (matrix, translate, rotate, scale), we execute different logic
switch ( transform ) {
case 'matrix':
for ( i = 0, il = inputSource.array.length; i < il; i ++ ) {
time = inputSource.array[ i ];
stride = i * outputSource.stride;
if ( data[ time ] === undefined ) data[ time ] = {};
if ( channel.arraySyntax === true ) {
const value = outputSource.array[ stride ];
const index = channel.indices[ 0 ] + 4 * channel.indices[ 1 ];
data[ time ][ index ] = value;
} else {
for ( j = 0, jl = outputSource.stride; j < jl; j ++ ) {
data[ time ][ j ] = outputSource.array[ stride + j ];
}
}
}
break;
case 'translate':
console.warn( 'THREE.ColladaLoader: Animation transform type "%s" not yet implemented.', transform );
break;
case 'rotate':
console.warn( 'THREE.ColladaLoader: Animation transform type "%s" not yet implemented.', transform );
break;
case 'scale':
console.warn( 'THREE.ColladaLoader: Animation transform type "%s" not yet implemented.', transform );
break;
}
const keyframes = prepareAnimationData( data, defaultMatrix );
const animation = {
name: object3D.uuid,
keyframes: keyframes
};
return animation;
}
function prepareAnimationData( data, defaultMatrix ) {
const keyframes = [];
// transfer data into a sortable array
for ( const time in data ) {
keyframes.push( { time: parseFloat( time ), value: data[ time ] } );
}
// ensure keyframes are sorted by time
keyframes.sort( ascending );
// now we clean up all animation data, so we can use them for keyframe tracks
for ( let i = 0; i < 16; i ++ ) {
transformAnimationData( keyframes, i, defaultMatrix.elements[ i ] );
}
return keyframes;
// array sort function
function ascending( a, b ) {
return a.time - b.time;
}
}
const position = new Vector3();
const scale = new Vector3();
const quaternion = new Quaternion();
function createKeyframeTracks( animation, tracks ) {
const keyframes = animation.keyframes;
const name = animation.name;
const times = [];
const positionData = [];
const quaternionData = [];
const scaleData = [];
for ( let i = 0, l = keyframes.length; i < l; i ++ ) {
const keyframe = keyframes[ i ];
const time = keyframe.time;
const value = keyframe.value;
matrix.fromArray( value ).transpose();
matrix.decompose( position, quaternion, scale );
times.push( time );
positionData.push( position.x, position.y, position.z );
quaternionData.push( quaternion.x, quaternion.y, quaternion.z, quaternion.w );
scaleData.push( scale.x, scale.y, scale.z );
}
if ( positionData.length > 0 ) tracks.push( new VectorKeyframeTrack( name + '.position', times, positionData ) );
if ( quaternionData.length > 0 ) tracks.push( new QuaternionKeyframeTrack( name + '.quaternion', times, quaternionData ) );
if ( scaleData.length > 0 ) tracks.push( new VectorKeyframeTrack( name + '.scale', times, scaleData ) );
return tracks;
}
function transformAnimationData( keyframes, property, defaultValue ) {
let keyframe;
let empty = true;
let i, l;
// check, if values of a property are missing in our keyframes
for ( i = 0, l = keyframes.length; i < l; i ++ ) {
keyframe = keyframes[ i ];
if ( keyframe.value[ property ] === undefined ) {
keyframe.value[ property ] = null; // mark as missing
} else {
empty = false;
}
}
if ( empty === true ) {
// no values at all, so we set a default value
for ( i = 0, l = keyframes.length; i < l; i ++ ) {
keyframe = keyframes[ i ];
keyframe.value[ property ] = defaultValue;
}
} else {
// filling gaps
createMissingKeyframes( keyframes, property );
}
}
function createMissingKeyframes( keyframes, property ) {
let prev, next;
for ( let i = 0, l = keyframes.length; i < l; i ++ ) {
const keyframe = keyframes[ i ];
if ( keyframe.value[ property ] === null ) {
prev = getPrev( keyframes, i, property );
next = getNext( keyframes, i, property );
if ( prev === null ) {
keyframe.value[ property ] = next.value[ property ];
continue;
}
if ( next === null ) {
keyframe.value[ property ] = prev.value[ property ];
continue;
}
interpolate( keyframe, prev, next, property );
}
}
}
function getPrev( keyframes, i, property ) {
while ( i >= 0 ) {
const keyframe = keyframes[ i ];
if ( keyframe.value[ property ] !== null ) return keyframe;
i --;
}
return null;
}
function getNext( keyframes, i, property ) {
while ( i < keyframes.length ) {
const keyframe = keyframes[ i ];
if ( keyframe.value[ property ] !== null ) return keyframe;
i ++;
}
return null;
}
function interpolate( key, prev, next, property ) {
if ( ( next.time - prev.time ) === 0 ) {
key.value[ property ] = prev.value[ property ];
return;
}
key.value[ property ] = ( ( key.time - prev.time ) * ( next.value[ property ] - prev.value[ property ] ) / ( next.time - prev.time ) ) + prev.value[ property ];
}
// animation clips
function parseAnimationClip( xml ) {
const data = {
name: xml.getAttribute( 'id' ) || 'default',
start: parseFloat( xml.getAttribute( 'start' ) || 0 ),
end: parseFloat( xml.getAttribute( 'end' ) || 0 ),
animations: []
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'instance_animation':
data.animations.push( parseId( child.getAttribute( 'url' ) ) );
break;
}
}
library.clips[ xml.getAttribute( 'id' ) ] = data;
}
function buildAnimationClip( data ) {
const tracks = [];
const name = data.name;
const duration = ( data.end - data.start ) || - 1;
const animations = data.animations;
for ( let i = 0, il = animations.length; i < il; i ++ ) {
const animationTracks = getAnimation( animations[ i ] );
for ( let j = 0, jl = animationTracks.length; j < jl; j ++ ) {
tracks.push( animationTracks[ j ] );
}
}
return new AnimationClip( name, duration, tracks );
}
function getAnimationClip( id ) {
return getBuild( library.clips[ id ], buildAnimationClip );
}
// controller
function parseController( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'skin':
// there is exactly one skin per controller
data.id = parseId( child.getAttribute( 'source' ) );
data.skin = parseSkin( child );
break;
case 'morph':
data.id = parseId( child.getAttribute( 'source' ) );
console.warn( 'THREE.ColladaLoader: Morph target animation not supported yet.' );
break;
}
}
library.controllers[ xml.getAttribute( 'id' ) ] = data;
}
function parseSkin( xml ) {
const data = {
sources: {}
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'bind_shape_matrix':
data.bindShapeMatrix = parseFloats( child.textContent );
break;
case 'source':
const id = child.getAttribute( 'id' );
data.sources[ id ] = parseSource( child );
break;
case 'joints':
data.joints = parseJoints( child );
break;
case 'vertex_weights':
data.vertexWeights = parseVertexWeights( child );
break;
}
}
return data;
}
function parseJoints( xml ) {
const data = {
inputs: {}
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'input':
const semantic = child.getAttribute( 'semantic' );
const id = parseId( child.getAttribute( 'source' ) );
data.inputs[ semantic ] = id;
break;
}
}
return data;
}
function parseVertexWeights( xml ) {
const data = {
inputs: {}
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'input':
const semantic = child.getAttribute( 'semantic' );
const id = parseId( child.getAttribute( 'source' ) );
const offset = parseInt( child.getAttribute( 'offset' ) );
data.inputs[ semantic ] = { id: id, offset: offset };
break;
case 'vcount':
data.vcount = parseInts( child.textContent );
break;
case 'v':
data.v = parseInts( child.textContent );
break;
}
}
return data;
}
function buildController( data ) {
const build = {
id: data.id
};
const geometry = library.geometries[ build.id ];
if ( data.skin !== undefined ) {
build.skin = buildSkin( data.skin );
// we enhance the 'sources' property of the corresponding geometry with our skin data
geometry.sources.skinIndices = build.skin.indices;
geometry.sources.skinWeights = build.skin.weights;
}
return build;
}
function buildSkin( data ) {
const BONE_LIMIT = 4;
const build = {
joints: [], // this must be an array to preserve the joint order
indices: {
array: [],
stride: BONE_LIMIT
},
weights: {
array: [],
stride: BONE_LIMIT
}
};
const sources = data.sources;
const vertexWeights = data.vertexWeights;
const vcount = vertexWeights.vcount;
const v = vertexWeights.v;
const jointOffset = vertexWeights.inputs.JOINT.offset;
const weightOffset = vertexWeights.inputs.WEIGHT.offset;
const jointSource = data.sources[ data.joints.inputs.JOINT ];
const inverseSource = data.sources[ data.joints.inputs.INV_BIND_MATRIX ];
const weights = sources[ vertexWeights.inputs.WEIGHT.id ].array;
let stride = 0;
let i, j, l;
// process skin data for each vertex
for ( i = 0, l = vcount.length; i < l; i ++ ) {
const jointCount = vcount[ i ]; // this is the amount of joints that affect a single vertex
const vertexSkinData = [];
for ( j = 0; j < jointCount; j ++ ) {
const skinIndex = v[ stride + jointOffset ];
const weightId = v[ stride + weightOffset ];
const skinWeight = weights[ weightId ];
vertexSkinData.push( { index: skinIndex, weight: skinWeight } );
stride += 2;
}
// we sort the joints in descending order based on the weights.
// this ensures, we only proceed the most important joints of the vertex
vertexSkinData.sort( descending );
// now we provide for each vertex a set of four index and weight values.
// the order of the skin data matches the order of vertices
for ( j = 0; j < BONE_LIMIT; j ++ ) {
const d = vertexSkinData[ j ];
if ( d !== undefined ) {
build.indices.array.push( d.index );
build.weights.array.push( d.weight );
} else {
build.indices.array.push( 0 );
build.weights.array.push( 0 );
}
}
}
// setup bind matrix
if ( data.bindShapeMatrix ) {
build.bindMatrix = new Matrix4().fromArray( data.bindShapeMatrix ).transpose();
} else {
build.bindMatrix = new Matrix4().identity();
}
// process bones and inverse bind matrix data
for ( i = 0, l = jointSource.array.length; i < l; i ++ ) {
const name = jointSource.array[ i ];
const boneInverse = new Matrix4().fromArray( inverseSource.array, i * inverseSource.stride ).transpose();
build.joints.push( { name: name, boneInverse: boneInverse } );
}
return build;
// array sort function
function descending( a, b ) {
return b.weight - a.weight;
}
}
function getController( id ) {
return getBuild( library.controllers[ id ], buildController );
}
// image
function parseImage( xml ) {
const data = {
init_from: getElementsByTagName( xml, 'init_from' )[ 0 ].textContent
};
library.images[ xml.getAttribute( 'id' ) ] = data;
}
function buildImage( data ) {
if ( data.build !== undefined ) return data.build;
return data.init_from;
}
function getImage( id ) {
const data = library.images[ id ];
if ( data !== undefined ) {
return getBuild( data, buildImage );
}
console.warn( 'THREE.ColladaLoader: Couldn\'t find image with ID:', id );
return null;
}
// effect
function parseEffect( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'profile_COMMON':
data.profile = parseEffectProfileCOMMON( child );
break;
}
}
library.effects[ xml.getAttribute( 'id' ) ] = data;
}
function parseEffectProfileCOMMON( xml ) {
const data = {
surfaces: {},
samplers: {}
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'newparam':
parseEffectNewparam( child, data );
break;
case 'technique':
data.technique = parseEffectTechnique( child );
break;
case 'extra':
data.extra = parseEffectExtra( child );
break;
}
}
return data;
}
function parseEffectNewparam( xml, data ) {
const sid = xml.getAttribute( 'sid' );
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'surface':
data.surfaces[ sid ] = parseEffectSurface( child );
break;
case 'sampler2D':
data.samplers[ sid ] = parseEffectSampler( child );
break;
}
}
}
function parseEffectSurface( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'init_from':
data.init_from = child.textContent;
break;
}
}
return data;
}
function parseEffectSampler( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'source':
data.source = child.textContent;
break;
}
}
return data;
}
function parseEffectTechnique( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'constant':
case 'lambert':
case 'blinn':
case 'phong':
data.type = child.nodeName;
data.parameters = parseEffectParameters( child );
break;
case 'extra':
data.extra = parseEffectExtra( child );
break;
}
}
return data;
}
function parseEffectParameters( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'emission':
case 'diffuse':
case 'specular':
case 'bump':
case 'ambient':
case 'shininess':
case 'transparency':
data[ child.nodeName ] = parseEffectParameter( child );
break;
case 'transparent':
data[ child.nodeName ] = {
opaque: child.hasAttribute( 'opaque' ) ? child.getAttribute( 'opaque' ) : 'A_ONE',
data: parseEffectParameter( child )
};
break;
}
}
return data;
}
function parseEffectParameter( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'color':
data[ child.nodeName ] = parseFloats( child.textContent );
break;
case 'float':
data[ child.nodeName ] = parseFloat( child.textContent );
break;
case 'texture':
data[ child.nodeName ] = { id: child.getAttribute( 'texture' ), extra: parseEffectParameterTexture( child ) };
break;
}
}
return data;
}
function parseEffectParameterTexture( xml ) {
const data = {
technique: {}
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'extra':
parseEffectParameterTextureExtra( child, data );
break;
}
}
return data;
}
function parseEffectParameterTextureExtra( xml, data ) {
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'technique':
parseEffectParameterTextureExtraTechnique( child, data );
break;
}
}
}
function parseEffectParameterTextureExtraTechnique( xml, data ) {
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'repeatU':
case 'repeatV':
case 'offsetU':
case 'offsetV':
data.technique[ child.nodeName ] = parseFloat( child.textContent );
break;
case 'wrapU':
case 'wrapV':
// some files have values for wrapU/wrapV which become NaN via parseInt
if ( child.textContent.toUpperCase() === 'TRUE' ) {
data.technique[ child.nodeName ] = 1;
} else if ( child.textContent.toUpperCase() === 'FALSE' ) {
data.technique[ child.nodeName ] = 0;
} else {
data.technique[ child.nodeName ] = parseInt( child.textContent );
}
break;
case 'bump':
data[ child.nodeName ] = parseEffectExtraTechniqueBump( child );
break;
}
}
}
function parseEffectExtra( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'technique':
data.technique = parseEffectExtraTechnique( child );
break;
}
}
return data;
}
function parseEffectExtraTechnique( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'double_sided':
data[ child.nodeName ] = parseInt( child.textContent );
break;
case 'bump':
data[ child.nodeName ] = parseEffectExtraTechniqueBump( child );
break;
}
}
return data;
}
function parseEffectExtraTechniqueBump( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'texture':
data[ child.nodeName ] = { id: child.getAttribute( 'texture' ), texcoord: child.getAttribute( 'texcoord' ), extra: parseEffectParameterTexture( child ) };
break;
}
}
return data;
}
function buildEffect( data ) {
return data;
}
function getEffect( id ) {
return getBuild( library.effects[ id ], buildEffect );
}
// material
function parseMaterial( xml ) {
const data = {
name: xml.getAttribute( 'name' )
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'instance_effect':
data.url = parseId( child.getAttribute( 'url' ) );
break;
}
}
library.materials[ xml.getAttribute( 'id' ) ] = data;
}
function getTextureLoader( image ) {
let loader;
let extension = image.slice( ( image.lastIndexOf( '.' ) - 1 >>> 0 ) + 2 ); // http://www.jstips.co/en/javascript/get-file-extension/
extension = extension.toLowerCase();
switch ( extension ) {
case 'tga':
loader = tgaLoader;
break;
default:
loader = textureLoader;
}
return loader;
}
function buildMaterial( data ) {
const effect = getEffect( data.url );
const technique = effect.profile.technique;
let material;
switch ( technique.type ) {
case 'phong':
case 'blinn':
material = new MeshPhongMaterial();
break;
case 'lambert':
material = new MeshLambertMaterial();
break;
default:
material = new MeshBasicMaterial();
break;
}
material.name = data.name || '';
function getTexture( textureObject, colorSpace = null ) {
const sampler = effect.profile.samplers[ textureObject.id ];
let image = null;
// get image
if ( sampler !== undefined ) {
const surface = effect.profile.surfaces[ sampler.source ];
image = getImage( surface.init_from );
} else {
console.warn( 'THREE.ColladaLoader: Undefined sampler. Access image directly (see #12530).' );
image = getImage( textureObject.id );
}
// create texture if image is available
if ( image !== null ) {
const loader = getTextureLoader( image );
if ( loader !== undefined ) {
const texture = loader.load( image );
const extra = textureObject.extra;
if ( extra !== undefined && extra.technique !== undefined && isEmpty( extra.technique ) === false ) {
const technique = extra.technique;
texture.wrapS = technique.wrapU ? RepeatWrapping : ClampToEdgeWrapping;
texture.wrapT = technique.wrapV ? RepeatWrapping : ClampToEdgeWrapping;
texture.offset.set( technique.offsetU || 0, technique.offsetV || 0 );
texture.repeat.set( technique.repeatU || 1, technique.repeatV || 1 );
} else {
texture.wrapS = RepeatWrapping;
texture.wrapT = RepeatWrapping;
}
if ( colorSpace !== null ) {
texture.colorSpace = colorSpace;
}
return texture;
} else {
console.warn( 'THREE.ColladaLoader: Loader for texture %s not found.', image );
return null;
}
} else {
console.warn( 'THREE.ColladaLoader: Couldn\'t create texture with ID:', textureObject.id );
return null;
}
}
const parameters = technique.parameters;
for ( const key in parameters ) {
const parameter = parameters[ key ];
switch ( key ) {
case 'diffuse':
if ( parameter.color ) material.color.fromArray( parameter.color );
if ( parameter.texture ) material.map = getTexture( parameter.texture, SRGBColorSpace );
break;
case 'specular':
if ( parameter.color && material.specular ) material.specular.fromArray( parameter.color );
if ( parameter.texture ) material.specularMap = getTexture( parameter.texture );
break;
case 'bump':
if ( parameter.texture ) material.normalMap = getTexture( parameter.texture );
break;
case 'ambient':
if ( parameter.texture ) material.lightMap = getTexture( parameter.texture, SRGBColorSpace );
break;
case 'shininess':
if ( parameter.float && material.shininess ) material.shininess = parameter.float;
break;
case 'emission':
if ( parameter.color && material.emissive ) material.emissive.fromArray( parameter.color );
if ( parameter.texture ) material.emissiveMap = getTexture( parameter.texture, SRGBColorSpace );
break;
}
}
ColorManagement.toWorkingColorSpace( material.color, SRGBColorSpace );
if ( material.specular ) ColorManagement.toWorkingColorSpace( material.specular, SRGBColorSpace );
if ( material.emissive ) ColorManagement.toWorkingColorSpace( material.emissive, SRGBColorSpace );
//
let transparent = parameters[ 'transparent' ];
let transparency = parameters[ 'transparency' ];
// <transparency> does not exist but <transparent>
if ( transparency === undefined && transparent ) {
transparency = {
float: 1
};
}
// <transparent> does not exist but <transparency>
if ( transparent === undefined && transparency ) {
transparent = {
opaque: 'A_ONE',
data: {
color: [ 1, 1, 1, 1 ]
} };
}
if ( transparent && transparency ) {
// handle case if a texture exists but no color
if ( transparent.data.texture ) {
// we do not set an alpha map (see #13792)
material.transparent = true;
} else {
const color = transparent.data.color;
switch ( transparent.opaque ) {
case 'A_ONE':
material.opacity = color[ 3 ] * transparency.float;
break;
case 'RGB_ZERO':
material.opacity = 1 - ( color[ 0 ] * transparency.float );
break;
case 'A_ZERO':
material.opacity = 1 - ( color[ 3 ] * transparency.float );
break;
case 'RGB_ONE':
material.opacity = color[ 0 ] * transparency.float;
break;
default:
console.warn( 'THREE.ColladaLoader: Invalid opaque type "%s" of transparent tag.', transparent.opaque );
}
if ( material.opacity < 1 ) material.transparent = true;
}
}
//
if ( technique.extra !== undefined && technique.extra.technique !== undefined ) {
const techniques = technique.extra.technique;
for ( const k in techniques ) {
const v = techniques[ k ];
switch ( k ) {
case 'double_sided':
material.side = ( v === 1 ? DoubleSide : FrontSide );
break;
case 'bump':
material.normalMap = getTexture( v.texture );
material.normalScale = new Vector2( 1, 1 );
break;
}
}
}
return material;
}
function getMaterial( id ) {
return getBuild( library.materials[ id ], buildMaterial );
}
// camera
function parseCamera( xml ) {
const data = {
name: xml.getAttribute( 'name' )
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'optics':
data.optics = parseCameraOptics( child );
break;
}
}
library.cameras[ xml.getAttribute( 'id' ) ] = data;
}
function parseCameraOptics( xml ) {
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
switch ( child.nodeName ) {
case 'technique_common':
return parseCameraTechnique( child );
}
}
return {};
}
function parseCameraTechnique( xml ) {
const data = {};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
switch ( child.nodeName ) {
case 'perspective':
case 'orthographic':
data.technique = child.nodeName;
data.parameters = parseCameraParameters( child );
break;
}
}
return data;
}
function parseCameraParameters( xml ) {
const data = {};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
switch ( child.nodeName ) {
case 'xfov':
case 'yfov':
case 'xmag':
case 'ymag':
case 'znear':
case 'zfar':
case 'aspect_ratio':
data[ child.nodeName ] = parseFloat( child.textContent );
break;
}
}
return data;
}
function buildCamera( data ) {
let camera;
switch ( data.optics.technique ) {
case 'perspective':
camera = new PerspectiveCamera(
data.optics.parameters.yfov,
data.optics.parameters.aspect_ratio,
data.optics.parameters.znear,
data.optics.parameters.zfar
);
break;
case 'orthographic':
let ymag = data.optics.parameters.ymag;
let xmag = data.optics.parameters.xmag;
const aspectRatio = data.optics.parameters.aspect_ratio;
xmag = ( xmag === undefined ) ? ( ymag * aspectRatio ) : xmag;
ymag = ( ymag === undefined ) ? ( xmag / aspectRatio ) : ymag;
xmag *= 0.5;
ymag *= 0.5;
camera = new OrthographicCamera(
- xmag, xmag, ymag, - ymag, // left, right, top, bottom
data.optics.parameters.znear,
data.optics.parameters.zfar
);
break;
default:
camera = new PerspectiveCamera();
break;
}
camera.name = data.name || '';
return camera;
}
function getCamera( id ) {
const data = library.cameras[ id ];
if ( data !== undefined ) {
return getBuild( data, buildCamera );
}
console.warn( 'THREE.ColladaLoader: Couldn\'t find camera with ID:', id );
return null;
}
// light
function parseLight( xml ) {
let data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'technique_common':
data = parseLightTechnique( child );
break;
}
}
library.lights[ xml.getAttribute( 'id' ) ] = data;
}
function parseLightTechnique( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'directional':
case 'point':
case 'spot':
case 'ambient':
data.technique = child.nodeName;
data.parameters = parseLightParameters( child );
}
}
return data;
}
function parseLightParameters( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'color':
const array = parseFloats( child.textContent );
data.color = new Color().fromArray( array );
ColorManagement.toWorkingColorSpace( data.color, SRGBColorSpace );
break;
case 'falloff_angle':
data.falloffAngle = parseFloat( child.textContent );
break;
case 'quadratic_attenuation':
const f = parseFloat( child.textContent );
data.distance = f ? Math.sqrt( 1 / f ) : 0;
break;
}
}
return data;
}
function buildLight( data ) {
let light;
switch ( data.technique ) {
case 'directional':
light = new DirectionalLight();
break;
case 'point':
light = new PointLight();
break;
case 'spot':
light = new SpotLight();
break;
case 'ambient':
light = new AmbientLight();
break;
}
if ( data.parameters.color ) light.color.copy( data.parameters.color );
if ( data.parameters.distance ) light.distance = data.parameters.distance;
return light;
}
function getLight( id ) {
const data = library.lights[ id ];
if ( data !== undefined ) {
return getBuild( data, buildLight );
}
console.warn( 'THREE.ColladaLoader: Couldn\'t find light with ID:', id );
return null;
}
// geometry
function parseGeometry( xml ) {
const data = {
name: xml.getAttribute( 'name' ),
sources: {},
vertices: {},
primitives: []
};
const mesh = getElementsByTagName( xml, 'mesh' )[ 0 ];
// the following tags inside geometry are not supported yet (see https://github.com/mrdoob/three.js/pull/12606): convex_mesh, spline, brep
if ( mesh === undefined ) return;
for ( let i = 0; i < mesh.childNodes.length; i ++ ) {
const child = mesh.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
const id = child.getAttribute( 'id' );
switch ( child.nodeName ) {
case 'source':
data.sources[ id ] = parseSource( child );
break;
case 'vertices':
// data.sources[ id ] = data.sources[ parseId( getElementsByTagName( child, 'input' )[ 0 ].getAttribute( 'source' ) ) ];
data.vertices = parseGeometryVertices( child );
break;
case 'polygons':
console.warn( 'THREE.ColladaLoader: Unsupported primitive type: ', child.nodeName );
break;
case 'lines':
case 'linestrips':
case 'polylist':
case 'triangles':
data.primitives.push( parseGeometryPrimitive( child ) );
break;
default:
console.log( child );
}
}
library.geometries[ xml.getAttribute( 'id' ) ] = data;
}
function parseSource( xml ) {
const data = {
array: [],
stride: 3
};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'float_array':
data.array = parseFloats( child.textContent );
break;
case 'Name_array':
data.array = parseStrings( child.textContent );
break;
case 'technique_common':
const accessor = getElementsByTagName( child, 'accessor' )[ 0 ];
if ( accessor !== undefined ) {
data.stride = parseInt( accessor.getAttribute( 'stride' ) );
}
break;
}
}
return data;
}
function parseGeometryVertices( xml ) {
const data = {};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
data[ child.getAttribute( 'semantic' ) ] = parseId( child.getAttribute( 'source' ) );
}
return data;
}
function parseGeometryPrimitive( xml ) {
const primitive = {
type: xml.nodeName,
material: xml.getAttribute( 'material' ),
count: parseInt( xml.getAttribute( 'count' ) ),
inputs: {},
stride: 0,
hasUV: false
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'input':
const id = parseId( child.getAttribute( 'source' ) );
const semantic = child.getAttribute( 'semantic' );
const offset = parseInt( child.getAttribute( 'offset' ) );
const set = parseInt( child.getAttribute( 'set' ) );
const inputname = ( set > 0 ? semantic + set : semantic );
primitive.inputs[ inputname ] = { id: id, offset: offset };
primitive.stride = Math.max( primitive.stride, offset + 1 );
if ( semantic === 'TEXCOORD' ) primitive.hasUV = true;
break;
case 'vcount':
primitive.vcount = parseInts( child.textContent );
break;
case 'p':
primitive.p = parseInts( child.textContent );
break;
}
}
return primitive;
}
function groupPrimitives( primitives ) {
const build = {};
for ( let i = 0; i < primitives.length; i ++ ) {
const primitive = primitives[ i ];
if ( build[ primitive.type ] === undefined ) build[ primitive.type ] = [];
build[ primitive.type ].push( primitive );
}
return build;
}
function checkUVCoordinates( primitives ) {
let count = 0;
for ( let i = 0, l = primitives.length; i < l; i ++ ) {
const primitive = primitives[ i ];
if ( primitive.hasUV === true ) {
count ++;
}
}
if ( count > 0 && count < primitives.length ) {
primitives.uvsNeedsFix = true;
}
}
function buildGeometry( data ) {
const build = {};
const sources = data.sources;
const vertices = data.vertices;
const primitives = data.primitives;
if ( primitives.length === 0 ) return {};
// our goal is to create one buffer geometry for a single type of primitives
// first, we group all primitives by their type
const groupedPrimitives = groupPrimitives( primitives );
for ( const type in groupedPrimitives ) {
const primitiveType = groupedPrimitives[ type ];
// second, ensure consistent uv coordinates for each type of primitives (polylist,triangles or lines)
checkUVCoordinates( primitiveType );
// third, create a buffer geometry for each type of primitives
build[ type ] = buildGeometryType( primitiveType, sources, vertices );
}
return build;
}
function buildGeometryType( primitives, sources, vertices ) {
const build = {};
const position = { array: [], stride: 0 };
const normal = { array: [], stride: 0 };
const uv = { array: [], stride: 0 };
const uv1 = { array: [], stride: 0 };
const color = { array: [], stride: 0 };
const skinIndex = { array: [], stride: 4 };
const skinWeight = { array: [], stride: 4 };
const geometry = new BufferGeometry();
const materialKeys = [];
let start = 0;
for ( let p = 0; p < primitives.length; p ++ ) {
const primitive = primitives[ p ];
const inputs = primitive.inputs;
// groups
let count = 0;
switch ( primitive.type ) {
case 'lines':
case 'linestrips':
count = primitive.count * 2;
break;
case 'triangles':
count = primitive.count * 3;
break;
case 'polylist':
for ( let g = 0; g < primitive.count; g ++ ) {
const vc = primitive.vcount[ g ];
switch ( vc ) {
case 3:
count += 3; // single triangle
break;
case 4:
count += 6; // quad, subdivided into two triangles
break;
default:
count += ( vc - 2 ) * 3; // polylist with more than four vertices
break;
}
}
break;
default:
console.warn( 'THREE.ColladaLoader: Unknown primitive type:', primitive.type );
}
geometry.addGroup( start, count, p );
start += count;
// material
if ( primitive.material ) {
materialKeys.push( primitive.material );
}
// geometry data
for ( const name in inputs ) {
const input = inputs[ name ];
switch ( name ) {
case 'VERTEX':
for ( const key in vertices ) {
const id = vertices[ key ];
switch ( key ) {
case 'POSITION':
const prevLength = position.array.length;
buildGeometryData( primitive, sources[ id ], input.offset, position.array );
position.stride = sources[ id ].stride;
if ( sources.skinWeights && sources.skinIndices ) {
buildGeometryData( primitive, sources.skinIndices, input.offset, skinIndex.array );
buildGeometryData( primitive, sources.skinWeights, input.offset, skinWeight.array );
}
// see #3803
if ( primitive.hasUV === false && primitives.uvsNeedsFix === true ) {
const count = ( position.array.length - prevLength ) / position.stride;
for ( let i = 0; i < count; i ++ ) {
// fill missing uv coordinates
uv.array.push( 0, 0 );
}
}
break;
case 'NORMAL':
buildGeometryData( primitive, sources[ id ], input.offset, normal.array );
normal.stride = sources[ id ].stride;
break;
case 'COLOR':
buildGeometryData( primitive, sources[ id ], input.offset, color.array );
color.stride = sources[ id ].stride;
break;
case 'TEXCOORD':
buildGeometryData( primitive, sources[ id ], input.offset, uv.array );
uv.stride = sources[ id ].stride;
break;
case 'TEXCOORD1':
buildGeometryData( primitive, sources[ id ], input.offset, uv1.array );
uv.stride = sources[ id ].stride;
break;
default:
console.warn( 'THREE.ColladaLoader: Semantic "%s" not handled in geometry build process.', key );
}
}
break;
case 'NORMAL':
buildGeometryData( primitive, sources[ input.id ], input.offset, normal.array );
n