@egjs/view3d
Version:
Fast & Customizable glTF 3D model viewer, packed with full of features!
1,487 lines (1,295 loc) • 512 kB
JavaScript
/*
Copyright (c) NAVER Corp.
name: @egjs/view3d
license: MIT
author: NAVER Corp.
repository: https://github.com/naver/egjs-view3d
version: 2.10.1
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('three'), require('@egjs/component')) :
typeof define === 'function' && define.amd ? define(['three', '@egjs/component'], factory) :
(global = global || self, global.View3D = factory(global.THREE, global.Component));
}(this, (function (THREE, Component) { 'use strict';
/*
* Copyright (c) 2020 NAVER Corp.
* egjs projects are licensed under the MIT license
*/
/**
* Error thrown by View3D
*/
class View3DError extends Error {
/**
* Create new instance of View3DError
* @param {string} message Error message
* @param {number} code Error code, see {@link ERROR_CODES}
*/
constructor(message, code) {
super(message);
Object.setPrototypeOf(this, View3DError.prototype);
this.name = "View3DError";
this.code = code;
}
}
/*
* Copyright (c) 2020 NAVER Corp.
* egjs projects are licensed under the MIT license
*/
/**
* Error codes of {@link View3DError}
* @type object
* @property {0} WRONG_TYPE The given value's type is not expected
* @property {1} ELEMENT_NOT_FOUND The element with given CSS selector does not exist
* @property {2} CANVAS_NOT_FOUND The element given is not a \<canvas\> element
* @property {3} WEBGL_NOT_SUPPORTED The browser does not support WebGL
* @property {4} PROVIDE_SRC_FIRST `init()` is called before setting `src`
* @property {5} FILE_NOT_SUPPORTED The given file is not supported
* @property {6} NOT_INITIALIZED The action is called before the component is initialized
* @property {7} MODEL_FAIL_TO_LOAD The 3D model failed to load
*/
const ERROR_CODES = {
WRONG_TYPE: 0,
ELEMENT_NOT_FOUND: 1,
CANVAS_NOT_FOUND: 2,
WEBGL_NOT_SUPPORTED: 3,
PROVIDE_SRC_FIRST: 4,
FILE_NOT_SUPPORTED: 5,
NOT_INITIALIZED: 6,
MODEL_FAIL_TO_LOAD: 7
};
const MESSAGES = {
WRONG_TYPE: (val, types) => `${typeof val} is not a ${types.map(type => `"${type}"`).join(" or ")}.`,
ELEMENT_NOT_FOUND: query => `Element with selector "${query}" not found.`,
CANVAS_NOT_FOUND: "The canvas element was not found inside the given root element.",
WEBGL_NOT_SUPPORTED: "WebGL is not supported on this browser.",
PROVIDE_SRC_FIRST: "\"src\" should be provided before initialization.",
FILE_NOT_SUPPORTED: src => `Given file "${src}" is not supported.`,
NOT_INITIALIZED: "View3D is not initialized yet.",
MODEL_FAIL_TO_LOAD: url => `Failed to load/parse the 3D model with the given url: "${url}". Check "loadError" event for actual error instance.`
};
var ERROR = {
CODES: ERROR_CODES,
MESSAGES
};
/*
* Copyright (c) 2020 NAVER Corp.
* egjs projects are licensed under the MIT license
*/
const isNumber = val => typeof val === "number";
const isString = val => typeof val === "string";
const isElement = val => !!val && val.nodeType === Node.ELEMENT_NODE;
const getNullableElement = (el, parent) => {
let targetEl = null;
if (isString(el)) {
const parentEl = parent ? parent : document;
const queryResult = parentEl.querySelector(el);
if (!queryResult) {
return null;
}
targetEl = queryResult;
} else if (isElement(el)) {
targetEl = el;
}
return targetEl;
};
const getElement = (el, parent) => {
const targetEl = getNullableElement(el, parent);
if (!targetEl) {
if (isString(el)) {
throw new View3DError(ERROR.MESSAGES.ELEMENT_NOT_FOUND(el), ERROR.CODES.ELEMENT_NOT_FOUND);
} else {
throw new View3DError(ERROR.MESSAGES.WRONG_TYPE(el, ["HTMLElement", "string"]), ERROR.CODES.WRONG_TYPE);
}
}
return targetEl;
};
const findCanvas = (root, selector) => {
const canvas = root.querySelector(selector);
if (!canvas) {
throw new View3DError(ERROR.MESSAGES.CANVAS_NOT_FOUND, ERROR.CODES.CANVAS_NOT_FOUND);
}
return canvas;
};
const isCSSSelector = val => {
if (!isString(val)) return false;
const dummyEl = document.createDocumentFragment();
try {
dummyEl.querySelector(val);
} catch (_a) {
return false;
}
return true;
};
const range = end => {
if (!end || end <= 0) {
return [];
}
return Array.apply(0, Array(end)).map((undef, idx) => idx);
};
const toRadian = x => x * Math.PI / 180;
const toDegree = x => x * 180 / Math.PI;
const clamp = (x, min, max) => Math.max(Math.min(x, max), min);
// Linear interpolation between a and b
const lerp = (a, b, t) => {
return a * (1 - t) + b * t;
};
const circulate = (val, min, max) => {
const size = Math.abs(max - min);
if (val < min) {
const offset = (min - val) % size;
val = max - offset;
} else if (val > max) {
const offset = (val - max) % size;
val = min + offset;
}
return val;
};
// eslint-disable-next-line @typescript-eslint/ban-types
const merge = (target, ...srcs) => {
srcs.forEach(source => {
Object.keys(source).forEach(key => {
const value = source[key];
if (Array.isArray(target[key]) && Array.isArray(value)) {
target[key] = [...target[key], ...value];
} else {
target[key] = value;
}
});
});
return target;
};
const getBoxPoints = box => {
return [box.min.clone(), new THREE.Vector3(box.min.x, box.min.y, box.max.z), new THREE.Vector3(box.min.x, box.max.y, box.min.z), new THREE.Vector3(box.min.x, box.max.y, box.max.z), new THREE.Vector3(box.max.x, box.min.y, box.min.z), new THREE.Vector3(box.max.x, box.min.y, box.max.z), new THREE.Vector3(box.max.x, box.max.y, box.min.z), box.max.clone()];
};
const toPowerOfTwo = val => {
let result = 1;
while (result < val) {
result *= 2;
}
return result;
};
const getPrimaryAxisIndex = (basis, viewDir) => {
let primaryIdx = 0;
let maxDot = 0;
basis.forEach((axes, axesIdx) => {
const dotProduct = Math.abs(viewDir.dot(axes));
if (dotProduct > maxDot) {
primaryIdx = axesIdx;
maxDot = dotProduct;
}
});
return primaryIdx;
};
// In radian
const getRotationAngle = (center, v1, v2) => {
const centerToV1 = new THREE.Vector2().subVectors(v1, center).normalize();
const centerToV2 = new THREE.Vector2().subVectors(v2, center).normalize();
// Get the rotation angle with the model's NDC coordinates as the center.
const deg = centerToV2.angle() - centerToV1.angle();
const compDeg = -Math.sign(deg) * (2 * Math.PI - Math.abs(deg));
// Take the smaller deg
const rotationAngle = Math.abs(deg) < Math.abs(compDeg) ? deg : compDeg;
return rotationAngle;
};
const getObjectOption = val => typeof val === "object" ? val : {};
const toBooleanString = val => val ? "true" : "false";
const getRotatedPosition = (distance, yawDeg, pitchDeg) => {
const yaw = toRadian(yawDeg);
const pitch = toRadian(pitchDeg);
const newPos = new THREE.Vector3(0, 0, 0);
newPos.y = distance * Math.sin(pitch);
newPos.z = distance * Math.cos(pitch);
newPos.x = newPos.z * Math.sin(-yaw);
newPos.z = newPos.z * Math.cos(-yaw);
return newPos;
};
// In Radians
const directionToYawPitch = direction => {
const xz = new THREE.Vector2(direction.x, direction.z);
const origin = new THREE.Vector2();
const yaw = Math.abs(direction.y) <= 0.99 ? getRotationAngle(origin, new THREE.Vector2(0, 1), xz) : 0;
const pitch = Math.atan2(direction.y, xz.distanceTo(origin));
return {
yaw,
pitch
};
};
const createLoadingContext = (view3D, src) => {
const context = {
src,
loaded: 0,
total: 0,
lengthComputable: false,
initialized: false
};
view3D.loadingContext.push(context);
return context;
};
const getAttributeScale = attrib => {
if (attrib.normalized && ArrayBuffer.isView(attrib.array)) {
const buffer = attrib.array;
const isSigned = isSignedArrayBuffer(buffer);
const scale = 1 / (Math.pow(2, 8 * buffer.BYTES_PER_ELEMENT) - 1);
return isSigned ? scale * 2 : scale;
} else {
return 1;
}
};
const getSkinnedVertex = (posIdx, mesh, positionScale, skinWeightScale) => {
const geometry = mesh.geometry;
const positions = geometry.attributes.position;
const skinIndicies = geometry.attributes.skinIndex;
const skinWeights = geometry.attributes.skinWeight;
const skeleton = mesh.skeleton;
const boneMatricies = skeleton.boneMatrices;
const pos = new THREE.Vector3().fromBufferAttribute(positions, posIdx).multiplyScalar(positionScale);
const skinned = new THREE.Vector4(0, 0, 0, 0);
const skinVertex = new THREE.Vector4(pos.x, pos.y, pos.z).applyMatrix4(mesh.bindMatrix);
const weights = [skinWeights.getX(posIdx), skinWeights.getY(posIdx), skinWeights.getZ(posIdx), skinWeights.getW(posIdx)].map(weight => weight * skinWeightScale);
const indicies = [skinIndicies.getX(posIdx), skinIndicies.getY(posIdx), skinIndicies.getZ(posIdx), skinIndicies.getW(posIdx)];
weights.forEach((weight, index) => {
const boneMatrix = new THREE.Matrix4().fromArray(boneMatricies, indicies[index] * 16);
skinned.add(skinVertex.clone().applyMatrix4(boneMatrix).multiplyScalar(weight));
});
const transformed = new THREE.Vector3().fromArray(skinned.applyMatrix4(mesh.bindMatrixInverse).toArray());
transformed.applyMatrix4(mesh.matrixWorld);
return transformed;
};
const isSignedArrayBuffer = buffer => {
const testBuffer = new buffer.constructor(1);
testBuffer[0] = -1;
return testBuffer[0] < 0;
};
const checkHalfFloatAvailable = renderer => {
if (renderer.capabilities.isWebGL2) {
return true;
} else {
const gl = renderer.getContext();
const texture = gl.createTexture();
let available = true;
try {
const data = new Uint16Array(4);
const ext = gl.getExtension("OES_texture_half_float");
if (!ext) {
available = false;
} else {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, ext.HALF_FLOAT_OES, data);
const err = gl.getError();
available = err === gl.NO_ERROR;
}
} catch (err) {
available = false;
}
gl.deleteTexture(texture);
return available;
}
};
const getFaceVertices = (model, meshIndex, faceIndex) => {
var _a;
if (!model || meshIndex < 0 || faceIndex < 0) return null;
const mesh = model.meshes[meshIndex];
const indexes = (_a = mesh === null || mesh === void 0 ? void 0 : mesh.geometry.index) === null || _a === void 0 ? void 0 : _a.array;
const face = indexes ? range(3).map(idx => indexes[3 * faceIndex + idx]) : null;
if (!mesh || !indexes || !face || face.some(val => val == null)) return null;
const position = mesh.geometry.getAttribute("position");
const vertices = face.map(index => {
return new THREE.Vector3().fromBufferAttribute(position, index);
});
return vertices;
};
const getAnimatedFace = (model, meshIndex, faceIndex) => {
const vertices = getFaceVertices(model, meshIndex, faceIndex);
if (!vertices) return null;
const mesh = model.meshes[meshIndex];
const indexes = mesh.geometry.getIndex();
const face = indexes.array.slice(3 * faceIndex, 3 * faceIndex + 3);
if (mesh.isSkinnedMesh) {
const geometry = mesh.geometry;
const positions = geometry.attributes.position;
const skinWeights = geometry.attributes.skinWeight;
const positionScale = getAttributeScale(positions);
const skinWeightScale = getAttributeScale(skinWeights);
vertices.forEach((vertex, idx) => {
const posIdx = face[idx];
const transformed = getSkinnedVertex(posIdx, mesh, positionScale, skinWeightScale);
vertex.copy(transformed);
});
} else {
vertices.forEach(vertex => {
vertex.applyMatrix4(mesh.matrixWorld);
});
}
return vertices;
};
const subclip = (sourceClip, name, startTime, endTime) => {
const clip = sourceClip.clone();
clip.name = name;
const tracks = [];
clip.tracks.forEach(track => {
const valueSize = track.getValueSize();
const times = [];
const values = [];
for (let timeIdx = 0; timeIdx < track.times.length; ++timeIdx) {
const time = track.times[timeIdx];
const nextTime = track.times[timeIdx + 1];
const prevTime = track.times[timeIdx - 1];
const isPrevFrame = nextTime && time < startTime && nextTime > startTime;
const isMiddleFrame = time >= startTime && time < endTime;
const isNextFrame = prevTime && time >= endTime && prevTime < endTime;
if (!isPrevFrame && !isMiddleFrame && !isNextFrame) continue;
times.push(time);
for (let k = 0; k < valueSize; ++k) {
values.push(track.values[timeIdx * valueSize + k]);
}
}
if (times.length === 0) return;
track.times = convertArray(times, track.times.constructor);
track.values = convertArray(values, track.values.constructor);
tracks.push(track);
});
clip.tracks = tracks;
for (let i = 0; i < clip.tracks.length; ++i) {
clip.tracks[i].shift(-startTime);
}
clip.duration = endTime - startTime;
return clip;
};
// From three.js AnimationUtils
// https://github.com/mrdoob/three.js/blob/68daccedef9c9c325cc5f4c929fcaf05229aa1b3/src/animation/AnimationUtils.js#L20
// The MIT License
// Copyright © 2010-2022 three.js authors
const convertArray = (array, type, forceClone = false) => {
if (!array ||
// let 'undefined' and 'null' pass
!forceClone && array.constructor === type) return array;
if (typeof type.BYTES_PER_ELEMENT === "number") {
return new type(array); // create typed array
}
return Array.prototype.slice.call(array); // create Array
};
const parseAsBboxRatio = (arr, bbox) => {
const min = bbox.min.toArray();
const size = new THREE.Vector3().subVectors(bbox.max, bbox.min).toArray();
return new THREE.Vector3().fromArray(arr.map((val, idx) => {
if (!isString(val)) return val;
const ratio = parseFloat(val) * 0.01;
return min[idx] + ratio * size[idx];
}));
};
/*! *****************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */
function __rest(s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
}
function __awaiter(thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
}
/*
* Copyright (c) 2020 NAVER Corp.
* egjs projects are licensed under the MIT license
*/
// Browser related constants
const IS_IOS = () => /iPad|iPhone|iPod/.test(navigator.userAgent) || navigator.platform === "MacIntel" && navigator.maxTouchPoints > 1;
const IS_ANDROID = () => /android/i.test(navigator.userAgent);
const EVENTS = {
MOUSE_DOWN: "mousedown",
MOUSE_MOVE: "mousemove",
MOUSE_UP: "mouseup",
TOUCH_START: "touchstart",
TOUCH_MOVE: "touchmove",
TOUCH_END: "touchend",
WHEEL: "wheel",
RESIZE: "resize",
CONTEXT_MENU: "contextmenu",
MOUSE_ENTER: "mouseenter",
MOUSE_LEAVE: "mouseleave",
POINTER_DOWN: "pointerdown",
POINTER_MOVE: "pointermove",
POINTER_UP: "pointerup",
POINTER_ENTER: "pointerenter",
POINTER_LEAVE: "pointerleave",
LOAD: "load",
ERROR: "error",
CLICK: "click",
DOUBLE_CLICK: "dblclick",
CONTEXT_LOST: "webglcontextlost",
CONTEXT_RESTORED: "webglcontextrestored"
};
const CURSOR = {
GRAB: "grab",
GRABBING: "grabbing",
NONE: ""
};
// https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent.button
var MOUSE_BUTTON;
(function (MOUSE_BUTTON) {
MOUSE_BUTTON[MOUSE_BUTTON["LEFT"] = 0] = "LEFT";
MOUSE_BUTTON[MOUSE_BUTTON["MIDDLE"] = 1] = "MIDDLE";
MOUSE_BUTTON[MOUSE_BUTTON["RIGHT"] = 2] = "RIGHT";
})(MOUSE_BUTTON || (MOUSE_BUTTON = {}));
const ANONYMOUS = "anonymous";
const EL_DIV = "div";
const EL_BUTTON = "button";
/*
* Copyright (c) 2020 NAVER Corp.
* egjs projects are licensed under the MIT license
*/
/**
* "auto"
* @type {"auto"}
*/
const AUTO = "auto";
/**
* Event type object with event name strings of {@link View3D}
* @type {object}
* @property {"ready"} READY {@link /docs/events/ready Ready event}
* @property {"loadStart"} LOAD_START {@link /docs/events/loadStart Load start event}
* @property {"load"} LOAD {@link /docs/events/load Load event}
* @property {"loadError"} LOAD_ERROR {@link /docs/events/loadError Load error event}
* @property {"resize"} RESIZE {@link /docs/events/resize Resize event}
* @property {"beforeRender"} BEFORE_RENDER {@link /docs/events/beforeRender Before render event}
* @property {"render"} RENDER {@link /docs/events/render Render event}
* @property {"progress"} PROGRESS {@link /docs/events/progress Progress event}
* @property {"inputStart"} INPUT_START {@link /docs/events/inputStart Input start event}
* @property {"inputEnd"} INPUT_END {@link /docs/events/inputEnd Input end event}
* @property {"cameraChange"} CAMERA_CHANGE {@link /docs/events/cameraChange Camera change event}
* @property {"animationStart"} ANIMATION_START {@link /docs/events/animationStart Animation start event}
* @property {"animationLoop"} ANIMATION_LOOP {@link /docs/events/animationLoop Animation loop event}
* @property {"animationFinished"} ANIMATION_FINISHED {@link /docs/events/animationFinished Animation finished event}
* @property {"annotationFocus"} ANNOTATION_FOCUS {@link /docs/events/annotationFocus Annotation focus event}
* @property {"annotationUnfocus"} ANNOTATION_UNFOCUS {@link /docs/events/annotationUnfocus Annotation unfocus event}
* @property {"quickLookTap"} QUICK_LOOK_TAP {@link /docs/events/quickLookTap Quick Look Tap event}
* @property {"arStart"} AR_START {@link /docs/events/arStart AR start evemt}
* @property {"arEnd"} AR_END {@link /docs/events/arEnd AR end event}
* @property {"arModelPlaced"} AR_MODEL_PLACED {@link /docs/events/arModelPlaced AR model placed event}
* @example
* ```ts
* import { EVENTS } from "@egjs/view3d";
* EVENTS.RESIZE; // "resize"
* ```
*/
const EVENTS$1 = {
READY: "ready",
LOAD_START: "loadStart",
LOAD: "load",
LOAD_ERROR: "loadError",
LOAD_FINISH: "loadFinish",
MODEL_CHANGE: "modelChange",
RESIZE: "resize",
BEFORE_RENDER: "beforeRender",
RENDER: "render",
PROGRESS: "progress",
INPUT_START: "inputStart",
INPUT_END: "inputEnd",
CAMERA_CHANGE: "cameraChange",
ANIMATION_START: "animationStart",
ANIMATION_LOOP: "animationLoop",
ANIMATION_FINISHED: "animationFinished",
ANNOTATION_FOCUS: "annotationFocus",
ANNOTATION_UNFOCUS: "annotationUnfocus",
AR_START: "arStart",
AR_END: "arEnd",
AR_MODEL_PLACED: "arModelPlaced",
QUICK_LOOK_TAP: "quickLookTap"
};
/**
* Collection of predefined easing functions
* @type {object}
* @property {function} SINE_WAVE
* @property {function} EASE_OUT_CUBIC
* @property {function} EASE_OUT_BOUNCE
* @example
* ```ts
* import View3D, { EASING } from "@egjs/view3d";
*
* new RotateControl({
* easing: EASING.EASE_OUT_CUBIC,
* });
* ```
*/
const EASING = {
SINE_WAVE: x => Math.sin(x * Math.PI * 2),
EASE_OUT_CUBIC: x => 1 - Math.pow(1 - x, 3),
EASE_OUT_BOUNCE: x => {
const n1 = 7.5625;
const d1 = 2.75;
if (x < 1 / d1) {
return n1 * x * x;
} else if (x < 2 / d1) {
return n1 * (x -= 1.5 / d1) * x + 0.75;
} else if (x < 2.5 / d1) {
return n1 * (x -= 2.25 / d1) * x + 0.9375;
} else {
return n1 * (x -= 2.625 / d1) * x + 0.984375;
}
}
};
/**
* Default class names that View3D uses
* @type {object}
* @property {"view3d-wrapper"} WRAPPER A class name for wrapper element
* @property {"view3d-canvas"} CANVAS A class name for canvas element
* @property {"view3d-poster"} POSTER A class name for poster element
* @property {"view3d-ar-overlay"} AR_OVERLAY A class name for AR overlay element
* @property {"view3d-annotation-wrapper"} ANNOTATION_WRAPPER A class name for annotation wrapper element
* @property {"view3d-annotation"} ANNOTATION A class name for annotation element
* @property {"default"} ANNOTATION_DEFAULT A class name for default style annotation element
* @property {"selected"} ANNOTATION_SELECTED A class name for selected annotation element
* @property {"flip-x"} ANNOTATION_FLIP_X A class name for annotation element which has tooltip on the left side
* @property {"flip-y"} ANNOTATION_FLIP_Y A class name for annotation element which has tooltip on the bottom side
* @property {"ctx-lost"} CTX_LOST A class name for canvas element which will be added on context lost
*/
const DEFAULT_CLASS = {
WRAPPER: "view3d-wrapper",
CANVAS: "view3d-canvas",
POSTER: "view3d-poster",
AR_OVERLAY: "view3d-ar-overlay",
ANNOTATION_WRAPPER: "view3d-annotation-wrapper",
ANNOTATION: "view3d-annotation",
ANNOTATION_TOOLTIP: "view3d-annotation-tooltip",
ANNOTATION_DEFAULT: "default",
ANNOTATION_SELECTED: "selected",
ANNOTATION_HIDDEN: "hidden",
ANNOTATION_FLIP_X: "flip-x",
ANNOTATION_FLIP_Y: "flip-y",
CTX_LOST: "ctx-lost"
};
/**
* Possible values for the toneMapping option.
* This is used to approximate the appearance of high dynamic range (HDR) on the low dynamic range medium of a standard computer monitor or mobile device's screen.
* @type {object}
* @property {THREE.LinearToneMapping} LINEAR
* @property {THREE.ReinhardToneMapping} REINHARD
* @property {THREE.CineonToneMapping} CINEON
* @property {THREE.ACESFilmicToneMapping} ACES_FILMIC
*/
const TONE_MAPPING = {
LINEAR: THREE.LinearToneMapping,
REINHARD: THREE.ReinhardToneMapping,
CINEON: THREE.CineonToneMapping,
ACES_FILMIC: THREE.ACESFilmicToneMapping
};
/**
* Types of zoom control
* @type {object}
* @property {"fov"} FOV Zoom by chaning fov(field-of-view). This will prevent camera from going inside the model.
* @property {"distance"} DISTANCE Zoom by changing camera distance from the model.
*/
const ZOOM_TYPE = {
FOV: "fov",
DISTANCE: "distance"
};
/**
* Available AR session types
* @type {object}
* @property {"WebXR"} WEBXR An AR session based on {@link https://developer.mozilla.org/en-US/docs/Web/API/WebXR_Device_API WebXR Device API}
* @property {"SceneViewer"} SCENE_VIEWER An AR session based on {@link https://developers.google.com/ar/develop/java/scene-viewer Google SceneViewer}, which is only available in Android
* @property {"QuickLook"} QUICK_LOOK An AR session based on Apple {@link https://developer.apple.com/augmented-reality/quick-look/ AR Quick Look}, which is only available in iOS
*/
const AR_SESSION_TYPE = {
WEBXR: "webAR",
SCENE_VIEWER: "sceneViewer",
QUICK_LOOK: "quickLook"
};
/**
* @type {object}
* @property {"ar_only"} ONLY_AR
* @property {"3d_only"} ONLY_3D
* @property {"ar_preferred"} PREFER_AR
* @property {"3d_preferred"} PREFER_3D
*/
const SCENE_VIEWER_MODE = {
ONLY_AR: "ar_only",
ONLY_3D: "3d_only",
PREFER_AR: "ar_preferred",
PREFER_3D: "3d_preferred"
};
/**
* <img src="https://docs-assets.developer.apple.com/published/b122cc68df/10cb0534-e1f6-42ed-aadb-5390c55ad3ff.png" />
* @see https://developer.apple.com/documentation/arkit/adding_an_apple_pay_button_or_a_custom_action_in_ar_quick_look
* @property {"plain"} PLAIN
* @property {"pay"} PAY
* @property {"buy"} BUY
* @property {"check-out"} CHECK_OUT
* @property {"book"} BOOK
* @property {"donate"} DONATE
* @property {"subscribe"} SUBSCRIBE
*/
const QUICK_LOOK_APPLE_PAY_BUTTON_TYPE = {
PLAIN: "plain",
PAY: "pay",
BUY: "buy",
CHECK_OUT: "check-out",
BOOK: "book",
DONATE: "donate",
SUBSCRIBE: "subscribe"
};
/**
* Available size of the custom banner
* @type {object}
* @property {"small"} SMALL 81pt
* @property {"medium"} MEDIUM 121pt
* @property {"large"} LARGE 161pt
*/
const QUICK_LOOK_CUSTOM_BANNER_SIZE = {
SMALL: "small",
MEDIUM: "medium",
LARGE: "large"
};
/**
* Input types
* @type {object}
* @property {0} ROTATE Rotate input
* @property {1} TRANSLATE Translate input
* @property {2} ZOOM Zoom input
*/
const INPUT_TYPE = {
ROTATE: 0,
TRANSLATE: 1,
ZOOM: 2
};
/**
* Animation repeat modes
* @type {object}
* @property {"one"} ONE Repeat single animation
* @property {"none"} NONE Pause on animation's last frame
* @property {"all"} ALL Repeat all animations
*/
const ANIMATION_REPEAT_MODE = {
ONE: "one",
NONE: "none",
ALL: "all"
};
/*
* Copyright (c) 2020 NAVER Corp.
* egjs projects are licensed under the MIT license
*/
/**
* Renderer that renders View3D's Scene
*/
class Renderer {
/**
* Create new Renderer instance
* @param {View3D} view3D An instance of View3D
*/
constructor(view3D) {
this._defaultRenderLoop = delta => {
const view3D = this._view3D;
const {
control,
autoPlayer,
animator
} = view3D;
if (!animator.animating && !control.animating && !autoPlayer.animating) return;
this._renderFrame(delta);
};
this._onContextLost = () => {
const canvas = this._canvas;
canvas.classList.add(DEFAULT_CLASS.CTX_LOST);
};
this._onContextRestore = () => {
const canvas = this._canvas;
const scene = this._view3D.scene;
canvas.classList.remove(DEFAULT_CLASS.CTX_LOST);
scene.initTextures();
this.renderSingleFrame();
};
const canvas = findCanvas(view3D.rootEl, view3D.canvasSelector);
this._canvas = canvas;
this._view3D = view3D;
this._renderQueued = false;
const renderer = new THREE.WebGLRenderer({
canvas,
alpha: true,
antialias: true,
preserveDrawingBuffer: true
});
renderer.toneMapping = view3D.toneMapping;
renderer.toneMappingExposure = view3D.exposure;
renderer.outputEncoding = THREE.sRGBEncoding;
renderer.setClearColor(0x000000, 0);
this._halfFloatAvailable = checkHalfFloatAvailable(renderer);
this._renderer = renderer;
this._clock = new THREE.Clock(false);
this._canvasSize = new THREE.Vector2();
canvas.addEventListener(EVENTS.CONTEXT_LOST, this._onContextLost);
canvas.addEventListener(EVENTS.CONTEXT_RESTORED, this._onContextRestore);
}
/**
* {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement HTMLCanvasElement} given when creating View3D instance
* @type HTMLCanvasElement
* @readonly
*/
get canvas() {
return this._canvas;
}
/**
* Current {@link https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext WebGLRenderingContext}
* @type WebGLRenderingContext
* @readonly
*/
get context() {
return this._renderer.getContext();
}
/**
* Three.js {@link https://threejs.org/docs/#api/en/renderers/WebGLRenderer WebGLRenderer} instance
* @type THREE.WebGLRenderer
* @readonly
*/
get threeRenderer() {
return this._renderer;
}
/**
* Default render loop of View3D
* @type {function}
* @readonly
*/
get defaultRenderLoop() {
return this._defaultRenderLoop;
}
/**
* The rendering width and height of the canvas
* @type {object}
* @param {number} width Width of the canvas
* @param {number} height Height of the canvas
* @readonly
*/
get size() {
const renderingSize = this._renderer.getSize(new THREE.Vector2());
return {
width: renderingSize.width,
height: renderingSize.y
};
}
/**
* Canvas element's actual size
* @type THREE.Vector2
* @readonly
*/
get canvasSize() {
return this._canvasSize;
}
/**
* An object containing details about the capabilities of the current RenderingContext.
* Merged with three.js WebGLRenderer's capabilities.
*/
get capabilities() {
const renderer = this._renderer;
return Object.assign(Object.assign({}, renderer.capabilities), {
halfFloat: this._halfFloatAvailable
});
}
/**
* Destroy the renderer and stop active animation loop
*/
destroy() {
const canvas = this._canvas;
this.stopAnimationLoop();
this._renderer.dispose();
canvas.removeEventListener(EVENTS.CONTEXT_LOST, this._onContextLost);
canvas.removeEventListener(EVENTS.CONTEXT_RESTORED, this._onContextRestore);
}
/**
* Resize the renderer based on current canvas width / height
* @returns {void}
*/
resize() {
const renderer = this._renderer;
const canvas = this._canvas;
if (renderer.xr.isPresenting) return;
const width = canvas.clientWidth || 1;
const height = canvas.clientHeight || 1;
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(width, height, false);
this._canvasSize.set(width, height);
}
setAnimationLoop(callback) {
const view3D = this._view3D;
const clock = this._clock;
clock.start();
this._renderer.setAnimationLoop((timestamp, frame) => {
const delta = Math.min(clock.getDelta(), view3D.maxDeltaTime);
callback(delta, frame);
});
}
stopAnimationLoop() {
this._clock.stop();
// See https://threejs.org/docs/#api/en/renderers/WebGLRenderer.setAnimationLoop
this._renderer.setAnimationLoop(null);
}
renderSingleFrame(immediate = false) {
const renderer = this._renderer;
if (!renderer.xr.isPresenting) {
if (immediate) {
this._renderFrame(0);
} else if (!this._renderQueued) {
requestAnimationFrame(() => {
this._renderFrame(0);
});
this._renderQueued = true;
}
}
}
_renderFrame(delta) {
const view3D = this._view3D;
const threeRenderer = this._renderer;
const {
scene,
camera,
control,
autoPlayer,
animator,
annotation
} = view3D;
if (threeRenderer.getContext().isContextLost()) return;
const deltaMiliSec = delta * 1000;
this._renderQueued = false;
animator.update(delta);
control.update(deltaMiliSec);
autoPlayer.update(deltaMiliSec);
view3D.trigger(EVENTS$1.BEFORE_RENDER, {
type: EVENTS$1.BEFORE_RENDER,
target: view3D,
delta: deltaMiliSec
});
camera.updatePosition();
scene.shadowPlane.render();
threeRenderer.render(scene.root, camera.threeCamera);
// Render annotations
annotation.render();
view3D.trigger(EVENTS$1.RENDER, {
type: EVENTS$1.RENDER,
target: view3D,
delta: deltaMiliSec
});
}
}
// https://github.com/mrdoob/three.js/issues/5552
// http://en.wikipedia.org/wiki/RGBE_image_format
class RGBELoader extends THREE.DataTextureLoader {
constructor( manager ) {
super( manager );
this.type = THREE.HalfFloatType;
}
// adapted from http://www.graphics.cornell.edu/~bjw/rgbe.html
parse( buffer ) {
const
/* return codes for rgbe routines */
//RGBE_RETURN_SUCCESS = 0,
RGBE_RETURN_FAILURE = - 1,
/* default error routine. change this to change error handling */
rgbe_read_error = 1,
rgbe_write_error = 2,
rgbe_format_error = 3,
rgbe_memory_error = 4,
rgbe_error = function ( rgbe_error_code, msg ) {
switch ( rgbe_error_code ) {
case rgbe_read_error: console.error( 'THREE.RGBELoader Read Error: ' + ( msg || '' ) );
break;
case rgbe_write_error: console.error( 'THREE.RGBELoader Write Error: ' + ( msg || '' ) );
break;
case rgbe_format_error: console.error( 'THREE.RGBELoader Bad File Format: ' + ( msg || '' ) );
break;
default:
case rgbe_memory_error: console.error( 'THREE.RGBELoader: Error: ' + ( msg || '' ) );
}
return RGBE_RETURN_FAILURE;
},
/* offsets to red, green, and blue components in a data (float) pixel */
//RGBE_DATA_RED = 0,
//RGBE_DATA_GREEN = 1,
//RGBE_DATA_BLUE = 2,
/* number of floats per pixel, use 4 since stored in rgba image format */
//RGBE_DATA_SIZE = 4,
/* flags indicating which fields in an rgbe_header_info are valid */
RGBE_VALID_PROGRAMTYPE = 1,
RGBE_VALID_FORMAT = 2,
RGBE_VALID_DIMENSIONS = 4,
NEWLINE = '\n',
fgets = function ( buffer, lineLimit, consume ) {
const chunkSize = 128;
lineLimit = ! lineLimit ? 1024 : lineLimit;
let p = buffer.pos,
i = - 1, len = 0, s = '',
chunk = String.fromCharCode.apply( null, new Uint16Array( buffer.subarray( p, p + chunkSize ) ) );
while ( ( 0 > ( i = chunk.indexOf( NEWLINE ) ) ) && ( len < lineLimit ) && ( p < buffer.byteLength ) ) {
s += chunk; len += chunk.length;
p += chunkSize;
chunk += String.fromCharCode.apply( null, new Uint16Array( buffer.subarray( p, p + chunkSize ) ) );
}
if ( - 1 < i ) {
/*for (i=l-1; i>=0; i--) {
byteCode = m.charCodeAt(i);
if (byteCode > 0x7f && byteCode <= 0x7ff) byteLen++;
else if (byteCode > 0x7ff && byteCode <= 0xffff) byteLen += 2;
if (byteCode >= 0xDC00 && byteCode <= 0xDFFF) i--; //trail surrogate
}*/
if ( false !== consume ) buffer.pos += len + i + 1;
return s + chunk.slice( 0, i );
}
return false;
},
/* minimal header reading. modify if you want to parse more information */
RGBE_ReadHeader = function ( buffer ) {
// regexes to parse header info fields
const magic_token_re = /^#\?(\S+)/,
gamma_re = /^\s*GAMMA\s*=\s*(\d+(\.\d+)?)\s*$/,
exposure_re = /^\s*EXPOSURE\s*=\s*(\d+(\.\d+)?)\s*$/,
format_re = /^\s*FORMAT=(\S+)\s*$/,
dimensions_re = /^\s*\-Y\s+(\d+)\s+\+X\s+(\d+)\s*$/,
// RGBE format header struct
header = {
valid: 0, /* indicate which fields are valid */
string: '', /* the actual header string */
comments: '', /* comments found in header */
programtype: 'RGBE', /* listed at beginning of file to identify it after "#?". defaults to "RGBE" */
format: '', /* RGBE format, default 32-bit_rle_rgbe */
gamma: 1.0, /* image has already been gamma corrected with given gamma. defaults to 1.0 (no correction) */
exposure: 1.0, /* a value of 1.0 in an image corresponds to <exposure> watts/steradian/m^2. defaults to 1.0 */
width: 0, height: 0 /* image dimensions, width/height */
};
let line, match;
if ( buffer.pos >= buffer.byteLength || ! ( line = fgets( buffer ) ) ) {
return rgbe_error( rgbe_read_error, 'no header found' );
}
/* if you want to require the magic token then uncomment the next line */
if ( ! ( match = line.match( magic_token_re ) ) ) {
return rgbe_error( rgbe_format_error, 'bad initial token' );
}
header.valid |= RGBE_VALID_PROGRAMTYPE;
header.programtype = match[ 1 ];
header.string += line + '\n';
while ( true ) {
line = fgets( buffer );
if ( false === line ) break;
header.string += line + '\n';
if ( '#' === line.charAt( 0 ) ) {
header.comments += line + '\n';
continue; // comment line
}
if ( match = line.match( gamma_re ) ) {
header.gamma = parseFloat( match[ 1 ], 10 );
}
if ( match = line.match( exposure_re ) ) {
header.exposure = parseFloat( match[ 1 ], 10 );
}
if ( match = line.match( format_re ) ) {
header.valid |= RGBE_VALID_FORMAT;
header.format = match[ 1 ];//'32-bit_rle_rgbe';
}
if ( match = line.match( dimensions_re ) ) {
header.valid |= RGBE_VALID_DIMENSIONS;
header.height = parseInt( match[ 1 ], 10 );
header.width = parseInt( match[ 2 ], 10 );
}
if ( ( header.valid & RGBE_VALID_FORMAT ) && ( header.valid & RGBE_VALID_DIMENSIONS ) ) break;
}
if ( ! ( header.valid & RGBE_VALID_FORMAT ) ) {
return rgbe_error( rgbe_format_error, 'missing format specifier' );
}
if ( ! ( header.valid & RGBE_VALID_DIMENSIONS ) ) {
return rgbe_error( rgbe_format_error, 'missing image size specifier' );
}
return header;
},
RGBE_ReadPixels_RLE = function ( buffer, w, h ) {
const scanline_width = w;
if (
// run length encoding is not allowed so read flat
( ( scanline_width < 8 ) || ( scanline_width > 0x7fff ) ) ||
// this file is not run length encoded
( ( 2 !== buffer[ 0 ] ) || ( 2 !== buffer[ 1 ] ) || ( buffer[ 2 ] & 0x80 ) )
) {
// return the flat buffer
return new Uint8Array( buffer );
}
if ( scanline_width !== ( ( buffer[ 2 ] << 8 ) | buffer[ 3 ] ) ) {
return rgbe_error( rgbe_format_error, 'wrong scanline width' );
}
const data_rgba = new Uint8Array( 4 * w * h );
if ( ! data_rgba.length ) {
return rgbe_error( rgbe_memory_error, 'unable to allocate buffer space' );
}
let offset = 0, pos = 0;
const ptr_end = 4 * scanline_width;
const rgbeStart = new Uint8Array( 4 );
const scanline_buffer = new Uint8Array( ptr_end );
let num_scanlines = h;
// read in each successive scanline
while ( ( num_scanlines > 0 ) && ( pos < buffer.byteLength ) ) {
if ( pos + 4 > buffer.byteLength ) {
return rgbe_error( rgbe_read_error );
}
rgbeStart[ 0 ] = buffer[ pos ++ ];
rgbeStart[ 1 ] = buffer[ pos ++ ];
rgbeStart[ 2 ] = buffer[ pos ++ ];
rgbeStart[ 3 ] = buffer[ pos ++ ];
if ( ( 2 != rgbeStart[ 0 ] ) || ( 2 != rgbeStart[ 1 ] ) || ( ( ( rgbeStart[ 2 ] << 8 ) | rgbeStart[ 3 ] ) != scanline_width ) ) {
return rgbe_error( rgbe_format_error, 'bad rgbe scanline format' );
}
// read each of the four channels for the scanline into the buffer
// first red, then green, then blue, then exponent
let ptr = 0, count;
while ( ( ptr < ptr_end ) && ( pos < buffer.byteLength ) ) {
count = buffer[ pos ++ ];
const isEncodedRun = count > 128;
if ( isEncodedRun ) count -= 128;
if ( ( 0 === count ) || ( ptr + count > ptr_end ) ) {
return rgbe_error( rgbe_format_error, 'bad scanline data' );
}
if ( isEncodedRun ) {
// a (encoded) run of the same value
const byteValue = buffer[ pos ++ ];
for ( let i = 0; i < count; i ++ ) {
scanline_buffer[ ptr ++ ] = byteValue;
}
//ptr += count;
} else {
// a literal-run
scanline_buffer.set( buffer.subarray( pos, pos + count ), ptr );
ptr += count; pos += count;
}
}
// now convert data from buffer into rgba
// first red, then green, then blue, then exponent (alpha)
const l = scanline_width; //scanline_buffer.byteLength;
for ( let i = 0; i < l; i ++ ) {
let off = 0;
data_rgba[ offset ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 1 ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 2 ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 3 ] = scanline_buffer[ i + off ];
offset += 4;
}
num_scanlines --;
}
return data_rgba;
};
const RGBEByteToRGBFloat = function ( sourceArray, sourceOffset, destArray, destOffset ) {
const e = sourceArray[ sourceOffset + 3 ];
const scale = Math.pow( 2.0, e - 128.0 ) / 255.0;
destArray[ destOffset + 0 ] = sourceArray[ sourceOffset + 0 ] * scale;
destArray[ destOffset + 1 ] = sourceArray[ sourceOffset + 1 ] * scale;
destArray[ destOffset + 2 ] = sourceArray[ sourceOffset + 2 ] * scale;
};
const RGBEByteToRGBHalf = function ( sourceArray, sourceOffset, destArray, destOffset ) {
const e = sourceArray[ sourceOffset + 3 ];
const scale = Math.pow( 2.0, e - 128.0 ) / 255.0;
// clamping to 65504, the maximum representable value in float16
destArray[ destOffset + 0 ] = THREE.DataUtils.toHalfFloat( Math.min( sourceArray[ sourceOffset + 0 ] * scale, 65504 ) );
destArray[ destOffset + 1 ] = THREE.DataUtils.toHalfFloat( Math.min( sourceArray[ sourceOffset + 1 ] * scale, 65504 ) );
destArray[ destOffset + 2 ] = THREE.DataUtils.toHalfFloat( Math.min( sourceArray[ sourceOffset + 2 ] * scale, 65504 ) );
};
const byteArray = new Uint8Array( buffer );
byteArray.pos = 0;
const rgbe_header_info = RGBE_ReadHeader( byteArray );
if ( RGBE_RETURN_FAILURE !== rgbe_header_info ) {
const w = rgbe_header_info.width,
h = rgbe_header_info.height,
image_rgba_data = RGBE_ReadPixels_RLE( byteArray.subarray( byteArray.pos ), w, h );
if ( RGBE_RETURN_FAILURE !== image_rgba_data ) {
let data, format, type;
let numElements;
switch ( this.type ) {
case THREE.UnsignedByteType:
data = image_rgba_data;
format = THREE.RGBEFormat; // handled as THREE.RGBAFormat in shaders
type = THREE.UnsignedByteType;
break;
case THREE.FloatType:
numElements = image_rgba_data.length / 4;
const floatArray = new Float32Array( numElements * 3 );
for ( let j = 0; j < numElements; j ++ ) {
RGBEByteToRGBFloat( image_rgba_data, j * 4, floatArray, j * 3 );
}
data = floatArray;
format = THREE.RGBFormat;
type = THREE.FloatType;
break;
case THREE.HalfFloatType:
numElements = image_rgba_data.length / 4;
const halfArray = new Uint16Array( numElements * 3 );
for ( let j = 0; j < numElements; j ++ ) {
RGBEByteToRGBHalf( image_rgba_data, j * 4, halfArray, j * 3 );
}
data = halfArray;
format = THREE.RGBFormat;
type = THREE.HalfFloatType;
break;
default:
console.error( 'THREE.RGBELoader: unsupported type: ', this.type );
break;
}
return {
width: w, height: h,
data: data,
header: rgbe_header_info.string,
gamma: rgbe_header_info.gamma,
exposure: rgbe_header_info.exposure,
format: format,
type: type
};
}
}
return null;
}
setDataType( value ) {
this.type = value;
return this;
}
load( url, onLoad, onProgress, onError ) {
function onLoadCallback( texture, texData ) {
switch ( texture.type ) {
case THREE.UnsignedByteType:
texture.encoding = THREE.RGBEEncoding;
texture.minFilter = THREE.NearestFilter;
texture.magFilter = THREE.NearestFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
case THREE.FloatType:
texture.encoding = THREE.LinearEncoding;
texture.minFilter = THREE.LinearFilter;
texture.magFilter = THREE.LinearFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
case THREE.HalfFloatType:
texture.encoding = THREE.LinearEncoding;
texture.minFilter = THREE.LinearFilter;
texture.magFilter = THREE.LinearFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
}
if ( onLoad ) onLoad( texture, texData );
}
return super.load( url, onLoadCallback, onProgress, onError );
}
}
/**
* Base class for all loaders that View3D uses
*/
class Loader {
/** */
constructor(view3D) {
this._onLoadingProgress = (evt, src, context) => {
const view3D = this._view3D;
context.initialized = true;
context.lengthComputable = evt.lengthComputable;
context.loaded = evt.loaded;
context.total = evt.total;
view3D.trigger(EVENTS$1.PROGRESS, {
type: EVENTS$1.PROGRESS,
target: view3D,
src,
lengthComputable: evt.lengthComputable,
loaded: evt.loaded,
total: evt.total
});
};
this._view3D = view3D;
}
}
/*
* Copyright (c) 2020 NAVER Corp.
* egjs projects are licensed under the MIT license
*/
/**
* Texture loader
*/
class TextureLoader extends Loader {
/**
* Create new TextureLoader instance
* @param {View3D} view3D An instance of View3D
*/
constructor(view3D) {
super(view3D);
}
/**
* Create new {@link https://threejs.org/docs/index.html#api/en/textures/Texture Texture} with given url
* Texture's {@link https://threejs.org/docs/index.html#api/en/textures/Texture.flipY flipY} property is `true` by Three.js's policy, so be careful when using it as a map texture.
* @param url url to fetch image
*/
load(url) {
const view3D = this._view3D;
return new Promise((resolve, reject) => {
const loader = new THREE.TextureLoader();
const loadingContext = createLoadingContext(view3D, url);
loader.setCrossOrigin(ANONYMOUS);
loader.load(url, resolve, evt => this._onLoadingProgress(evt, url, loadingContext), err => {
loadingContext.initialized = true;
reject(err);
});
});
}
/**
* Create new texture with given HDR(RGBE) image url
* @param url image url
*/
loadHDRTexture(url) {
const view3D = this._view3D;
return new Promise((resolve, reject) => {
const loader = new RGBELoader();
if (!view3D.renderer.capabilities.halfFloat) {
loader.type = THREE.FloatType;
}
const loadingContext = createLoadingContext(view3D, url);
loader.setCrossOrigin(ANONYMOUS);
loader.load(url, texture => {
texture.mapping = THREE.EquirectangularReflectionMapping;
resolve(texture);
}, evt => this._onLoadingProgress(evt, url, loadingContext), err => {
loadingContext.initialized = true;
reject(err);
});
});
}
}
/*
* Copyright (c) 2020 NAVER Corp.
* egjs projects are licensed under the MIT license
*/
// Constants that used internally
// Texture map names that used in THREE#MeshStandardMaterial
const STANDARD_MAPS = ["alphaMap", "aoMap", "bumpMap", "displacementMap", "emissiveMap", "envMap", "lightMap", "map", "metalnessMap", "normalMap", "roughnessMap", "sheenColorMap", "sheenRoughnessMap", "specularColorMap", "specularIntensityMap", "transmissionMap", "clearcoatMap", "clearcoatNormalMap"];
const CONTROL_EVENTS = {
HOLD: "hold",
RELEASE: "release",
ENABLE: "enable",
DISABLE: "disable"
};
var GESTURE;
(function (GESTURE) {
GESTURE[GESTURE["NONE"] = 0] = "NONE";
GESTURE[GESTURE["ONE_FINGER_HORIZONTAL"] = 1] = "ONE_FINGER_HORIZONTAL";
GESTURE[GESTURE["ONE_FINGER_VERTICAL"] = 2] = "ONE_FINGER_VERTICAL";
GESTURE[GESTURE["ONE_FINGER"] = 3] = "ONE_FI