@needle-tools/engine
Version:
Needle Engine is a web-based runtime for 3D apps. It runs on your machine for development with great integrations into editors like Unity or Blender - and can be deployed onto any device! It is flexible, extensible and networking and XR are built-in.
165 lines (164 loc) • 8.12 kB
TypeScript
import '../../../engine/engine_shims.js';
import { AnimationClip, BufferGeometry, Material, Matrix4, MeshBasicMaterial, MeshStandardMaterial, Object3D, OrthographicCamera, PerspectiveCamera, Quaternion, SkinnedMesh, Texture, Vector3, Vector4, WebGLRenderer } from 'three';
import * as fflate from 'three/examples/jsm/libs/fflate.module.js';
import type { OffscreenCanvasExt } from '../../../engine/engine_shims.js';
import type { IUSDExporterExtension } from './Extension.js';
type MeshPhysicalNodeMaterial = import("three/src/materials/nodes/MeshPhysicalNodeMaterial.js").default;
declare function makeNameSafe(str: any): any;
declare function findStructuralNodesInBoneHierarchy(bones: Array<Object3D>): Set<Object3D<import("three").Object3DEventMap>>;
declare type USDObjectTransform = {
position: Vector3 | null;
quaternion: Quaternion | null;
scale: Vector3 | null;
};
type USDObjectEventType = "serialize" & ({} & string);
declare class USDObject {
static USDObject_export_id: number;
uuid: string;
name: string;
/** If no type is provided, type is chosen automatically (Xform or Mesh) */
type?: string;
/** MaterialBindingAPI and SkelBindingAPI are handled automatically, extra schemas can be added here */
extraSchemas: string[];
displayName?: string;
visibility?: "inherited" | "invisible";
getMatrix(): Matrix4;
setMatrix(value: any): void;
/** @deprecated Use `transform`, or `getMatrix()` if you really need the matrix */
get matrix(): Matrix4;
/** @deprecated Use `transform`, or `setMatrix()` if you really need the matrix */
set matrix(value: Matrix4);
transform: USDObjectTransform | null;
private _isDynamic;
get isDynamic(): boolean;
private set isDynamic(value);
geometry: BufferGeometry | null;
material: MeshStandardMaterial | MeshBasicMaterial | Material | MeshPhysicalNodeMaterial | null;
camera: PerspectiveCamera | OrthographicCamera | null;
parent: USDObject | null;
skinnedMesh: SkinnedMesh | null;
children: Array<USDObject | null>;
animations: AnimationClip[] | null;
_eventListeners: Record<USDObjectEventType, Function[]>;
needsTranslate: boolean;
needsOrient: boolean;
needsScale: boolean;
static createEmptyParent(object: USDObject): USDObject;
static createEmpty(): USDObject;
constructor(id: any, name: any, transform?: USDObjectTransform | null, mesh?: BufferGeometry | null, material?: MeshStandardMaterial | MeshBasicMaterial | MeshPhysicalNodeMaterial | Material | null, camera?: PerspectiveCamera | OrthographicCamera | null, skinnedMesh?: SkinnedMesh | null, animations?: AnimationClip[] | null);
is(obj: any): boolean;
isEmpty(): boolean;
clone(): USDObject;
deepClone(): USDObject;
getPath(): string;
add(child: any): void;
remove(child: any): void;
addEventListener(evt: USDObjectEventType, listener: (writer: USDWriter, context: USDZExporterContext) => void): void;
removeEventListener(evt: any, listener: (writer: USDWriter, context: USDZExporterContext) => void): void;
onSerialize(writer: any, context: any): void;
}
declare class USDDocument extends USDObject {
stageLength: number;
get isDocumentRoot(): boolean;
get isDynamic(): boolean;
constructor();
add(child: USDObject): void;
remove(child: USDObject): void;
traverse(callback: (object: USDObject) => void, current?: USDObject | null): void;
findById(uuid: string): USDObject | undefined;
buildHeader(_context: USDZExporterContext): string;
}
declare class USDWriter {
str: string;
indent: number;
constructor();
clear(): void;
beginBlock(str?: string | undefined, char?: string, createNewLine?: boolean): void;
closeBlock(char?: string): void;
beginArray(str: any): void;
closeArray(): void;
appendLine(str?: string): void;
toString(): string;
applyIndent(str: any): string;
}
declare type TextureMap = {
[name: string]: {
texture: Texture;
scale?: Vector4;
};
};
declare class USDZExporterContext {
root?: Object3D;
exporter: USDZExporter;
extensions: Array<IUSDExporterExtension>;
quickLookCompatible: boolean;
exportInvisible: boolean;
materials: Map<string, Material>;
textures: TextureMap;
files: {
[path: string]: Uint8Array | [Uint8Array, fflate.ZipOptions] | null | any;
};
document: USDDocument;
output: string;
animations: AnimationClip[];
constructor(root: Object3D | null | undefined, exporter: USDZExporter, options: {
extensions?: Array<IUSDExporterExtension>;
quickLookCompatible: boolean;
exportInvisible: boolean;
});
makeNameSafe(str: any): any;
}
/**[documentation](https://developer.apple.com/documentation/arkit/usdz_schemas_for_ar/preliminary_anchoringapi/preliminary_anchoring_type) */
export type Anchoring = "plane" | "image" | "face" | "none";
/**[documentation](https://developer.apple.com/documentation/arkit/usdz_schemas_for_ar/preliminary_anchoringapi/preliminary_planeanchoring_alignment) */
export type Alignment = "horizontal" | "vertical" | "any";
type USDZExporterOptions = {
ar: {
anchoring: {
type: Anchoring;
};
planeAnchoring: {
alignment: Alignment;
};
};
quickLookCompatible: boolean;
extensions: Array<IUSDExporterExtension>;
maxTextureSize: number;
exportInvisible: boolean;
};
declare class USDZExporter {
debug: boolean;
pruneUnusedNodes: boolean;
sceneAnchoringOptions: USDZExporterOptions;
extensions: Array<IUSDExporterExtension>;
keepObject?: (object: Object3D) => boolean;
beforeWritingDocument?: () => void;
constructor();
parse(scene: Object3D | null | undefined, options?: USDZExporterOptions): Promise<Uint8Array<ArrayBufferLike>>;
}
declare type ImageReadbackResult = {
imageData: ImageData;
imageBitmap?: ImageBitmap;
};
/** Reads back a texture from the GPU (can be compressed, a render texture, or anything), optionally applies RGBA colorScale to it, and returns CPU data for further usage.
* Note that there are WebGL / WebGPU rules preventing some use of data between WebGL contexts.
*/
declare function decompressGpuTexture(texture: any, maxTextureSize?: number, renderer?: WebGLRenderer | null, colorScale?: Vector4 | undefined): Promise<ImageReadbackResult>;
/** This method uses a 'bitmaprenderer' context and doesn't do any pixel manipulation.
* This way, we can keep the alpha channel as it was, but we're losing the ability to do pixel manipulations or resize operations. */
declare function imageToCanvasUnpremultiplied(image: ImageBitmapSource & {
width: number;
height: number;
}, maxTextureSize?: number): Promise<OffscreenCanvasExt>;
/** This method uses a '2d' canvas context for pixel manipulation, and can apply a color scale or Y flip to the given image.
* Unfortunately, canvas always uses premultiplied data, and thus images with low alpha values (or multiplying by a=0) will result in black pixels.
*/
declare function imageToCanvas(image: HTMLImageElement | HTMLCanvasElement | OffscreenCanvas | ImageBitmap, color?: Vector4 | undefined, flipY?: boolean, maxTextureSize?: number): Promise<OffscreenCanvasExt>;
declare function getBoneName(bone: Object3D): any;
declare function getMaterialName(material: Material): string;
declare function getPathToSkeleton(bone: Object3D, assumedRoot: Object3D): any;
export declare function buildXform(model: USDObject | null, writer: USDWriter, context: USDZExporterContext): void;
declare function fn(num: number): string;
declare function buildMatrix(matrix: any): string;
declare const formatsWithAlphaChannel: number[];
export { buildMatrix, decompressGpuTexture, findStructuralNodesInBoneHierarchy, formatsWithAlphaChannel, getBoneName, getMaterialName as getMaterialNameForUSD, getPathToSkeleton, imageToCanvas, imageToCanvasUnpremultiplied, makeNameSafe as makeNameSafeForUSD, USDDocument, fn as usdNumberFormatting, USDObject, USDWriter, USDZExporter, USDZExporterContext, };