@plotvideo/diffusionstudio
Version:
Build bleeding edge video processing applications
1,614 lines (1,492 loc) • 99.7 kB
TypeScript
import { CanvasTextMetrics } from 'pixi.js';
import { Container } from 'pixi.js';
import { ContainerChild } from 'pixi.js';
import { Filter } from 'pixi.js';
import { Graphics } from 'pixi.js';
import { Muxer } from 'mp4-muxer';
import { Renderer } from 'pixi.js';
import { Sprite } from 'pixi.js';
import { StreamTarget } from 'mp4-muxer';
import { Text as Text_2 } from 'pixi.js';
import { TextStyle } from 'pixi.js';
import { Texture } from 'pixi.js';
/**
* Defines the position of the anchor as
* a ratio of the width an height
*/
export declare type Anchor = {
x: float;
y: float;
};
declare interface AnimationBuilder extends AnimationBuilder_2 {
height: AnimationFunction<number, this>;
width: AnimationFunction<number, this>;
x: AnimationFunction<number, this>;
y: AnimationFunction<number, this>;
translateX: AnimationFunction<number, this>;
translateY: AnimationFunction<number, this>;
rotation: AnimationFunction<number, this>;
alpha: AnimationFunction<number, this>;
scale: AnimationFunction<number, this>;
}
declare class AnimationBuilder extends AnimationBuilder_2 {
}
declare interface AnimationBuilder_2 {
to(value: number, relframe: number): this;
}
declare class AnimationBuilder_2 {
private target;
animation: Keyframe_2<string | number> | undefined;
constructor(target: any);
init(property: string | symbol, value: number | string, delay?: number, easing?: EasingFunction): void;
}
declare type AnimationFunction<V extends number | string, T> = (value: V, delay?: number, easing?: EasingFunction) => T;
export declare type ArgumentTypes<F extends Function> = F extends (...args: infer A) => any ? A : never;
/**
* Move an element inside the provided array
*/
export declare function arraymove(arr: any[], fromIndex: number, toIndex: number): void;
/**
* clip assert replacement for the browser
* @example assert(true == false)
*/
export declare function assert(condition: any): void;
/**
* Converts the specified AudioBuffer to a Blob.
*
* Note that changing the MIME type does not change the actual file format.
* The output is a WAVE in any case
*/
export declare function audioBufferToWav(buffer: AudioBuffer, type?: string): Blob;
export declare class AudioClip extends MediaClip<AudioClipProps> {
readonly type = "audio";
track?: Track<AudioClip>;
source: AudioSource<{}>;
/**
* Access to the HTML5 audio element
*/
readonly element: HTMLAudioElement;
constructor(source?: File | AudioSource, props?: AudioClipProps);
init(): Promise<void>;
update(): void | Promise<void>;
exit(): void;
copy(): AudioClip;
}
export declare interface AudioClipProps extends MediaClipProps {
}
export declare type AudioMimeType = keyof (typeof SUPPORTED_MIME_TYPES)['AUDIO'];
declare type AudioSettings = {
sampleRate: number;
numberOfChannels: number;
bitrate: number;
};
export declare type AudioSlice = {
start: Timestamp;
stop: Timestamp;
};
export declare class AudioSource<T extends Object = {}> extends Source<T> {
readonly type: ClipType;
private decoding;
private _silences?;
transcript?: Transcript;
audioBuffer?: AudioBuffer;
decode(numberOfChannels?: number, sampleRate?: number, cache?: boolean): Promise<AudioBuffer>;
/**
* @deprecated Use fastsampler instead.
*/
samples(numberOfSampes?: number, windowSize?: number, min?: number): Promise<number[]>;
/**
* Fast sampler that uses a window size to calculate the max value of the samples in the window.
* @param options - Sampling options.
* @returns An array of the max values of the samples in the window.
*/
fastsampler({ length, start, stop, logarithmic, }?: FastSamplerOptions): Promise<Float32Array>;
thumbnail(...args: ArgumentTypes<this['samples']>): Promise<HTMLElement>;
/**
* Find silences in the audio clip. Results are cached.
*
* uses default sample rate of 3000
* @param options - Silences options.
* @returns An array of the silences (in ms) in the clip.
*/
silences(options?: SilenceDetectionOptions): Promise<AudioSlice[]>;
}
export declare class AudioTrack extends MediaTrack<AudioClip> {
readonly type = "audio";
}
export declare type Background = {
/**
* @default #000000
*/
fill?: string;
/**
* @default 1
*/
alpha?: number;
/**
* @default 20
*/
borderRadius?: number;
/**
* @default { x: 40, y: 10 }
*/
padding?: {
x: int;
y: int;
};
};
declare type BaseClass = {
view: Container;
} & Serializer;
/**
* Copyright (c) 2024 The Diffusion Studio Authors
*
* This Source Code Form is subject to the terms of the Mozilla
* Public License, v. 2.0 that can be found in the LICENSE file.
*/
export declare class BaseError extends Error {
readonly message: string;
readonly code: string;
constructor({ message, code }: {
message?: string | undefined;
code?: string | undefined;
}, ...args: any[]);
}
declare type BaseEvents<E = {}> = {
'*': any;
error: Error;
} & E;
/**
* Merges the channels of the audio blob into a mono AudioBuffer
*/
export declare function blobToMonoBuffer(blob: Blob, sampleRate?: number, scalingFactor?: number): Promise<AudioBuffer>;
/**
* Convert an audio buffer into a planar float 32 array
*/
export declare function bufferToF32Planar(input: AudioBuffer): Float32Array;
/**
* Conver an audio buffer inter a interleaved int 16 array
*/
export declare function bufferToI16Interleaved(audioBuffer: AudioBuffer): Int16Array;
/**
* Generic encoder that allows you to encode
* a canvas frame by frame
*/
export declare class CanvasEncoder implements Required<EncoderInit> {
private canvas;
private muxer?;
private videoEncoder?;
frame: frame;
sampleRate: number;
numberOfChannels: number;
videoBitrate: number;
gpuBatchSize: number;
fps: number;
height: number;
width: number;
audio: boolean;
/**
* Create a new Webcodecs encoder
* @param canvas - The canvas to encode
* @param init - Configure the output
* @example
* ```
* const encoder = new CanvasEncoder(canvas, { fps: 60 });
* ```
*/
constructor(canvas: HTMLCanvasElement | OffscreenCanvas, init?: EncoderInit);
/**
* Initiate the encoders and muxers
* @returns {Promise<void>} - A promise that resolves when initialization is complete
*/
private init;
/**
* Encode the next video frame, the current time will be incremented thereafter
* @param canvas - Optionally provide a canvas to encode
* @returns {Promise<void>} - A promise that resolves when the frame has been encoded
*/
encodeVideo(canvas?: HTMLCanvasElement | OffscreenCanvas): Promise<void>;
/**
* Encode an audio buffer using the encoder configuration added in the constructor
* @param buffer - The audio buffer to encode
* @returns {Promise<void>} - A promise that resolves when the audio has been added to the encoder queue
*/
encodeAudio(buffer: AudioBuffer): Promise<void>;
/**
* Finalizes the rendering process and creates a blob
* @returns {Promise<Blob>} - The rendered video as a Blob
*/
blob(): Promise<Blob>;
/**
* @deprecated use `blob` instead
*/
export(): Promise<Blob>;
}
export declare function capitalize(str: string): string;
export declare class CaptionPresetDeserializer {
static fromJSON<K extends {
type?: CaptionPresetType;
}>(data: K extends string ? never : K): CaptionPresetStrategy;
}
export declare interface CaptionPresetStrategy {
/**
* Defines the type of strategy
*/
type: CaptionPresetType;
/**
* This function applies the settings to the track
*/
applyTo(track: CaptionTrack): Promise<void>;
}
export declare type CaptionPresetType = 'CLASSIC' | 'SPOTLIGHT' | 'CASCADE' | 'GUINEA' | 'SOLAR' | 'WHISPER' | 'VERDANT' | string;
/**
* Defines the captions transport format
*/
export declare type Captions = {
/**
* Defines the word or token
* currently spoken
*/
token: string;
/**
* Defines the time when the token
* will be spoken in **milliseconds**
*/
start: number;
/**
* Defines the time when the token
* has been spoken in **milliseconds**
*/
stop: number;
}[][];
export declare class CaptionTrack extends Track<TextClip> {
/**
* Defines the media clip that will be
* used for creating the captions
*/
clip?: MediaClip;
readonly type = "caption";
/**
* The currently active captioning strategy
*/
preset: CaptionPresetStrategy;
/**
* Defines the media resource from which the
* captions will be created. It must contain
* a `Transcript`
*/
from(value: MediaClip | undefined): this;
/**
* If a transcript has been added to the resource
* you can generate captions with this function
* @param strategy The caption strategy to use
* @default ClassicCaptionPreset
*/
generate(strategy?: CaptionPresetStrategy | (new () => CaptionPresetStrategy)): Promise<this>;
}
export declare class CascadeCaptionPreset extends Serializer implements CaptionPresetStrategy {
generatorOptions: GeneratorOptions;
readonly type = "CASCADE";
position: Position;
constructor(config?: Partial<DefaultCaptionPresetConfig>);
applyTo(track: CaptionTrack): Promise<void>;
}
/**
* A circular mask of a given radius
*/
export declare class CircleMask extends Mask {
private _radius;
constructor(props: CircleMaskProps);
}
declare interface CircleMaskProps extends MaskProps {
radius: number;
}
export declare class ClassicCaptionPreset extends Serializer implements CaptionPresetStrategy {
generatorOptions: GeneratorOptions;
readonly type: CaptionPresetType;
position: Position;
constructor(config?: Partial<DefaultCaptionPresetConfig>);
applyTo(track: CaptionTrack): Promise<void>;
}
export declare class Clip<Props extends ClipProps = ClipProps> extends Clip_base {
_name: undefined | string;
_start: Timestamp;
_stop: Timestamp;
/**
* Defines the type of the clip
*/
readonly type: ClipType;
/**
* Defines the source of the clip with a
* one-to-many (1:n) relationship
*/
source?: Source;
/**
* The view that contains the render related information
*/
readonly view: Container<ContainerChild>;
/**
* Timestamp when the clip has been created
*/
readonly createdAt: Date;
/**
* Controls the visability of the clip
*/
disabled: boolean;
/**
* Track is ready to be rendered
*/
state: ClipState;
/**
* Access the parent track
*/
track?: Track<Clip>;
/**
* Human readable identifier ot the clip
*/
get name(): string | undefined;
set name(name: string);
/**
* Get the first visible frame
*/
get start(): Timestamp;
/**
* Get the last visible frame
*/
get stop(): Timestamp;
constructor(props?: ClipProps);
/**
* Method for connecting the track with the clip
*/
connect(track: Track<Clip>): Promise<void>;
/**
* Change clip's offset to zero in seconds. Can be negative
*/
set start(time: frame | Timestamp);
/**
* Set the last visible time that the
* clip is visible
*/
set stop(time: frame | Timestamp);
/**
* Offsets the clip by a given frame number
*/
offsetBy(time: frame | Timestamp): this;
/**
* Triggered when the clip is
* added to the composition
*/
init(): Promise<void>;
/**
* Triggered when the clip enters the scene
*/
enter(): void;
/**
* Triggered for each redraw of the scene.
* Can return a promise which will be awaited
* during export.
* @param time the current time to render
*/
update(time: Timestamp): void | Promise<void>;
/**
* Triggered when the clip exits the scene
*/
exit(): void;
/**
* Remove the clip from the track
*/
detach(): this;
/**
* Split the clip into two clips at the specified time
* @param time split, will use the current frame of the composition
* a fallback
* @returns The clip that was created by performing this action
*/
split(time?: frame | Timestamp): Promise<this>;
/**
* Create a copy of the clip
*/
copy(): Clip;
/**
* Modify the properties of the clip and
* trigger an update afterwards
*/
set(props?: Props): this;
}
declare const Clip_base: {
new (...args: any[]): {
_handlers: {
'*'?: {
[x: string]: (event: EmittedEvent<any, any>) => void;
} | undefined;
error?: {
[x: string]: (event: EmittedEvent<Error, any>) => void;
} | undefined;
offsetBy?: {
[x: string]: (event: EmittedEvent<Timestamp, any>) => void;
} | undefined;
update?: {
[x: string]: (event: EmittedEvent<any, any>) => void;
} | undefined;
frame?: {
[x: string]: (event: EmittedEvent<number | undefined, any>) => void;
} | undefined;
attach?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
detach?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
load?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
};
on<T extends "*" | "error" | keyof ClipEvents>(eventType: T, callback: (event: EmittedEvent<BaseEvents<ClipEvents>[T], any>) => void): string;
off(id?: string | "*", ...ids: string[]): void;
trigger<T extends "*" | "error" | keyof ClipEvents>(eventType: T, detail: BaseEvents<ClipEvents>[T]): void;
bubble(target: any): string;
resolve(eventType: "*" | "error" | keyof ClipEvents): (resolve: (value: unknown) => void, reject: (reason?: any) => void) => void;
};
} & typeof Serializer;
export declare class ClipDeserializer {
static fromType(data: {
type: ClipType;
}): Clip;
static fromSource(data: Source): VideoClip | AudioClip | HtmlClip | ImageClip | undefined;
}
export declare type ClipEvents = {
offsetBy: Timestamp;
update: any;
frame: number | undefined;
attach: undefined;
detach: undefined;
load: undefined;
};
export declare interface ClipProps {
disabled?: boolean;
name?: string;
start?: frame | Timestamp;
stop?: frame | Timestamp;
}
export declare type ClipState = 'IDLE' | 'LOADING' | 'ATTACHED' | 'READY' | 'ERROR';
export declare type ClipType = 'image' | 'audio' | 'text' | 'video' | 'base' | 'html' | 'complex_text';
export declare class ComplexTextClip extends TextClip<ComplexTextClipProps> {
readonly type = "complex_text";
track?: Track<ComplexTextClip>;
private _maxWidth?;
private _textAlign;
private _textBaseline;
/**
* Access to the container that contains
* all text objects
*/
model: Container<Text_2>;
segments: types_2.TextSegment[];
metrics: TextMetrics_2;
background?: types_2.Background;
styles?: types_2.StyleOption[];
constructor(props?: string | ComplexTextClipProps);
/**
* Set the copy for the text object. To split a line you can use '\n'.
*/
get text(): string;
set text(value: string);
/**
* The width at which text will wrap
*/
get maxWidth(): number | undefined;
set maxWidth(value: number | undefined);
/**
* Alignment for multiline text, does not affect single line text.
*/
get textAlign(): types_2.TextAlign;
set textAlign(value: types_2.TextAlign);
/**
* The baseline of the text that is rendered.
*/
get textBaseline(): types_2.TextBaseline;
set textBaseline(value: types_2.TextBaseline);
copy(): ComplexTextClip;
private createRenderSplits;
private createTextMetrics;
private createTextStyles;
private drawBackground;
protected reflectUpdate(): void;
}
export declare interface ComplexTextClipProps extends TextClipProps {
segments?: TextSegment[];
background?: Background;
styles?: StyleOption[];
}
export declare class Composition extends Composition_base {
private _duration;
/**
* Access to the underlying pixijs renderer
*/
renderer?: Renderer;
/**
* The root container of the composition
*/
stage: Container<ContainerChild>;
/**
* Settings of the composition
*/
settings: CompositionSettings;
/**
* Tracks attached to the composition
*/
tracks: Track<Clip>[];
/**
* The current frame that the playback is set to
*/
frame: frame;
/**
* User defined fixed duration, use the duration
* property to set this value
*/
fixedDuration?: Timestamp;
/**
* Defines the current state of the composition
*/
state: CompositionState;
/**
* Defines the fps used for rendering.
*/
fps: float;
/**
* Get the canvas element that has been
* added to the dom
*/
canvas?: HTMLCanvasElement;
/**
* Defines the context of the external
* canvas element
*/
private context?;
constructor({ height, width, background, backend, }?: Partial<CompositionSettings>);
/**
* The realtime playback has started
*/
get playing(): boolean;
/**
* Composition is rendering in
* non realtime
*/
get rendering(): boolean;
/**
* Get the current width of the canvas
*/
get width(): number;
/**
* Get the current height of the canvas
*/
get height(): number;
/**
* This is where the playback stops playing
*/
get duration(): Timestamp;
/**
* Limit the total duration of the composition
*/
set duration(time: frame | Timestamp | undefined);
resize(width: number, height: number): void;
/**
* Set the player as a child of the given html div element
*/
attachPlayer(element: HTMLElement): void;
/**
* Remove the player from the dom
*/
detachPlayer(element: HTMLElement): void;
/**
* Append a new track, it will be inserted at
* index 0 and rendered last (top layer)
*/
shiftTrack<L extends Track<Clip>>(Track: (new () => L) | L): L;
/**
* Create a track with the given type
* @param type the desired type of the track
* @returns A new track
*/
createTrack<T extends TrackType>(type: T): TrackMap[T];
/**
* Convenience function for appending a track
* aswell as the clip to the composition
*/
add<L extends Clip>(clip: L): Promise<L>;
/**
* Remove a given clip from the composition
* @returns `Clip` when it has been successfully removed `undefined` otherwise
*/
remove<L extends Clip>(clip: L): L | undefined;
/**
* Remove all tracks that are of the specified type
* @param track type to be removed
*/
removeTracks(Track: new (composition: Composition) => Track<Clip>): Track<Clip>[];
/**
* Find tracks that match the profided parameters
*/
findTracks<T extends Track<Clip>>(predicate: ((value: Track<Clip>) => boolean) | (new () => T)): T[];
/**
* Find clips that match the profided parameters
*/
findClips<T extends Clip>(predicate: ((value: Clip) => boolean) | (new () => T)): T[];
/**
* Compute the currently active frame
*/
computeFrame(): void;
/**
* Take a screenshot of the still frame
*/
screenshot(format?: ScreenshotImageFormat, quality?: number): string;
/**
* Set the playback position to a specific time
* @param value new playback time
*/
seek(value: frame | Timestamp): Promise<void>;
/**
* Play the composition
*/
play(): Promise<void>;
/**
* Pause the composition
*/
pause(): Promise<void>;
audio(numberOfChannels?: number, sampleRate?: number): Promise<AudioBuffer>;
/**
* Get the current playback time and composition
* duration formatted as `00:00 / 00:00` by default.
* if **hours** is set the format is `HH:mm:ss` whereas
* **milliseconds** will return `mm:ss.SSS`
*/
time(precision?: {
hours?: boolean;
milliseconds?: boolean;
}): string;
/**
* Remove a given track from the composition
* @returns `Track` when it has been successfully removed `undefined` otherwise
*/
removeTrack<T extends Track<Clip>>(track: T): T | undefined;
private ticker;
/**
* Updates the state of the composition
*/
private update;
}
declare const Composition_base: {
new (...args: any[]): {
_handlers: {
'*'?: {
[x: string]: (event: EmittedEvent<any, any>) => void;
} | undefined;
error?: {
[x: string]: (event: EmittedEvent<Error, any>) => void;
} | undefined;
play?: {
[x: string]: (event: EmittedEvent<frame, any>) => void;
} | undefined;
pause?: {
[x: string]: (event: EmittedEvent<frame, any>) => void;
} | undefined;
init?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
currentframe?: {
[x: string]: (event: EmittedEvent<frame, any>) => void;
} | undefined;
update?: {
[x: string]: (event: EmittedEvent<any, any>) => void;
} | undefined;
frame?: {
[x: string]: (event: EmittedEvent<number | undefined, any>) => void;
} | undefined;
attach?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
detach?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
load?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
resize?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
};
on<T extends "*" | "error" | keyof CompositionEvents>(eventType: T, callback: (event: EmittedEvent<BaseEvents<CompositionEvents>[T], any>) => void): string;
off(id?: string | "*", ...ids: string[]): void;
trigger<T extends "*" | "error" | keyof CompositionEvents>(eventType: T, detail: BaseEvents<CompositionEvents>[T]): void;
bubble(target: any): string;
resolve(eventType: "*" | "error" | keyof CompositionEvents): (resolve: (value: unknown) => void, reject: (reason?: any) => void) => void;
};
} & typeof Serializer;
/**
* Defines the type of events emitted by the
* composition
*/
export declare type CompositionEvents = {
play: frame;
pause: frame;
init: undefined;
currentframe: frame;
update: any;
frame: number | undefined;
attach: undefined;
detach: undefined;
load: undefined;
resize: undefined;
};
export declare type CompositionSettings = {
/**
* Height of the composition
*
* @default 1080
*/
height: int;
/**
* Width of the composition
*
* @default 1920
*/
width: int;
/**
* Background color of the composition
*
* @default #000000
*/
background: hex;
/**
* Overwrite the backend auto detection.
* *While webgpu is faster than webgl
* it might not be available in your
* browser yet.*
*/
backend: 'webgpu' | 'webgl';
};
export declare type CompositionState = 'IDLE' | 'RENDER' | 'PLAY';
/**
* Defines the constructor required by mixins
*/
export declare type Constructor<T = {}> = new (...args: any[]) => T;
/**
* Limit the number of times a function can be called
* per interval, timeout is in milliseconds
*/
export declare function debounce(func: Function, timeout?: number): (...args: any[]) => void;
export declare type DefaultCaptionPresetConfig = {
generatorOptions: GeneratorOptions;
position: Position;
};
/**
* Copyright (c) 2024 The Diffusion Studio Authors
*
* This Source Code Form is subject to the terms of the Mozilla
* Public License, v. 2.0 that can be found in the LICENSE file.
*/
export declare type Deserializer<T> = (data: any) => Promise<T> | T;
/**
* Copyright (c) 2024 The Diffusion Studio Authors
*
* This Source Code Form is subject to the terms of the Mozilla
* Public License, v. 2.0 that can be found in the LICENSE file.
*/
/**
* This utility creates an anchor tag and clicks on it
* @param source Blob url or base64 encoded svg
* @param name File name suggestion
*/
export declare function downloadObject(source: string | Blob, name?: string): Promise<void>;
export declare type EasingFunction = keyof EasingFunctions;
export declare type EasingFunctions = typeof easingFunctions;
declare const easingFunctions: {
linear: (t: number) => number;
easeIn: (t: number) => number;
easeOut: (t: number) => number;
easeInOut: (t: number) => number;
};
export declare class EllipseMask extends Mask {
private _radius;
constructor(props: EllipseMaskProps);
}
declare interface EllipseMaskProps extends MaskProps {
radius: {
x: number;
y: number;
};
}
declare type EmittedEvent<K, T extends {}> = OverrideValues<CustomEvent<K>, {
target: T;
}>;
export declare type EncodedOpusChunk = {
data: Uint8Array;
timestamp: number;
type: 'key' | 'delta';
duration: number;
};
export declare type EncodedOpusChunkOutputCallback = (output: EncodedOpusChunk, metadata: EncodedAudioChunkMetadata) => void;
export declare class Encoder extends WebcodecsVideoEncoder {
private audioEncoder?;
/**
* Create a new audio and video encoder and multiplex the result
* using a mp4 container
* @param composition The composition to render
* @param options Configure the output
* @example
* ```
* new Encoder(composition, { resolution: 2 }).render() // will render at 4K
* ```
*/
constructor(composition: Composition, init?: VideoEncoderInit_2);
/**
* Export the specified composition
* @throws DOMException if the export has been aborted
*/
render(target?: FileSystemFileHandle | string, signal?: AbortSignal): Promise<void>;
/**
* Check which configurations are supported and select the best
* @returns A supported audio and video configuration
*/
private getConfigs;
/**
* @deprecated please replace with `render`
*/
export(target?: FileSystemFileHandle | string): Promise<void>;
}
export declare class EncoderError extends BaseError {
}
/**
* Copyright (c) 2024 The Diffusion Studio Authors
*
* This Source Code Form is subject to the terms of the Mozilla
* Public License, v. 2.0 that can be found in the LICENSE file.
*/
declare type EncoderEvents = {
render: {
/**
* Defines how many were rendered yet
*/
progress: number;
/**
* Defines the total number of frames
* to be rendered
*/
total: number;
/**
* Defines the estimated remaining
* render time
*/
remaining: Date;
};
};
declare interface EncoderInit {
/**
* A floating point number indicating the audio context's sample rate, in samples per second.
*
* @default 48000
*/
sampleRate?: number;
/**
* Defines the number of channels
* of the composed audio
*
* @default 2
*/
numberOfChannels?: number;
/**
* Defines the bitrate at which the video
* should be rendered at
* @default 10e6
*/
videoBitrate?: number;
/**
* Defines the maximum size of the video
* encoding queue, increasing this number
* will put a higher pressure on the gpu.
* It's restricted to a value between 1 and 100
* @default 5
*/
gpuBatchSize?: number;
/**
* Defines the fps at which the composition
* will be rendered
* @default 30
*/
fps?: number;
/**
* Defines if the audio should be encoded
*/
audio?: boolean;
}
/**
* Error message structure
*/
export declare type ErrorEventDetail = {
msg: string;
code: string;
params?: any;
};
/**
* Copyright (c) 2024 The Diffusion Studio Authors
*
* This Source Code Form is subject to the terms of the Mozilla
* Public License, v. 2.0 that can be found in the LICENSE file.
*/
export declare function EventEmitter<Events = {}>(): {
new (...args: any[]): {
_handlers: { [T in keyof BaseEvents<Events>]?: {
[x: string]: (event: EmittedEvent<BaseEvents<Events>[T], any>) => void;
} | undefined; };
on<T_1 extends "*" | "error" | keyof Events>(eventType: T_1, callback: (event: EmittedEvent<BaseEvents<Events>[T_1], any>) => void): string;
off(id?: string | "*", ...ids: string[]): void;
trigger<T_1 extends "*" | "error" | keyof Events>(eventType: T_1, detail: BaseEvents<Events>[T_1]): void;
bubble(target: {
_handlers: { [T in keyof BaseEvents<Events>]?: {
[x: string]: (event: EmittedEvent<BaseEvents<Events>[T], any>) => void;
} | undefined; };
on<T_1 extends "*" | "error" | keyof Events>(eventType: T_1, callback: (event: EmittedEvent<BaseEvents<Events>[T_1], any>) => void): string;
off(id?: string | "*", ...ids: string[]): void;
trigger<T_1 extends "*" | "error" | keyof Events>(eventType: T_1, detail: BaseEvents<Events>[T_1]): void;
bubble(target: any): string;
resolve(eventType: "*" | "error" | keyof Events): (resolve: (value: unknown) => void, reject: (reason?: any) => void) => void;
}): string;
resolve(eventType: "*" | "error" | keyof Events): (resolve: (value: unknown) => void, reject: (reason?: any) => void) => void;
};
};
export declare function EventEmitterMixin<Events = {}, T extends Constructor = Constructor>(Base: T): {
new (...args: any[]): {
_handlers: { [T_1 in keyof BaseEvents<Events>]?: {
[x: string]: (event: EmittedEvent<BaseEvents<Events>[T_1], any>) => void;
}; };
on<T_1 extends keyof BaseEvents<Events>>(eventType: T_1, callback: (event: EmittedEvent<BaseEvents<Events>[T_1], any>) => void): string;
off(id?: string | "*", ...ids: string[]): void;
trigger<T_1 extends keyof BaseEvents<Events>>(eventType: T_1, detail: BaseEvents<Events>[T_1]): void;
bubble(target: any): string;
resolve(eventType: keyof BaseEvents<Events>): (resolve: (value: unknown) => void, reject: (reason?: any) => void) => void;
};
} & T;
declare type EventListener_2 = (event: MessageEvent<any>['data']) => void;
declare type Events = {
load: undefined;
update: undefined;
};
declare type Events_2 = {
update: any;
frame: number | undefined;
attach: undefined;
detach: undefined;
};
declare type Events_3 = {
load: undefined;
};
declare type Events_4 = {
update: any;
};
/**
* @deprecated please replace with `EncoderError`
*/
export declare class ExportError extends BaseError {
}
/**
* Fast sampler options.
*/
export declare type FastSamplerOptions = {
/**
* The number of samples to return.
*/
length?: number;
/**
* The start time in **milliseconds** relative to the beginning of the clip.
*/
start?: Timestamp | number;
/**
* The stop time in **milliseconds** relative to the beginning of the clip.
*/
stop?: Timestamp | number;
/**
* Whether to use a logarithmic scale.
*/
logarithmic?: boolean;
};
/**
* Defines a floating point number
*/
export declare type float = (number & {
_float: void;
}) | number;
/**
* Converts a Float32Array to 16-bit PCM.
*/
export declare function floatTo16BitPCM(dataview: DataView, buffer: Float32Array, offset: number): DataView;
export declare class Font extends Font_base {
/**
* Defines if the font has been loaded yet
*/
loaded: boolean;
constructor(config?: types.FontSource);
get name(): string;
/**
* Defines the family of the font
* @example 'Montserrat'
*/
family: string;
/**
* Defines the weight of the font
* @example '500'
*/
weight?: string;
/**
* Defines the font face source
* @example 'url(https://mywebfont.ttf)'
*/
source: string | undefined;
/**
* Defines the font style
* @example 'italic'
*/
style: string | undefined;
/**
* Load the font that has been initiated via the constructor
*/
load(): Promise<this>;
copy(): Font;
/**
* Get all available local fonts, requires the
* **Local Font Access API**
*/
static localFonts(): Promise<types.FontSources[]>;
/**
* Get common web fonts
*/
static webFonts(): types.FontSources[];
/**
* Create a font by font family
*/
static fromFamily<T extends keyof typeof WebFonts>({ family, weight, }: types.WebfontProperties<T>): Font;
}
declare const Font_base: {
new (...args: any[]): {
_handlers: {
'*'?: {
[x: string]: (event: EmittedEvent<any, any>) => void;
} | undefined;
error?: {
[x: string]: (event: EmittedEvent<Error, any>) => void;
} | undefined;
load?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
};
on<T extends "*" | "error" | "load">(eventType: T, callback: (event: EmittedEvent<BaseEvents<Events_3>[T], any>) => void): string;
off(id?: string | "*", ...ids: string[]): void;
trigger<T extends "*" | "error" | "load">(eventType: T, detail: BaseEvents<Events_3>[T]): void;
bubble(target: any): string;
resolve(eventType: "*" | "error" | "load"): (resolve: (value: unknown) => void, reject: (reason?: any) => void) => void;
};
} & typeof Serializer;
declare const FONT_WEIGHTS: {
readonly '100': "Thin";
readonly '200': "Extra Light";
readonly '300': "Light";
readonly '400': "Normal";
readonly '500': "Medium";
readonly '600': "Semi Bold";
readonly '700': "Bold";
readonly '800': "Extra Bold";
readonly '900': "Black";
};
/**
* Defines all available font families
*/
export declare type FontFamily = keyof typeof WebFonts | string;
/**
* Defines the properties that are required
* to load a new font
*/
export declare type FontSource = {
/**
* Name of the Family
* @example 'Arial'
*/
family: string;
/**
* Source of the Variant
* @example url(arial.ttf)
*/
source: string;
/**
* Defines the font style
* @example 'italic'
*/
style?: string;
/**
* The weight of the font
* @example '400'
*/
weight?: string;
};
/**
* Defines a single font that has one or
* more variants
*/
export declare type FontSources = {
family: string;
variants: FontSource[];
};
/**
* Defines the style of the font
*/
export declare type FontStyle = 'normal' | 'italic' | 'oblique';
/**
* Defines all available font subsets which
* limit the number of characters
*/
export declare type FontSubset = 'latin' | 'latin-ext' | 'vietnamese' | 'cyrillic' | 'cyrillic-ext';
/**
* Defines the source where the font is coming from
*/
export declare type FontType = 'local' | 'web';
/**
* Defines all available font weights
*/
export declare type FontWeight = keyof typeof FONT_WEIGHTS;
/**
* Defines the thickness/weight of the font
*/
export declare type fontWeight = 'normal' | 'bold' | 'bolder' | 'lighter' | '100' | '200' | '300' | '400' | '500' | '600' | '700' | '800' | '900';
/**
* Copyright (c) 2024 The Diffusion Studio Authors
*
* This Source Code Form is subject to the terms of the Mozilla
* Public License, v. 2.0 that can be found in the LICENSE file.
*/
export declare const FPS_DEFAULT = 30;
/**
* Defines an interger that correspondes
* to a point in time
*/
export declare type frame = (number & {
_frame: void;
}) | number;
/**
* Convert frames to milliseconds
*/
export declare function framesToMillis(frames: frame, fps?: number): number;
/**
* Convert frames into seconds
*/
export declare function framesToSeconds(frames: frame, fps?: number): number;
export declare type GeneratorOptions = {
/**
* Iterates by word count
*/
count?: [number, number?];
/**
* Iterates by group duration
*/
duration?: [number, number?];
/**
* Iterates by number of characters within the group
*/
length?: [number, number?];
};
/**
* Function for retrieving supported audio encoder configurations
*/
export declare function getAudioEncoderConfigs(settings: AudioSettings): Promise<AudioEncoderConfig[]>;
/**
* Function for retrieving the best supported audio
* and video profiles
*/
export declare function getSupportedEncoderConfigs(settings: {
audio: AudioSettings;
video: VideoSettings;
}): Promise<[VideoEncoderConfig, AudioEncoderConfig | undefined]>;
/**
* Function for retrieving supported video encoder
* configurations
*/
export declare function getVideoEncoderConfigs(settings: VideoSettings): Promise<VideoEncoderConfig[]>;
/**
* Group an array of objects by the specified key
*/
export declare function groupBy<T extends {}, K extends keyof T>(arr: T[], key: K): Record<T[K], T[]>;
export declare class GuineaCaptionPreset extends Serializer implements CaptionPresetStrategy {
readonly type: CaptionPresetType;
colors: hex[];
position: Position;
constructor(config?: Partial<MultiColorCaptionPresetConfig>);
applyTo(track: CaptionTrack): Promise<void>;
protected splitSequence(sequence: WordGroup): {
segments: string[];
words: Word[][];
};
}
/**
* Defines a color hex value
*/
export declare type hex = `#${string}`;
export declare class HtmlClip extends HtmlClip_base {
readonly type = "html";
track?: Track<HtmlClip>;
source: HtmlSource<{}>;
/**
* Access to the html document that
* will be rendered to the canvas
*/
readonly element: HTMLImageElement;
readonly canvas: HTMLCanvasElement;
readonly context: CanvasRenderingContext2D;
/**
* Access to the sprite containing the canvas
*/
readonly sprite: Sprite;
constructor(source?: File | HtmlSource, props?: HtmlClipProps);
init(): Promise<void>;
update(_: Timestamp): void | Promise<void>;
copy(): HtmlClip;
}
declare const HtmlClip_base: {
new (...args: any[]): {
filters?: Filter | Filter[];
_height?: int | Keyframe_2<int> | Percent | NumberCallback;
_width?: int | Keyframe_2<int> | Percent | NumberCallback;
_position: Position;
_scale?: Scale;
rotation: number | Keyframe_2<number> | NumberCallback;
alpha: number | Keyframe_2<number> | NumberCallback;
translate: Translate2D;
get position(): Position;
set position(value: Position | "center");
get scale(): Scale;
set scale(value: Scale | float | Keyframe_2<number> | NumberCallback);
x: int | `${number}%` | Keyframe_2<int> | NumberCallback;
y: int | `${number}%` | Keyframe_2<int> | NumberCallback;
translateX: int | Keyframe_2<int> | NumberCallback;
translateY: int | Keyframe_2<int> | NumberCallback;
height: int | `${number}%` | Keyframe_2<int> | NumberCallback;
width: int | `${number}%` | Keyframe_2<int> | NumberCallback;
mask: Graphics | undefined;
get anchor(): Anchor;
set anchor(value: Anchor | float);
enter(): void;
exit(): void;
animate(): AnimationBuilder;
view: Container;
id: `${string}-${string}-${string}-${string}-${string}`;
toJSON(): any;
};
} & {
new (props?: ClipProps): Clip<HtmlClipProps>;
fromJSON<T extends Serializer, K = {}>(this: new () => T, obj: K extends string ? never : K): T;
};
export declare interface HtmlClipProps extends ClipProps, VisualMixinProps {
}
export declare class HtmlSource<T extends Object = {}> extends Source<T> {
readonly type: ClipType;
/**
* Access to the iframe that is required
* for extracting the html's dimensions
*/
readonly iframe: HTMLIFrameElement;
constructor();
/**
* Access to the html document as loaded
* within the iframe. Can be manipulated with
* javascript
*/
get document(): Document | undefined;
createObjectURL(): Promise<string>;
protected loadUrl(url: string | URL | Request, init?: RequestInit): Promise<void>;
protected loadFile(file: File): Promise<void>;
/**
* Update the object url using the current
* contents of the iframe document
*/
update(): void;
thumbnail(): Promise<HTMLImageElement>;
}
export declare class HtmlTrack extends Track<HtmlClip> {
readonly type = "html";
}
export declare class ImageClip extends ImageClip_base {
readonly type = "image";
track?: Track<ImageClip>;
readonly element: HTMLImageElement;
source: ImageSource<{}>;
/**
* Access to the sprite containing the image texture
*/
readonly sprite: Sprite;
constructor(source?: File | ImageSource, props?: ImageClipProps);
init(): Promise<void>;
update(_: Timestamp): void | Promise<void>;
copy(): ImageClip;
}
declare const ImageClip_base: {
new (...args: any[]): {
filters?: Filter | Filter[];
_height?: int | Keyframe_2<int> | Percent | NumberCallback;
_width?: int | Keyframe_2<int> | Percent | NumberCallback;
_position: Position;
_scale?: Scale;
rotation: number | Keyframe_2<number> | NumberCallback;
alpha: number | Keyframe_2<number> | NumberCallback;
translate: Translate2D;
get position(): Position;
set position(value: Position | "center");
get scale(): Scale;
set scale(value: Scale | float | Keyframe_2<number> | NumberCallback);
x: int | `${number}%` | Keyframe_2<int> | NumberCallback;
y: int | `${number}%` | Keyframe_2<int> | NumberCallback;
translateX: int | Keyframe_2<int> | NumberCallback;
translateY: int | Keyframe_2<int> | NumberCallback;
height: int | `${number}%` | Keyframe_2<int> | NumberCallback;
width: int | `${number}%` | Keyframe_2<int> | NumberCallback;
mask: Graphics | undefined;
get anchor(): Anchor;
set anchor(value: Anchor | float);
enter(): void;
exit(): void;
animate(): AnimationBuilder;
view: Container;
id: `${string}-${string}-${string}-${string}-${string}`;
toJSON(): any;
};
} & {
new (props?: ClipProps): Clip<ImageClipProps>;
fromJSON<T extends Serializer, K = {}>(this: new () => T, obj: K extends string ? never : K): T;
};
export declare interface ImageClipProps extends ClipProps, VisualMixinProps {
}
export declare type ImageMimeType = keyof (typeof SUPPORTED_MIME_TYPES)['IMAGE'];
export declare class ImageSource<T extends Object = {}> extends Source<T> {
readonly type: ClipType;
thumbnail(): Promise<HTMLImageElement>;
}
declare class ImageTrack_2 extends Track<ImageClip> {
readonly type = "image";
}
export { ImageTrack_2 as ImageTrack }
export declare type InsertMode = (typeof insertModes)[number];
/**
* Copyright (c) 2024 The Diffusion Studio Authors
*
* This Source Code Form is subject to the terms of the Mozilla
* Public License, v. 2.0 that can be found in the LICENSE file.
*/
declare const insertModes: readonly ["DEFAULT", "STACK"];
declare interface InsertStrategy<T extends InsertMode> {
readonly mode: T;
add(clip: Clip, track: Track<Clip>, index?: number): void;
update(clip: Clip, track: Track<Clip>): void;
offset(time: Timestamp, track: Track<Clip>): void;
}
/**
* Defines a number without decimal places
*/
export declare type int = (number & {
_int: void;
}) | number;
/**
* Copyright (c) 2024 The Diffusion Studio Authors
*
* This Source Code Form is subject to the terms of the Mozilla
* Public License, v. 2.0 that can be found in the LICENSE file.
*/
/**
* Converts an AudioBuffer to a Float32Array.
* For 2 channels it will result in something like:
* [L[0], R[0], L[1], R[1], ... , L[n], R[n]]
*/
export declare function interleave(input: AudioBuffer): Float32Array;
export declare class IOError extends BaseError {
}
/**
* Check whether a given value is a class
*/
export declare function isClass(value: any): boolean;
declare class Keyframe_2<T extends number | string> implements Omit<Serializer, 'id'> {
/**
* Defines the range of the input values
* in milliseconds
*/
input: number[];
/**
* Defines the range of the output values
*/
output: T[];
/**
* Defines the required options that
* control the behaviour of the keyframe
*/
options: Required<KeyframeOptions>;
/**
* Constructs a Keyframe object.
* @param inputRange - The range of input values (e.g., frame numbers).
* @param outputRange - The range of output values (e.g., opacity, degrees, colors).
* @param options - Additional options for extrapolation, type, and easing.
*/
constructor(inputRange: frame[], outputRange: T[], options?: KeyframeOptions);
/**
* Normalizes the frame number to a value between 0 and 1 based on the input range.
* @param frame - The current frame number.
* @returns The normalized value.
*/
private normalize;
/**
* Interpolates the output value based on the normalized frame value.
* @param t - The normalized frame value (between 0 and 1).
* @param segment - The current segment index.
* @returns The interpolated output value.
*/
private interpolate;
/**
* Evaluates the interpolated value for a given milliseconds number.
* @param time - The current time in milliseconds or as a timestamp
* @returns The interpolated output value.
*/
value(time: number | Timestamp): T;
/**
* Add a new keyframe to the animation
* @param frame time of the keyframe
* @param value value of the keyframe
*/
push(input: frame, output: T): this;
toJSON(): this;
static fromJSON<T extends number | string>(obj: ReturnType<Keyframe_2<T>['toJSON']>): Keyframe_2<T>;
}
export { Keyframe_2 as Keyframe }
/**
* Options for configuring a Keyframe instance.
*/
export declare type KeyframeOptions = {
/**
* Defines the extrapolation behavior outside the input range.
* - "clamp": Clamps the value to the nearest endpoint within the range.
* - "extend": Allows values to extend beyond the range.
* @default "clamp"
*/
extrapolate?: "clamp" | "extend";
/**
* Specifies the type of output values.
* - "number": Output values are numbers.
* - "color": Output values are colors in hex format.
* @default "number"
*/
type?: "number" | "color";
/**
* An optional easing function to apply to the interpolation.
* Easing functions can modify the interpolation to be non-linear.
* @default "linear"
*/
easing?: EasingFunction;
};
/**
* Copyright (c) 2024 The Diffusion Studio Authors
*
* This Source Code Form is subject to the terms of the Mozilla
* Public License, v. 2.0 that can be found in the LICENSE file.
*/
export declare enum Language {