@diffusionstudio/core
Version:
2D motion graphics and video rendering engine
1,655 lines (1,546 loc) • 126 kB
TypeScript
import { BaseEvents } from '../mixins/event/types';
import { BaseEvents as BaseEvents_2 } from '../../mixins/event/types';
import { BlendMode as BlendMode_2 } from '../..';
import { ClipAnimationOptions as ClipAnimationOptions_2 } from '..';
import { ClipType as ClipType_2 } from '../..';
import { EmittedEvent } from '../mixins/event/types';
import { EmittedEvent as EmittedEvent_2 } from '../../mixins/event/types';
import { frame as frame_2 } from '..';
import { int as int_2 } from '../..';
import { Mask as Mask_2 } from '../..';
import { MediaInput as MediaInput_2 } from '../..';
import { Percent as Percent_2 } from '../..';
import { Point as Point_2 } from '../..';
import { RelativePoint as RelativePoint_2 } from '../..';
import { Size as Size_2 } from '../..';
import { SourceInitOptions as SourceInitOptions_2 } from '..';
import { SourceInitOptions as SourceInitOptions_3 } from './base';
import { Timestamp as Timestamp_2 } from '../..';
import { Token } from '../../nodes/text/token';
import { WrappedCanvas } from '../../lib/metamuxer';
export declare function abs(value: number | Percent, max: number): number;
/**
* List of all track types.
* @public
*/
declare const ALL_TRACK_TYPES: readonly ["video", "audio", "subtitle"];
/**
* Move an element inside the provided array
*/
export declare function arraymove(arr: any[], fromIndex: number, toIndex: number): void;
/**
* clip assert replacement for the browser
* @example assert(true == false)
*/
export declare function assert(condition: any, message?: string): asserts condition;
export declare function assertNever(_: never): void;
/**
* List of known audio codecs, ordered by encoding preference.
* @public
*/
declare const AUDIO_CODECS: readonly ["aac", "opus", "mp3", "vorbis", "flac", "pcm-s16", "pcm-s16be", "pcm-s24", "pcm-s24be", "pcm-s32", "pcm-s32be", "pcm-f32", "pcm-f32be", "pcm-u8", "pcm-s8", "ulaw", "alaw"];
export declare const AUDIO_LOOK_AHEAD = 0.2;
/**
* Converts the specified AudioBuffer to a Blob.
*
* Note that changing the MIME type does not change the actual file format.
* The output is a WAVE in any case
*/
export declare function audioBufferToWav(buffer: AudioBuffer, type?: string): Blob;
export declare class AudioClip extends Clip {
private _muted;
private _volume;
private _transcript?;
protected gainNode?: GainNode;
readonly type: ClipType;
layer?: Layer<AudioClip>;
source?: AudioSource;
playing: boolean;
/**
* Defines the start and stop values of the clip
* relative to the delay (which part of the media should be played)
*/
range: [Timestamp, Timestamp];
/**
* Defines the playback element
*/
element?: HTMLAudioElement | HTMLVideoElement;
constructor(input?: MediaInput | AudioSource, props?: AudioClipProps);
init(audio: AudioRenderer): Promise<void>;
play(audio: AudioRenderer): Promise<void>;
pause(): Promise<void>;
enter(audio: AudioRenderer): Promise<void>;
update(audio: AudioRenderer): Promise<void>;
exit(): Promise<void>;
/**
* Defines the transcript of the video/audio.
* Will be trimmed to the clip range.
* If the duration is not set, the transcript will be returned.
*/
get transcript(): Transcript | undefined;
set transcript(transcript: Transcript | undefined);
cleanup(): void;
get start(): Timestamp;
get stop(): Timestamp;
get duration(): Timestamp;
set duration(time: Time);
/**
* Number between 0 and 1 defining the volume of the media
* @default 1
*/
get volume(): number;
set volume(value: number);
get muted(): boolean;
set muted(value: boolean);
trim(start?: Time, stop?: Time): this;
/**
* Returns a slice of a media clip with trimmed start and stop
*/
subclip(start?: Time, stop?: Time): this;
split(time?: Time): Promise<this>;
/**
* Remove silences from the clip will return an array of clips with the silences removed.
* If the clip has been added to a layer it will remove the silences within the layer.
* @param options - Options for silence detection
*/
removeSilences(options?: SilenceRemoveOptions): Promise<AudioClip[]>;
/**
* Get the range of the clip in **seconds**
*/
protected getBufferRange(audio: AudioRenderer): [number, number];
}
export declare interface AudioClipProps extends ClipProps {
playing?: boolean;
transcript?: Transcript;
volume?: number;
muted?: boolean;
}
/**
* Union type of known audio codecs.
* @public
*/
declare type AudioCodec = typeof AUDIO_CODECS[number];
declare interface AudioConfig {
/**
* Enable audio encoding
* @default true
*/
enabled?: boolean;
/**
* A floating point number indicating the audio context's sample rate, in samples per second.
* @default 48000
*/
sampleRate?: number;
/**
* Defines the number of channels of the composed audio
* @default 2
*/
numberOfChannels?: number;
/**
* Defines the bitrate at which the audio should be rendered at
* @default 128e3
*/
bitrate?: number;
/**
* Defines the codec to use for the audio
* @default 'aac'
*/
codec?: AudioCodec;
}
/**
* Web Audio API based audio renderer
*/
export declare class AudioRenderer {
readonly context: AudioContext | OfflineAudioContext;
/**
* Offset in **seconds** relative to the hardware time when the playback started
*/
hardwareOffset: number;
/**
* Offset in **seconds** relative to 0 when the playback started
*/
playbackOffset: number;
/**
* Defines the fps used for rendering.
*/
playbackFps: float;
/**
* The fps used when the ticker is inactive (not playing)
*/
inactiveFps: number;
/**
* Defines the current state of the ticker
*/
playing: boolean;
/**
* Defines if the ticker is active
*/
stopped: boolean;
/**
* User defined fixed duration
*
* @deprecated Use markers.stop instead
*/
duration?: Timestamp;
/**
* The function to call when the ticker is updated
*/
private callback;
/**
* The last time the timer was updated
*/
private lastFrameTime;
/**
* Creates a new ticker
* @param callback - The function to call when the ticker is updated
*/
constructor(options?: AudioRendererInit);
/**
* The current time of the hardware in seconds
*/
get hardwareTime(): number;
/**
* The current time of the playback in **seconds** relative to 0
*/
get playbackTime(): number;
get playbackTimestamp(): Timestamp;
/**
* The current frame that the playback is set to
*/
get playbackFrame(): frame;
/**
* Starts the animation loop
*/
start(): void;
/**
* Stops the animation loop
*/
stop(): void;
/**
* Starts the frame incrementation
*/
play(): Promise<void>;
/**
* Pauses the frame incrementation
*/
pause(): Promise<void>;
/**
* The animation loop
*/
private timer;
private resumeAudioContext;
}
declare type AudioRendererInit = {
fps?: number;
callback?(): Promise<void>;
context?: AudioContext | OfflineAudioContext;
};
export declare type AudioSlice = {
start: Timestamp;
stop: Timestamp;
};
export declare class AudioSource extends BaseSource {
readonly type: ClipType;
element: HTMLAudioElement | HTMLVideoElement;
decoder: WebAudioDecoder;
duration?: Timestamp;
demuxer?: Promise<Input>;
transcript?: Transcript;
constructor(options: SourceOptions);
init(options?: SourceInitOptions): Promise<void>;
decode(numberOfChannels?: number, sampleRate?: number, cache?: boolean): Promise<AudioBuffer>;
/**
* Find silences in the audio clip. Results are cached.
*
* uses default sample rate of 3000
* @param options - Silences options.
* @returns An array of the silences (in ms) in the clip.
*/
silences(options?: SilenceDetectionOptions): Promise<AudioSlice[]>;
/**
* Sampler that uses a window size to calculate the max value of the samples in the window.
* @param options - Sampling options.
* @returns An array of the max values of the samples in the window.
*/
sample({ length, start, stop, logarithmic, }?: SamplerOptions): Promise<Float32Array>;
thumbnail(options?: SamplerOptions): Promise<HTMLElement>;
}
export declare type Background = {
/**
* @default #000000
*/
fill?: hex;
/**
* @default 100
*/
opacity?: number;
/**
* @default 20
*/
borderRadius?: number;
/**
* @default { x: 30, y: 20 }
*/
padding?: {
x: int;
y: int;
};
};
export declare class BaseError extends Error {
readonly message: string;
readonly code: string;
constructor({ message, code }: {
message?: string | undefined;
code?: string | undefined;
}, ...args: any[]);
}
declare type BaseEvents_3<E = {}> = {
'*': any;
error: Error;
} & E;
export declare class BaseSource extends Serializer {
/**
* Unique identifier of the source
*/
id: string;
/**
* Data associated with the source
*/
data: Record<string, unknown>;
readonly type: ClipType;
mimeType: string;
input: MediaInput;
name: string;
createdAt: Date;
constructor(options: SourceOptions);
init(options?: SourceInitOptions): Promise<void>;
/**
* Get the source as an array buffer
*/
arrayBuffer(): Promise<ArrayBuffer>;
/**
* Downloads the file
*/
download(): Promise<void>;
/**
* Get a visulization of the source
* as an html element
* @deprecated
*/
thumbnail(): Promise<HTMLElement>;
/**
* Create a checkpoint of the source. May include Blob or FileSystemFileHandle.
* @param middleware A function to modify the checkpoint data
* @returns A serialized representation of the source
*/
createCheckpoint(): Promise<unknown>;
}
/**
* Defines the blend mode to use
*/
export declare type BlendMode = 'source-over' | 'source-in' | 'source-out' | 'source-atop' | 'destination-over' | 'destination-in' | 'destination-out' | 'destination-atop' | 'lighter' | 'copy' | 'xor' | 'multiply' | 'screen' | 'overlay' | 'darken' | 'lighten' | 'color-dodge' | 'color-burn' | 'hard-light' | 'soft-light' | 'difference' | 'exclusion' | 'hue' | 'saturation' | 'color' | 'luminosity';
/**
* Merges the channels of the audio blob into a mono AudioBuffer
*/
export declare function blobToMonoBuffer(blob: Blob, sampleRate?: number, scalingFactor?: number): Promise<AudioBuffer>;
/**
* Convert an audio buffer into a planar float 32 array
*/
export declare function bufferToF32Planar(input: AudioBuffer): Float32Array;
/**
* Conver an audio buffer inter a interleaved int 16 array
*/
export declare function bufferToI16Interleaved(audioBuffer: AudioBuffer): Int16Array;
export declare function capitalize(str: string): string;
export declare class CaptionPreset {
/**
* This function returns the position of the captions
*/
position: RelativePoint;
constructor(config?: CaptionPresetConfig);
/**
* This function syncs the timestamp of the captions to the audio clip
* @param layer - The layer to sync
* @param clip - The audio clip to sync to
*/
sync(layer: Layer, clip: AudioClip): this;
/**
* This function creates the captions
* @param layer - The layer to apply the settings to
* @param transcript - The transcript to apply the settings to
* @param offset - The offset of the captions
*/
apply(layer: Layer, transcript: Transcript, offset: Timestamp): Promise<void>;
}
export declare interface CaptionPresetConfig {
position?: RelativePoint;
}
/**
* Defines the captions transport format
*/
export declare type Captions = {
/**
* Defines the word or token
* currently spoken
*/
token: string;
/**
* Defines the time when the token
* will be spoken in **milliseconds**
*/
start: number;
/**
* Defines the time when the token
* has been spoken in **milliseconds**
*/
stop: number;
}[][];
export declare class CascadeCaptionPreset extends CaptionPreset {
generatorOptions: GeneratorOptions;
constructor(config?: DefaultCaptionPresetConfig);
apply(layer: Layer, transcript: Transcript, delay: Timestamp): Promise<void>;
}
/**
* Defines the properties of a circle
*/
export declare interface Circle {
/**
* The x coordinate of the circle
*/
cx: number | Percent;
/**
* The y coordinate of the circle
*/
cy: number | Percent;
/**
* The radius of the circle
*/
radius: number | Percent;
}
export declare class CircleClip extends ShapeClip {
_keepAspectRatio: boolean;
readonly type = "circle";
layer?: Layer<CircleClip>;
animations: CircleClipAnimationOptions;
constructor(props?: CircleClipProps);
get radius(): number;
set radius(value: number | Percent);
get name(): string;
/**
* Access to the html document that
* will be rendered to the canvas
*/
render(renderer: VideoRenderer): void;
}
export declare type CircleClipAnimationOptions = (KeyframeOptions<'x' | 'y' | 'translateX' | 'translateY' | 'width' | 'height', number | Percent> | KeyframeOptions<'opacity' | 'rotation' | 'scale' | 'scaleX' | 'scaleY' | 'radius', number> | KeyframeOptions<'fill', string>)[];
export declare interface CircleClipProps extends ShapeClipProps {
radius?: number | Percent;
animations?: CircleClipAnimationOptions;
}
export declare class CircleMask extends Mask {
readonly type: MaskType;
/**
* @deprecated Use `x` instead
*/
cx: number | Percent;
/**
* @deprecated Use `y` instead
*/
cy: number | Percent;
animations: CircleMaskAnimationOptions;
constructor({ cx, cy, x, y, radius, animations, ...props }?: CircleMaskProps);
set x(value: number | Percent);
set y(value: number | Percent);
get x(): number | Percent;
get y(): number | Percent;
get radius(): number;
set radius(value: number | Percent);
draw(renderer: VideoRenderer): Path2D;
get bounds(): [Point, Point, Point, Point];
}
export declare type CircleMaskAnimationOptions = KeyframeOptions<'x' | 'y' | 'cx' | 'cy' | 'radius' | 'width' | 'height', number | Percent>[];
export declare interface CircleMaskProps extends MaskProps {
cx?: number | Percent;
cy?: number | Percent;
x?: number | Percent;
y?: number | Percent;
radius?: number | Percent;
animations?: CircleMaskAnimationOptions;
}
export declare function clamp(t: number, a: number, b: number): number;
export declare class ClassicCaptionPreset extends CaptionPreset {
generatorOptions: GeneratorOptions;
constructor(config?: DefaultCaptionPresetConfig);
apply(layer: Layer, transcript: Transcript, delay: Timestamp): Promise<void>;
}
export declare class Clip extends Clip_base {
/**
* Unique identifier of the clip
*/
id: string;
_name: undefined | string;
_delay: Timestamp;
_duration: Timestamp;
/**
* Data associated with the clip
*/
data: Record<string, unknown>;
/**
* Flag to check if the clip has been initialized
*/
initialized: boolean;
/**
* Defines the type of the clip
*/
readonly type: ClipType;
/**
* Defines the source of the clip which can be
* shared with other clips for more efficient
* memory usage
*/
source?: BaseSource;
/**
* Timestamp when the clip has been created
*/
readonly createdAt: Date;
/**
* Controls the visability of the clip
*/
disabled: boolean;
/**
* Animation properties for the clip
*/
animations: ClipAnimationOptions;
/**
* Access the parent layer
*/
layer?: Layer;
/**
* The input that was used to create the clip
*/
input?: MediaInput;
/**
* Stores the current transition configuration; how this clip transitions to the next clip.
*/
transition?: TransitionConfig;
/**
* Human readable identifier of the clip
*/
get name(): string | undefined;
set name(name: string);
/**
* Get the first visible frame
*/
get start(): Timestamp;
/**
* Get the last visible frame
*/
get stop(): Timestamp;
/**
* Get the delay of the clip
*/
get delay(): Timestamp;
/**
* Get the duration of the clip
*/
get duration(): Timestamp;
constructor(props?: ClipProps);
/**
* Set the animation time of the clip
* and interpolate the values
* @param time the current absolute time to render
*/
animate(time: Timestamp): this;
/**
* Method for connecting the layer with the clip
*/
connect(layer: Layer<Clip>): Promise<void>;
/**
* Change clip's offset to zero in seconds. Can be negative
*/
set delay(time: Time);
/**
* Set the duration of the clip, needs to be positive
*/
set duration(time: Time);
/**
* Offsets the clip by a given frame number
*/
offset(time: Time): this;
/**
* Triggered when the clip is
* added to the composition
*/
init(audio: AudioRenderer): Promise<void>;
/**
* Triggered when the clip enters the scene
*/
enter(audio: AudioRenderer): Promise<void>;
/**
* Triggered for each redraw of the scene.
*/
update(audio: AudioRenderer): Promise<void>;
/**
* Triggered after the clip was updated
*/
render(video: VideoRenderer): void;
/**
* Triggered when the clip exits the scene
*/
exit(audio: AudioRenderer): Promise<void>;
/**
* Seek the clip to a specific absolute time
*/
seek(audio: AudioRenderer): Promise<void>;
/**
* Play the clip
*/
play(audio: AudioRenderer): Promise<void>;
/**
* Pause the clip
*/
pause(audio: AudioRenderer): Promise<void>;
/**
* Remove the clip from the layer
*/
detach(): this;
/**
* Cleanup the clip after it has been removed from the layer
*/
cleanup(): void;
/**
* Trim the clip to the specified start and stop
*/
trim(start?: Time, stop?: Time): this;
/**
* Split the clip into two clips at the specified time
* @param time split, will use the current frame of the composition
* a fallback
* @returns The clip that was created by performing this action
*/
split(time?: Time): Promise<this>;
/**
* Create a copy of the clip. Will have receive a new id
* but share the same source
*/
copy(): this;
/**
* Create a checkpoint of the clip. May include Blob or FileSystemFileHandle.
* @param middleware A function to modify the checkpoint data
* @returns A serialized representation of the clip
*/
createCheckpoint(): Promise<unknown>;
/**
* Restore a checkpoint of the clip.
* @param checkpoint The checkpoint to restore
* @param sources The sources to use for the restoration
* @returns The restored clip
*/
restoreCheckpoint(checkpoint: unknown, sources?: BaseSource[]): Promise<this>;
}
declare type Clip_2 = {
start: Timestamp;
stop: Timestamp;
mask?: Mask;
};
declare const Clip_base: {
new (...args: any[]): {
_handlers: {
'*'?: {
[x: string]: (event: EmittedEvent_2<any, any>) => void;
} | undefined;
error?: {
[x: string]: (event: EmittedEvent_2<Error, any>) => void;
} | undefined;
offset?: {
[x: string]: (event: EmittedEvent_2<Timestamp, any>) => void;
} | undefined;
frame?: {
[x: string]: (event: EmittedEvent_2<number | undefined, any>) => void;
} | undefined;
attach?: {
[x: string]: (event: EmittedEvent_2<undefined, any>) => void;
} | undefined;
detach?: {
[x: string]: (event: EmittedEvent_2<undefined, any>) => void;
} | undefined;
update?: {
[x: string]: (event: EmittedEvent_2<any, any>) => void;
} | undefined;
};
on<T extends "*" | "error" | keyof ClipEvents>(eventType: T, callback: (event: EmittedEvent_2<BaseEvents_2<ClipEvents>[T], /*elided*/ any>) => void): string;
off(id?: string | "*", ...ids: string[]): void;
emit<T extends "*" | "error" | keyof ClipEvents>(eventType: T, detail: BaseEvents_2<ClipEvents>[T]): void;
bubble(target: /*elided*/ any): string;
resolve(eventType: "*" | "error" | keyof ClipEvents): (resolve: (value: unknown) => void, reject: (reason?: any) => void) => void;
};
} & typeof Serializer;
export declare type ClipAnimationOptions = KeyframeOptions<any, number | string | hex | Percent>[];
export declare class ClipDeserializer {
static fromType(data: {
type: ClipType;
}): clips.Clip;
static fromSource(data: BaseSource): clips.AudioClip | clips.HtmlClip | clips.ImageClip | undefined;
}
export declare type ClipEvents = {
offset: Timestamp;
frame: number | undefined;
attach: undefined;
detach: undefined;
update: any;
};
export declare interface ClipProps {
disabled?: boolean;
name?: string;
duration?: Time;
delay?: Time;
animations?: ClipAnimationOptions;
transition?: TransitionConfig;
}
declare namespace clips {
export {
Clip,
ClipType,
ClipEvents,
ClipAnimationOptions,
ClipDeserializer,
ClipProps,
VisualMixin,
VisualMixinAnimationOptions,
VisualMixinProps,
ImageClip,
ImageClipAnimationOptions,
ImageClipProps,
TextClip,
RichTextClip,
RichTextClipProps,
Background,
TextClipAnimationOptions,
TextClipProps,
VideoClip,
VideoClipAnimationOptions,
VideoClipProps,
AudioClip,
AudioClipProps,
SilenceRemoveOptions,
HtmlClip,
HtmlClipAnimationOptions,
HtmlClipProps,
ShapeClip,
ShapeClipProps,
CircleClip,
CircleClipAnimationOptions,
CircleClipProps,
RectangleClip,
RectangleClipAnimationOptions,
RectangleClipProps,
WaveformClip,
WaveformBar,
WaveformClipAnimationOptions,
WaveformClipProps
}
}
export declare type ClipType = 'image' | 'audio' | 'text' | 'video' | 'base' | 'html' | 'shape' | 'circle' | 'rectangle' | 'waveform';
export declare class Composition extends Composition_base {
/**
* Unique identifier of the composition
*/
id: string;
/**
* Access the video renderer
*/
video: VideoRenderer;
/**
* Access the audio renderer. Will drive the playback of the composition
*/
audio: AudioRenderer;
/**
* Layers attached to the composition
*/
layers: Layer[];
/**
* Behavior when playback reaches the end of the composition
*/
playbackEndBehavior: PlaybackEndBehavior;
/**
* Defines special timestamps of the composition
*/
markers: Markers;
/**
* Timestamp when the composition has been created
*/
readonly createdAt: Date;
/**
* Data associated with the clip
*/
data: Record<string, unknown>;
constructor({ height, width, background, playbackEndBehavior, }?: CompositionSettings);
/**
* Settings of the composition
*/
get settings(): Required<CompositionSettings>;
set settings(settings: Required<CompositionSettings>);
/**
* Get the current playback state of the composition
*/
get playing(): boolean;
/**
* Get the current width of the canvas
*/
get width(): number;
/**
* Get the current height of the canvas
*/
get height(): number;
/**
* This is where the playback stops playing
*/
get duration(): Timestamp;
/**
* Limit the total duration of the composition
*/
set duration(time: Time | undefined);
/**
* Get the currently rendered time of the playback
*/
get playhead(): Timestamp;
/**
* Set the currently rendered time of the playback
*/
set playhead(time: Time);
/**
* Get all clips in the composition
*/
get clips(): Clip[];
/**
* Resize the renderer
*/
resize(width: number, height: number): void;
/**
* Add the renderer to the dom.
* This will start the ticker
*/
mount(element: HTMLElement): void;
/**
* Remove the renderer from the dom.
* This will stop the ticker
*/
unmount(): void;
/**
* Insert a new layer at the specified index (defaults to 0)
* @param Layer The layer to insert
* @param index The index to insert at (0 = top layer, default: 0)
*/
insertLayer<L extends Layer>(layer: L, index?: number): Promise<L>;
/**
* Create a layer with the given type
* @param type the desired type of the layer
* @returns A new layer
*/
createLayer<L extends Clip>(index?: number): Layer<L>;
/**
* Create captions for the composition
* @param source The source to create captions for
* @param strategy The strategy to use for creating captions
* @returns A new layer with captions
*/
createCaptions(source: AudioClip | Transcript, strategy?: CaptionPreset | (new () => CaptionPreset)): Promise<Layer<TextClip>>;
/**
* Convenience function for appending a layer
* aswell as the clip to the composition
*/
add<L extends Clip | Clip[]>(clip: L): Promise<L>;
/**
* Remove a given clip from the composition
* @returns `Clip` when it has been successfully removed `undefined` otherwise
*/
remove<L extends Clip>(clip: L): L | undefined;
/**
* Compute the currently active frame
*/
update(): Promise<void>;
/**
* Convenience function to take a screenshot of the current frame
*/
screenshot(format?: ScreenshotImageFormat, quality?: number): string;
/**
* Set the playback position to a specific time
* @param time new playback time (defaults to 0)
*/
seek(time?: Time): Promise<void>;
/**
* Play the composition
* @param time The time to start playing from
*/
play(time?: Time): Promise<void>;
/**
* Pause the composition
*/
pause(): Promise<void>;
/**
* Remove all layers and clips from the composition
*/
clear(): void;
/**
* Get the current playback time and composition
* duration formatted as `00:00 / 00:00` by default.
* if **hours** is set the format is `HH:mm:ss` whereas
* **milliseconds** will return `mm:ss.SSS`
*/
time(precision?: {
hours?: boolean;
milliseconds?: boolean;
}): string;
/**
* Remove a given layer from the composition
* @returns `Layer` when it has been successfully removed `undefined` otherwise
*/
removeLayer(layer: Layer): Layer | undefined;
/**
* Remove multiple layers from the composition
* @returns `Layer[]` all removed layers
*/
removeLayers(...layers: Layer[]): Layer[];
/**
* Handle the playback end behavior
*/
private handlePlaybackEnd;
/**
* Create a checkpoint of the composition. May include Blobs.
* @returns A serialized representation of the composition
*/
createCheckpoint(): Promise<unknown>;
/**
* Restore a checkpoint of the composition.
* @param checkpoint The checkpoint to restore
* @param sources The sources to use for the restoration
* @returns The restored composition
*/
restoreCheckpoint(checkpoint: unknown, sources?: BaseSource[]): Promise<this>;
}
declare const Composition_base: {
new (...args: any[]): {
_handlers: {
'*'?: {
[x: string]: (event: EmittedEvent<any, any>) => void;
} | undefined;
error?: {
[x: string]: (event: EmittedEvent<Error, any>) => void;
} | undefined;
play?: {
[x: string]: (event: EmittedEvent<frame_2, any>) => void;
} | undefined;
pause?: {
[x: string]: (event: EmittedEvent<frame_2, any>) => void;
} | undefined;
attach?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
detach?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
resize?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
mount?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
unmount?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
frame?: {
[x: string]: (event: EmittedEvent<number | undefined, any>) => void;
} | undefined;
currentframe?: {
[x: string]: (event: EmittedEvent<frame_2, any>) => void;
} | undefined;
init?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
update?: {
[x: string]: (event: EmittedEvent<any, any>) => void;
} | undefined;
load?: {
[x: string]: (event: EmittedEvent<undefined, any>) => void;
} | undefined;
};
on<T extends "*" | "error" | keyof CompositionEvents>(eventType: T, callback: (event: EmittedEvent<BaseEvents<CompositionEvents>[T], /*elided*/ any>) => void): string;
off(id?: string | "*", ...ids: string[]): void;
emit<T extends "*" | "error" | keyof CompositionEvents>(eventType: T, detail: BaseEvents<CompositionEvents>[T]): void;
bubble(target: /*elided*/ any): string;
resolve(eventType: "*" | "error" | keyof CompositionEvents): (resolve: (value: unknown) => void, reject: (reason?: any) => void) => void;
};
} & typeof Serializer;
/**
* Defines the type of events emitted by the
* composition
*/
export declare type CompositionEvents = {
play: frame;
pause: frame;
attach: undefined;
detach: undefined;
resize: undefined;
mount: undefined;
unmount: undefined;
frame: number | undefined;
currentframe: frame;
init: undefined;
update: any;
load: undefined;
};
export declare type CompositionSettings = {
/**
* Height of the composition
*
* @default 1080
*/
height?: int;
/**
* Width of the composition
*
* @default 1920
*/
width?: int;
/**
* Background color of the composition
*
* @default #000000
*/
background?: hex | 'transparent';
/**
* Behavior when playback reaches the end of the composition
* - 'stop': Pause at the end
* - 'loop': Continue playing from the beginning
* - 'reset': Jump to frame 0 and pause
*
* @default 'stop'
*/
playbackEndBehavior?: PlaybackEndBehavior;
};
/**
* Defines the constructor required by mixins
*/
export declare type Constructor<T = {}> = new (...args: any[]) => T;
declare type ContainerFormat = 'mp4' | 'webm' | 'ogg';
export declare const DateDeserializer: {
fromJSON: (date: string) => Date;
};
/**
* Limit the number of times a function can be called
* per interval, timeout is in milliseconds
*/
export declare function debounce(func: Function, timeout?: number): (...args: any[]) => void;
export declare class DecoderError extends BaseError {
}
export declare interface DefaultCaptionPresetConfig extends CaptionPresetConfig {
generatorOptions?: GeneratorOptions;
}
export declare type Deserializer<T> = (data: any) => Promise<T> | T;
declare interface Deserializer_2 {
fromJSON?: (obj: any) => any;
}
export declare function detectContentType(input: MediaInput): Promise<string>;
/**
* This utility creates an anchor tag and clicks on it
* @param source Blob url or base64 encoded svg
* @param name File name suggestion
*/
export declare function downloadObject(source: MediaInput, name?: string): Promise<void>;
/**
* An optional easing function to apply to the interpolation.
* Easing functions can modify the interpolation to be non-linear.
* @default "linear"
*/
export declare type Easing = 'linear' | 'ease-in' | 'ease-out' | 'ease-in-out' | 'ease-out-in';
/**
* Defines a circle with a width and height
*/
export declare interface Ellipse {
/**
* The x coordinate of the ellipse
*/
cx: number | Percent;
/**
* The y coordinate of the ellipse
*/
cy: number | Percent;
/**
* The width of the ellipse
*/
width: number | Percent;
/**
* The height of the ellipse
*/
height: number | Percent;
}
declare type EmittedEvent_3<K, T extends {}> = OverrideValues<CustomEvent<K>, {
target: T;
}>;
export declare type EncodedOpusChunk = {
data: Uint8Array;
timestamp: number;
type: 'key' | 'delta';
duration: number;
};
export declare type EncodedOpusChunkOutputCallback = (output: EncodedOpusChunk, metadata: EncodedAudioChunkMetadata) => void;
export declare class Encoder extends Encoder_base {
private composition;
private config;
/**
* Create a new audio and video encoder and multiplex the result
* using a mp4 container
* @param composition The composition to render
* @param config Configure the output
* @example
* ```
* const blob = await new Encoder(composition).render();
* ```
*/
constructor(composition: Composition, config?: EncoderConfig);
/**
* Export the specified composition
* @throws DOMException if the export has been aborted
*/
render(target?: WriteStreamCallback | FileSystemFileHandle | WritableStream | string, signal?: AbortSignal): Promise<undefined | Blob>;
audioCodecs(): Promise<("opus" | "aac" | "mp3" | "vorbis" | "flac" | "pcm-s16" | "pcm-s16be" | "pcm-s24" | "pcm-s24be" | "pcm-s32" | "pcm-s32be" | "pcm-f32" | "pcm-f32be" | "pcm-u8" | "pcm-s8" | "ulaw" | "alaw")[]>;
videoCodecs(): Promise<("avc" | "hevc" | "vp9" | "av1" | "vp8")[]>;
private log;
}
declare const Encoder_base: {
new (...args: any[]): {
_handlers: {
'*'?: {
[x: string]: (event: EmittedEvent<any, any>) => void;
} | undefined;
error?: {
[x: string]: (event: EmittedEvent<Error, any>) => void;
} | undefined;
render?: {
[x: string]: (event: EmittedEvent< {
progress: number;
total: number;
remaining: Date;
}, any>) => void;
} | undefined;
};
on<T extends "*" | "error" | "render">(eventType: T, callback: (event: EmittedEvent<BaseEvents<EncoderEvents>[T], /*elided*/ any>) => void): string;
off(id?: string | "*", ...ids: string[]): void;
emit<T extends "*" | "error" | "render">(eventType: T, detail: BaseEvents<EncoderEvents>[T]): void;
bubble(target: {
_handlers: {
'*'?: {
[x: string]: (event: EmittedEvent<any, any>) => void;
} | undefined;
error?: {
[x: string]: (event: EmittedEvent<Error, any>) => void;
} | undefined;
render?: {
[x: string]: (event: EmittedEvent< {
progress: number;
total: number;
remaining: Date;
}, any>) => void;
} | undefined;
};
on<T extends "*" | "error" | "render">(eventType: T, callback: (event: EmittedEvent<BaseEvents<EncoderEvents>[T], /*elided*/ any>) => void): string;
off(id?: string | "*", ...ids: string[]): void;
emit<T extends "*" | "error" | "render">(eventType: T, detail: BaseEvents<EncoderEvents>[T]): void;
bubble(target: /*elided*/ any): string;
resolve(eventType: "*" | "error" | "render"): (resolve: (value: unknown) => void, reject: (reason?: any) => void) => void;
}): string;
resolve(eventType: "*" | "error" | "render"): (resolve: (value: unknown) => void, reject: (reason?: any) => void) => void;
};
};
declare interface EncoderConfig {
/**
* Video encoding configuration
*/
video?: VideoConfig;
/**
* Audio encoding configuration
*/
audio?: AudioConfig;
/**
* Defines if the performance should be logged
* @default false
*/
debug?: boolean;
/**
* Defines the watermark to add to the video.
* Needs to be more than 5 characters long
*/
watermark?: string;
/**
* Defines the output format of the encoded file
* @default 'mp4'
*/
format?: ContainerFormat;
}
export declare class EncoderError extends BaseError {
}
declare type EncoderEvents = {
render: {
/**
* Defines how many frames were rendered yet
*/
progress: number;
/**
* Defines the total number of frames
* to be rendered
*/
total: number;
/**
* Defines the estimated remaining
* render time
*/
remaining: Date;
};
};
/**
* Error message structure
*/
export declare type ErrorEventDetail = {
msg: string;
code: string;
params?: any;
};
export declare function EventEmitter<Events = {}>(): {
new (...args: any[]): {
_handlers: { [T in keyof BaseEvents<Events>]?: {
[x: string]: (event: EmittedEvent<BaseEvents<Events>[T], any>) => void;
} | undefined; };
on<T_1 extends "*" | "error" | keyof Events>(eventType: T_1, callback: (event: EmittedEvent<BaseEvents<Events>[T_1], /*elided*/ any>) => void): string;
off(id?: string | "*", ...ids: string[]): void;
emit<T_1 extends "*" | "error" | keyof Events>(eventType: T_1, detail: BaseEvents<Events>[T_1]): void;
bubble(target: {
_handlers: { [T in keyof BaseEvents<Events>]?: {
[x: string]: (event: EmittedEvent<BaseEvents<Events>[T], any>) => void;
} | undefined; };
on<T_1 extends "*" | "error" | keyof Events>(eventType: T_1, callback: (event: EmittedEvent<BaseEvents<Events>[T_1], /*elided*/ any>) => void): string;
off(id?: string | "*", ...ids: string[]): void;
emit<T_1 extends "*" | "error" | keyof Events>(eventType: T_1, detail: BaseEvents<Events>[T_1]): void;
bubble(target: /*elided*/ any): string;
resolve(eventType: "*" | "error" | keyof Events): (resolve: (value: unknown) => void, reject: (reason?: any) => void) => void;
}): string;
resolve(eventType: "*" | "error" | keyof Events): (resolve: (value: unknown) => void, reject: (reason?: any) => void) => void;
};
};
export declare function EventEmitterMixin<Events = {}, T extends Constructor = Constructor>(Base: T): {
new (...args: any[]): {
_handlers: { [T_1 in keyof BaseEvents_3<Events>]?: {
[x: string]: (event: EmittedEvent_3<BaseEvents_3<Events>[T_1], any>) => void;
}; };
on<T_1 extends keyof BaseEvents_3<Events>>(eventType: T_1, callback: (event: EmittedEvent_3<BaseEvents_3<Events>[T_1], /*elided*/ any>) => void): string;
off(id?: string | "*", ...ids: string[]): void;
emit<T_1 extends keyof BaseEvents_3<Events>>(eventType: T_1, detail: BaseEvents_3<Events>[T_1]): void;
bubble(target: /*elided*/ any): string;
resolve(eventType: keyof BaseEvents_3<Events>): (resolve: (value: unknown) => void, reject: (reason?: any) => void) => void;
};
} & T;
declare type Events = {
frame: number | undefined;
attach: undefined;
detach: undefined;
update: any;
};
declare type Events_2 = {
update: any;
};
/**
* Defines the extrapolation behavior outside the input range.
* - "clamp": Clamps the value to the nearest endpoint within the range.
* - "extend": Allows values to extend beyond the range.
* @default "clamp"
*/
export declare type Extrapolate = 'clamp' | 'extend';
/**
* Defines the fill properties that
* can be applied to a shape
*/
export declare interface FillOptions {
/**
* The color of the fill
*/
color: hex | Gradient | Pattern;
/**
* The opacity of the fill
*/
opacity?: number;
}
/**
* Defines the fill rule of the mask
*/
export declare type FillRule = 'nonzero' | 'evenodd';
/**
* Defines a floating point number
*/
export declare type float = (number & {
_float: void;
}) | number;
/**
* Converts a Float32Array to 16-bit PCM.
*/
export declare function floatTo16BitPCM(dataview: DataView, buffer: Float32Array, offset: number): DataView;
/**
* Defines the properties of a font
*/
export declare interface Font {
/**
* The size of the font
*/
size: number;
/**
* The family of the font
*/
family: string;
/**
* The weight of the font
*/
weight?: FontWeight;
/**
* The style of the font
*/
style?: FontStyle;
}
export declare const FONT_WEIGHTS: {
readonly '100': "Thin";
readonly '200': "Extra Light";
readonly '300': "Light";
readonly '400': "Normal";
readonly '500': "Medium";
readonly '600': "Semi Bold";
readonly '700': "Bold";
readonly '800': "Extra Bold";
readonly '900': "Black";
};
/**
* Defines all available font families
*/
export declare type FontFamily = keyof typeof WebFonts | string;
export declare class FontManager extends Serializer {
/**
* The fonts that have been loaded
*/
loadedFonts: types.FontSource[];
/**
* Load the font that has been initiated via the constructor
*/
load<T extends keyof typeof WebFonts>(options: types.FontSource | types.WebfontProperties<T>): Promise<Font>;
/**
* Reload all fonts
* @deprecated Use restoreCheckpoint instead
*/
reload(): Promise<void>;
/**
* Get all available local fonts, requires the
* **Local Font Access API**
*/
static localFonts(): Promise<types.FontSources[]>;
/**
* Get common web fonts
*/
static webFonts(): types.FontSources[];
static load<T extends keyof typeof WebFonts>(options: types.FontSource | types.WebfontProperties<T>): Promise<Font>;
copy(): FontManager;
/**
* Create a checkpoint of the current font manager state
*/
createCheckpoint(): Promise<unknown>;
/**
* Restore the font manager state from a checkpoint
* @param checkpoint - The checkpoint to restore from
*/
restoreCheckpoint(checkpoint: unknown): Promise<void>;
}
/**
* Defines the properties that are required
* to load a new font
*/
export declare type FontSource = {
/**
* Name of the Family
* @example 'Arial'
*/
family: string;
/**
* Source of the Variant
* @example url(arial.ttf)
*/
source: string;
/**
* Defines the font style
* @example 'italic'
*/
style?: FontStyle;
/**
* The weight of the font
* @example '400'
*/
weight?: FontWeight;
/**
* The size of the font
* @example 16
*/
size?: number;
};
/**
* Defines a single font that has one or
* more variants
*/
export declare type FontSources = {
family: string;
variants: FontSource[];
};
/**
* Defines the style of the font
*/
export declare type FontStyle = 'normal' | 'italic' | 'oblique';
/**
* Defines all available font subsets which
* limit the number of characters
*/
export declare type FontSubset = 'latin' | 'latin-ext' | 'vietnamese' | 'cyrillic' | 'cyrillic-ext';
/**
* Defines the source where the font is coming from
*/
export declare type FontType = 'local' | 'web';
/**
* Defines the thickness/weight of the font
*/
export declare type FontWeight = 'normal' | 'bold' | '100' | '200' | '300' | '400' | '500' | '600' | '700' | '800' | '900';
export declare const FPS_DEFAULT = 30;
export declare const FPS_INACTIVE = 1;
/**
* Defines an interger that correspondes
* to a point in time
*/
export declare type frame = (number & {
_frame: void;
}) | number;
/**
* Convert frames to milliseconds
*/
export declare function framesToMillis(frames: frame, fps?: number): number;
/**
* Convert frames into seconds
*/
export declare function framesToSeconds(frames: frame, fps?: number): number;
export declare type GeneratorOptions = {
/**
* Iterates by word count
*/
count?: [number, number?];
/**
* Iterates by group duration
*/
duration?: [number, number?];
/**
* Iterates by number of characters within the group
*/
length?: [number, number?];
};
/**
* Function for retrieving the best supported avc profile
* @param settings - Video settings
* @returns Supported avc profile
*/
export declare function getBestSupportedAvcProfile(settings: VideoSettings): Promise<string | undefined>;
/**
* Defines the properties of a glow
*/
export declare interface Glow {
/**
* The color of the glow
*/
color?: hex;
/**
* The radius of the glow
*/
radius?: number;
/**
* The intensity of the glow
*/
intensity?: number;
/**
* The opacity of the glow
*/
opacity?: number;
}
/**
* Defines the properties of a gradient
*/
export declare interface Gradient {
/**
* The type of gradient to use
*/
type: GradientType;
/**
* The stops of the gradient
*/
stops: GradientStop[];
}
/**
* Defines the properties of a gradient stop
*/
export declare interface GradientStop {
/**
* The offset of the gradient stop
*/
offset: number;
/**
* The color of the gradient stop
*/
color: string;
}
/**
* Defines the type of gradient to use
*/
export declare type GradientType = 'linear' | 'radial';
/**
* Group an array of objects by the specified key
*/
export declare function groupBy<T extends {}, K extends keyof T>(arr: T[], key: K): Record<T[K], T[]>;
export declare class GuineaCaptionPreset extends CaptionPreset {
colors: hex[];
constructor(config?: MultiColorCaptionPresetConfig);
apply(layer: Layer, transcript: Transcript, delay: Timestamp): Promise<void>;
protected splitSequence(sequence: WordGroup): {
segments: string[];
words: Word[][];
};
}
/**
* Defines a color hex value
*/
export declare type hex = `#${string}`;
export declare function hexWithOpacity(color?: hex, opacity?: number): hex;
export declare type HSL = {
h: number;
s: number;
l: number;
};
export declare class HtmlClip extends HtmlClip_base {
_keepAspectRatio: boolean;
readonly type = "html";
layer?: Layer<HtmlClip>;
source: HtmlSource;
animations: HtmlClipAnimationOptions;
/**
* Access to the html document that
* will be rendered to the canvas
*/
readonly element: HTMLImageElement;
constructor(input?: MediaInput | HtmlSource, props?: HtmlClipProps);
init(): Promise<void>;
render(renderer: VideoRenderer): void;
refresh(): this;
}
declare const HtmlClip_base: {
new (...args: any[]): {
source?: {
height: number;
width: number;
aspectRatio: number;
} | undefined;
_height?: number