UNPKG

@coderline/alphatab

Version:

alphaTab is a music notation and guitar tablature rendering library

1,445 lines (1,434 loc) 505 kB
/** * Lists all types of note acceuntations */ declare enum AccentuationType { /** * No accentuation */ None = 0, /** * Normal accentuation */ Normal = 1, /** * Heavy accentuation */ Heavy = 2, /** * Tenuto accentuation */ Tenuto = 3 } /** * Defines all possible accidentals for notes. */ declare enum AccidentalType { /** * No accidental */ None = 0, /** * Naturalize */ Natural = 1, /** * Sharp */ Sharp = 2, /** * Flat */ Flat = 3, /** * Natural for smear bends */ NaturalQuarterNoteUp = 4, /** * Sharp for smear bends */ SharpQuarterNoteUp = 5, /** * Flat for smear bends */ FlatQuarterNoteUp = 6, /** * Double Sharp, indicated by an 'x' */ DoubleSharp = 7, /** * Double Flat, indicated by 'bb' */ DoubleFlat = 8 } /** * Represents the information related to the beats actively being played now. */ declare class ActiveBeatsChangedEventArgs { /** * The currently active beats across all tracks and voices. */ activeBeats: Beat[]; constructor(activeBeats: Beat[]); } /** * This is the main synthesizer component which can be used to * play a {@link MidiFile} via a {@link ISynthOutput}. */ declare class AlphaSynth extends AlphaSynthBase { /** * Initializes a new instance of the {@link AlphaSynth} class. * @param output The output to use for playing the generated samples. */ constructor(output: ISynthOutput, bufferTimeInMilliseconds: number); /** * Creates a new audio exporter, initialized with the given data. * @param options The export options to use. * The track volume and transposition pitches must lists must be filled with midi channels. * @param midi The midi file to use. * @param syncPoints The sync points to use * @param transpositionPitches The initial transposition pitches to apply. * @param transpositionPitches The initial transposition pitches to apply. */ exportAudio(options: AudioExportOptions, midi: MidiFile, syncPoints: BackingTrackSyncPoint[], mainTranspositionPitches: Map<number, number>): IAlphaSynthAudioExporter; } /** * This class implements a HTML5 Web Audio API based audio output device * for alphaSynth. It can be controlled via a JS API. * @target web */ declare class AlphaSynthAudioWorkletOutput extends AlphaSynthWebAudioOutputBase { private _worklet; private _bufferTimeInMilliseconds; private readonly _settings; constructor(settings: Settings); open(bufferTimeInMilliseconds: number): void; play(): void; private handleMessage; pause(): void; addSamples(f: Float32Array): void; resetSamples(): void; } /** * This is the base class for synthesizer components which can be used to * play a {@link MidiFile} via a {@link ISynthOutput}. */ declare class AlphaSynthBase implements IAlphaSynth { protected sequencer: MidiFileSequencer; protected synthesizer: IAudioSampleSynthesizer; protected isSoundFontLoaded: boolean; private _isMidiLoaded; private _tickPosition; private _timePosition; private _metronomeVolume; private _countInVolume; protected _playedEventsQueue: Queue<SynthEvent>; protected _midiEventsPlayedFilter: Set<MidiEventType>; private _notPlayedSamples; private _synthStopping; private _output; get output(): ISynthOutput; isReady: boolean; get isReadyForPlayback(): boolean; state: PlayerState; get logLevel(): LogLevel; set logLevel(value: LogLevel); get masterVolume(): number; set masterVolume(value: number); protected updateMasterVolume(value: number): void; get metronomeVolume(): number; set metronomeVolume(value: number); get countInVolume(): number; set countInVolume(value: number); get midiEventsPlayedFilter(): MidiEventType[]; set midiEventsPlayedFilter(value: MidiEventType[]); get playbackSpeed(): number; set playbackSpeed(value: number); protected updatePlaybackSpeed(value: number): void; get tickPosition(): number; set tickPosition(value: number); get timePosition(): number; set timePosition(value: number); get playbackRange(): PlaybackRange | null; set playbackRange(value: PlaybackRange | null); get isLooping(): boolean; set isLooping(value: boolean); destroy(): void; /** * Initializes a new instance of the {@link AlphaSynthBase} class. * @param output The output to use for playing the generated samples. */ constructor(output: ISynthOutput, synthesizer: IAudioSampleSynthesizer, bufferTimeInMilliseconds: number); protected onSampleRequest(): void; play(): boolean; private playInternal; pause(): void; playPause(): void; stop(): void; playOneTimeMidiFile(midi: MidiFile): void; resetSoundFonts(): void; private _loadedSoundFonts; loadSoundFont(data: Uint8Array, append: boolean): void; private checkReadyForPlayback; /** * Loads the given midi file for playback. * @param midi The midi file to load */ loadMidiFile(midi: MidiFile): void; applyTranspositionPitches(transpositionPitches: Map<number, number>): void; setChannelTranspositionPitch(channel: number, semitones: number): void; setChannelMute(channel: number, mute: boolean): void; resetChannelStates(): void; setChannelSolo(channel: number, solo: boolean): void; setChannelVolume(channel: number, volume: number): void; private onSamplesPlayed; protected checkForFinish(): void; private stopOneTimeMidi; protected updateTimePosition(timePosition: number, isSeek: boolean): void; readonly ready: IEventEmitter; readonly readyForPlayback: IEventEmitter; readonly finished: IEventEmitter; readonly soundFontLoaded: IEventEmitter; readonly soundFontLoadFailed: IEventEmitterOfT<Error>; readonly midiLoaded: IEventEmitterOfT<PositionChangedEventArgs>; readonly midiLoadFailed: IEventEmitterOfT<Error>; readonly stateChanged: IEventEmitterOfT<PlayerStateChangedEventArgs>; readonly positionChanged: IEventEmitterOfT<PositionChangedEventArgs>; readonly midiEventsPlayed: IEventEmitterOfT<MidiEventsPlayedEventArgs>; readonly playbackRangeChanged: IEventEmitterOfT<PlaybackRangeChangedEventArgs>; /* Excluded from this release type: hasSamplesForProgram */ /* Excluded from this release type: hasSamplesForPercussion */ loadBackingTrack(_score: Score): void; updateSyncPoints(_syncPoints: BackingTrackSyncPoint[]): void; } /** * This implementation of the {@link IMidiFileHandler} * generates a {@link MidiFile} object which can be used in AlphaSynth for playback. */ declare class AlphaSynthMidiFileHandler implements IMidiFileHandler { private _midiFile; private _smf1Mode; /** * Initializes a new instance of the {@link AlphaSynthMidiFileHandler} class. * @param midiFile The midi file. * @param smf1Mode Whether to generate a SMF1 compatible midi file. This might break multi note bends. */ constructor(midiFile: MidiFile, smf1Mode?: boolean); addTimeSignature(tick: number, timeSignatureNumerator: number, timeSignatureDenominator: number): void; addRest(track: number, tick: number, channel: number): void; addNote(track: number, start: number, length: number, key: number, velocity: number, channel: number): void; private static fixValue; addControlChange(track: number, tick: number, channel: number, controller: ControllerType, value: number): void; addProgramChange(track: number, tick: number, channel: number, program: number): void; addTempo(tick: number, tempo: number): void; addBend(track: number, tick: number, channel: number, value: number): void; addNoteBend(track: number, tick: number, channel: number, key: number, value: number): void; finishTrack(track: number, tick: number): void; } /** * This class implements a HTML5 Web Audio API based audio output device * for alphaSynth using the legacy ScriptProcessor node. * @target web */ declare class AlphaSynthScriptProcessorOutput extends AlphaSynthWebAudioOutputBase { private _audioNode; private _circularBuffer; private _bufferCount; private _requestedBufferCount; open(bufferTimeInMilliseconds: number): void; play(): void; pause(): void; addSamples(f: Float32Array): void; resetSamples(): void; private requestBuffers; private _outputBuffer; private generateSound; } /** * @target web */ declare abstract class AlphaSynthWebAudioOutputBase implements ISynthOutput { protected static readonly BufferSize: number; protected static readonly PreferredSampleRate: number; protected _context: AudioContext | null; protected _buffer: AudioBuffer | null; protected _source: AudioBufferSourceNode | null; private _resumeHandler?; get sampleRate(): number; activate(resumedCallback?: () => void): void; private patchIosSampleRate; open(bufferTimeInMilliseconds: number): void; private registerResumeHandler; private unregisterResumeHandler; play(): void; pause(): void; destroy(): void; abstract addSamples(f: Float32Array): void; abstract resetSamples(): void; readonly ready: IEventEmitter; readonly samplesPlayed: IEventEmitterOfT<number>; readonly sampleRequest: IEventEmitter; protected onSamplesPlayed(numberOfSamples: number): void; protected onSampleRequest(): void; protected onReady(): void; enumerateOutputDevices(): Promise<ISynthOutputDevice[]>; setOutputDevice(device: ISynthOutputDevice | null): Promise<void>; getOutputDevice(): Promise<ISynthOutputDevice | null>; } /** * a WebWorker based alphaSynth which uses the given player as output. * @target web */ declare class AlphaSynthWebWorkerApi implements IAlphaSynth { private _synth; private _output; private _workerIsReadyForPlayback; private _workerIsReady; private _outputIsReady; private _state; private _masterVolume; private _metronomeVolume; private _countInVolume; private _playbackSpeed; private _tickPosition; private _timePosition; private _isLooping; private _playbackRange; private _midiEventsPlayedFilter; get output(): ISynthOutput; get isReady(): boolean; get isReadyForPlayback(): boolean; get state(): PlayerState; get logLevel(): LogLevel; get worker(): Worker; set logLevel(value: LogLevel); get masterVolume(): number; set masterVolume(value: number); get metronomeVolume(): number; set metronomeVolume(value: number); get countInVolume(): number; set countInVolume(value: number); get midiEventsPlayedFilter(): MidiEventType[]; set midiEventsPlayedFilter(value: MidiEventType[]); get playbackSpeed(): number; set playbackSpeed(value: number); get tickPosition(): number; set tickPosition(value: number); get timePosition(): number; set timePosition(value: number); get isLooping(): boolean; set isLooping(value: boolean); get playbackRange(): PlaybackRange | null; set playbackRange(value: PlaybackRange | null); constructor(player: ISynthOutput, settings: Settings); destroy(): void; play(): boolean; pause(): void; playPause(): void; stop(): void; playOneTimeMidiFile(midi: MidiFile): void; loadSoundFont(data: Uint8Array, append: boolean): void; resetSoundFonts(): void; loadMidiFile(midi: MidiFile): void; applyTranspositionPitches(transpositionPitches: Map<number, number>): void; setChannelTranspositionPitch(channel: number, semitones: number): void; setChannelMute(channel: number, mute: boolean): void; resetChannelStates(): void; setChannelSolo(channel: number, solo: boolean): void; setChannelVolume(channel: number, volume: number): void; handleWorkerMessage(e: MessageEvent): void; private checkReady; private checkReadyForPlayback; readonly ready: IEventEmitter; readonly readyForPlayback: IEventEmitter; readonly finished: IEventEmitter; readonly soundFontLoaded: IEventEmitter; readonly soundFontLoadFailed: IEventEmitterOfT<Error>; readonly midiLoaded: IEventEmitterOfT<PositionChangedEventArgs>; readonly midiLoadFailed: IEventEmitterOfT<Error>; readonly stateChanged: IEventEmitterOfT<PlayerStateChangedEventArgs>; readonly positionChanged: IEventEmitterOfT<PositionChangedEventArgs>; readonly midiEventsPlayed: IEventEmitterOfT<MidiEventsPlayedEventArgs>; readonly playbackRangeChanged: IEventEmitterOfT<PlaybackRangeChangedEventArgs>; onOutputSampleRequest(): void; onOutputSamplesPlayed(samples: number): void; private onOutputReady; loadBackingTrack(_score: Score): void; updateSyncPoints(_syncPoints: BackingTrackSyncPoint[]): void; } /** * @target web */ export declare class AlphaTabApi extends AlphaTabApiBase<SettingsJson | Settings> { /** * Initializes a new instance of the {@link AlphaTabApi} class. * @param element The HTML element into which alphaTab should be initialized. * @param settings The settings to use. * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab'), { display: { scale: 1.2 }}); * ``` */ constructor(element: HTMLElement, options: SettingsJson | Settings); /** * @inheritdoc */ tex(tex: string, tracks?: number[]): void; /** * Opens a popup window with the rendered music notation for printing. * @param width An optional custom width as CSS width that should be used. Best is to use a CSS width that is suitable for your preferred page size. * @param additionalSettings An optional parameter to specify additional setting values which should be respected during printing ({@since 1.2.0}) * @remarks * Opens a popup window with the rendered music notation for printing. The default display of alphaTab in the browser is not very * suitable for printing. The items are lazy loaded, the width can be dynamic, and the scale might be better suitable for screens. * This function opens a popup window which is filled with a by-default A4 optimized view of the rendered score: * * * Lazy loading is disabled * * The scale is reduced to 0.8 * * The stretch force is reduced to 0.8 * * The width is optimized to A4. Portrait if the page-layout is used, landscape if the horizontal-layout is used. * * @category Methods - Core * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.print(); * api.print(undefined, { display: { barsPerRow: 5 } }); * ``` */ print(width?: string, additionalSettings?: unknown): void; /** * Generates an SMF1.0 file and downloads it * @remarks * Generates a SMF1.0 compliant MIDI file of the currently loaded song and starts the download of it. * Please be aware that SMF1.0 does not support bends per note which might result in wrong bend effects * in case multiple bends are applied on the same beat (e.g. two notes bending or vibrato + bends). * * @category Methods - Core * @since 1.3.0 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.downloadMidi(); * ``` */ downloadMidi(format?: MidiFileFormat): void; /** * @inheritdoc */ changeTrackMute(tracks: Track[], mute: boolean): void; /** * @inheritdoc */ changeTrackSolo(tracks: Track[], solo: boolean): void; /** * @inheritdoc */ changeTrackVolume(tracks: Track[], volume: number): void; private trackIndexesToTracks; /** * This event is fired when the SoundFont is being loaded. * @remarks * This event is fired when the SoundFont is being loaded and reports the progress accordingly. * * @eventProperty * @category Events - Player * @since 0.9.4 * * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.soundFontLoad.on((e) => { * updateProgress(e.loaded, e.total); * }); * ``` */ soundFontLoad: IEventEmitterOfT<ProgressEventArgs>; /** * Triggers a load of the soundfont from the given URL. * @param url The URL from which to load the soundfont * @param append Whether to fully replace or append the data from the given soundfont. * @category Methods - Player * @since 0.9.4 */ loadSoundFontFromUrl(url: string, append: boolean): void; } /** * This class represents the public API of alphaTab and provides all logic to display * a music sheet in any UI using the given {@link IUiFacade} * @param <TSettings> The UI object holding the settings. * @csharp_public */ export declare class AlphaTabApiBase<TSettings> { private _startTime; private _trackIndexes; private _trackIndexLookup; private _isDestroyed; private _score; private _tracks; private _actualPlayerMode; private _player; private _renderer; /** * The actual player mode which is currently active. * @remarks * Allows determining whether a backing track or the synthesizer is active in case automatic detection is enabled. * @category Properties - Player * @since 1.6.0 */ get actualPlayerMode(): PlayerMode; /** * The UI facade used for interacting with the user interface (like the browser). * @remarks * The implementation depends on the platform alphaTab is running in (e.g. the web version in the browser, WPF in .net etc.) * @category Properties - Core * @since 0.9.4 */ readonly uiFacade: IUiFacade<TSettings>; /** * The UI container that holds the whole alphaTab control. * @remarks * Gets the UI container that represents the element on which alphaTab was initialized. Note that this is not the raw instance, but a UI framework specific wrapper for alphaTab. * @category Properties - Core * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * const container = api.container; * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * var container = api.Container; * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * val container = api.container; * ``` */ readonly container: IContainer; /** * The score renderer used for rendering the music sheet. * @remarks * This is the low-level API responsible for the actual rendering engine. * Gets access to the underling {@link IScoreRenderer} that is used for the rendering. * * @category Properties - Core * @since 0.9.4 */ get renderer(): IScoreRenderer; /** * The score holding all information about the song being rendered * @category Properties - Core * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * updateScoreInfo(api.score); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * UpdateScoreInfo(api.Score); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * updateScoreInfo(api.score) * ``` */ get score(): Score | null; /** * The settings that are used for rendering the music notation. * @remarks * Gets access to the underling {@link Settings} object that is currently used by alphaTab. * * @category Properties - Core * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * showSettingsModal(api.settings); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * ShowSettingsDialog(api.Settings); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * showSettingsDialog(api.settings) * ``` */ settings: Settings; /** * The list of the tracks that are currently rendered. * * @category Properties - Core * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * highlightCurrentTracksInTrackSelector(api.tracks); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * HighlightCurrentTracksInTrackSelector(api.Tracks); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * highlightCurrentTracksInTrackSelector(api.tracks) * ``` */ get tracks(): Track[]; /** * The UI container that will hold all rendered results. * @since 0.9.4 * @category Properties - Core */ readonly canvasElement: IContainer; /** * Initializes a new instance of the {@link AlphaTabApiBase} class. * @param uiFacade The UI facade to use for interacting with the user interface. * @param settings The UI settings object to use for loading the settings. */ constructor(uiFacade: IUiFacade<TSettings>, settings: TSettings); private setupPlayerWrapper; /** * Destroys the alphaTab control and restores the initial state of the UI. * @remarks * This function destroys the alphaTab control and tries to restore the initial state of the UI. This might be useful if * our website is quite dynamic and you need to uninitialize alphaTab from an element again. After destroying alphaTab * it cannot be used anymore. Any further usage leads to unexpected behavior. * * @category Methods - Core * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.destroy(); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.Destroy(); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.destroy() * ``` */ destroy(): void; /** * Applies any changes that were done to the settings object. * @remarks * It also informs the {@link renderer} about any new values to consider. * By default alphaTab will not trigger any re-rendering or settings update just if the settings object itself was changed. This method must be called * to trigger an update of the settings in all components. Then a re-rendering can be initiated using the {@link render} method. * * @category Methods - Core * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.settings.display.scale = 2.0; * api.updateSettings(); * api.render(); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * * api.Settings.Display.Scale = 2.0; * api.UpdateSettings(); * api.Render() * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * * api.settings.display.scale = 2.0 * api.updateSettings() * api.render() * ``` */ updateSettings(): void; private updateRenderer; /** * Initiates a load of the score using the given data. * @returns true if the data object is supported and a load was initiated, otherwise false * @param scoreData The data container supported by {@link IUiFacade}. The supported types is depending on the platform: * * * A `alphaTab.model.Score` instance (all platforms) * * A `ArrayBuffer` or `Uint8Array` containing one of the supported file formats (all platforms, native byte array or input streams on other platforms) * * A url from where to download the binary data of one of the supported file formats (browser only) * * @param trackIndexes The indexes of the tracks from the song that should be rendered. If not provided, the first track of the * song will be shown. * @category Methods - Player * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.load('/assets/MyFile.gp'); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.Load(System.IO.File.OpenRead("MyFile.gp")); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * contentResolver.openInputStream(uri).use { * api.load(it) * } * ``` */ load(scoreData: unknown, trackIndexes?: number[]): boolean; /** * Initiates a rendering of the given score. * @param score The score containing the tracks to be rendered. * @param trackIndexes The indexes of the tracks from the song that should be rendered. If not provided, the first track of the * song will be shown. * * @category Methods - Core * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.RenderScore(generateScore(),[ 2, 3 ]); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.RenderScore(GenerateScore(), new double[] { 2, 3 }); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.renderScore(generateScore(), alphaTab.collections.DoubleList(2, 3)); * ``` */ renderScore(score: Score, trackIndexes?: number[]): void; /** * Renders the given list of tracks. * @param tracks The tracks to render. They must all belong to the same score. * * @category Methods - Core * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.renderTracks([api.score.tracks[0], api.score.tracks[1]]); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.RenderTracks(new []{ * api.Score.Tracks[2], * api.Score.Tracks[3] * }); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.renderTracks(alphaTab.collections.List( * api.score.tracks[2], * api.score.tracks[3] * } * ``` */ renderTracks(tracks: Track[]): void; private internalRenderTracks; /* Excluded from this release type: triggerResize */ private appendRenderResult; private updateRenderResult; /** * Tells alphaTab to render the given alphaTex. * @param tex The alphaTex code to render. * @param tracks If set, the given tracks will be rendered, otherwise the first track only will be rendered. * @category Methods - Core * @since 0.9.4 * * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.tex("\\title 'Test' . 3.3.4"); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.Tex("\\title 'Test' . 3.3.4"); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.tex("\\title 'Test' . 3.3.4"); * ``` */ tex(tex: string, tracks?: number[]): void; /** * Triggers a load of the soundfont from the given data. * @remarks * AlphaTab only supports SoundFont2 and SoundFont3 {@since 1.4.0} encoded soundfonts for loading. To load a soundfont the player must be enabled in advance. * * @param data The data object to decode. The supported data types is depending on the platform. * * * A `ArrayBuffer` or `Uint8Array` (all platforms, native byte array or input streams on other platforms) * * A url from where to download the binary data of one of the supported file formats (browser only) * * @param append Whether to fully replace or append the data from the given soundfont. * @returns `true` if the passed in object is a supported format and loading was initiated, otherwise `false`. * * @category Methods - Player * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.loadSoundFont('/assets/MyFile.sf2'); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.LoadSoundFont(System.IO.File.OpenRead("MyFile.sf2")); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * contentResolver.openInputStream(uri).use { * api.loadSoundFont(it) * } * ``` */ loadSoundFont(data: unknown, append?: boolean): boolean; /** * Unloads all presets from previously loaded SoundFonts. * @remarks * This function resets the player internally to not have any SoundFont loaded anymore. This allows you to reduce the memory usage of the page * if multiple partial SoundFonts are loaded via `loadSoundFont(..., true)`. Depending on the workflow you might also just want to use `loadSoundFont(..., false)` once * instead of unloading the previous SoundFonts. * * @category Methods - Player * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.loadSoundFont('/assets/guitars.sf2', true); * api.loadSoundFont('/assets/pianos.sf2', true); * // .. * api.resetSoundFonts(); * api.loadSoundFont('/assets/synths.sf2', true); * ``` * * @example * C# * ```cs *var api = new AlphaTabApi<MyControl>(...); *api.LoadSoundFont(System.IO.File.OpenRead("guitars.sf2"), true); *api.LoadSoundFont(System.IO.File.OpenRead("pianos.sf2"), true); *... *api.ResetSoundFonts(); *api.LoadSoundFont(System.IO.File.OpenRead("synths.sf2"), true); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.loadSoundFont(readResource("guitars.sf2"), true) * api.loadSoundFont(readResource("pianos.sf2"), true) * ... * api.resetSoundFonts() * api.loadSoundFont(readResource("synths.sf2"), true) * ``` */ resetSoundFonts(): void; /** * Initiates a re-rendering of the current setup. * @remarks * If rendering is not yet possible, it will be deferred until the UI changes to be ready for rendering. * * @category Methods - Core * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.render(); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.Render(); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.render() * ``` */ render(): void; private _tickCache; /** * The tick cache allowing lookup of midi ticks to beats. * @remarks * Gets the tick cache allowing lookup of midi ticks to beats. If the player is enabled, a midi file will be generated * for the loaded {@link Score} for later playback. During this generation this tick cache is filled with the * exact midi ticks when beats are played. * * The {@link MidiTickLookup.findBeat} method allows a lookup of the beat related to a given input midi tick. * * @category Properties - Player * @since 1.2.3 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * const lookupResult = api.tickCache.findBeat(new Set([0, 1]), 100); * const currentBeat = lookupResult?.currentBeat; * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * var lookupResult = api.TickCache.FindBeat(new AlphaTab.Core.EcmaScript.Set(0, 1), 100); * var currentBeat = lookupResult?.CurrentBeat; * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * val lookupResult = api.tickCache.findBeat(alphaTab.core.ecmaScript.Set(0, 1), 100); * val currentBeat = lookupResult?.CurrentBeat; * ``` */ get tickCache(): MidiTickLookup | null; /** * The tick cache allowing lookup of midi ticks to beats. * @remarks * In older versions of alphaTab you can access the `boundsLookup` via {@link IScoreRenderer.boundsLookup} on {@link renderer}. * * After the rendering completed alphaTab exposes via this lookup the location of the individual * notation elements. The lookup provides fast access to the bars and beats at a given location. * If the {@link CoreSettings.includeNoteBounds} option was activated also the location of the individual notes can be obtained. * * The property contains a `BoundsLookup` instance which follows a hierarchical structure that represents * the tree of rendered elements. * * The hierarchy is: `staffSystems > bars(1) > bars(2) > beats > notes` * * * `staffSystems` - Represent the bounds of the individual systems ("rows") where staves are contained. * * `bars(1)` - Represent the bounds of all bars for a particular master bar across all tracks. * * `bars(2)` - Represent the bounds of an individual bar of a track. The bounds on y-axis span the region of the staff and notes might exceed this bounds. * * `beats` - Represent the bounds of the individual beats within a track. The bounds on y-axis are equal to the bar bounds. * * `notes` - Represent the bounds of the individual note heads/numbers within a track. * * Each bounds hierarchy have a `visualBounds` and `realBounds`. * * * `visualBounds` - Represent the area covering all visually visible elements * * `realBounds` - Represents the actual bounds of the elements in this beat including whitespace areas. * * `noteHeadBounds` (only on `notes` level) - Represents the area of the note heads or number based on the staff * * You can check out the individual sizes and regions. * @category Properties - Core * @since 1.5.0 */ get boundsLookup(): BoundsLookup | null; /** * The alphaSynth player used for playback. * @remarks * This is the low-level API to the Midi synthesizer used for playback. * Gets access to the underling {@link IAlphaSynth} that is used for the audio playback. * @category Properties - Player * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * setupPlayerEvents(api.settings); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * SetupPlayerEvents(api.Player); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * setupPlayerEvents(api.player) * ``` */ get player(): IAlphaSynth | null; /** * Whether the player is ready for starting the playback. * @remarks * Gets whether the synthesizer is ready for playback. The player is ready for playback when * all background workers are started, the audio output is initialized, a soundfont is loaded, and a song was loaded into the player as midi file. * @category Properties - Player * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * if(api.isReadyForPlayback)) api.play(); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * if(api.IsReadyForPlayback) api.Play(); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * if (api.isReadyForPlayback) api.play() * ``` */ get isReadyForPlayback(): boolean; /** * The current player state. * @remarks * Gets the current player state, meaning whether it is paused or playing. * @category Properties - Player * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * if(api.playerState != alphaTab.synth.PlayerState.Playing) api.play(); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * if(api.PlayerState != PlayerState.Playing) api.Play(); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * if (api.playerState != PlayerState.Playing) api.play() * ``` */ get playerState(): PlayerState; /** * The current master volume as percentage (0-1). * @remarks * Gets or sets the master volume of the overall audio being played. The volume is annotated in percentage where 1.0 would be the normal volume and 0.5 only 50%. * @category Properties - Player * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.masterVolume = 0.5; * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.MasterVolume = 0.5; * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.masterVolume = 0.5 * ``` */ get masterVolume(): number; set masterVolume(value: number); /** * The metronome volume as percentage (0-1). * @remarks * Gets or sets the volume of the metronome. By default the metronome is disabled but can be enabled by setting the volume different. * @category Properties - Player * @defaultValue `0` * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.metronomeVolume = 0.5; * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.MetronomeVolume = 0.5; * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.metronomeVolume = 0.5 * ``` */ get metronomeVolume(): number; set metronomeVolume(value: number); /** * The volume of the count-in metronome ticks. * @remarks * Gets or sets the volume of the metronome during the count-in of the song. By default the count-in is disabled but can be enabled by setting the volume different. * @category Properties - Player * @since 1.1.0 * @defaultValue `0` * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.countInVolume = 0.5; * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.CountInVolume = 0.5; * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.countInVolume = 0.5 * ``` */ get countInVolume(): number; set countInVolume(value: number); /** * The midi events which will trigger the `midiEventsPlayed` event * @remarks * Gets or sets the midi events which will trigger the `midiEventsPlayed` event. With this filter set you can enable * that alphaTab will signal any midi events as they are played by the synthesizer. This allows reacing on various low level * audio playback elements like notes/rests played or metronome ticks. * * Refer to the [related guide](https://alphatab.net/docs/guides/handling-midi-events) to learn more about this feature. * @defaultValue `[]` * @category Properties - Player * @since 1.2.0 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.midiEventsPlayedFilter = [alphaTab.midi.MidiEventType.AlphaTabMetronome]; * api.midiEventsPlayed.on(function(e) { * for(const midi of e.events) { * if(midi.isMetronome) { * console.log('Metronome tick ' + midi.metronomeNumerator); * } * } * }); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.MidiEventsPlayedFilter = new MidiEventType[] { AlphaTab.Midi.MidiEventType.AlphaTabMetronome }; * api.MidiEventsPlayed.On(e => * { * foreach(var midi of e.events) * { * if(midi is AlphaTab.Midi.AlphaTabMetronomeEvent metronome) * { * Console.WriteLine("Metronome tick " + metronome.MetronomeNumerator); * } * } * }); * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...); * api.midiEventsPlayedFilter = alphaTab.collections.List<alphaTab.midi.MidiEventType>( alphaTab.midi.MidiEventType.AlphaTabMetronome ) * api.midiEventsPlayed.on { e -> * for (midi in e.events) { * if(midi instanceof alphaTab.midi.AlphaTabMetronomeEvent && midi.isMetronome) { * println("Metronome tick " + midi.tick); * } * } * } * ``` */ get midiEventsPlayedFilter(): MidiEventType[]; set midiEventsPlayedFilter(value: MidiEventType[]); /** * The position within the song in midi ticks. * @category Properties - Player * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.tickPosition = 4000; * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.TickPosition = 4000; * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.tickPosition = 4000 * ``` */ get tickPosition(): number; set tickPosition(value: number); /** * The position within the song in milliseconds * @category Properties - Player * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.timePosition = 4000; * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.TimePosition = 4000; * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.timePosition = 4000 * ``` */ get timePosition(): number; set timePosition(value: number); /** * The range of the song that should be played. * @remarks * Gets or sets the range of the song that should be played. The range is defined in midi ticks or the whole song is played if the range is set to null * @category Properties - Player * @defaultValue `null` * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.playbackRange = { startTick: 1000, endTick: 50000 }; * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.PlaybackRange = new PlaybackRange { StartTick = 1000, EndTick = 50000 }; * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.playbackRange = PlaybackRange.apply { * startTick = 1000 * endTick = 50000 * } * ``` */ get playbackRange(): PlaybackRange | null; set playbackRange(value: PlaybackRange | null); /** * The current playback speed as percentage * @remarks * Controls the current playback speed as percentual value. Normal speed is 1.0 (100%) and 0.5 would be 50%. * @category Properties - Player * @defaultValue `1` * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.playbackSpeed = 0.5; * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.PlaybackSpeed = 0.5; * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.playbackSpeed = 0.5 * ``` */ get playbackSpeed(): number; set playbackSpeed(value: number); /** * Whether the playback should automatically restart after it finished. * @remarks * This setting controls whether the playback should automatically restart after it finished to create a playback loop. * @category Properties - Player * @defaultValue `false` * @since 0.9.4 * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.isLooping = true; * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.IsLooping = true; * ``` * * @example * Android * ```kotlin * val api = AlphaTabApi<MyControl>(...) * api.isLooping = true * ``` */ get isLooping(): boolean; set isLooping(value: boolean); private destroyPlayer; /** * * @returns true if a new player was created, false if no player was created (includes destroy & reuse of the current one) */ private setupOrDestroyPlayer; /** * Re-creates the midi for the current score and loads it. * @remarks * This will result in the player to stop playback. Some setting changes require re-genration of the midi song. * @category Methods - Player * @since 1.6.0 */ loadMidiForScore(): void; /** * Triggers an update of the sync points for the current score after modification within the data model * @category Methods - Player * @since 1.6.0 */ updateSyncPoints(): void; /** * Changes the volume of the given tracks. * @param tracks The tracks for which the volume should be changed. * @param volume The volume to set for all tracks in percent (0-1) * * @remarks * This will result in a volume change of the primary and secondary midi channel that the track uses for playback. * If the track shares the channels with another track, all related tracks will be changed as they cannot be distinguished. * @category Methods - Player * @since 0.9.4 * * @example * JavaScript * ```js * const api = new alphaTab.AlphaTabApi(document.querySelector('#alphaTab')); * api.changeTrackVolume([api.score.tracks[0], api.score.tracks[1]], 1.5); * api.changeTrackVolume([api.score.tracks[2]], 0.5); * ``` * * @example * C# * ```cs * var api = new AlphaTabApi<MyControl>(...); * api.ChangeTrackVolume(new Track[] { api.Score.Tracks[0], api.Score.Tracks[1] }, 1.5); * api.ChangeTrackVolume(new Track[] {