federer
Version:
Experiments in asynchronous federated learning and decentralized learning
39 lines • 1.52 kB
TypeScript
import { LearningRateScheduleOptions } from "../common";
/**
* Represents a learning rate schedule.
*
* TensorFlow.js doesn't have built-in support for learning rate schedules, so
* we implement our own.
*
* It is the role of the {@link ClientWorker} to build and use a learning rate
* schedule, based on the {@link ClientStartOptions} it is given.
*/
export interface LearningRateSchedule {
/**
* Compute the decayed learning rate that should be used for round `round`.
*
* Note that TensorFlow usually uses the step number to compute the decayed
* learning rate, but we use the round number. Doing so makes it possible to
* decay as a function of global progress, not just of local progress.
* Additionally, this syncs up the learning rates of all clients.
*/
decayedLearningRate(round: number): number;
}
export declare const LearningRateSchedule: {
/** Get the schedule that corresponds to the given options. */
get: (options: LearningRateScheduleOptions) => LearningRateSchedule | undefined;
};
/**
* Exponential learning rate decay.
*
* @see {@link ExponentialDecayOptions}
*/
export declare class ExponentialDecay {
readonly initialLearningRate: number;
readonly decayRounds: number;
readonly decayRate: number;
readonly stairCase: boolean;
constructor(initialLearningRate: number, decayRounds?: number, decayRate?: number, stairCase?: boolean);
decayedLearningRate(round: number): number;
}
//# sourceMappingURL=decay.d.ts.map