@astermind/astermind-elm
Version:
JavaScript Extreme Learning Machine (ELM) library for browser and Node.js.
68 lines (67 loc) • 2.73 kB
TypeScript
export type ActivationName = 'relu' | 'leakyrelu' | 'leaky-relu' | 'sigmoid' | 'tanh' | 'linear' | 'identity' | 'none' | 'gelu';
export declare class Activations {
/** Rectified Linear Unit */
static relu(x: number): number;
/** Leaky ReLU with configurable slope for x<0 (default 0.01) */
static leakyRelu(x: number, alpha?: number): number;
/** Logistic sigmoid */
static sigmoid(x: number): number;
/** Hyperbolic tangent */
static tanh(x: number): number;
/** Linear / identity activation */
static linear(x: number): number;
/**
* GELU (Gaussian Error Linear Unit), tanh approximation.
* 0.5 * x * (1 + tanh(√(2/π) * (x + 0.044715 x^3)))
*/
static gelu(x: number): number;
/**
* Softmax with numerical stability and optional temperature.
* @param arr logits
* @param temperature >0; higher = flatter distribution
*/
static softmax(arr: number[], temperature?: number): number[];
/** d/dx ReLU */
static dRelu(x: number): number;
/** d/dx LeakyReLU */
static dLeakyRelu(x: number, alpha?: number): number;
/** d/dx Sigmoid = s(x)*(1-s(x)) */
static dSigmoid(x: number): number;
/** d/dx tanh = 1 - tanh(x)^2 */
static dTanh(x: number): number;
/** d/dx Linear = 1 */
static dLinear(_: number): number;
/**
* d/dx GELU (tanh approximation)
* 0.5*(1 + tanh(u)) + 0.5*x*(1 - tanh(u)^2) * du/dx
* where u = k*(x + 0.044715 x^3), du/dx = k*(1 + 0.134145 x^2), k = sqrt(2/pi)
*/
static dGelu(x: number): number;
/** Apply an elementwise activation across a 2D matrix, returning a new matrix. */
static apply(matrix: number[][], fn: (x: number) => number): number[][];
/** Apply an elementwise derivative across a 2D matrix, returning a new matrix. */
static applyDerivative(matrix: number[][], dfn: (x: number) => number): number[][];
/**
* Get an activation function by name. Case-insensitive.
* For leaky ReLU, you can pass { alpha } to override the negative slope.
*/
static get(name: string, opts?: {
alpha?: number;
}): (x: number) => number;
/** Get derivative function by name (mirrors get). */
static getDerivative(name: string, opts?: {
alpha?: number;
}): (x: number) => number;
/** Get both forward and derivative together. */
static getPair(name: string, opts?: {
alpha?: number;
}): {
f: (x: number) => number;
df: (x: number) => number;
};
/**
* Given softmax probabilities p, returns the Jacobian J = diag(p) - p p^T
* (Useful for analysis; not typically needed for ELM.)
*/
static softmaxJacobian(p: number[]): number[][];
}