tinygrad
Version:
A JavaScript/TypeScript autograd engine with operator overloading, inspired by micrograd
55 lines • 1.42 kB
text/typescript
declare namespace engine_d_exports {
export { Value };
}
/**
* Stores a single scalar value and its gradient for automatic differentiation
*/
declare class Value {
#private;
data: number;
_op: string;
grad: number;
constructor(data: number, children?: Value[] | Set<Value>, _op?: string);
add(other: Value | number): Value;
mul(other: Value | number): Value;
pow(other: number): Value;
neg(): Value;
sub(other: Value | number): Value;
div(other: Value | number): Value;
relu(): Value;
backward(): void;
toString(): string;
}
declare namespace nn_d_exports {
export { Layer, MLP, Neuron };
}
declare abstract class Module {
zeroGrad(): void;
abstract parameters(): Value[];
}
declare class Neuron extends Module {
w: Value[];
b: Value;
nonlin: boolean;
constructor(nin: number, nonlin?: boolean);
call(x: Value[]): Value;
parameters(): Value[];
toString(): string;
}
declare class Layer extends Module {
neurons: Neuron[];
constructor(nin: number, nout: number, nonlin?: boolean);
call(x: Value[]): Value | Value[];
parameters(): Value[];
toString(): string;
}
declare class MLP extends Module {
layers: Layer[];
constructor(nin: number, nouts: number[]);
call(x: Value[]): Value | Value[];
parameters(): Value[];
toString(): string;
}
//#endregion
export { engine_d_exports as engine, nn_d_exports as nn };
//# sourceMappingURL=index.d.cts.map