scalar-autograd
Version:
Scalar-based reverse-mode automatic differentiation in TypeScript.
35 lines (34 loc) • 1.21 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.ValueActivation = void 0;
const Value_1 = require("./Value");
class ValueActivation {
static relu(x) {
const r = Math.max(0, x.data);
return Value_1.Value.make(r, x, null, (out) => () => {
if (x.requiresGrad)
x.grad += (x.data > 0 ? 1 : 0) * out.grad;
}, `relu(${x.label})`);
}
static softplus(x) {
const s = Math.log(1 + Math.exp(x.data));
return Value_1.Value.make(s, x, null, (out) => () => {
x.grad += 1 / (1 + Math.exp(-x.data)) * out.grad;
}, `softplus(${x.label})`);
}
static tanh(x) {
const t = Math.tanh(x.data);
return Value_1.Value.make(t, x, null, (out) => () => {
if (x.requiresGrad)
x.grad += (1 - t ** 2) * out.grad;
}, `tanh(${x.label})`);
}
static sigmoid(x) {
const s = 1 / (1 + Math.exp(-x.data));
return Value_1.Value.make(s, x, null, (out) => () => {
if (x.requiresGrad)
x.grad += s * (1 - s) * out.grad;
}, `sigmoid(${x.label})`);
}
}
exports.ValueActivation = ValueActivation;