ml-regression
Version:
Regression algorithms
362 lines (318 loc) • 10.2 kB
JavaScript
'use strict';
var mlRegressionBase = require('ml-regression-base');
var mlRegressionPolynomial = require('ml-regression-polynomial');
var mlRegressionSimpleLinear = require('ml-regression-simple-linear');
var mlRegressionExponential = require('ml-regression-exponential');
var mlRegressionPower = require('ml-regression-power');
var mlRegressionMultivariateLinear = require('ml-regression-multivariate-linear');
var Kernel = require('ml-kernel');
var mlMatrix = require('ml-matrix');
var mlRegressionTheilSen = require('ml-regression-theil-sen');
var mlRegressionRobustPolynomial = require('ml-regression-robust-polynomial');
/*
* Function that calculate the potential fit in the form f(x) = A*x^M
* with a given M and return de A coefficient.
*
* @param {Vector} X - Vector of the x positions of the points.
* @param {Vector} Y - Vector of the x positions of the points.
* @param {Number} M - The exponent of the potential fit.
* @return {Number} A - The A coefficient of the potential fit.
*/
class PotentialRegression extends mlRegressionBase.BaseRegression {
/**
* @class
* @param x - Independent variable
* @param y - Dependent variable
* @param M
*/
constructor(x, y, M) {
super();
if (x === true) {
// reloading model
this.A = y.A;
this.M = y.M;
} else {
let n = x.length;
if (n !== y.length) {
throw new RangeError("input and output array have a different length");
}
let linear = new mlRegressionPolynomial.PolynomialRegression(x, y, [M]);
this.A = linear.coefficients[0];
this.M = M;
}
}
_predict(x) {
return this.A * x ** this.M;
}
toJSON() {
return {
name: "potentialRegression",
A: this.A,
M: this.M,
};
}
toString(precision) {
return `f(x) = ${mlRegressionBase.maybeToPrecision(this.A, precision)} * x^${this.M}`;
}
toLaTeX(precision) {
if (this.M >= 0) {
return `f(x) = ${mlRegressionBase.maybeToPrecision(this.A, precision)}x^{${this.M}}`;
} else {
return `f(x) = \\frac{${mlRegressionBase.maybeToPrecision(this.A, precision)}}{x^{${-this
.M}}}`;
}
}
static load(json) {
if (json.name !== "potentialRegression") {
throw new TypeError("not a potential regression model");
}
return new PotentialRegression(true, json);
}
}
const defaultOptions$1 = {
lambda: 0.1,
kernelType: "gaussian",
kernelOptions: {},
computeCoefficient: false,
};
// Implements the Kernel ridge regression algorithm.
// http://www.ics.uci.edu/~welling/classnotes/papers_class/Kernel-Ridge.pdf
class KernelRidgeRegression extends mlRegressionBase.BaseRegression {
constructor(inputs, outputs, options) {
super();
if (inputs === true) {
// reloading model
this.alpha = outputs.alpha;
this.inputs = outputs.inputs;
this.kernelType = outputs.kernelType;
this.kernelOptions = outputs.kernelOptions;
this.kernel = new Kernel(outputs.kernelType, outputs.kernelOptions);
} else {
inputs = mlMatrix.Matrix.checkMatrix(inputs);
options = { ...defaultOptions$1, ...options };
const kernelFunction = new Kernel(
options.kernelType,
options.kernelOptions,
);
const K = kernelFunction.compute(inputs);
const n = inputs.rows;
K.add(mlMatrix.Matrix.eye(n, n).mul(options.lambda));
this.alpha = mlMatrix.solve(K, outputs);
this.inputs = inputs;
this.kernelType = options.kernelType;
this.kernelOptions = options.kernelOptions;
this.kernel = kernelFunction;
}
}
_predict(newInputs) {
return this.kernel
.compute([newInputs], this.inputs)
.mmul(this.alpha)
.getRow(0);
}
toJSON() {
return {
name: "kernelRidgeRegression",
alpha: this.alpha,
inputs: this.inputs,
kernelType: this.kernelType,
kernelOptions: this.kernelOptions,
};
}
static load(json) {
if (json.name !== "kernelRidgeRegression") {
throw new TypeError("not a KRR model");
}
return new KernelRidgeRegression(true, json);
}
}
const defaultOptions = {
order: 2,
};
// Implements the Kernel ridge regression algorithm.
// http://www.ics.uci.edu/~welling/classnotes/papers_class/Kernel-Ridge.pdf
class PolynomialFitRegression2D extends mlRegressionBase.BaseRegression {
/**
* Constructor for the 2D polynomial fitting
* @param inputs
* @param outputs
* @param options
*/
constructor(inputs, outputs, options = {}) {
super();
if (inputs === true) {
// reloading model
this.coefficients = mlMatrix.Matrix.columnVector(outputs.coefficients);
this.order = outputs.order;
if (outputs.r) {
this.r = outputs.r;
this.r2 = outputs.r2;
}
if (outputs.chi2) {
this.chi2 = outputs.chi2;
}
} else {
options = { ...defaultOptions, ...options };
this.order = options.order;
this.coefficients = [];
this.X = inputs;
this.y = outputs;
this.train(this.X, this.y, options);
}
}
/**
* Function that fits the model given the data(X) and predictions(y).
* The third argument is an object with the following options:
* order: order of the polynomial to fit.
* @param {Matrix} X - A matrix with n rows and 2 columns.
* @param {Matrix} y - A vector of the prediction values.
*/
train(X, y) {
if (!mlMatrix.Matrix.isMatrix(X)) X = new mlMatrix.Matrix(X);
if (!mlMatrix.Matrix.isMatrix(y)) y = mlMatrix.Matrix.columnVector(y);
if (y.rows !== X.rows) {
y = y.transpose();
}
if (X.columns !== 2) {
throw new RangeError(
`You give X with ${X.columns} columns and it must be 2`,
);
}
if (X.rows !== y.rows) {
throw new RangeError("X and y must have the same rows");
}
let examples = X.rows;
let coefficients = ((this.order + 2) * (this.order + 1)) / 2;
if (examples < coefficients) {
throw new Error(
"Insufficient number of points to create regression model.",
);
}
this.coefficients = new Array(coefficients);
let x1 = X.getColumnVector(0);
let x2 = X.getColumnVector(1);
let scaleX1 = 1 / x1.clone().abs().max();
let scaleX2 = 1 / x2.clone().abs().max();
let scaleY = 1 / y.clone().abs().max();
x1.mulColumn(0, scaleX1);
x2.mulColumn(0, scaleX2);
y.mulColumn(0, scaleY);
let A = new mlMatrix.Matrix(examples, coefficients);
let col = 0;
for (let i = 0; i <= this.order; ++i) {
let limit = this.order - i;
for (let j = 0; j <= limit; ++j) {
let result = powColVector(x1, i).mulColumnVector(powColVector(x2, j));
A.setColumn(col, result);
col++;
}
}
let svd = new mlMatrix.SVD(A.transpose(), {
computeLeftSingularVectors: true,
computeRightSingularVectors: true,
autoTranspose: false,
});
let qqs = mlMatrix.Matrix.rowVector(svd.diagonal);
qqs = qqs.apply((i, j) => {
if (qqs.get(i, j) >= 1e-15) qqs.set(i, j, 1 / qqs.get(i, j));
else qqs.set(i, j, 0);
});
let qqs1 = mlMatrix.Matrix.zeros(examples, coefficients);
for (let i = 0; i < coefficients; ++i) {
qqs1.set(i, i, qqs.get(0, i));
}
qqs = qqs1;
let U = svd.rightSingularVectors;
let V = svd.leftSingularVectors;
this.coefficients = V.mmul(qqs.transpose()).mmul(U.transpose()).mmul(y);
col = 0;
for (let i = 0; i <= coefficients; ++i) {
let limit = this.order - i;
for (let j = 0; j <= limit; ++j) {
this.coefficients.set(
col,
0,
(this.coefficients.get(col, 0) * scaleX1 ** i * scaleX2 ** j) /
scaleY,
);
col++;
}
}
}
_predict(newInputs) {
let x1 = newInputs[0];
let x2 = newInputs[1];
let y = 0;
let column = 0;
for (let i = 0; i <= this.order; i++) {
for (let j = 0; j <= this.order - i; j++) {
y += x1 ** i * x2 ** j * this.coefficients.get(column, 0);
column++;
}
}
return y;
}
toJSON() {
return {
name: "polyfit2D",
order: this.order,
coefficients: this.coefficients,
};
}
static load(json) {
if (json.name !== "polyfit2D") {
throw new TypeError("not a polyfit2D model");
}
return new PolynomialFitRegression2D(true, json);
}
}
/**
* Function that given a column vector return this: vector^power
* @param x - Column vector.
* @param power - Pow number.
* @returns {Suite|Matrix}
*/
function powColVector(x, power) {
let result = x.clone();
for (let i = 0; i < x.rows; ++i) {
result.set(i, 0, result.get(i, 0) ** power);
}
return result;
}
const NLR = {
PotentialRegression,
};
Object.defineProperty(exports, "PolynomialRegression", {
enumerable: true,
get: function () { return mlRegressionPolynomial.PolynomialRegression; }
});
Object.defineProperty(exports, "SLR", {
enumerable: true,
get: function () { return mlRegressionSimpleLinear.SimpleLinearRegression; }
});
Object.defineProperty(exports, "SimpleLinearRegression", {
enumerable: true,
get: function () { return mlRegressionSimpleLinear.SimpleLinearRegression; }
});
Object.defineProperty(exports, "ExponentialRegression", {
enumerable: true,
get: function () { return mlRegressionExponential.ExponentialRegression; }
});
Object.defineProperty(exports, "PowerRegression", {
enumerable: true,
get: function () { return mlRegressionPower.PowerRegression; }
});
exports.MultivariateLinearRegression = mlRegressionMultivariateLinear;
Object.defineProperty(exports, "TheilSenRegression", {
enumerable: true,
get: function () { return mlRegressionTheilSen.TheilSenRegression; }
});
Object.defineProperty(exports, "RobustPolynomialRegression", {
enumerable: true,
get: function () { return mlRegressionRobustPolynomial.RobustPolynomialRegression; }
});
exports.KRR = KernelRidgeRegression;
exports.KernelRidgeRegression = KernelRidgeRegression;
exports.NLR = NLR;
exports.NonLinearRegression = NLR;
exports.PolinomialFitting2D = PolynomialFitRegression2D;