@tensorflow-models/coco-ssd
Version:
Object detection model (coco-ssd) in TensorFlow.js
113 lines • 4.03 kB
JavaScript
"use strict";
/**
* @license
* Copyright 2018 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
Object.defineProperty(exports, "__esModule", { value: true });
var gradients_1 = require("../gradients");
var tensor_util_env_1 = require("../tensor_util_env");
var operation_1 = require("./operation");
/**
* Computes the softmax normalized vector given the logits.
*
* ```js
* const a = tf.tensor1d([1, 2, 3]);
*
* a.softmax().print(); // or tf.softmax(a)
* ```
*
* ```js
* const a = tf.tensor2d([2, 4, 6, 1, 2, 3], [2, 3]);
*
* a.softmax().print(); // or tf.softmax(a)
* ```
*
* @param logits The logits array.
* @param dim The dimension softmax would be performed on. Defaults to `-1`
* which indicates the last dimension.
*/
/** @doc {heading: 'Operations', subheading: 'Normalization'} */
function softmax_(logits, dim) {
if (dim === void 0) { dim = -1; }
var $logits = tensor_util_env_1.convertToTensor(logits, 'logits', 'softmax');
if (dim === -1) {
dim = $logits.rank - 1;
}
if (dim !== $logits.rank - 1) {
throw Error('Softmax along a non-last dimension is not yet supported. ' +
("Logits was rank " + $logits.rank + " and dim was " + dim));
}
var customOp = gradients_1.customGrad(function (logits) {
// Do it in log space for numerical stability.
// exp(X - logSumExp(X))
var keepDims = true;
var lse = logits.logSumExp([dim], keepDims);
var logResult = logits.toFloat().sub(lse);
var y = logResult.exp();
var gradFunc = function (dy) {
var dyTimesY = dy.mul(y);
var keepDims = true;
return dyTimesY.sub(dyTimesY.sum([dim], keepDims).mul(y));
};
return { value: y, gradFunc: gradFunc };
});
return customOp($logits);
}
/**
* Computes the log softmax.
*
* ```js
* const a = tf.tensor1d([1, 2, 3]);
*
* a.logSoftmax().print(); // or tf.logSoftmax(a)
* ```
*
* ```js
* const a = tf.tensor2d([2, 4, 6, 1, 2, 3], [2, 3]);
*
* a.logSoftmax().print(); // or tf.logSoftmax(a)
* ```
*
* @param logits The logits array.
* @param axis The dimension softmax would be performed on. Defaults to `-1`
* which indicates the last dimension.
*/
/** @doc {heading: 'Operations', subheading: 'Normalization'} */
function logSoftmax_(logits, axis) {
if (axis === void 0) { axis = -1; }
var $logits = tensor_util_env_1.convertToTensor(logits, 'logits', 'logSoftmax');
if (axis === -1) {
axis = $logits.rank - 1;
}
if (axis !== $logits.rank - 1) {
throw Error('Log Softmax along a non-last dimension is not yet supported. ' +
("Logits was rank " + $logits.rank + " and axis was " + axis));
}
var customOp = gradients_1.customGrad(function (logits) {
var keepDims = true;
var xMax = logits.max(axis, true);
var shifted = logits.sub(xMax);
var value = shifted.toFloat().sub(shifted.exp().sum(axis, keepDims).log());
var gradFunc = function (dy) {
var softmax = value.exp();
return dy.sub(dy.sum(axis, keepDims).mul(softmax));
};
return { value: value, gradFunc: gradFunc };
});
return customOp($logits);
}
exports.softmax = operation_1.op({ softmax_: softmax_ });
exports.logSoftmax = operation_1.op({ logSoftmax_: logSoftmax_ });
//# sourceMappingURL=softmax.js.map