@tensorflow/tfjs-layers
Version:
TensorFlow layers API in JavaScript
583 lines • 88.2 kB
JavaScript
/**
* @license
* Copyright 2023 Google LLC.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
/**
* TFJS-based multi-head attention layer.
*/
/* Original source: keras/layers/attention/multi_head_attention.py */
import { einsum, linalg, logicalAnd, mul, ones, serialization, tidy, util } from '@tensorflow/tfjs-core';
import { cast, expandDims } from '../../backend/tfjs_backend';
import { getConstraint, serializeConstraint } from '../../constraints';
import { Layer } from '../../engine/topology';
import { ValueError } from '../../errors';
import { getInitializer, serializeInitializer } from '../../initializers';
import { getRegularizer, serializeRegularizer } from '../../regularizers';
import { Softmax } from '../advanced_activations';
import { Dropout } from '../core';
import { EinsumDense } from './einsum_dense';
const _CHR_IDX = 'abcdefghijklmnopqrstuvwxyz'.split('');
/**
* Builds einsum equations for the attention computation.
*
* Query, key, value inputs after projection are expected to have the shape as:
* `(bs, <non-attention dims>, <attention dims>, numHeads, channels)`.
* `bs` and `<non-attention dims>` are treated as `<batch dims>`.
*
* The attention operations can be generalized:
* (1) Query-key dot product:
* `(<batch dims>, <query attention dims>, numHeads, channels), (<batch dims>,
* <key attention dims>, numHeads, channels) -> (<batch dims>,
* numHeads, <query attention dims>, <key attention dims>)`
* (2) Combination:
* `(<batch dims>, numHeads, <query attention dims>, <key attention dims>),
* (<batch dims>, <value attention dims>, numHeads, channels) -> (<batch
* dims>, <query attention dims>, numHeads, channels)`
*
* @param rank Rank of query, key, value tensors.
* @param attnAxes Array of axes, `[-1, rank)`,
* that attention will be applied to.
* @returns Einsum equations.
*/
function buildAttentionEquation(rank, attnAxes) {
const targetNotationArr = _CHR_IDX.slice(0, rank);
// `batchDims` includes the head dim.
const excludeIndices = [...attnAxes, rank - 1];
const batchDims = [];
for (const e of Array(rank).keys()) {
if (!excludeIndices.includes(e)) {
batchDims.push(e);
}
}
let letterOffset = rank;
let sourceNotation = '';
for (let i = 0; i < rank; i++) {
if (batchDims.includes(i) || i === rank - 1) {
sourceNotation += targetNotationArr[i];
}
else {
sourceNotation += _CHR_IDX[letterOffset];
letterOffset++;
}
}
const productNotation = batchDims.map(i => targetNotationArr[i]).concat(attnAxes.map(i => targetNotationArr[i]), attnAxes.map(i => sourceNotation[i])).join('');
const targetNotation = targetNotationArr.join('');
const dotProductEquation = `${sourceNotation},${targetNotation}->${productNotation}`;
const attnScoresRank = productNotation.length;
const combineEquation = `${productNotation},${sourceNotation}->${targetNotation}`;
return [dotProductEquation, combineEquation, attnScoresRank];
}
/**
* Builds an einsum equation for projections inside multi-head attention.
*/
function buildProjectionEquation(freeDims, boundDims, outputDims) {
let inputStr = '';
let kernelStr = '';
let outputStr = '';
let biasAxes = '';
let letterOffset = 0;
for (let i = 0; i < freeDims; i++) {
const char = _CHR_IDX[i + letterOffset];
inputStr += char;
outputStr += char;
}
letterOffset += freeDims;
for (let i = 0; i < boundDims; i++) {
const char = _CHR_IDX[i + letterOffset];
inputStr += char;
kernelStr += char;
}
letterOffset += boundDims;
for (let i = 0; i < outputDims; i++) {
const char = _CHR_IDX[i + letterOffset];
kernelStr += char;
outputStr += char;
biasAxes += char;
}
const equation = `${inputStr},${kernelStr}->${outputStr}`;
return [equation, biasAxes, outputStr.length];
}
function getOutputShape(outputRank, knownLastDims) {
const outputShape = Array(outputRank - knownLastDims.length).fill(null).concat(knownLastDims);
return outputShape;
}
/**
* MultiHeadAttention layer.
*
* This is an implementation of multi-headed attention as described in the
* paper "Attention is all you Need" (Vaswani et al., 2017).
* If `query`, `key,` `value` are the same, then
* this is self-attention. Each timestep in `query` attends to the
* corresponding sequence in `key`, and returns a fixed-width vector.
*
* This layer first projects `query`, `key` and `value`. These are
* (effectively) a list of tensors of length `numAttentionHeads`, where the
* corresponding shapes are `(batchSize, <query dimensions>, keyDim)`,
* `(batchSize, <key/value dimensions>, keyDim)`,
* `(batchSize, <key/value dimensions>, valueDim)`.
*
* Then, the query and key tensors are dot-producted and scaled. These are
* softmaxed to obtain attention probabilities. The value tensors are then
* interpolated by these probabilities, then concatenated back to a single
* tensor.
*
* Finally, the result tensor with the last dimension as valueDim can take an
* linear projection and return.
*
* When using `MultiHeadAttention` inside a custom layer, the custom layer must
* implement its own `build()` method and call `MultiHeadAttention`'s
* `buildFromSignature()` there.
* This enables weights to be restored correctly when the model is loaded.
*
* Examples:
*
* Performs 1D cross-attention over two sequence inputs with an attention mask.
* Returns the additional attention weights over heads.
*
* ```js
* const layer = new MultiHeadAttention({numHeads: 2, keyDim: 2});
* const target = tf.input({shape: [8, 16]});
* const source = tf.input({shape: [4, 16]});
* const outputTensor, weights = layer.callAndReturnAttentionScores(
* target, {value: source});
* console.log(outputTensor.shape); // [null, 8, 16]
* console.log(weights.shape); // [null, 2, 8, 4]
* ```
*
* Performs 2D self-attention over a 5D input tensor on axes 2 and 3.
*
* ```js
* const layer = new MultiHeadAttention({
* numHeads: 2, keyDim: 2, attentionAxes: [2, 3]});
* const inputTensor = tf.input({shape: [5, 3, 4, 16]});
* const outputTensor = layer.call(inputTensor, {value: inputTensor});
* console.log(outputTensor.shape); // [null, 5, 3, 4, 16]
* ```
*
* Returns:
* attentionOutput: The result of the computation, of shape `(B, T, E)`,
* where `T` is for target sequence shapes and `E` is the query input
* last dimension if `outputShape` is `None`. Otherwise, the
* multi-head outputs are projected to the shape specified by
* `outputShape`.
* attentionScores: multi-head attention coefficients over attention axes.
*/
class MultiHeadAttention extends Layer {
constructor(args) {
var _a, _b, _c, _d, _e;
super(args);
this.supportsMasking = true;
this.numHeads = args.numHeads;
this.keyDim = args.keyDim;
this.valueDim = (_a = args.valueDim) !== null && _a !== void 0 ? _a : args.keyDim;
this.dropout = (_b = args.dropout) !== null && _b !== void 0 ? _b : 0;
this.useBias = (_c = args.useBias) !== null && _c !== void 0 ? _c : true;
this._outputShape = args.outputShape;
this.kernelInitializer = getInitializer((_d = args.kernelInitializer) !== null && _d !== void 0 ? _d : 'glorotUniform');
this.biasInitializer = getInitializer((_e = args.biasInitializer) !== null && _e !== void 0 ? _e : 'zeros');
this.kernelRegularizer = getRegularizer(args.kernelRegularizer);
this.biasRegularizer = getRegularizer(args.biasRegularizer);
this.activityRegularizer = getRegularizer(args.activityRegularizer);
this.kernelConstraint = getConstraint(args.kernelConstraint);
this.biasConstraint = getConstraint(args.biasConstraint);
if (args.attentionAxes != null && !Array.isArray(args.attentionAxes)) {
this.attentionAxes = [args.attentionAxes];
}
else {
this.attentionAxes = args.attentionAxes;
}
this.builtFromSignature = false;
this.queryShape = null;
this.keyShape = null;
this.valueShape = null;
}
/**
* Should be used for testing purposes only.
*/
get _queryDense() {
return this.queryDense;
}
/**
* Should be used for testing purposes only.
*/
get _keyDense() {
return this.keyDense;
}
/**
* Should be used for testing purposes only.
*/
get _valueDense() {
return this.valueDense;
}
/**
* Should be used for testing purposes only.
*/
get _outputDense() {
return this.outputDense;
}
getConfig() {
const config = {
numHeads: this.numHeads,
keyDim: this.keyDim,
valueDim: this.valueDim,
dropout: this.dropout,
useBias: this.useBias,
outputShape: this._outputShape,
attentionAxes: this.attentionAxes,
kernelInitializer: serializeInitializer(this.kernelInitializer),
biasInitializer: serializeInitializer(this.biasInitializer),
kernelRegularizer: serializeRegularizer(this.kernelRegularizer),
biasRegularizer: serializeRegularizer(this.biasRegularizer),
activityRegularizer: serializeRegularizer(this.activityRegularizer),
kernelConstraint: serializeConstraint(this.kernelConstraint),
biasConstraint: serializeConstraint(this.biasConstraint),
queryShape: this.queryShape,
keyShape: this.keyShape,
valueShape: this.valueShape,
};
const baseConfig = super.getConfig();
Object.assign(config, baseConfig);
return config;
}
static fromConfig(cls, config) {
// If the layer has a different build() function from the default,
// we need to trigger the customized build to create weights.
const queryShape = config['queryShape'];
const keyShape = config['keyShape'];
const valueShape = config['valueShape'];
delete config['queryShape'];
delete config['keyShape'];
delete config['valueShape'];
const layer = new cls(config);
if ([queryShape, keyShape, valueShape].includes(null)) {
console.warn('One of dimensions of the input shape is missing. It ' +
'should have been memorized when the layer was serialized. ' +
`${cls.toString()} is created without weights.`);
}
else {
layer.buildFromSignature(queryShape, valueShape, keyShape);
}
return layer;
}
/**
* Builds layers and variables.
*
* Once the method is called, this.builtFromSignature will be set to true.
*/
buildFromSignature(queryShape, valueShape, keyShape) {
this.builtFromSignature = true;
if (keyShape == null) {
keyShape = valueShape;
}
this.queryShape = queryShape;
this.valueShape = valueShape;
this.keyShape = keyShape;
// Not using SymbolicTensors since tf.input() adds a batch dimension to the
// given shape, therefore giving the tensor the wrong rank.
const queryRank = queryShape.length;
const valueRank = valueShape.length;
const keyRank = keyShape.length;
const freeDims = queryRank - 1;
let [einsumEquation, biasAxes, outputRank] = buildProjectionEquation(freeDims, 1, 2);
this.queryDense = new EinsumDense(Object.assign({ equation: einsumEquation, outputShape: getOutputShape(outputRank - 1, [this.numHeads, this.keyDim]), biasAxes: this.useBias ? biasAxes : null, name: 'query' }, this.getCommonKwargsForSublayer()));
[einsumEquation, biasAxes, outputRank] =
buildProjectionEquation(keyRank - 1, 1, 2);
this.keyDense = new EinsumDense(Object.assign({ equation: einsumEquation, outputShape: getOutputShape(outputRank - 1, [this.numHeads, this.keyDim]), biasAxes: this.useBias ? biasAxes : null, name: 'key' }, this.getCommonKwargsForSublayer()));
[einsumEquation, biasAxes, outputRank] =
buildProjectionEquation(valueRank - 1, 1, 2);
this.valueDense = new EinsumDense(Object.assign({ equation: einsumEquation, outputShape: getOutputShape(outputRank - 1, [this.numHeads, this.valueDim]), biasAxes: this.useBias ? biasAxes : null, name: 'value' }, this.getCommonKwargsForSublayer()));
// Builds the attention computations for multi-head dot product attention.
this.buildAttention(outputRank);
this.outputDense = this.makeOutputDense(freeDims, this.getCommonKwargsForSublayer(), 'attentionOutput');
}
getCommonKwargsForSublayer() {
// Create new clone of kernel/bias initializer, so that we don't reuse
// the initializer instance, which could lead to same init value since
// initializer is stateless.
const kernelInitializer = getInitializer({
className: this.kernelInitializer.getClassName(),
config: this.kernelInitializer.getConfig(),
});
const biasInitializer = getInitializer({
className: this.biasInitializer.getClassName(),
config: this.biasInitializer.getConfig(),
});
const commonKwargs = {
kernelInitializer,
biasInitializer,
kernelRegularizer: this.kernelRegularizer,
biasRegularizer: this.biasRegularizer,
activityRegularizer: this.activityRegularizer,
kernelConstraint: this.kernelConstraint,
biasConstraint: this.biasConstraint,
};
return commonKwargs;
}
/**
* Builds the output projection matrix.
*
* @param freeDims Number of free dimensions for einsum equation building.
* @param commonKwargs Common keyword arguments for einsum layer.
* @param name Name for the projection layer.
* @returns Projection layer.
*/
makeOutputDense(freeDims, commonKwargs, name) {
let outputShape;
if (this._outputShape) {
if (!Array.isArray(this._outputShape)) {
outputShape = [this._outputShape];
}
else {
outputShape = this._outputShape;
}
}
else {
outputShape = [this.queryShape[this.queryShape.length - 1]];
}
const [einsumEquation, biasAxes, outputRank] = buildProjectionEquation(freeDims, 2, outputShape.length);
return new EinsumDense(Object.assign({ equation: einsumEquation, outputShape: getOutputShape(outputRank - 1, outputShape), biasAxes: this.useBias ? biasAxes : null, name }, commonKwargs));
}
/**
* Builds multi-head dot-product attention computations.
*
* This function builds attributes necessary for `computeAttention` to
* customize attention computation to replace the default dot-product
* attention.
*
* @param rank The rank of query, key, value tensors.
*/
buildAttention(rank) {
if (this.attentionAxes == null) {
this.attentionAxes = [];
for (let i = 1; i < rank - 2; i++) {
this.attentionAxes.push(i);
}
}
else {
this.attentionAxes = [...this.attentionAxes];
}
const [dotProductEquation, combineEquation, attnScoresRank] = buildAttentionEquation(rank, this.attentionAxes);
this.dotProductEquation = dotProductEquation;
this.combineEquation = combineEquation;
const normAxes = [];
const startIdx = attnScoresRank - this.attentionAxes.length;
for (let i = startIdx; i < attnScoresRank; i++) {
normAxes.push(i);
}
this.softmax = new Softmax({ axis: normAxes });
this.dropoutLayer = new Dropout({ rate: this.dropout });
}
maskedSoftmax(attentionScores, attentionMask) {
return tidy(() => {
// Normalize the attention scores to probabilities.
// `attentionScores` = [B, N, T, S]
if (attentionMask != null) {
// The expand dim happens starting from the `numHeads` dimension,
// (<batchDims>, numHeads, <queryAttentionDims, keyAttentionDims>)
const maskExpansionAxis = -this.attentionAxes.length * 2 - 1;
const endIdx = attentionScores.shape.length - attentionMask.shape.length;
for (let _ = 0; _ < endIdx; _++) {
attentionMask = expandDims(attentionMask, maskExpansionAxis);
}
}
return this.softmax.apply(attentionScores, { mask: attentionMask });
});
}
/**
* Applies Dot-product attention with query, key, value tensors.
*
* This function defines the computation inside `call` with projected
* multi-head Q, K, V inputs. Users can override this function for
* customized attention implementation.
*
* @param query Projected query `Tensor` of shape `(B, T, N, keyDim)`.
* @param key Projected key `Tensor` of shape `(B, S, N, keyDim)`.
* @param value Projected value `Tensor` of shape `(B, S, N, valueDim)`.
* @param attentionMask A boolean mask of shape `(B, T, S)`, that prevents
* attention to certain positions. It is generally not needed if
* the `query` and `value` (and/or `key`) are masked.
* @param training Boolean indicating whether the layer should behave
* in training mode (adding dropout) or in inference mode (doing
* nothing).
* @returns attentionOutput: Multi-headed outputs of attention computation.
* @returns attentionScores: Multi-headed attention weights.
*/
computeAttention(query, key, value, attentionMask, training) {
return tidy(() => {
// Note: Applying scalar multiply at the smaller end of einsum improves
// XLA performance, but may introduce slight numeric differences in
// the Transformer attention head.
query = mul(query, 1.0 / Math.sqrt(this.keyDim));
// Take the dot product between "query" and "key" to get the raw
// attention scores.
let attentionScores = einsum(this.dotProductEquation, key, query);
attentionScores = this.maskedSoftmax(attentionScores, attentionMask);
// This is actually dropping out entire tokens to attend to, which might
// seem a bit unusual, but is taken from the original Transformer paper.
const attentionScoresDropout = this.dropoutLayer.apply(attentionScores, { training });
// `contextLayer` = [B, T, N, H]
const attentionOutput = einsum(this.combineEquation, attentionScoresDropout, value);
return [attentionOutput, attentionScores];
});
}
apply(inputs, kwargs) {
var _a;
if (!kwargs || !kwargs['value']) {
throw new ValueError('Must pass in `value` argument in `kwargs.`');
}
let newInputs;
newInputs = [inputs, kwargs['value']].concat((_a = kwargs['key']) !== null && _a !== void 0 ? _a : []);
// TODO(pforderique): Support mask propagation.
return super.apply(newInputs, kwargs);
}
call(query, kwargs) {
return tidy(() => {
return this.callAndReturnAttentionScores(query, kwargs)[0];
});
}
/**
* Exactly like `call` except also returns the attention scores.
*/
callAndReturnAttentionScores(query, { value, key, useCausalMask, attentionMask, training }) {
return tidy(() => {
if (!this.builtFromSignature) {
this.buildFromSignature(query.shape, value.shape, key ? key.shape : null);
}
if (key == null) {
key = value;
}
// TODO(pforderique): Support RaggedTensor inputs.
attentionMask = this.computeAttentionMask(query, value, attentionMask, useCausalMask);
// N = `numAttentionHeads`
// H = `sizePerHead`
// `query` = [B, T, N ,H]
query = this.queryDense.apply(query);
// `key` = [B, S, N, H]
key = this.keyDense.apply(key);
// `value` = [B, S, N, H]
value = this.valueDense.apply(value);
const [attentionOutputPreDense, attentionScores] = this.computeAttention(query, key, value, attentionMask, training);
const attentionOutput = this.outputDense.apply(attentionOutputPreDense);
return [attentionOutput, attentionScores];
});
}
/**
* Computes the attention mask.
*
* * The `query`'s mask is reshaped from [B, T] to [B, T, 1].
* * The `value`'s mask is reshaped from [B, S] to [B, 1, S].
* * The `key`'s mask is reshaped from [B, S] to [B, 1, S]. The `key`'s
* mask is ignored if `key` is `None` or if `key is value`.
* * If `useCausalMask=true`, then the causal mask is computed. Its shape
* is [1, T, S].
*
* All defined masks are merged using a logical AND operation (`&`).
*
* In general, if the `query` and `value` are masked, then there is no need
* to define the `attentionMask`.
*
* @param query Projected query `Tensor` of shape `(B, T, N, keyDim)`.
* @param key Projected key `Tensor` of shape `(B, S, N, keyDim)`.
* @param value Projected value `Tensor` of shape `(B, S, N, valueDim)`.
* @param attentionMask A boolean mask of shape `(B, T, S)`, that prevents
* attention to certain positions.
* @param useCausalMask A boolean to indicate whether to apply a causal
* mask to prevent tokens from attending to future tokens (e.g.,
* used in a decoder Transformer).
* @returns attentionMask: A boolean mask of shape `(B, T, S)`, that prevents
* attention to certain positions, based on the Keras masks of the
* `query`, `key`, `value`, and `attentionMask` tensors, and the
* causal mask if `useCausalMask=true`.
*/
computeAttentionMask(query, value, attentionMask, useCausalMask = false) {
return tidy(() => {
let autoMask;
const queryMask = query.kerasMask;
const valueMask = value.kerasMask;
if (queryMask != null) {
autoMask = queryMask.expandDims(2); // Shape is [B, T, 1]
}
if (valueMask != null) {
const mask = valueMask.expandDims(1); // Shape is [B, 1, S]
autoMask = autoMask ? logicalAnd(autoMask, mask) : mask;
}
if (useCausalMask) {
// the shape of the causal mask is [1, T, S]
const mask = this.computeCausalMask(query, value);
autoMask = autoMask ? logicalAnd(autoMask, mask) : mask;
}
if (autoMask != null) {
// Merge attentionMask & automatic mask, to shape [B, T, S]
attentionMask = attentionMask ?
cast(attentionMask, 'bool').logicalAnd(autoMask) : autoMask;
}
return attentionMask;
});
}
/**
* Computes a causal mask (e.g., for masked self-attention layers).
*
* For example, if query and value both contain sequences of length 4,
* this function returns a boolean `Tensor` equal to:
*
* ```
* [[[true, false, false, false],
* [true, true, false, false],
* [true, true, true, false],
* [true, true, true, true]]]
* ```
*
* @param query query `Tensor` of shape `(B, T, ...)`.
* @param value value `Tensor` of shape `(B, S, ...)` (defaults to query).
* @returns mask: A boolean `Tensor` of shape [1, T, S] containing a lower
* triangular matrix of shape [T, S].
*/
computeCausalMask(query, value) {
return tidy(() => {
const qSeqLength = query.shape[1];
const vSeqLength = value ? value.shape[1] : qSeqLength;
// Create a lower triangular matrix.
return linalg.bandPart(ones([1, qSeqLength, vSeqLength], 'bool'), -1, 0);
});
}
/**
*
* @param inputShapes A list of [queryShape, valueShape] or
* [queryShape, valueShape, keyShape]. If no keyShape provided, valueShape
* is assumed as the keyShape.
*/
computeOutputShape(inputShapes) {
const [queryShape, valueShape, maybeKeyShape] = inputShapes;
const keyShape = maybeKeyShape !== null && maybeKeyShape !== void 0 ? maybeKeyShape : valueShape;
if (queryShape.slice(-1)[0] !== valueShape.slice(-1)[0]) {
throw new ValueError(`The last dimension of 'queryShape' and 'valueShape' must be equal, ` +
`but are ${queryShape.slice(-1)[0]}, ${valueShape.slice(-1)[0]}. ` +
`Received: queryShape=${queryShape}, valueShape=${valueShape}`);
}
if (!util.arraysEqual(valueShape.slice(1, -1), keyShape.slice(1, -1))) {
throw new Error(`All dimensions of 'value' and 'key', except the last one, must be ` +
`equal. Received ${valueShape} and ${keyShape}`);
}
if (this._outputShape) {
return queryShape.slice(0, -1).concat(this._outputShape);
}
return queryShape;
}
}
/** @nocollapse */
MultiHeadAttention.className = 'MultiHeadAttention';
export { MultiHeadAttention };
serialization.registerClass(MultiHeadAttention);
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibXVsdGloZWFkX2F0dGVudGlvbi5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uLy4uLy4uLy4uLy4uL3RmanMtbGF5ZXJzL3NyYy9sYXllcnMvbmxwL211bHRpaGVhZF9hdHRlbnRpb24udHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUE7Ozs7Ozs7Ozs7Ozs7OztHQWVHO0FBRUg7O0dBRUc7QUFFSCxxRUFBcUU7QUFDckUsT0FBTyxFQUFVLE1BQU0sRUFBRSxNQUFNLEVBQUUsVUFBVSxFQUFFLEdBQUcsRUFBRSxJQUFJLEVBQUUsYUFBYSxFQUFFLElBQUksRUFBRSxJQUFJLEVBQUUsTUFBTSx1QkFBdUIsQ0FBQztBQUVqSCxPQUFPLEVBQUUsSUFBSSxFQUFFLFVBQVUsRUFBRSxNQUFNLDRCQUE0QixDQUFDO0FBQzlELE9BQU8sRUFBb0MsYUFBYSxFQUFFLG1CQUFtQixFQUFFLE1BQU0sbUJBQW1CLENBQUM7QUFDekcsT0FBTyxFQUFFLEtBQUssRUFBNkIsTUFBTSx1QkFBdUIsQ0FBQztBQUN6RSxPQUFPLEVBQUUsVUFBVSxFQUFFLE1BQU0sY0FBYyxDQUFDO0FBQzFDLE9BQU8sRUFBc0MsY0FBYyxFQUFFLG9CQUFvQixFQUFFLE1BQU0sb0JBQW9CLENBQUM7QUFFOUcsT0FBTyxFQUFzQyxjQUFjLEVBQUUsb0JBQW9CLEVBQUUsTUFBTSxvQkFBb0IsQ0FBQztBQUU5RyxPQUFPLEVBQUUsT0FBTyxFQUFFLE1BQU0seUJBQXlCLENBQUM7QUFDbEQsT0FBTyxFQUFFLE9BQU8sRUFBRSxNQUFNLFNBQVMsQ0FBQztBQUNsQyxPQUFPLEVBQUUsV0FBVyxFQUFFLE1BQU0sZ0JBQWdCLENBQUM7QUFFN0MsTUFBTSxRQUFRLEdBQUcsNEJBQTRCLENBQUMsS0FBSyxDQUFDLEVBQUUsQ0FBQyxDQUFDO0FBQ3hEOzs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7R0FxQkc7QUFDSCxTQUFTLHNCQUFzQixDQUM3QixJQUFZLEVBQUUsUUFBa0I7SUFFaEMsTUFBTSxpQkFBaUIsR0FBRyxRQUFRLENBQUMsS0FBSyxDQUFDLENBQUMsRUFBRSxJQUFJLENBQUMsQ0FBQztJQUNsRCxxQ0FBcUM7SUFDckMsTUFBTSxjQUFjLEdBQUcsQ0FBQyxHQUFHLFFBQVEsRUFBRSxJQUFJLEdBQUcsQ0FBQyxDQUFDLENBQUM7SUFDL0MsTUFBTSxTQUFTLEdBQUcsRUFBRSxDQUFDO0lBQ3JCLEtBQUssTUFBTSxDQUFDLElBQUksS0FBSyxDQUFDLElBQUksQ0FBQyxDQUFDLElBQUksRUFBRSxFQUFFO1FBQ2xDLElBQUksQ0FBQyxjQUFjLENBQUMsUUFBUSxDQUFDLENBQUMsQ0FBQyxFQUFFO1lBQy9CLFNBQVMsQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLENBQUM7U0FDbkI7S0FDRjtJQUNELElBQUksWUFBWSxHQUFHLElBQUksQ0FBQztJQUN4QixJQUFJLGNBQWMsR0FBRyxFQUFFLENBQUM7SUFDeEIsS0FBSyxJQUFJLENBQUMsR0FBRyxDQUFDLEVBQUUsQ0FBQyxHQUFHLElBQUksRUFBRSxDQUFDLEVBQUUsRUFBRTtRQUM3QixJQUFJLFNBQVMsQ0FBQyxRQUFRLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxLQUFLLElBQUksR0FBRyxDQUFDLEVBQUU7WUFDM0MsY0FBYyxJQUFJLGlCQUFpQixDQUFDLENBQUMsQ0FBQyxDQUFDO1NBQ3hDO2FBQU07WUFDTCxjQUFjLElBQUksUUFBUSxDQUFDLFlBQVksQ0FBQyxDQUFDO1lBQ3pDLFlBQVksRUFBRSxDQUFDO1NBQ2hCO0tBQ0Y7SUFFRCxNQUFNLGVBQWUsR0FDbkIsU0FBUyxDQUFDLEdBQUcsQ0FBQyxDQUFDLENBQUMsRUFBRSxDQUFDLGlCQUFpQixDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsTUFBTSxDQUMvQyxRQUFRLENBQUMsR0FBRyxDQUFDLENBQUMsQ0FBQyxFQUFFLENBQUMsaUJBQWlCLENBQUMsQ0FBQyxDQUFDLENBQUMsRUFDdkMsUUFBUSxDQUFDLEdBQUcsQ0FBQyxDQUFDLENBQUMsRUFBRSxDQUFDLGNBQWMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUNyQyxDQUFDLElBQUksQ0FBQyxFQUFFLENBQUMsQ0FBQztJQUNYLE1BQU0sY0FBYyxHQUFHLGlCQUFpQixDQUFDLElBQUksQ0FBQyxFQUFFLENBQUMsQ0FBQztJQUVsRCxNQUFNLGtCQUFrQixHQUN0QixHQUFHLGNBQWMsSUFBSSxjQUFjLEtBQUssZUFBZSxFQUFFLENBQUM7SUFDNUQsTUFBTSxjQUFjLEdBQUcsZUFBZSxDQUFDLE1BQU0sQ0FBQztJQUM5QyxNQUFNLGVBQWUsR0FDbkIsR0FBRyxlQUFlLElBQUksY0FBYyxLQUFLLGNBQWMsRUFBRSxDQUFDO0lBRTVELE9BQU8sQ0FBQyxrQkFBa0IsRUFBRSxlQUFlLEVBQUUsY0FBYyxDQUFDLENBQUM7QUFDL0QsQ0FBQztBQUVEOztHQUVHO0FBQ0gsU0FBUyx1QkFBdUIsQ0FDOUIsUUFBZ0IsRUFBRSxTQUFpQixFQUFFLFVBQWtCO0lBRXZELElBQUksUUFBUSxHQUFHLEVBQUUsQ0FBQztJQUNsQixJQUFJLFNBQVMsR0FBRyxFQUFFLENBQUM7SUFDbkIsSUFBSSxTQUFTLEdBQUcsRUFBRSxDQUFDO0lBQ25CLElBQUksUUFBUSxHQUFHLEVBQUUsQ0FBQztJQUNsQixJQUFJLFlBQVksR0FBRyxDQUFDLENBQUM7SUFFckIsS0FBSyxJQUFJLENBQUMsR0FBRyxDQUFDLEVBQUUsQ0FBQyxHQUFHLFFBQVEsRUFBRSxDQUFDLEVBQUUsRUFBRTtRQUNqQyxNQUFNLElBQUksR0FBRyxRQUFRLENBQUMsQ0FBQyxHQUFHLFlBQVksQ0FBQyxDQUFDO1FBQ3hDLFFBQVEsSUFBSSxJQUFJLENBQUM7UUFDakIsU0FBUyxJQUFJLElBQUksQ0FBQztLQUNuQjtJQUVELFlBQVksSUFBSSxRQUFRLENBQUM7SUFDekIsS0FBSyxJQUFJLENBQUMsR0FBRyxDQUFDLEVBQUUsQ0FBQyxHQUFHLFNBQVMsRUFBRSxDQUFDLEVBQUUsRUFBRTtRQUNsQyxNQUFNLElBQUksR0FBRyxRQUFRLENBQUMsQ0FBQyxHQUFHLFlBQVksQ0FBQyxDQUFDO1FBQ3hDLFFBQVEsSUFBSSxJQUFJLENBQUM7UUFDakIsU0FBUyxJQUFJLElBQUksQ0FBQztLQUNuQjtJQUVELFlBQVksSUFBSSxTQUFTLENBQUM7SUFDMUIsS0FBSyxJQUFJLENBQUMsR0FBRyxDQUFDLEVBQUUsQ0FBQyxHQUFHLFVBQVUsRUFBRSxDQUFDLEVBQUUsRUFBRTtRQUNuQyxNQUFNLElBQUksR0FBRyxRQUFRLENBQUMsQ0FBQyxHQUFHLFlBQVksQ0FBQyxDQUFDO1FBQ3hDLFNBQVMsSUFBSSxJQUFJLENBQUM7UUFDbEIsU0FBUyxJQUFJLElBQUksQ0FBQztRQUNsQixRQUFRLElBQUksSUFBSSxDQUFDO0tBQ2xCO0lBRUQsTUFBTSxRQUFRLEdBQUcsR0FBRyxRQUFRLElBQUksU0FBUyxLQUFLLFNBQVMsRUFBRSxDQUFDO0lBQzFELE9BQU8sQ0FBQyxRQUFRLEVBQUUsUUFBUSxFQUFFLFNBQVMsQ0FBQyxNQUFNLENBQUMsQ0FBQztBQUNoRCxDQUFDO0FBRUQsU0FBUyxjQUFjLENBQ3JCLFVBQWtCLEVBQUUsYUFBdUI7SUFFM0MsTUFBTSxXQUFXLEdBQ2YsS0FBSyxDQUFDLFVBQVUsR0FBRyxhQUFhLENBQUMsTUFBTSxDQUFDLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxDQUFDLE1BQU0sQ0FBQyxhQUFhLENBQUMsQ0FBQztJQUM1RSxPQUFPLFdBQVcsQ0FBQztBQUNyQixDQUFDO0FBMkhEOzs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7R0E0REc7QUFDSCxNQUFhLGtCQUFtQixTQUFRLEtBQUs7SUE4QjNDLFlBQVksSUFBNEI7O1FBQ3RDLEtBQUssQ0FBQyxJQUFJLENBQUMsQ0FBQztRQUNaLElBQUksQ0FBQyxlQUFlLEdBQUcsSUFBSSxDQUFDO1FBQzVCLElBQUksQ0FBQyxRQUFRLEdBQUcsSUFBSSxDQUFDLFFBQVEsQ0FBQztRQUM5QixJQUFJLENBQUMsTUFBTSxHQUFHLElBQUksQ0FBQyxNQUFNLENBQUM7UUFDMUIsSUFBSSxDQUFDLFFBQVEsR0FBRyxNQUFBLElBQUksQ0FBQyxRQUFRLG1DQUFJLElBQUksQ0FBQyxNQUFNLENBQUM7UUFDN0MsSUFBSSxDQUFDLE9BQU8sR0FBRyxNQUFBLElBQUksQ0FBQyxPQUFPLG1DQUFJLENBQUMsQ0FBQztRQUNqQyxJQUFJLENBQUMsT0FBTyxHQUFHLE1BQUEsSUFBSSxDQUFDLE9BQU8sbUNBQUksSUFBSSxDQUFDO1FBQ3BDLElBQUksQ0FBQyxZQUFZLEdBQUcsSUFBSSxDQUFDLFdBQVcsQ0FBQztRQUNyQyxJQUFJLENBQUMsaUJBQWlCLEdBQUcsY0FBYyxDQUNyQyxNQUFBLElBQUksQ0FBQyxpQkFBaUIsbUNBQUksZUFBZSxDQUFDLENBQUM7UUFDN0MsSUFBSSxDQUFDLGVBQWUsR0FBRyxjQUFjLENBQUMsTUFBQSxJQUFJLENBQUMsZUFBZSxtQ0FBSSxPQUFPLENBQUMsQ0FBQztRQUN2RSxJQUFJLENBQUMsaUJBQWlCLEdBQUcsY0FBYyxDQUFDLElBQUksQ0FBQyxpQkFBaUIsQ0FBQyxDQUFDO1FBQ2hFLElBQUksQ0FBQyxlQUFlLEdBQUcsY0FBYyxDQUFDLElBQUksQ0FBQyxlQUFlLENBQUMsQ0FBQztRQUM1RCxJQUFJLENBQUMsbUJBQW1CLEdBQUcsY0FBYyxDQUFDLElBQUksQ0FBQyxtQkFBbUIsQ0FBQyxDQUFDO1FBQ3BFLElBQUksQ0FBQyxnQkFBZ0IsR0FBRyxhQUFhLENBQUMsSUFBSSxDQUFDLGdCQUFnQixDQUFDLENBQUM7UUFDN0QsSUFBSSxDQUFDLGNBQWMsR0FBRyxhQUFhLENBQUMsSUFBSSxDQUFDLGNBQWMsQ0FBQyxDQUFDO1FBQ3pELElBQUksSUFBSSxDQUFDLGFBQWEsSUFBSSxJQUFJLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxhQUFhLENBQUMsRUFBRTtZQUNwRSxJQUFJLENBQUMsYUFBYSxHQUFHLENBQUMsSUFBSSxDQUFDLGFBQWEsQ0FBQyxDQUFDO1NBQzNDO2FBQU07WUFDTCxJQUFJLENBQUMsYUFBYSxHQUFHLElBQUksQ0FBQyxhQUF5QixDQUFDO1NBQ3JEO1FBQ0QsSUFBSSxDQUFDLGtCQUFrQixHQUFHLEtBQUssQ0FBQztRQUNoQyxJQUFJLENBQUMsVUFBVSxHQUFHLElBQUksQ0FBQztRQUN2QixJQUFJLENBQUMsUUFBUSxHQUFHLElBQUksQ0FBQztRQUNyQixJQUFJLENBQUMsVUFBVSxHQUFHLElBQUksQ0FBQztJQUN6QixDQUFDO0lBRUQ7O09BRUc7SUFDSCxJQUFJLFdBQVc7UUFDYixPQUFPLElBQUksQ0FBQyxVQUFVLENBQUM7SUFDekIsQ0FBQztJQUVEOztPQUVHO0lBQ0gsSUFBSSxTQUFTO1FBQ1gsT0FBTyxJQUFJLENBQUMsUUFBUSxDQUFDO0lBQ3ZCLENBQUM7SUFFRDs7T0FFRztJQUNILElBQUksV0FBVztRQUNiLE9BQU8sSUFBSSxDQUFDLFVBQVUsQ0FBQztJQUN6QixDQUFDO0lBRUQ7O09BRUc7SUFDSCxJQUFJLFlBQVk7UUFDZCxPQUFPLElBQUksQ0FBQyxXQUFXLENBQUM7SUFDMUIsQ0FBQztJQUVRLFNBQVM7UUFDaEIsTUFBTSxNQUFNLEdBQUc7WUFDYixRQUFRLEVBQUUsSUFBSSxDQUFDLFFBQVE7WUFDdkIsTUFBTSxFQUFFLElBQUksQ0FBQyxNQUFNO1lBQ25CLFFBQVEsRUFBRSxJQUFJLENBQUMsUUFBUTtZQUN2QixPQUFPLEVBQUUsSUFBSSxDQUFDLE9BQU87WUFDckIsT0FBTyxFQUFFLElBQUksQ0FBQyxPQUFPO1lBQ3JCLFdBQVcsRUFBRSxJQUFJLENBQUMsWUFBWTtZQUM5QixhQUFhLEVBQUUsSUFBSSxDQUFDLGFBQWE7WUFDakMsaUJBQWlCLEVBQUUsb0JBQW9CLENBQUMsSUFBSSxDQUFDLGlCQUFpQixDQUFDO1lBQy9ELGVBQWUsRUFBRSxvQkFBb0IsQ0FBQyxJQUFJLENBQUMsZUFBZSxDQUFDO1lBQzNELGlCQUFpQixFQUFFLG9CQUFvQixDQUFDLElBQUksQ0FBQyxpQkFBaUIsQ0FBQztZQUMvRCxlQUFlLEVBQUUsb0JBQW9CLENBQUMsSUFBSSxDQUFDLGVBQWUsQ0FBQztZQUMzRCxtQkFBbUIsRUFBRSxvQkFBb0IsQ0FBQyxJQUFJLENBQUMsbUJBQW1CLENBQUM7WUFDbkUsZ0JBQWdCLEVBQUUsbUJBQW1CLENBQUMsSUFBSSxDQUFDLGdCQUFnQixDQUFDO1lBQzVELGNBQWMsRUFBRSxtQkFBbUIsQ0FBQyxJQUFJLENBQUMsY0FBYyxDQUFDO1lBQ3hELFVBQVUsRUFBRSxJQUFJLENBQUMsVUFBVTtZQUMzQixRQUFRLEVBQUUsSUFBSSxDQUFDLFFBQVE7WUFDdkIsVUFBVSxFQUFFLElBQUksQ0FBQyxVQUFVO1NBQzVCLENBQUM7UUFDRixNQUFNLFVBQVUsR0FBRyxLQUFLLENBQUMsU0FBUyxFQUFFLENBQUM7UUFDckMsTUFBTSxDQUFDLE1BQU0sQ0FBQyxNQUFNLEVBQUUsVUFBVSxDQUFDLENBQUM7UUFDbEMsT0FBTyxNQUFNLENBQUM7SUFDaEIsQ0FBQztJQUVELE1BQU0sQ0FBVSxVQUFVLENBQ3hCLEdBQTZDLEVBQzdDLE1BQWdDO1FBRWhDLGtFQUFrRTtRQUNsRSw2REFBNkQ7UUFDN0QsTUFBTSxVQUFVLEdBQUcsTUFBTSxDQUFDLFlBQVksQ0FBVSxDQUFDO1FBQ2pELE1BQU0sUUFBUSxHQUFHLE1BQU0sQ0FBQyxVQUFVLENBQVUsQ0FBQztRQUM3QyxNQUFNLFVBQVUsR0FBRyxNQUFNLENBQUMsWUFBWSxDQUFVLENBQUM7UUFDakQsT0FBTyxNQUFNLENBQUMsWUFBWSxDQUFDLENBQUM7UUFDNUIsT0FBTyxNQUFNLENBQUMsVUFBVSxDQUFDLENBQUM7UUFDMUIsT0FBTyxNQUFNLENBQUMsWUFBWSxDQUFDLENBQUM7UUFFNUIsTUFBTSxLQUFLLEdBQUcsSUFBSSxHQUFHLENBQUMsTUFBTSxDQUFDLENBQUM7UUFDOUIsSUFBSSxDQUFDLFVBQVUsRUFBRSxRQUFRLEVBQUUsVUFBVSxDQUFDLENBQUMsUUFBUSxDQUFDLElBQUksQ0FBQyxFQUFFO1lBQ25ELE9BQU8sQ0FBQyxJQUFJLENBQ1Isc0RBQXNEO2dCQUN0RCw0REFBNEQ7Z0JBQzVELEdBQUcsR0FBRyxDQUFDLFFBQVEsRUFBRSw4QkFBOEIsQ0FDbEQsQ0FBQztTQUNMO2FBQU07WUFDSixLQUF1QyxDQUFDLGtCQUFrQixDQUN6RCxVQUFVLEVBQUUsVUFBVSxFQUFFLFFBQVEsQ0FBQyxDQUFDO1NBQ3JDO1FBQ0QsT0FBTyxLQUFLLENBQUM7SUFDZixDQUFDO0lBRUQ7Ozs7T0FJRztJQUNILGtCQUFrQixDQUNoQixVQUFpQixFQUNqQixVQUFpQixFQUNqQixRQUFnQjtRQUVoQixJQUFJLENBQUMsa0JBQWtCLEdBQUcsSUFBSSxDQUFDO1FBRS9CLElBQUksUUFBUSxJQUFJLElBQUksRUFBRTtZQUNwQixRQUFRLEdBQUcsVUFBVSxDQUFDO1NBQ3ZCO1FBRUQsSUFBSSxDQUFDLFVBQVUsR0FBRyxVQUFVLENBQUM7UUFDN0IsSUFBSSxDQUFDLFVBQVUsR0FBRyxVQUFVLENBQUM7UUFDN0IsSUFBSSxDQUFDLFFBQVEsR0FBRyxRQUFRLENBQUM7UUFFekIsMkVBQTJFO1FBQzNFLDJEQUEyRDtRQUMzRCxNQUFNLFNBQVMsR0FBRyxVQUFVLENBQUMsTUFBTSxDQUFDO1FBQ3BDLE1BQU0sU0FBUyxHQUFHLFVBQVUsQ0FBQyxNQUFNLENBQUM7UUFDcEMsTUFBTSxPQUFPLEdBQUcsUUFBUSxDQUFDLE1BQU0sQ0FBQztRQUVoQyxNQUFNLFFBQVEsR0FBRyxTQUFTLEdBQUcsQ0FBQyxDQUFDO1FBQy9CLElBQUksQ0FBQyxjQUFjLEVBQUUsUUFBUSxFQUFFLFVBQVUsQ0FBQyxHQUN4Qyx1QkFBdUIsQ0FBQyxRQUFRLEVBQUUsQ0FBQyxFQUFFLENBQUMsQ0FBQyxDQUFDO1FBQzFDLElBQUksQ0FBQyxVQUFVLEdBQUcsSUFBSSxXQUFXLGlCQUMvQixRQUFRLEVBQUUsY0FBYyxFQUN4QixXQUFXLEVBQUUsY0FBYyxDQUFDLFVBQVUsR0FBRyxDQUFDLEVBQUUsQ0FBQyxJQUFJLENBQUMsUUFBUSxFQUFFLElBQUksQ0FBQyxNQUFNLENBQUMsQ0FBQyxFQUN6RSxRQUFRLEVBQUUsSUFBSSxDQUFDLE9BQU8sQ0FBQyxDQUFDLENBQUMsUUFBUSxDQUFDLENBQUMsQ0FBQyxJQUFJLEVBQ3hDLElBQUksRUFBRSxPQUFPLElBQ1YsSUFBSSxDQUFDLDBCQUEwQixFQUFFLEVBQ3BDLENBQUM7UUFFSCxDQUFDLGNBQWMsRUFBRSxRQUFRLEVBQUUsVUFBVSxDQUFDO1lBQ3BDLHVCQUF1QixDQUFDLE9BQU8sR0FBRyxDQUFDLEVBQUUsQ0FBQyxFQUFFLENBQUMsQ0FBQyxDQUFDO1FBQzdDLElBQUksQ0FBQyxRQUFRLEdBQUcsSUFBSSxXQUFXLGlCQUM3QixRQUFRLEVBQUUsY0FBYyxFQUN4QixXQUFXLEVBQUUsY0FBYyxDQUFDLFVBQVUsR0FBRyxDQUFDLEVBQUUsQ0FBQyxJQUFJLENBQUMsUUFBUSxFQUFFLElBQUksQ0FBQyxNQUFNLENBQUMsQ0FBQyxFQUN6RSxRQUFRLEVBQUUsSUFBSSxDQUFDLE9BQU8sQ0FBQyxDQUFDLENBQUMsUUFBUSxDQUFDLENBQUMsQ0FBQyxJQUFJLEVBQ3hDLElBQUksRUFBRSxLQUFLLElBQ1IsSUFBSSxDQUFDLDBCQUEwQixFQUFFLEVBQ3BDLENBQUM7UUFFSCxDQUFDLGNBQWMsRUFBRSxRQUFRLEVBQUUsVUFBVSxDQUFDO1lBQ3BDLHVCQUF1QixDQUFDLFNBQVMsR0FBRyxDQUFDLEVBQUUsQ0FBQyxFQUFFLENBQUMsQ0FBQyxDQUFDO1FBQy9DLElBQUksQ0FBQyxVQUFVLEdBQUcsSUFBSSxXQUFXLGlCQUMvQixRQUFRLEVBQUUsY0FBYyxFQUN4QixXQUFXLEVBQUUsY0FBYyxDQUN6QixVQUFVLEdBQUcsQ0FBQyxFQUFFLENBQUMsSUFBSSxDQUFDLFFBQVEsRUFBRSxJQUFJLENBQUMsUUFBUSxDQUFDLENBQUMsRUFDakQsUUFBUSxFQUFFLElBQUksQ0FBQyxPQUFPLENBQUMsQ0FBQyxDQUFDLFFBQVEsQ0FBQyxDQUFDLENBQUMsSUFBSSxFQUN4QyxJQUFJLEVBQUUsT0FBTyxJQUNWLElBQUksQ0FBQywwQkFBMEIsRUFBRSxFQUNwQyxDQUFDO1FBRUgsMEVBQTBFO1FBQzFFLElBQUksQ0FBQyxjQUFjLENBQUMsVUFBVSxDQUFDLENBQUM7UUFDaEMsSUFBSSxDQUFDLFdBQVcsR0FBRyxJQUFJLENBQUMsZUFBZSxDQUNyQyxRQUFRLEVBQ1IsSUFBSSxDQUFDLDBCQUEwQixFQUFFLEVBQ2pDLGlCQUFpQixDQUNsQixDQUFDO0lBQ0osQ0FBQztJQUVPLDBCQUEwQjtRQUNoQyxzRUFBc0U7UUFDdEUsc0VBQXNFO1FBQ3RFLDRCQUE0QjtRQUM1QixNQUFNLGlCQUFpQixHQUFHLGNBQWMsQ0FBQztZQUN2QyxTQUFTLEVBQUUsSUFBSSxDQUFDLGlCQUFpQixDQUFDLFlBQVksRUFBRTtZQUNoRCxNQUFNLEVBQUUsSUFBSSxDQUFDLGlCQUFpQixDQUFDLFNBQVMsRUFBRTtTQUMzQyxDQUFDLENBQUM7UUFDSCxNQUFNLGVBQWUsR0FBRyxjQUFjLENBQUM7WUFDckMsU0FBUyxFQUFFLElBQUksQ0FBQyxlQUFlLENBQUMsWUFBWSxFQUFFO1lBQzlDLE1BQU0sRUFBRSxJQUFJLENBQUMsZUFBZSxDQUFDLFNBQVMsRUFBRTtTQUN6QyxDQUFDLENBQUM7UUFFSCxNQUFNLFlBQVksR0FBRztZQUNuQixpQkFBaUI7WUFDakIsZUFBZTtZQUNmLGlCQUFpQixFQUFFLElBQUksQ0FBQyxpQkFBaUI7WUFDekMsZUFBZSxFQUFFLElBQUksQ0FBQyxlQUFlO1lBQ3JDLG1CQUFtQixFQUFFLElBQUksQ0FBQyxtQkFBbUI7WUFDN0MsZ0JBQWdCLEVBQUUsSUFBSSxDQUFDLGdCQUFnQjtZQUN2QyxjQUFjLEVBQUUsSUFBSSxDQUFDLGNBQWM7U0FDcEMsQ0FBQztRQUNGLE9BQU8sWUFBWSxDQUFDO0lBQ3RCLENBQUM7SUFFRDs7Ozs7OztPQU9HO0lBQ0ssZUFBZSxDQUNyQixRQUFnQixFQUFFLFlBQW9CLEVBQUUsSUFBYTtRQUVyRCxJQUFJLFdBQWtCLENBQUM7UUFDdkIsSUFBSSxJQUFJLENBQUMsWUFBWSxFQUFFO1lBQ3JCLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxZQUFZLENBQUMsRUFBRTtnQkFDckMsV0FBVyxHQUFHLENBQUMsSUFBSSxDQUFDLFlBQVksQ0FBQyxDQUFDO2FBQ25DO2lCQUFNO2dCQUNMLFdBQVcsR0FBRyxJQUFJLENBQUMsWUFBWSxDQUFDO2FBQ2pDO1NBQ0Y7YUFBTTtZQUNMLFdBQVcsR0FBRyxDQUFDLElBQUksQ0FBQyxVQUFVLENBQUMsSUFBSSxDQUFDLFVBQVUsQ0FBQyxNQUFNLEdBQUcsQ0FBQyxDQUFDLENBQUMsQ0FBQztTQUM3RDtRQUVELE1BQU0sQ0FBQyxjQUFjLEVBQUUsUUFBUSxFQUFFLFVBQVUsQ0FBQyxHQUMxQyx1QkFBdUIsQ0FBQyxRQUFRLEVBQUUsQ0FBQyxFQUFFLFdBQVcsQ0FBQyxNQUFNLENBQUMsQ0FBQztRQUUzRCxPQUFPLElBQUksV0FBVyxpQkFDcEIsUUFBUSxFQUFFLGNBQWMsRUFDeEIsV0FBVyxFQUFFLGNBQWMsQ0FBQyxVQUFVLEdBQUcsQ0FBQyxFQUFFLFdBQVcsQ0FBQyxFQUN4RCxRQUFRLEVBQUUsSUFBSSxDQUFDLE9BQU8sQ0FBQyxDQUFDLENBQUMsUUFBUSxDQUFDLENBQUMsQ0FBQyxJQUFJLEVBQ3hDLElBQUksSUFDRCxZQUFZLEVBQ2YsQ0FBQztJQUNMLENBQUM7SUFFRDs7Ozs7Ozs7T0FRRztJQUNPLGNBQWMsQ0FBQyxJQUFZO1FBQ25DLElBQUksSUFBSSxDQUFDLGFBQWEsSUFBSSxJQUFJLEVBQUU7WUFDOUIsSUFBSSxDQUFDLGFBQWEsR0FBRyxFQUFFLENBQUM7WUFDeEIsS0FBSyxJQUFJLENBQUMsR0FBRyxDQUFDLEVBQUUsQ0FBQyxHQUFHLElBQUksR0FBRyxDQUFDLEVBQUUsQ0FBQyxFQUFFLEVBQUU7Z0JBQ2pDLElBQUksQ0FBQyxhQUFhLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxDQUFDO2FBQzVCO1NBQ0Y7YUFBTTtZQUNMLElBQUksQ0FBQyxhQUFhLEdBQUcsQ0FBQyxHQUFHLElBQUksQ0FBQyxhQUFhLENBQUMsQ0FBQztTQUM5QztRQUVELE1BQU0sQ0FBQyxrQkFBa0IsRUFBRSxlQUFlLEVBQUUsY0FBYyxDQUFDLEdBQ3pELHNCQUFzQixDQUFDLElBQUksRUFBRSxJQUFJLENBQUMsYUFBYSxDQUFDLENBQUM7UUFDbkQsSUFBSSxDQUFDLGtCQUFrQixHQUFHLGtCQUFrQixDQUFDO1FBQzdDLElBQUksQ0FBQyxlQUFlLEdBQUcsZUFBZSxDQUFDO1FBRXZDLE1BQU0sUUFBUSxHQUFhLEVBQUUsQ0FBQztRQUM5QixNQUFNLFFBQVEsR0FBRyxjQUFjLEdBQUcsSUFBSSxDQUFDLGFBQWEsQ0FBQyxNQUFNLENBQUM7UUFDNUQsS0FBSyxJQUFJLENBQUMsR0FBRyxRQUFRLEVBQUUsQ0FBQyxHQUFHLGNBQWMsRUFBRSxDQUFDLEVBQUUsRUFBRTtZQUM5QyxRQUFRLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxDQUFDO1NBQ2xCO1FBQ0QsSUFBSSxDQUFDLE9BQU8sR0FBRyxJQUFJLE9BQU8sQ0FBQyxFQUFDLElBQUksRUFBRSxRQUFRLEVBQUMsQ0FBQyxDQUFDO1FBQzdDLElBQUksQ0FBQyxZQUFZLEdBQUcsSUFBSSxPQUFPLENBQUMsRUFBQyxJQUFJLEVBQUUsSUFBSSxDQUFDLE9BQU8sRUFBQyxDQUFDLENBQUM7SUFDeEQsQ0FBQztJQUVTLGFBQWEsQ0FDckIsZUFBdUIsRUFBRSxhQUFzQjtRQUUvQyxPQUFPLElBQUksQ0FBQyxHQUFHLEVBQUU7WUFDZixtREFBbUQ7WUFDbkQsbUNBQW1DO1lBQ25DLElBQUksYUFBYSxJQUFJLElBQUksRUFBRTtnQkFDekIsaUVBQWlFO2dCQUNqRSxrRUFBa0U7Z0JBQ2xFLE1BQU0saUJBQWlCLEdBQUcsQ0FBQyxJQUFJLENBQUMsYUFBYSxDQUFDLE1BQU0sR0FBRyxDQUFDLEdBQUcsQ0FBQyxDQUFDO2dCQUM3RCxNQUFNLE1BQU0sR0FDVixlQUFlLENBQUMsS0FBSyxDQUFDLE1BQU0sR0FBRyxhQUFhLENBQUMsS0FBSyxDQUFDLE1BQU0sQ0FBQztnQkFDNUQsS0FBSyxJQUFJLENBQUMsR0FBRyxDQUFDLEVBQUUsQ0FBQyxHQUFHLE1BQU0sRUFBRSxDQUFDLEVBQUUsRUFBRTtvQkFDL0IsYUFBYSxHQUFHLFVBQVUsQ0FBQyxhQUFhLEVBQUUsaUJBQWlCLENBQUMsQ0FBQztpQkFDOUQ7YUFDRjtZQUNELE9BQU8sSUFBSSxDQUFDLE9BQU8sQ0FBQyxLQUFLLENBQ3ZCLGVBQWUsRUFBRSxFQUFDLElBQUksRUFBRSxhQUFhLEVBQUMsQ0FBVyxDQUFDO1FBQ3RELENBQUMsQ0FBQyxDQUFDO0lBQ0wsQ0FBQztJQUVEOzs7Ozs7Ozs7Ozs7Ozs7Ozs7T0FrQkc7SUFDTyxnQkFBZ0IsQ0FDeEIsS0FBYSxFQUNiLEdBQVcsRUFDWCxLQUFhLEVBQ2IsYUFBc0IsRUFDdEIsUUFBa0I7UUFFbEIsT0FBTyxJQUFJLENBQUMsR0FBRyxFQUFFO1lBQ2YsdUVBQXVFO1lBQ3ZFLG1FQUFtRTtZQUNuRSxrQ0FBa0M7WUFDbEMsS0FBSyxHQUFHLEdBQUcsQ0FBQyxLQUFLLEVBQUUsR0FBRyxHQUFHLElBQUksQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUFDLE1BQU0sQ0FBQyxDQUFDLENBQUM7WUFFakQsZ0VBQWdFO1lBQ2hFLG9CQUFvQjtZQUNwQixJQUFJLGVBQWUsR0FBRyxNQUFNLENBQUMsSUFBSSxDQUFDLGtCQUFrQixFQUFFLEdBQUcsRUFBRSxLQUFLLENBQUMsQ0FBQztZQUVsRSxlQUFlLEdBQUcsSUFBSSxDQUFDLGFBQWEsQ0FBQyxlQUFlLEVBQUUsYUFBYSxDQUFDLENBQUM7WUFFckUsd0VBQXdFO1lBQ3hFLHdFQUF3RTtZQUN4RSxNQUFNLHNCQUFzQixHQUMxQixJQUFJLENBQUMsWUFBWSxDQUFDLEtBQUssQ0FBQyxlQUFlLEVBQUUsRUFBQyxRQUFRLEVBQUMsQ0FBVyxDQUFDO1lBRWpFLGdDQUFnQztZQUNoQyxNQUFNLGVBQWUsR0FDbkIsTUFBTSxDQUFDLElBQUksQ0FBQyxlQUFlLEVBQUUsc0JBQXNCLEVBQUUsS0FBSyxDQUFDLENBQUM7WUFFOUQsT0FBTyxDQUFDLGVBQWUsRUFBRSxlQUFlLENBQUMsQ0FBQztRQUM1QyxDQUFDLENBQUMsQ0FBQztJQUNMLENBQUM7SUFFUSxLQUFLLENBQ1osTUFBK0IsRUFDL0IsTUFBZTs7UUFFZixJQUFJLENBQUMsTUFBTSxJQUFJLENBQUMsTUFBTSxDQUFDLE9BQU8sQ0FBQyxFQUFFO1lBQy9CLE1BQU0sSUFBSSxVQUFVLENBQUMsNENBQTRDLENBQUMsQ0FBQztTQUNwRTtRQUNELElBQUksU0FBb0MsQ0FBQztRQUV6QyxTQUFTLEdBQUcsQ0FBQyxNQUFNLEVBQUUsTUFBTSxDQUFDLE9BQU8sQ0FBQyxDQUFDLENBQUMsTUFBTSxDQUFDLE1BQUEsTUFBTSxDQUFDLEtBQUssQ0FBQyxtQ0FBSSxFQUFFLENBQUMsQ0FBQztRQUVsRSwrQ0FBK0M7UUFDL0MsT0FBTyxLQUFLLENBQUMsS0FBSyxDQUFDLFNBQVMsRUFBRSxNQUFNLENBQUMsQ0FBQztJQUN4QyxDQUFDO0lBRVEsSUFBSSxDQUNYLEtBQWEsRUFBRSxNQUFpQztRQUVoRCxPQUFPLElBQUksQ0FBQyxHQUFHLEVBQUU7WUFDZixPQUFPLElBQUksQ0FBQyw0QkFBNEIsQ0FBQyxLQUFLLEVBQUUsTUFBTSxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUM7UUFDN0QsQ0FBQyxDQUFDLENBQUM7SUFDTCxDQUFDO0lBRUQ7O09BRUc7SUFDSCw0QkFBNEIsQ0FDMUIsS0FBYSxFQUNiLEVBQ0UsS0FBSyxFQUNMLEdBQUcsRUFDSCxhQUFhLEVBQ2IsYUFBYSxFQUNiLFFBQVEsRUFDa0I7UUFFNUIsT0FBTyxJQUFJLENBQUMsR0FBRyxFQUFFO1lBQ2YsSUFBSSxDQUFDLElBQUksQ0FBQyxrQkFBa0IsRUFBRTtnQkFDNUIsSUFBSSxDQUFDLGtCQUFrQixDQUNyQixLQUFLLENBQUMsS0FBSyxFQUNYLEtBQUssQ0FBQyxLQUFLLEVBQ1gsR0FBRyxDQUFDLENBQUMsQ0FBQyxHQUFHLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxJQUFJLENBQ3ZCLENBQUM7YUFDSDtZQUNELElBQUksR0FBRyxJQUFJLElBQUksRUFBRTtnQkFDZixHQUFHLEdBQUcsS0FBSyxDQUFDO2FBQ2I7WUFFRCxrREFBa0Q7WUFFbEQsYUFBYSxHQUFHLElBQUksQ0FBQyxvQkFBb0IsQ0FDdkMsS0FBSyxFQUNMLEtBQUssRUFDTCxhQUFhLEVBQ2IsYUFBYSxDQUNkLENBQUM7WUFFRiw0QkFBNEI7WUFDNUIsc0JBQXNCO1lBQ3RCLHlCQUF5QjtZQUN6QixLQUFLLEdBQUcsSUFBSSxDQUFDLFVBQVUsQ0FBQyxLQUFLLENBQUMsS0FBSyxDQUFXLENBQUM7WUFFL0MsdUJBQXVCO1lBQ3ZCLEdBQUcsR0FBRyxJQUFJLENBQUMsUUFBUSxDQUFDLEtBQUssQ0FBQyxHQUFHLENBQVcsQ0FBQztZQUV6Qyx5QkFBeUI7WUFDekIsS0FBSyxHQUFHLElBQUksQ0FBQyxVQUFVLENBQUMsS0FBSyxDQUFDLEtBQUssQ0FBVyxDQUFDO1lBRS9DLE1BQU0sQ0FBQyx1QkFBdUIsRUFBRSxlQUFlLENBQUMsR0FBRyxJQUFJLENBQUMsZ0JBQWdCLENBQ3RFLEtBQUssRUFDTCxHQUFHLEVBQ0gsS0FBSyxFQUNMLGFBQWEsRUFDYixRQUFRLENBQ1QsQ0FBQztZQUNGLE1BQU0sZUFBZSxHQUNuQixJQUFJLENBQUMsV0FBVyxDQUFDLEtBQUssQ0FBQyx1QkFBdUIsQ0FBVyxDQUFDO1lBRTVELE9BQU8sQ0FBQyxlQUFlLEVBQUUsZUFBZSxDQUFDLENBQUM7UUFDNUMsQ0FBQyxDQUFDLENBQUM7SUFDTCxDQUFDO0lBRUQ7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OztPQTJCRztJQUNLLG9CQUFvQixDQUMxQixLQUFhLEVBQ2IsS0FBYSxFQUNiLGFBQXNCLEVBQ3RCLGFBQWEsR0FBRyxLQUFLO1FBRXJCLE9BQU8sSUFBSSxDQUFDLEdBQUcsRUFBRTtZQUNmLElBQUksUUFBZ0IsQ0FBQztZQUVyQixNQUFNLFNBQVMsR0FBRyxLQUFLLENBQUMsU0FBUyxDQUFDO1lBQ2xDLE1BQU0sU0FBUyxHQUFHLEtBQUssQ0FBQyxTQUFTLENBQUM7WUFDbEMsSUFBSSxTQUFTLElBQUksSUFBSSxFQUFFO2dCQUNyQixRQUFRLEdBQUcsU0FBUyxDQUFDLFVBQVUsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLHFCQUFxQjthQUMxRDtZQUNELElBQUksU0FBUyxJQUFJLElBQUksRUFBRTtnQkFDckIsTUFBTSxJQUFJLEdBQUcsU0FBUyxDQUFDLFVBQVUsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLHFCQUFxQjtnQkFDM0QsUUFBUSxHQUFHLFFBQVEsQ0FBQyxDQUFDLENBQUMsVUFBVSxDQUFDLFFBQVEsRUFBRSxJQUFJLENBQUMsQ0FBQyxDQUFDLENBQUMsSUFBSSxDQUFDO2FBQ3pEO1lBQ0QsSUFBSSxhQUFhLEVBQUU7Z0JBQ2pCLDRDQUE0QztnQkFDNUMsTUFBTSxJQUFJLEdBQUcsSUFBSSxDQUFDLGlCQUFpQixDQUFDLEtBQUssRUFBRSxLQUFLLENBQUMsQ0FBQztnQkFDbEQsUUFBUSxHQUFHLFFBQVEsQ0FBQyxDQUFDLENBQUMsVUFBVSxDQUFDLFFBQVEsRUFBRSxJQUFJLENBQUMsQ0FBQyxDQUFDLENBQUMsSUFBSSxDQUFDO2FBQ3pEO1lBQ0QsSUFBSSxRQUFRLElBQUksSUFBSSxFQUFFO2dCQUNwQiwyREFBMkQ7Z0JBQzNELGFBQWEsR0FBRyxhQUFhLENBQUMsQ0FBQztvQkFDN0IsSUFBSSxDQUFDLGFBQWEsRUFBRSxNQUFNLENBQUMsQ0FBQyxVQUFVLENBQUMsUUFBUSxDQUFDLENBQUMsQ0FBQyxDQUFDLFFBQVEsQ0FBQzthQUMvRDtZQUVELE9BQU8sYUFBYSxDQUFDO1FBQ3ZCLENBQUMsQ0FBQyxDQUFDO0lBQ0wsQ0FBQztJQUVEOzs7Ozs7Ozs7Ozs7Ozs7OztPQWlCRztJQUNLLGlCQUFpQixDQUFDLEtBQWEsRUFBRSxLQUFjO1FBQ3JELE9BQU8sSUFBSSxDQUFDLEdBQUcsRUFBRTtZQUNmLE1BQU0sVUFBVSxHQUFHLEtBQUssQ0FBQyxLQUFLLENBQUMsQ0FBQyxDQUFDLENBQUM7WUFDbEMsTUFBTSxVQUFVLEdBQUcsS0FBSyxDQUFDLENBQUMsQ0FBQyxLQUFLLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxVQUFVLENBQUM7WUFDdkQsb0NBQW9DO1lBQ3BDLE9BQU8sTUFBTSxDQUFDLFFBQVEsQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLEVBQUUsVUFBVSxFQUFFLFVBQVUsQ0FBQyxFQUFFLE1BQU0sQ0FBQyxFQUFFLENBQUMsQ0FBQyxFQUFFLENBQUMsQ0FBQyxDQUFDO1FBQzNFLENBQUMsQ0FBQyxDQUFDO0lBQ0wsQ0FBQztJQUVEOzs7OztPQUtHO0lBQ00sa0JBQWtCLENBQUMsV0FBdUM7UUFDakUsTUFBTSxDQUFDLFVBQVUsRUFBRSxVQUFVLEVBQUUsYUFBYSxDQUFDLEdBQUcsV0FBVyxDQUFDO1FBQzVELE1BQU0sUUFBUSxHQUFHLGFBQWEsYUFBYixhQUFhLGNBQWIsYUFBYSxHQUFJLFVBQVUsQ0FBQztRQUU3QyxJQUFJLFVBQVUsQ0FBQyxLQUFLLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsS0FBSyxVQUFVLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLEVBQUU7WUFDdkQsTUFBTSxJQUFJLFVBQVUsQ0FDbEIscUVBQXFFO2dCQUNyRSxXQUFXLFVBQVUsQ0FBQyxLQUFLLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsS0FBSyxVQUFVLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLElBQUk7Z0JBQ2xFLHdCQUF3QixVQUFVLGdCQUFnQixVQUFVLEVBQUUsQ0FDL0QsQ0FBQztTQUNIO1FBRUQsSUFBSSxDQUFDLElBQUksQ0FBQyxXQUFXLENBQUMsVUFBVSxDQUFDLEtBQUssQ0FBQyxDQUFDLEVBQUUsQ0FBQyxDQUFDLENBQUMsRUFBRSxRQUFRLENBQUMsS0FBSyxDQUFDLENBQUMsRUFBRSxDQUFDLENBQUMsQ0FBQyxDQUFDLEVBQUU7WUFDckUsTUFBTSxJQUFJLEtBQUssQ0FDYixvRUFBb0U7Z0JBQ3BFLG1CQUFtQixVQUFVLFFBQVEsUUFBUSxFQUFFLENBQ2hELENBQUM7U0FDSDtRQUVELElBQUksSUFBSSxDQUFDLFlBQVksRUFBRTtZQUNyQixPQUFPLFVBQVUsQ0FBQyxLQUFLLENBQUMsQ0FBQyxFQUFFLENBQUMsQ0FBQyxDQUFDLENBQUMsTUFBTSxDQUFDLElBQUksQ0FBQyxZQUFZLENBQUMsQ0FBQztTQUMxRDtRQUVELE9BQU8sVUFBVSxDQUFDO0lBQ3BCLENBQUM7O0FBeGpCRCxrQkFBa0I7QUFDRiw0QkFBUyxHQUFHLG9CQUFvQixDQUFDO1NBRnRDLGtCQUFrQjtBQTJqQi9CLGFBQWEsQ0FBQyxhQUFhLENBQUMsa0JBQWtCLENBQUMsQ0FBQyIsInNvdXJjZXNDb250ZW50IjpbIi8qKlxuICogQGxpY2Vuc2VcbiAqIENvcHlyaWdodCAyMDIzIEdvb2dsZSBMTEMuXG4gKiBMaWNlbnNlZCB1bmRlciB0aGUgQXBhY2hlIExpY2Vuc2UsIFZlcnNpb24gMi4wICh0aGUgXCJMaWNlbnNlXCIpO1xuICogeW91IG1heSBub3QgdXNlIHRoaXMgZmlsZSBleGNlcHQgaW4gY29tcGxpYW5jZSB3aXRoIHRoZSBMaWNlbnNlLlxuICogWW91IG1heSBvYnRhaW4gYSBjb3B5IG9mIHRoZSBMaWNlbnNlIGF0XG4gKlxuICogaHR0cDovL3d3dy5hcGFjaGUub3JnL2xpY2Vuc2VzL0xJQ0VOU0UtMi4wXG4gKlxuICogVW5sZXNzIHJlcXVpcmVkIGJ5IGFwcGxpY2FibGUgbGF3IG9yIGFncmVlZCB0byBpbiB3cml0aW5nLCBzb2Z0d2FyZVxuICogZGlzdHJpYnV0ZWQgdW5kZXIgdGhlIExpY2Vuc2UgaXMgZGlzdHJpYnV0ZWQgb24gYW4gXCJBUyBJU1wiIEJBU0lTLFxuICogV0lUSE9VVCBXQVJSQU5USUVTIE9SIENPTkRJVElPTlMgT0YgQU5ZIEtJTkQsIGVpdGhlciBleHByZXNzIG9yIGltcGxpZWQuXG4gKiBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kXG4gKiBsaW1pdGF0aW9ucyB1bmRlciB0aGUgTGljZW5zZS5cbiAqID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09XG4gKi9cblxuLyoqXG4gKiAgVEZKUy1iYXNlZCBtdWx0aS1oZWFkIGF0dGVudGlvbiBsYXllci5cbiAqL1xuXG4vKiBPcmlnaW5hbCBzb3VyY2U6IGtlcmFzL2xheWVycy9hdHRlbnRpb24vbXVsdGlfaGVhZF9hdHRlbnRpb24ucHkgKi9cbmltcG9ydCB7IFRlbnNvciwgZWluc3VtLCBsaW5hbGcsIGxvZ2ljYWxBbmQsIG11bCwgb25lcywgc2VyaWFsaXphdGlvbiwgdGlkeSwgdXRpbCB9IGZyb20gJ0B0ZW5zb3JmbG93L3RmanMtY29yZSc7XG5cbmltcG9ydCB7IGNhc3QsIGV4cGFuZERpbXMgfSBmcm9tICcuLi8uLi9iYWNrZW5kL3RmanNfYmFja2VuZCc7XG5pbXBvcnQgeyBDb25zdHJhaW50LCBDb25zdHJhaW50SWRlbnRpZmllciwgZ2V0Q29uc3RyYWludCwgc2VyaWFsaXplQ29uc3RyYWludCB9IGZyb20gJy4uLy4uL2NvbnN0cmFpbnRzJztcbmltcG9ydCB7IExheWVyLCBMYXllckFyZ3MsIFN5bWJvbGljVGVuc29yIH0gZnJvbSAnLi4vLi4vZW5naW5lL3RvcG9sb2d5JztcbmltcG9ydCB7IFZhbHVlRXJyb3IgfSBmcm9tICcuLi8uLi9lcnJvcnMnO1xuaW1wb3J0IHsgSW5pdGlhbGl6ZXIsIEluaXRpYWxpemVySWRlbnRpZmllciwgZ2V0SW5pdGlhbGl6ZXIsIHNlcmlhbGl6ZUluaXRpYWxpemVyIH0gZnJvbSAnLi4vLi4vaW5pdGlhbGl6ZXJzJztcbmltcG9ydCB7IFNoYXBlIH0gZnJvbSAnLi4vLi4va2VyYXNfZm9ybWF0L2NvbW1vbic7XG5pbXBvcnQgeyBSZWd1bGFyaXplciwgUmVndWxhcml6ZXJJZGVudGlmaWVyLCBnZXRSZWd1bGFyaXplciwgc2VyaWFsaXplUmVndWxhcml6ZXIgfSBmcm9tICcuLi8uLi9yZWd1bGFyaXplcnMnO1xuaW1wb3J0IHsgS3dhcmdzIH0gZnJvbSAnLi4vLi4vdHlwZXMnO1xuaW1wb3J0IHsgU29mdG1heCB9IGZyb20gJy4uL2FkdmFuY2VkX2FjdGl2YXRpb25zJztcbmltcG9ydCB7IERyb3BvdXQgfSBmcm9tICcuLi9jb3JlJztcbmltcG9ydCB7IEVpbnN1bURlbnNlIH0gZnJvbSAnLi9laW5zdW1fZGVuc2UnO1xuXG5jb25zdCBfQ0hSX0lEWCA9ICdhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5eicuc3BsaXQoJycpO1xuLyoqXG4gKiBCdWlsZHMgZWluc3VtIGVxdWF0aW9ucyBmb3IgdGhlIGF0dGVudGlvbiBjb21wdXRhdGlvbi5cbiAqXG4gKiBRdWVyeSwga2V5LCB2YWx1ZSBpbnB1dHMgYWZ0ZXIgcHJvamVjdGlvbiBhcmUgZXhwZWN0ZWQgdG8gaGF2ZSB0aGUgc2hhcGUgYXM6XG4gKiBgKGJzLCA8bm9uLWF0dGVudGlvbiBkaW1zPiwgPGF0dGVudGlvbiBkaW1zPiwgbnVtSGVhZHMsIGNoYW5uZWxzKWAuXG4gKiBgYnNgIGFuZCBgPG5vbi1hdHRlbnRpb24gZGltcz5gIGFyZSB0cmVhdGVkIGFzIGA8YmF0Y2ggZGltcz5gLlxuICpcbiAqIFRoZSBhdHRlbnRpb24gb3BlcmF0aW9ucyBjYW4gYmUgZ2VuZXJhbGl6ZWQ6XG4gKiAoMSkgUXVlcnkta2V5IGRvdCBwcm9kdWN0OlxuICogYCg8YmF0Y2ggZGltcz4sIDxxdWVyeSBhdHRlbnRpb24gZGltcz4sIG51bUhlYWRzLCBjaGFubmVscyksICg8YmF0Y2ggZGltcz4sXG4gKiA8a2V5IGF0dGVudGlvbiBkaW1zPiwgbnVtSGVhZHMsIGNoYW5uZWxzKSAtPiAoPGJhdGNoIGRpbXM+LFxuICogbnVtSGVhZHMsIDxxdWV