@tensorflow/tfjs-data
Version:
TensorFlow Data API in JavaScript
1,006 lines • 122 kB
JavaScript
/**
* @license
* Copyright 2018 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
import * as tf from '@tensorflow/tfjs-core';
import * as seedrandom from 'seedrandom';
import { deepClone } from '../util/deep_clone';
import { deepMapAndAwaitAll, deepZip, zipToList } from '../util/deep_map';
import { GrowingRingBuffer } from '../util/growing_ring_buffer';
import { RingBuffer } from '../util/ring_buffer';
// Here we implement a simple asynchronous iterator.
// This lets us avoid using either third-party stream libraries or
// recent TypeScript language support requiring polyfills.
/**
* Create a `LazyIterator` from an array of items.
*/
export function iteratorFromItems(items) {
return new ArrayIterator(items);
}
/**
* Create a `LazyIterator` of incrementing integers.
*/
export function iteratorFromIncrementing(start) {
let i = start;
return iteratorFromFunction(() => ({ value: i++, done: false }));
}
/**
* Create a `LazyIterator` from a function.
*
* ```js
* let i = -1;
* const func = () =>
* ++i < 5 ? {value: i, done: false} : {value: null, done: true};
* const iter = tf.data.iteratorFromFunction(func);
* await iter.forEachAsync(e => console.log(e));
* ```
*
* @param func A function that produces data on each call.
*/
export function iteratorFromFunction(func) {
return new FunctionCallIterator(func);
}
/**
* Create a `LazyIterator` by concatenating underlying streams, which are
* themselves provided as a stream.
*
* This can also be thought of as a "stream flatten" operation.
*
* @param baseIterators A stream of streams to be concatenated.
* @param baseErrorHandler An optional function that can intercept `Error`s
* raised during a `next()` call on the base stream. This function can decide
* whether the error should be propagated, whether the error should be
* ignored, or whether the base stream should be terminated.
*/
export function iteratorFromConcatenated(baseIterators, baseErrorHandler) {
return new ChainedIterator(baseIterators, baseErrorHandler);
}
/**
* Create a `LazyIterator` by concatenating streams produced by calling a
* stream-generating function a given number of times.
*
* Since a `LazyIterator` is read-once, it cannot be repeated, but this
* function can be used to achieve a similar effect:
*
* LazyIterator.ofConcatenatedFunction(() => new MyIterator(), 6);
*
* @param iteratorFunc: A function that produces a new stream on each call.
* @param count: The number of times to call the function.
* @param baseErrorHandler An optional function that can intercept `Error`s
* raised during a `next()` call on the base stream. This function can decide
* whether the error should be propagated, whether the error should be
* ignored, or whether the base stream should be terminated.
*/
export function iteratorFromConcatenatedFunction(iteratorFunc, count, baseErrorHandler) {
return iteratorFromConcatenated(iteratorFromFunction(iteratorFunc).take(count), baseErrorHandler);
}
/**
* Create a `LazyIterator` by zipping together an array, dict, or nested
* structure of `LazyIterator`s (and perhaps additional constants).
*
* The underlying streams must provide elements in a consistent order such
* that they correspond.
*
* Typically, the underlying streams should have the same number of
* elements. If they do not, the behavior is determined by the
* `mismatchMode` argument.
*
* The nested structure of the `iterators` argument determines the
* structure of elements in the resulting iterator.
*
* @param iterators: An array or object containing LazyIterators at the
* leaves.
* @param mismatchMode: Determines what to do when one underlying iterator
* is exhausted before the others. `ZipMismatchMode.FAIL` (the default)
* causes an error to be thrown in this case. `ZipMismatchMode.SHORTEST`
* causes the zipped iterator to terminate with the furst underlying
* streams, so elements remaining on the longer streams are ignored.
* `ZipMismatchMode.LONGEST` causes the zipped stream to continue, filling
* in nulls for the exhausted streams, until all streams are exhausted.
*/
export function iteratorFromZipped(iterators, mismatchMode = ZipMismatchMode.FAIL) {
return new ZipIterator(iterators, mismatchMode);
}
/**
* An asynchronous iterator, providing lazy access to a potentially
* unbounded stream of elements.
*
* Iterator can be obtained from a dataset:
* `const iter = await dataset.iterator();`
*/
export class LazyIterator {
/**
* Collect all remaining elements of a bounded stream into an array.
* Obviously this will succeed only for small streams that fit in memory.
* Useful for testing.
*
* @returns A Promise for an array of stream elements, which will resolve
* when the stream is exhausted.
*/
async toArray() {
const result = [];
let x = await this.next();
while (!x.done) {
result.push(x.value);
x = await this.next();
}
return result;
}
/**
* Collect all elements of this dataset into an array with prefetching 100
* elements. This is useful for testing, because the prefetch changes the
* order in which the Promises are resolved along the processing pipeline.
* This may help expose bugs where results are dependent on the order of
* Promise resolution rather than on the logical order of the stream (i.e.,
* due to hidden mutable state).
*
* @returns A Promise for an array of stream elements, which will resolve
* when the stream is exhausted.
*/
async toArrayForTest() {
const stream = this.prefetch(100);
const result = [];
let x = await stream.next();
while (!x.done) {
result.push(x.value);
x = await stream.next();
}
return result;
}
/**
* Draw items from the stream until it is exhausted.
*
* This can be useful when the stream has side effects but no output. In
* that case, calling this function guarantees that the stream will be
* fully processed.
*/
async resolveFully() {
let x = await this.next();
while (!x.done) {
x = await this.next();
}
}
/**
* Draw items from the stream until it is exhausted, or a predicate fails.
*
* This can be useful when the stream has side effects but no output. In
* that case, calling this function guarantees that the stream will be
* fully processed.
*/
async resolveWhile(predicate) {
let x = await this.next();
let shouldContinue = predicate(x.value);
while ((!x.done) && shouldContinue) {
x = await this.next();
shouldContinue = predicate(x.value);
}
}
/**
* Handles errors thrown on this stream using a provided handler function.
*
* @param handler A function that handles any `Error` thrown during a `next()`
* call and returns true if the stream should continue (dropping the failed
* call) or false if the stream should quietly terminate. If the handler
* itself throws (or rethrows) an `Error`, that will be propagated.
*
* @returns A `LazyIterator` of elements passed through from upstream,
* possibly filtering or terminating on upstream `next()` calls that
* throw an `Error`.
*/
handleErrors(handler) {
return new ErrorHandlingLazyIterator(this, handler);
}
// TODO(soergel): Implement reduce() etc.
/**
* Filters this stream according to `predicate`.
*
* @param predicate A function mapping a stream element to a boolean or a
* `Promise` for one.
*
* @returns A `LazyIterator` of elements for which the predicate was true.
*/
filter(predicate) {
return new FilterIterator(this, predicate);
}
/**
* Maps this stream through a 1-to-1 transform.
*
* @param transform A function mapping a stream element to a transformed
* element.
*
* @returns A `LazyIterator` of transformed elements.
*/
map(transform) {
return new MapIterator(this, transform);
}
/**
* Maps this stream through an async 1-to-1 transform.
*
* @param transform A function mapping a stream element to a `Promise` for a
* transformed stream element.
*
* @returns A `LazyIterator` of transformed elements.
*/
mapAsync(transform) {
return new AsyncMapIterator(this, transform);
}
/**
* Maps this stream through a 1-to-1 transform, forcing serial execution.
*
* @param transform A function mapping a stream element to a transformed
* element.
*
* @returns A `LazyIterator` of transformed elements.
*/
serialMapAsync(transform) {
return new AsyncMapIterator(this, transform).serial();
}
/**
* Maps this stream through a 1-to-many transform.
*
* @param transform A function mapping a stream element to an array of
* transformed elements.
*
* @returns A `DataStream` of transformed elements.
*/
flatmap(transform) {
return new FlatmapIterator(this, transform);
}
/**
* Apply a function to every element of the stream.
*
* @param f A function to apply to each stream element.
*/
async forEachAsync(f) {
return this.map(f).resolveFully();
}
/**
* Apply a function to every element of the stream, forcing serial execution.
*
* @param f A function to apply to each stream element. Should return 'true'
* to indicate that the stream should continue, or 'false' to cause it to
* terminate.
*/
async serialForEach(f) {
return this.serialMapAsync(f).resolveWhile(x => (x === true));
}
/**
* Groups elements into batches, represented as arrays of elements.
*
* We can think of the elements of this iterator as 'rows' (even if they are
* nested structures). By the same token, consecutive values for a given
* key within the elements form a 'column'. This matches the usual sense of
* 'row' and 'column' when processing tabular data (e.g., parsing a CSV).
*
* Thus, "Row-major" means that the resulting batch is simply a collection of
* rows: `[row1, row2, row3, ...]`. This is contrast to the column-major
* form, which is needed for vectorized computation.
*
* @param batchSize The number of elements desired per batch.
* @param smallLastBatch Whether to emit the final batch when it has fewer
* than batchSize elements. Default true.
* @returns A `LazyIterator` of batches of elements, represented as arrays
* of the original element type.
*/
rowMajorBatch(batchSize, smallLastBatch = true) {
return new RowMajorBatchIterator(this, batchSize, smallLastBatch);
}
/**
* Groups elements into batches, represented in column-major form.
*
* We can think of the elements of this iterator as 'rows' (even if they are
* nested structures). By the same token, consecutive values for a given
* key within the elements form a 'column'. This matches the usual sense of
* 'row' and 'column' when processing tabular data (e.g., parsing a CSV).
*
* Thus, "column-major" means that the resulting batch is a (potentially
* nested) structure representing the columns. Each column entry, then,
* contains a collection of the values found in that column for a range of
* input elements. This representation allows for vectorized computation, in
* contrast to the row-major form.
*
* The inputs should all have the same nested structure (i.e., of arrays and
* dicts). The result is a single object with the same nested structure,
* where the leaves are arrays collecting the values of the inputs at that
* location (or, optionally, the result of a custom function applied to those
* arrays).
*
* @param batchSize The number of elements desired per batch.
* @param smallLastBatch Whether to emit the final batch when it has fewer
* than batchSize elements. Default true.
* @param zipFn: (optional) A function that expects an array of elements at a
* single node of the object tree, and returns a `DeepMapResult`. The
* `DeepMapResult` either provides a result value for that node (i.e.,
* representing the subtree), or indicates that the node should be processed
* recursively. The default zipFn recurses as far as possible and places
* arrays at the leaves.
* @returns A `LazyIterator` of batches of elements, represented as an object
* with collections at the leaves.
*/
columnMajorBatch(batchSize, smallLastBatch = true,
// tslint:disable-next-line:no-any
zipFn = zipToList) {
// First collect the desired number of input elements as a row-major batch.
const rowBatches = this.rowMajorBatch(batchSize, smallLastBatch);
// Now 'rotate' or 'pivot' the data, collecting all values from each column
// in the batch (i.e., for each key within the elements) into an array.
return rowBatches.map(x => deepZip(x, zipFn));
}
/**
* Concatenate this `LazyIterator` with another.
*
* @param iterator A `LazyIterator` to be concatenated onto this one.
* @param baseErrorHandler An optional function that can intercept `Error`s
* raised during a `next()` call on the base stream. This function can
* decide whether the error should be propagated, whether the error should
* be ignored, or whether the base stream should be terminated.
* @returns A `LazyIterator`.
*/
concatenate(iterator, baseErrorHandler) {
return new ChainedIterator(iteratorFromItems([this, iterator]), baseErrorHandler);
}
/**
* Limits this stream to return at most `count` items.
*
* @param count The maximum number of items to provide from the stream. If
* a negative or undefined value is given, the entire stream is returned
* unaltered.
*/
take(count) {
if (count < 0 || count == null) {
return this;
}
return new TakeIterator(this, count);
}
/**
* Skips the first `count` items in this stream.
*
* @param count The number of items to skip. If a negative or undefined
* value is given, the entire stream is returned unaltered.
*/
skip(count) {
if (count < 0 || count == null) {
return this;
}
return new SkipIterator(this, count);
}
/**
* Prefetch the first `bufferSize` items in this stream.
*
* Note this prefetches Promises, but makes no guarantees about when those
* Promises resolve.
*
* @param bufferSize: An integer specifying the number of elements to be
* prefetched.
*/
prefetch(bufferSize) {
return new PrefetchIterator(this, bufferSize);
}
// TODO(soergel): deep sharded shuffle, where supported
/**
* Randomly shuffles the elements of this stream.
*
* @param bufferSize: An integer specifying the number of elements from
* this stream from which the new stream will sample.
* @param seed: (Optional.) An integer specifying the random seed that
* will be used to create the distribution.
*/
shuffle(windowSize, seed) {
return new ShuffleIterator(this, windowSize, seed);
}
/**
* Force an iterator to execute serially: each next() call will await the
* prior one, so that they cannot execute concurrently.
*/
serial() {
return new SerialIterator(this);
}
}
// ============================================================================
// The following private classes serve to implement the chainable methods
// on LazyIterator. Unfortunately they can't be placed in separate files,
// due to resulting trouble with circular imports.
// ============================================================================
// Iterators that just extend LazyIterator directly
// ============================================================================
class ArrayIterator extends LazyIterator {
constructor(items) {
super();
this.items = items;
this.trav = 0;
}
summary() {
return `Array of ${this.items.length} items`;
}
async next() {
if (this.trav >= this.items.length) {
return { value: null, done: true };
}
const item = this.items[this.trav];
this.trav++;
return { value: deepClone(item), done: false };
}
}
class FunctionCallIterator extends LazyIterator {
constructor(nextFn) {
super();
this.nextFn = nextFn;
}
summary() {
return `Function call`;
}
async next() {
try {
return this.nextFn();
}
catch (e) {
// Modify the error message but leave the stack trace intact
e.message =
`Error thrown while iterating through a dataset: ${e.message}`;
throw e;
}
}
}
class SerialIterator extends LazyIterator {
constructor(upstream) {
super();
this.upstream = upstream;
this.lastRead = Promise.resolve({ value: null, done: false });
}
summary() {
return `${this.upstream.summary()} -> Serial`;
}
async next() {
// This sets this.lastRead to a new Promise right away, as opposed to
// saying `await this.lastRead; this.lastRead = this.serialNext();` which
// would not work because this.nextRead would be updated only after the
// promise resolves.
this.lastRead = this.lastRead.then(() => this.serialNext());
return this.lastRead;
}
async serialNext() {
return this.upstream.next();
}
}
class SkipIterator extends LazyIterator {
constructor(upstream, maxCount) {
super();
this.upstream = upstream;
this.maxCount = maxCount;
// Local state that should not be clobbered by out-of-order execution.
this.count = 0;
this.lastRead = Promise.resolve({ value: null, done: false });
}
summary() {
return `${this.upstream.summary()} -> Skip`;
}
async next() {
// This sets this.lastRead to a new Promise right away, as opposed to
// saying `await this.lastRead; this.lastRead = this.serialNext();` which
// would not work because this.nextRead would be updated only after the
// promise resolves.
this.lastRead = this.lastRead.then(() => this.serialNext());
return this.lastRead;
}
async serialNext() {
// TODO(soergel): consider tradeoffs of reading in parallel, eg.
// collecting next() promises in an Array and then waiting for
// Promise.all() of those. Benefit: pseudo-parallel execution. Drawback:
// maybe delayed GC.
while (this.count++ < this.maxCount) {
const skipped = await this.upstream.next();
// short-circuit if upstream is already empty
if (skipped.done) {
return skipped;
}
tf.dispose(skipped.value);
}
return this.upstream.next();
}
}
class TakeIterator extends LazyIterator {
constructor(upstream, maxCount) {
super();
this.upstream = upstream;
this.maxCount = maxCount;
this.count = 0;
}
summary() {
return `${this.upstream.summary()} -> Take`;
}
async next() {
if (this.count++ >= this.maxCount) {
return { value: null, done: true };
}
return this.upstream.next();
}
}
// Note this batch just groups items into row-wise element arrays.
// Rotating these to a column-wise representation happens only at the dataset
// level.
class RowMajorBatchIterator extends LazyIterator {
constructor(upstream, batchSize, enableSmallLastBatch = true) {
super();
this.upstream = upstream;
this.batchSize = batchSize;
this.enableSmallLastBatch = enableSmallLastBatch;
this.lastRead = Promise.resolve({ value: null, done: false });
}
summary() {
return `${this.upstream.summary()} -> RowMajorBatch`;
}
async next() {
// This sets this.lastRead to a new Promise right away, as opposed to
// saying `await this.lastRead; this.lastRead = this.serialNext();` which
// would not work because this.nextRead would be updated only after the
// promise resolves.
this.lastRead = this.lastRead.then(() => this.serialNext());
return this.lastRead;
}
async serialNext() {
const batch = [];
while (batch.length < this.batchSize) {
const item = await this.upstream.next();
if (item.done) {
if (this.enableSmallLastBatch && batch.length > 0) {
return { value: batch, done: false };
}
return { value: null, done: true };
}
batch.push(item.value);
}
return { value: batch, done: false };
}
}
class FilterIterator extends LazyIterator {
constructor(upstream, predicate) {
super();
this.upstream = upstream;
this.predicate = predicate;
this.lastRead = Promise.resolve({ value: null, done: false });
}
summary() {
return `${this.upstream.summary()} -> Filter`;
}
async next() {
// This sets this.lastRead to a new Promise right away, as opposed to
// saying `await this.lastRead; this.lastRead = this.serialNext();` which
// would not work because this.nextRead would be updated only after the
// promise resolves.
this.lastRead = this.lastRead.then(() => this.serialNext());
return this.lastRead;
}
async serialNext() {
while (true) {
const item = await this.upstream.next();
if (item.done || this.predicate(item.value)) {
return item;
}
tf.dispose(item.value);
}
}
}
class MapIterator extends LazyIterator {
constructor(upstream, transform) {
super();
this.upstream = upstream;
this.transform = transform;
}
summary() {
return `${this.upstream.summary()} -> Map`;
}
async next() {
const item = await this.upstream.next();
if (item.done) {
return { value: null, done: true };
}
const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);
// Careful: the transform may mutate the item in place.
// That's why we have to remember the input Tensors above, and then
// below dispose only those that were not passed through to the output.
// Note too that the transform function is responsible for tidying
// any intermediate Tensors. Here we are concerned only about the
// inputs.
const mapped = this.transform(item.value);
const outputTensors = tf.tensor_util.getTensorsInContainer(mapped);
// TODO(soergel) faster intersection
// TODO(soergel) move to tf.disposeExcept(in, out)?
for (const t of inputTensors) {
if (!tf.tensor_util.isTensorInList(t, outputTensors)) {
t.dispose();
}
}
return { value: mapped, done: false };
}
}
class ErrorHandlingLazyIterator extends LazyIterator {
constructor(upstream, handler) {
super();
this.upstream = upstream;
this.handler = handler;
this.count = 0;
this.lastRead = Promise.resolve({ value: null, done: false });
}
summary() {
return `${this.upstream.summary()} -> handleErrors`;
}
async next() {
// This sets this.lastRead to a new Promise right away, as opposed to
// saying `await this.lastRead; this.lastRead = this.serialNext();` which
// would not work because this.nextRead would be updated only after the
// promise resolves.
this.lastRead = this.lastRead.then(() => this.serialNext());
return this.lastRead;
}
async serialNext() {
while (true) {
try {
return await this.upstream.next();
}
catch (e) {
if (!this.handler(e)) {
return { value: null, done: true };
}
// If the handler returns true, loop and fetch the next upstream item.
// If the upstream iterator throws an endless stream of errors, and if
// the handler says to ignore them, then we loop forever here. That is
// the correct behavior-- it's up to the handler to decide when to stop.
}
}
}
}
class AsyncMapIterator extends LazyIterator {
constructor(upstream, transform) {
super();
this.upstream = upstream;
this.transform = transform;
}
summary() {
return `${this.upstream.summary()} -> AsyncMap`;
}
async next() {
const item = await this.upstream.next();
if (item.done) {
return { value: null, done: true };
}
const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);
// Careful: the transform may mutate the item in place.
// That's why we have to remember the input Tensors above, and then
// below dispose only those that were not passed through to the output.
// Note too that the transform function is responsible for tidying
// any intermediate Tensors. Here we are concerned only about the
// inputs.
const mapped = await this.transform(item.value);
const outputTensors = tf.tensor_util.getTensorsInContainer(mapped);
// TODO(soergel) faster intersection
// TODO(soergel) move to tf.disposeExcept(in, out)?
for (const t of inputTensors) {
if (!tf.tensor_util.isTensorInList(t, outputTensors)) {
t.dispose();
}
}
return { value: mapped, done: false };
}
}
// Iterators that maintain a queue of pending items
// ============================================================================
/**
* A base class for transforming streams that operate by maintaining an
* output queue of elements that are ready to return via next(). This is
* commonly required when the transformation is 1-to-many: A call to next()
* may trigger a call to the underlying stream, which will produce many
* mapped elements of this stream-- of which we need to return only one, so
* we have to queue the rest.
*/
export class OneToManyIterator extends LazyIterator {
constructor() {
super();
this.outputQueue = new GrowingRingBuffer();
this.lastRead = Promise.resolve({ value: null, done: false });
}
async next() {
// This sets this.lastRead to a new Promise right away, as opposed to
// saying `await this.lastRead; this.lastRead = this.serialNext();` which
// would not work because this.nextRead would be updated only after the
// promise resolves.
this.lastRead = this.lastRead.then(() => this.serialNext());
return this.lastRead;
}
async serialNext() {
// Fetch so that the queue contains at least one item if possible.
// If the upstream source is exhausted, AND there are no items left in
// the output queue, then this stream is also exhausted.
while (this.outputQueue.length() === 0) {
// TODO(soergel): consider parallel reads.
if (!await this.pump()) {
return { value: null, done: true };
}
}
return { value: this.outputQueue.shift(), done: false };
}
}
class FlatmapIterator extends OneToManyIterator {
constructor(upstream, transform) {
super();
this.upstream = upstream;
this.transform = transform;
}
summary() {
return `${this.upstream.summary()} -> Flatmap`;
}
async pump() {
const item = await this.upstream.next();
if (item.done) {
return false;
}
const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);
// Careful: the transform may mutate the item in place.
// that's why we have to remember the input Tensors above, and then
// below dispose only those that were not passed through to the output.
// Note too that the transform function is responsible for tidying any
// intermediate Tensors. Here we are concerned only about the inputs.
const mappedArray = this.transform(item.value);
const outputTensors = tf.tensor_util.getTensorsInContainer(mappedArray);
this.outputQueue.pushAll(mappedArray);
// TODO(soergel) faster intersection, and deduplicate outputTensors
// TODO(soergel) move to tf.disposeExcept(in, out)?
for (const t of inputTensors) {
if (!tf.tensor_util.isTensorInList(t, outputTensors)) {
t.dispose();
}
}
return true;
}
}
/**
* Provides a `LazyIterator` that concatenates a stream of underlying
* streams.
*
* Doing this in a concurrency-safe way requires some trickery. In
* particular, we want this stream to return the elements from the
* underlying streams in the correct order according to when next() was
* called, even if the resulting Promises resolve in a different order.
*/
export class ChainedIterator extends LazyIterator {
constructor(iterators, baseErrorHandler) {
super();
this.baseErrorHandler = baseErrorHandler;
// Strict Promise execution order:
// a next() call may not even begin until the previous one completes.
this.lastRead = null;
// Local state that should not be clobbered by out-of-order execution.
this.iterator = null;
this.moreIterators = iterators;
}
summary() {
const upstreamSummaries = 'TODO: fill in upstream of chained summaries';
return `${upstreamSummaries} -> Chained`;
}
async next() {
this.lastRead = this.readFromChain(this.lastRead);
return this.lastRead;
}
async readFromChain(lastRead) {
// Must await on the previous read since the previous read may have advanced
// the stream of streams, from which we need to read.
// This is unfortunate since we can't parallelize reads. Which means
// prefetching of chained streams is a no-op.
// One solution is to prefetch immediately upstream of this.
await lastRead;
if (this.iterator == null) {
const iteratorResult = await this.moreIterators.next();
if (iteratorResult.done) {
// No more streams to stream from.
return { value: null, done: true };
}
this.iterator = iteratorResult.value;
if (this.baseErrorHandler != null) {
this.iterator = this.iterator.handleErrors(this.baseErrorHandler);
}
}
const itemResult = await this.iterator.next();
if (itemResult.done) {
this.iterator = null;
return this.readFromChain(lastRead);
}
return itemResult;
}
}
export var ZipMismatchMode;
(function (ZipMismatchMode) {
ZipMismatchMode[ZipMismatchMode["FAIL"] = 0] = "FAIL";
ZipMismatchMode[ZipMismatchMode["SHORTEST"] = 1] = "SHORTEST";
ZipMismatchMode[ZipMismatchMode["LONGEST"] = 2] = "LONGEST"; // use nulls for exhausted streams; use up the longest stream.
})(ZipMismatchMode || (ZipMismatchMode = {}));
/**
* Provides a `LazyIterator` that zips together an array, dict, or nested
* structure of `LazyIterator`s (and perhaps additional constants).
*
* The underlying streams must provide elements in a consistent order such
* that they correspond.
*
* Typically, the underlying streams should have the same number of
* elements. If they do not, the behavior is determined by the
* `mismatchMode` argument.
*
* The nested structure of the `iterators` argument determines the
* structure of elements in the resulting iterator.
*
* Doing this in a concurrency-safe way requires some trickery. In
* particular, we want this stream to return the elements from the
* underlying streams in the correct order according to when next() was
* called, even if the resulting Promises resolve in a different order.
*
* @param iterators: An array or object containing LazyIterators at the
* leaves.
* @param mismatchMode: Determines what to do when one underlying iterator
* is exhausted before the others. `ZipMismatchMode.FAIL` (the default)
* causes an error to be thrown in this case. `ZipMismatchMode.SHORTEST`
* causes the zipped iterator to terminate with the furst underlying
* streams, so elements remaining on the longer streams are ignored.
* `ZipMismatchMode.LONGEST` causes the zipped stream to continue, filling
* in nulls for the exhausted streams, until all streams are exhausted.
*/
class ZipIterator extends LazyIterator {
constructor(iterators, mismatchMode = ZipMismatchMode.FAIL) {
super();
this.iterators = iterators;
this.mismatchMode = mismatchMode;
this.count = 0;
this.currentPromise = null;
}
summary() {
const upstreamSummaries = 'TODO: fill in upstream of zip summaries';
return `{${upstreamSummaries}} -> Zip`;
}
async nextState(afterState) {
// This chaining ensures that the underlying next() are not even called
// before the previous ones have resolved.
await afterState;
// Collect underlying iterator "done" signals as a side effect in
// getNext()
let numIterators = 0;
let iteratorsDone = 0;
function getNext(container) {
if (container instanceof LazyIterator) {
const result = container.next();
return {
value: result.then(x => {
numIterators++;
if (x.done) {
iteratorsDone++;
}
return x.value;
}),
recurse: false
};
}
else {
return { value: null, recurse: true };
}
}
const mapped = await deepMapAndAwaitAll(this.iterators, getNext);
if (numIterators === iteratorsDone) {
// The streams have all ended.
return { value: null, done: true };
}
if (iteratorsDone > 0) {
switch (this.mismatchMode) {
case ZipMismatchMode.FAIL:
throw new Error('Zipped streams should have the same length. ' +
`Mismatched at element ${this.count}.`);
case ZipMismatchMode.SHORTEST:
return { value: null, done: true };
case ZipMismatchMode.LONGEST:
default:
// Continue. The exhausted streams already produced value: null.
}
}
this.count++;
return { value: mapped, done: false };
}
async next() {
this.currentPromise = this.nextState(this.currentPromise);
return this.currentPromise;
}
}
// Iterators that maintain a ring buffer of pending promises
// ============================================================================
/**
* A stream that prefetches a given number of items from an upstream source,
* returning them in FIFO order.
*
* Note this prefetches Promises, but makes no guarantees about when those
* Promises resolve.
*/
export class PrefetchIterator extends LazyIterator {
constructor(upstream, bufferSize) {
super();
this.upstream = upstream;
this.bufferSize = bufferSize;
this.buffer = new RingBuffer(bufferSize);
}
summary() {
return `${this.upstream.summary()} -> Prefetch`;
}
/**
* Refill the prefetch buffer. Returns only after the buffer is full, or
* the upstream source is exhausted.
*/
refill() {
while (!this.buffer.isFull()) {
const v = this.upstream.next();
this.buffer.push(v);
}
}
next() {
this.refill();
// This shift will never throw an error because the buffer is always
// full after a refill. If the stream is exhausted, the buffer will be
// full of Promises that will resolve to the end-of-stream signal.
return this.buffer.shift();
}
}
/**
* A stream that performs a sliding-window random shuffle on an upstream
* source. This is like a `PrefetchIterator` except that the items are
* returned in randomized order. Mixing naturally improves as the buffer
* size increases.
*/
export class ShuffleIterator extends PrefetchIterator {
constructor(upstream, windowSize, seed) {
super(upstream, windowSize);
this.upstream = upstream;
this.windowSize = windowSize;
// Local state that should not be clobbered by out-of-order execution.
this.upstreamExhausted = false;
this.random = seedrandom.alea(seed || tf.util.now().toString());
this.lastRead = Promise.resolve({ value: null, done: false });
}
async next() {
// This sets this.lastRead to a new Promise right away, as opposed to
// saying `await this.lastRead; this.lastRead = this.serialNext();` which
// would not work because this.nextRead would be updated only after the
// promise resolves.
this.lastRead = this.lastRead.then(() => this.serialNext());
return this.lastRead;
}
randomInt(max) {
return Math.floor(this.random() * max);
}
chooseIndex() {
return this.randomInt(this.buffer.length());
}
async serialNext() {
// TODO(soergel): consider performance
if (!this.upstreamExhausted) {
this.refill();
}
while (!this.buffer.isEmpty()) {
const chosenIndex = this.chooseIndex();
const result = await this.buffer.shuffleExcise(chosenIndex);
if (result.done) {
this.upstreamExhausted = true;
}
else {
this.refill();
return result;
}
}
return { value: null, done: true };
}
}
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibGF6eV9pdGVyYXRvci5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uLy4uLy4uLy4uL3RmanMtZGF0YS9zcmMvaXRlcmF0b3JzL2xhenlfaXRlcmF0b3IudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUE7Ozs7Ozs7Ozs7Ozs7Ozs7R0FnQkc7QUFFSCxPQUFPLEtBQUssRUFBRSxNQUFNLHVCQUF1QixDQUFDO0FBQzVDLE9BQU8sS0FBSyxVQUFVLE1BQU0sWUFBWSxDQUFDO0FBR3pDLE9BQU8sRUFBQyxTQUFTLEVBQUMsTUFBTSxvQkFBb0IsQ0FBQztBQUM3QyxPQUFPLEVBQUMsa0JBQWtCLEVBQXFDLE9BQU8sRUFBRSxTQUFTLEVBQUMsTUFBTSxrQkFBa0IsQ0FBQztBQUMzRyxPQUFPLEVBQUMsaUJBQWlCLEVBQUMsTUFBTSw2QkFBNkIsQ0FBQztBQUM5RCxPQUFPLEVBQUMsVUFBVSxFQUFDLE1BQU0scUJBQXFCLENBQUM7QUFPL0Msb0RBQW9EO0FBQ3BELGtFQUFrRTtBQUNsRSwwREFBMEQ7QUFFMUQ7O0dBRUc7QUFDSCxNQUFNLFVBQVUsaUJBQWlCLENBQUksS0FBVTtJQUM3QyxPQUFPLElBQUksYUFBYSxDQUFDLEtBQUssQ0FBQyxDQUFDO0FBQ2xDLENBQUM7QUFFRDs7R0FFRztBQUNILE1BQU0sVUFBVSx3QkFBd0IsQ0FBQyxLQUFhO0lBQ3BELElBQUksQ0FBQyxHQUFHLEtBQUssQ0FBQztJQUNkLE9BQU8sb0JBQW9CLENBQUMsR0FBRyxFQUFFLENBQUMsQ0FBQyxFQUFDLEtBQUssRUFBRSxDQUFDLEVBQUUsRUFBRSxJQUFJLEVBQUUsS0FBSyxFQUFDLENBQUMsQ0FBQyxDQUFDO0FBQ2pFLENBQUM7QUFFRDs7Ozs7Ozs7Ozs7O0dBWUc7QUFDSCxNQUFNLFVBQVUsb0JBQW9CLENBQ2hDLElBQ2lEO0lBQ25ELE9BQU8sSUFBSSxvQkFBb0IsQ0FBQyxJQUFJLENBQUMsQ0FBQztBQUN4QyxDQUFDO0FBRUQ7Ozs7Ozs7Ozs7O0dBV0c7QUFDSCxNQUFNLFVBQVUsd0JBQXdCLENBQ3BDLGFBQTRDLEVBQzVDLGdCQUF3QztJQUMxQyxPQUFPLElBQUksZUFBZSxDQUFDLGFBQWEsRUFBRSxnQkFBZ0IsQ0FBQyxDQUFDO0FBQzlELENBQUM7QUFFRDs7Ozs7Ozs7Ozs7Ozs7O0dBZUc7QUFDSCxNQUFNLFVBQVUsZ0NBQWdDLENBQzVDLFlBQW1ELEVBQUUsS0FBYSxFQUNsRSxnQkFBd0M7SUFDMUMsT0FBTyx3QkFBd0IsQ0FDM0Isb0JBQW9CLENBQUMsWUFBWSxDQUFDLENBQUMsSUFBSSxDQUFDLEtBQUssQ0FBQyxFQUFFLGdCQUFnQixDQUFDLENBQUM7QUFDeEUsQ0FBQztBQUVEOzs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OztHQXVCRztBQUNILE1BQU0sVUFBVSxrQkFBa0IsQ0FDOUIsU0FBNEIsRUFDNUIsZUFBZ0MsZUFBZSxDQUFDLElBQUk7SUFDdEQsT0FBTyxJQUFJLFdBQVcsQ0FBSSxTQUFTLEVBQUUsWUFBWSxDQUFDLENBQUM7QUFDckQsQ0FBQztBQUVEOzs7Ozs7R0FNRztBQUNILE1BQU0sT0FBZ0IsWUFBWTtJQWdCaEM7Ozs7Ozs7T0FPRztJQUNILEtBQUssQ0FBQyxPQUFPO1FBQ1gsTUFBTSxNQUFNLEdBQVEsRUFBRSxDQUFDO1FBQ3ZCLElBQUksQ0FBQyxHQUFHLE1BQU0sSUFBSSxDQUFDLElBQUksRUFBRSxDQUFDO1FBQzFCLE9BQU8sQ0FBQyxDQUFDLENBQUMsSUFBSSxFQUFFO1lBQ2QsTUFBTSxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUMsS0FBSyxDQUFDLENBQUM7WUFDckIsQ0FBQyxHQUFHLE1BQU0sSUFBSSxDQUFDLElBQUksRUFBRSxDQUFDO1NBQ3ZCO1FBQ0QsT0FBTyxNQUFNLENBQUM7SUFDaEIsQ0FBQztJQUVEOzs7Ozs7Ozs7O09BVUc7SUFDSCxLQUFLLENBQUMsY0FBYztRQUNsQixNQUFNLE1BQU0sR0FBRyxJQUFJLENBQUMsUUFBUSxDQUFDLEdBQUcsQ0FBQyxDQUFDO1FBQ2xDLE1BQU0sTUFBTSxHQUFRLEVBQUUsQ0FBQztRQUN2QixJQUFJLENBQUMsR0FBRyxNQUFNLE1BQU0sQ0FBQyxJQUFJLEVBQUUsQ0FBQztRQUM1QixPQUFPLENBQUMsQ0FBQyxDQUFDLElBQUksRUFBRTtZQUNkLE1BQU0sQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLEtBQUssQ0FBQyxDQUFDO1lBQ3JCLENBQUMsR0FBRyxNQUFNLE1BQU0sQ0FBQyxJQUFJLEVBQUUsQ0FBQztTQUN6QjtRQUNELE9BQU8sTUFBTSxDQUFDO0lBQ2hCLENBQUM7SUFFRDs7Ozs7O09BTUc7SUFDSCxLQUFLLENBQUMsWUFBWTtRQUNoQixJQUFJLENBQUMsR0FBRyxNQUFNLElBQUksQ0FBQyxJQUFJLEVBQUUsQ0FBQztRQUMxQixPQUFPLENBQUMsQ0FBQyxDQUFDLElBQUksRUFBRTtZQUNkLENBQUMsR0FBRyxNQUFNLElBQUksQ0FBQyxJQUFJLEVBQUUsQ0FBQztTQUN2QjtJQUNILENBQUM7SUFFRDs7Ozs7O09BTUc7SUFDSCxLQUFLLENBQUMsWUFBWSxDQUFDLFNBQTRCO1FBQzdDLElBQUksQ0FBQyxHQUFHLE1BQU0sSUFBSSxDQUFDLElBQUksRUFBRSxDQUFDO1FBQzFCLElBQUksY0FBYyxHQUFHLFNBQVMsQ0FBQyxDQUFDLENBQUMsS0FBSyxDQUFDLENBQUM7UUFDeEMsT0FBTyxDQUFDLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxJQUFJLGNBQWMsRUFBRTtZQUNsQyxDQUFDLEdBQUcsTUFBTSxJQUFJLENBQUMsSUFBSSxFQUFFLENBQUM7WUFDdEIsY0FBYyxHQUFHLFNBQVMsQ0FBQyxDQUFDLENBQUMsS0FBSyxDQUFDLENBQUM7U0FDckM7SUFDSCxDQUFDO0lBRUQ7Ozs7Ozs7Ozs7O09BV0c7SUFDSCxZQUFZLENBQUMsT0FBa0M7UUFDN0MsT0FBTyxJQUFJLHlCQUF5QixDQUFDLElBQUksRUFBRSxPQUFPLENBQUMsQ0FBQztJQUN0RCxDQUFDO0lBRUQseUNBQXlDO0lBRXpDOzs7Ozs7O09BT0c7SUFDSCxNQUFNLENBQUMsU0FBZ0M7UUFDckMsT0FBTyxJQUFJLGNBQWMsQ0FBQyxJQUFJLEVBQUUsU0FBUyxDQUFDLENBQUM7SUFDN0MsQ0FBQztJQUVEOzs7Ozs7O09BT0c7SUFDSCxHQUFHLENBQUksU0FBMEI7UUFDL0IsT0FBTyxJQUFJLFdBQVcsQ0FBQyxJQUFJLEVBQUUsU0FBUyxDQUFDLENBQUM7SUFDMUMsQ0FBQztJQUVEOzs7Ozs7O09BT0c7SUFDSCxRQUFRLENBQUksU0FBbUM7UUFDN0MsT0FBTyxJQUFJLGdCQUFnQixDQUFDLElBQUksRUFBRSxTQUFTLENBQUMsQ0FBQztJQUMvQyxDQUFDO0lBRUQ7Ozs7Ozs7T0FPRztJQUNILGNBQWMsQ0FBSSxTQUFtQztRQUNuRCxPQUFPLElBQUksZ0JBQWdCLENBQUMsSUFBSSxFQUFFLFNBQVMsQ0FBQyxDQUFDLE1BQU0sRUFBRSxDQUFDO0lBQ3hELENBQUM7SUFFRDs7Ozs7OztPQU9HO0lBQ0gsT0FBTyxDQUFJLFNBQTRCO1FBQ3JDLE9BQU8sSUFBSSxlQUFlLENBQUMsSUFBSSxFQUFFLFNBQVMsQ0FBQyxDQUFDO0lBQzlDLENBQUM7SUFFRDs7OztPQUlHO0lBQ0gsS0FBSyxDQUFDLFlBQVksQ0FBQyxDQUFxQjtRQUN0QyxPQUFPLElBQUksQ0FBQyxHQUFHLENBQUMsQ0FBQyxDQUFDLENBQUMsWUFBWSxFQUFFLENBQUM7SUFDcEMsQ0FBQztJQUVEOzs7Ozs7T0FNRztJQUNILEtBQUssQ0FBQyxhQUFhLENBQUMsQ0FBaUM7UUFDbkQsT0FBTyxJQUFJLENBQUMsY0FBYyxDQUFDLENBQUMsQ0FBQyxDQUFDLFlBQVksQ0FBQyxDQUFDLENBQUMsRUFBRSxDQUFDLENBQUMsQ0FBQyxLQUFLLElBQUksQ0FBQyxDQUFDLENBQUM7SUFDaEUsQ0FBQztJQUVEOzs7Ozs7Ozs7Ozs7Ozs7OztPQWlCRztJQUNILGFBQWEsQ0FBQyxTQUFpQixFQUFFLGNBQWMsR0FBRyxJQUFJO1FBQ3BELE9BQU8sSUFBSSxxQkFBcUIsQ0FBQyxJQUFJLEVBQUUsU0FBUyxFQUFFLGNBQWMsQ0FBQyxDQUFDO0lBQ3BFLENBQUM7SUFFRDs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OztPQStCRztJQUNILGdCQUFnQixDQUNaLFNBQWlCLEVBQUUsY0FBYyxHQUFHLElBQUk7SUFDeEMsa0NBQWtDO0lBQ2xDLFFBQXNDLFNBQVM7UUFFakQsMkVBQTJFO1FBQzNFLE1BQU0sVUFBVSxHQUFHLElBQUksQ0FBQyxhQUFhLENBQUMsU0FBUyxFQUFFLGNBQWMsQ0FBQyxDQUFDO1FBQ2pFLDJFQUEyRTtRQUMzRSx1RUFBdUU7UUFDdkUsT0FBTyxVQUFVLENBQUMsR0FBRyxDQUFDLENBQUMsQ0FBQyxFQUFFLENBQUMsT0FBTyxDQUFDLENBQUMsRUFBRSxLQUFLLENBQUMsQ0FBQyxDQUFDO0lBQ2hELENBQUM7SUFFRDs7Ozs7Ozs7O09BU0c7SUFDSCxXQUFXLENBQ1AsUUFBeUIsRUFDekIsZ0JBQXdDO1FBQzFDLE9BQU8sSUFBSSxlQUFlLENBQ3RCLGlCQUFpQixDQUFDLENBQUMsSUFBSSxFQUFFLFFBQVEsQ0FBQyxDQUFDLEVBQUUsZ0JBQWdCLENBQUMsQ0FBQztJQUM3RCxDQUFDO0lBRUQ7Ozs7OztPQU1HO0lBQ0gsSUFBSSxDQUFDLEtBQWE7UUFDaEIsSUFBSSxLQUFLLEdBQUcsQ0FBQyxJQUFJLEtBQUssSUFBSSxJQUFJLEVBQUU7WUFDOUIsT0FBTyxJQUFJLENBQUM7U0FDYjtRQUNELE9BQU8sSUFBSSxZQUFZLENBQUMsSUFBSSxFQUFFLEtBQUssQ0FBQyxDQUFDO0lBQ3ZDLENBQUM7SUFFRDs7Ozs7T0FLRztJQUNILElBQUksQ0FBQyxLQUFhO1FBQ2hCLElBQUksS0FBSyxHQUFHLENBQUMsSUFBSSxLQUFLLElBQUksSUFBSSxFQUFFO1lBQzlCLE9BQU8sSUFBSSxDQUFDO1NBQ2I7UUFDRCxPQUFPLElBQUksWUFBWSxDQUFDLElBQUksRUFBRSxLQUFLLENBQUMsQ0FBQztJQUN2QyxDQUFDO0lBRUQ7Ozs7Ozs7O09BUUc7SUFDSCxRQUFRLENBQUMsVUFBa0I7UUFDekIsT0FBTyxJQUFJLGdCQUFnQixDQUFDLElBQUksRUFBRSxVQUFVLENBQUMsQ0FBQztJQUNoRCxDQUFDO0lBRUQsdURBQXVEO0lBRXZEOzs7Ozs7O09BT0c7SUFDSCxPQUFPLENBQUMsVUFBa0IsRUFBRSxJQUFhO1FBQ3ZDLE9BQU8sSUFBSSxlQUFlLENBQUMsSUFBSSxFQUFFLFVBQVUsRUFBRSxJQUFJLENBQUMsQ0FBQztJQUNyRCxDQUFDO0lBRUQ7OztPQUdHO0lBQ0gsTUFBTTtRQUNKLE9BQU8sSUFBSSxjQUFjLENBQUMsSUFBSSxDQUFDLENBQUM7SUFDbEMsQ0FBQztDQUNGO0FBRUQsK0VBQStFO0FBQy9FLHlFQUF5RTtBQUN6RSwwRUFBMEU7QUFDMUUsa0RBQWtEO0FBQ2xELCtFQUErRTtBQUUvRSxtREFBbUQ7QUFDbkQsK0VBQStFO0FBRS9FLE1BQU0sYUFBaUIsU0FBUSxZQUFlO0lBRTVDLFlBQXNCLEtBQVU7UUFDOUIsS0FBSyxFQUFFLENBQUM7UUFEWSxVQUFLLEdBQUwsS0FBSyxDQUFLO1FBRHhCLFNBQUksR0FBRyxDQUFDLENBQUM7SUFHakIsQ0FBQztJQUVELE9BQU87UUFDTCxPQUFPLFlBQVksSUFBSSxDQUFDLEtBQUssQ0FBQyxNQUFNLFFBQVEsQ0FBQztJQUMvQyxDQUFDO0lBRUQsS0FBSyxDQUFDLElBQUk7UUFDUixJQUFJLElBQUksQ0FBQyxJQUFJLElBQUksSUFBSSxDQUFDLEtBQUssQ0FBQyxNQUFNLEVBQUU7WUFDbEMsT0FBTyxFQUFDLEtBQUssRUFBRSxJQUFJLEVBQUUsSUFBSSxFQUFFLElBQUksRUFBQyxDQUFDO1NBQ2xDO1FBQ0QsTUFBTSxJQUFJLEdBQUcsSUFBSSxDQUFDLEtBQUssQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUFDLENBQUM7UUFDbkMsSUFBSSxDQUFDLElBQUksRUFBRSxDQUFDO1FBQ1osT0FBTyxFQUFDLEtBQUssRUFBRSxTQUFTLENBQUMsSUFBSSxDQUFDLEVBQUUsSUFBSSxFQUFFLEtBQUssRUFBQyxDQUFDO0lBQy9DLENBQUM7Q0FDRjtBQUVELE1BQU0sb0JBQXdCLFNBQVEsWUFBZTtJQUNuRCxZQUNjLE1BQTJEO1FBQ3ZFLEtBQUssRUFBRSxDQUFDO1FBREksV0FBTSxHQUFOLE1BQU0sQ0FBcUQ7SUFFekUsQ0FBQztJQUVELE9BQU87UUFDTCxPQUFPLGVBQWUsQ0FBQztJQUN6QixDQUFDO0lBRUQsS0FBSyxDQUFDLElBQUk7UUFDUixJQUFJO1lBQ0YsT0FBTyxJQUFJLENBQUMsTUFBTSxFQUFFLENBQUM7U0FDdEI7UUFBQyxPQUFPLENBQUMsRUFBRTtZQUNWLDREQUE0RDtZQUM1RCxDQUFDLENBQUMsT0FBTztnQkFDTCxtREFBbUQsQ0FBQyxDQUFDLE9BQU8sRUFBRSxDQUFDO1lBQ25FLE1BQU0sQ0FBQyxDQUFDO1NBQ1Q7SUFDSCxDQUFDO0NBQ0Y7QUFFRCxNQUFNLGNBQWtCLFNBQVEsWUFBZTtJQUs3QyxZQUFzQixRQUF5QjtRQUM3QyxLQUFLLEVBQUUsQ0FBQztRQURZLGFBQVEsR0FBUixRQUFRLENBQWlCO1FBRTdDLElBQUksQ0FBQyxRQUFRLEdBQUcsT0FBTyxDQUFDLE9BQU8sQ0FBQyxFQUFDLEtBQUssRUFBRSxJQUFJLEVBQUUsSUFBSSxFQUFFLEtBQUssRUFBQyxDQUFDLENBQUM7SUFDOUQsQ0FBQztJQUVELE9BQU87UUFDTCxPQUFPLEdBQUcsSUFBSSxDQUFDLFFBQVEsQ0FBQyxPQUFPLEVBQUUsWUFBWSxDQUFDO0lBQ2hELENBQUM7SUFFRCxLQUFLLENBQUMsSUFBSTtRQUNSLHFFQUFxRTtRQUNyRSx5RUFBeUU7UUFDekUsdUVBQXVFO1FBQ3ZFLG9CQUFvQjtRQUNwQixJQUFJLENBQUMsUUFBUSxHQUFHLElBQUksQ0FBQyxRQUFRLENBQUMsSUFBSSxDQUFDLEdBQUcsRUFBRSxDQUFDLElBQUksQ0FBQyxVQUFVLEVBQUUsQ0FBQyxDQUFDO1FBQzVELE9BQU8sSUFBSSxDQUFDLFFBQVEsQ0FBQztJQUN2QixDQUFDO0lBRU8sS0FBSyxDQUFDLFVBQVU7UUFDdEIsT0FBTyxJQUFJLENBQUMsUUFBUSxDQUFDLElBQUksRUFBRSxDQUFDO0lBQzlCLENBQUM7Q0FDRjtBQUVELE1BQU0sWUFBZ0IsU0FBUSxZQUFlO0lBUTNDLFlBQXNCLFFBQXlCLEVBQVksUUFBZ0I7UUFDekUsS0FBSyxFQUFFLENBQUM7UUFEWSxhQUFRLEdBQVIsUUFBUSxDQUFpQjtRQUFZLGFBQVEsR0FBUixRQUFRLENBQVE7UUFIM0Usc0VBQXNFO1FBQ3RFLFVBQUssR0FBRyxDQUFDLENBQUM7UUFJUixJQUFJLENBQUMsUUFBUSxHQUFHLE9BQU8sQ0FBQyxPQUFPLENBQUMsRUFBQyxLQUFLLEVBQUUsSUFBSSxFQUFFLElBQUksRUFBRSxLQUFLLEVBQUMsQ0FBQyxDQUFDO0lBQzlELENBQUM7SUFFRCxPQUFPO1FBQ0wsT0FBTyxHQUFHLElBQUksQ0FBQyxRQUFRLENBQUMsT0FBTyxFQUFFLFVBQVUsQ0FBQztJQUM5QyxDQUFDO0lBRUQsS0FBSyxDQUFDLElBQUk7UUFDUixxRUFBcUU7UUFDckUseUVBQXlFO1FBQ3pFLHVFQUF1RTtRQUN2RSxvQkFBb0I7UUFDcEIsSUFBSSxDQUFDLFFBQVEsR0FBRyxJQUFJLENBQUMsUUFBUSxDQUFDLElBQUksQ0FBQyxHQUFHLEVBQUUsQ0FBQyxJQUFJLENBQUMsVUFBVSxFQUFFLENBQUMsQ0FBQztRQUM1RCxPQUFPLElBQUksQ0FBQyxRQUFRLENBQUM7SUFDdkIsQ0FBQztJQUVPLEtBQUssQ0FBQyxVQUFVO1FBQ3RCLGdFQUFnRTtRQUNoRSw4REFBOEQ7UUFDOUQseUVBQXlFO1FBQ3pFLG9CQUFvQjtRQUNwQixPQUFPLElBQUksQ0FBQyxLQUFLLEVBQUUsR0FBRyxJQUFJLENBQUMsUUFBUSxFQUFFO1lBQ25DLE1BQU0sT0FBTyxHQUFHLE1BQU0sSUFBSSxDQUFDLFFBQVEsQ0FBQyxJQUFJLEVBQUUsQ0FBQztZQUMzQyw2Q0FBNkM7WUFDN0MsSUFBSSxPQUFPLENBQUMsSUFBSSxFQUFFO2dCQUNoQixPQUFPLE9BQU8sQ0FBQzthQUNoQjtZQUNELEVBQUUsQ0FBQyxPQUFPLENBQUMsT0FBTyxDQUFDLEtBQVcsQ0FBQyxDQUFDO1NBQ2pDO1FBQ0QsT0FBTyxJQUFJLENBQUMsUUFBUSxDQUFDLElBQUksRUFBRSxDQUFDO0lBQzlCLENBQUM7Q0FDRjtBQUVELE1BQU0sWUFBZ0IsU0FBUSxZQUFlO0lBRTNDLFlBQXNCLFFBQXlCLEVBQVksUUFBZ0I7UUFDekUsS0FBSyxFQUFFLENBQUM7UUFEWSxhQUFRLEdBQVIsUUFBUSxDQUFpQjtRQUFZLGFBQVEsR0FBUixRQUFRLENBQVE7UUFEM0UsVUFBSyxHQUFHLENBQUMsQ0FBQztJQUdWLENBQUM7SUFFRCxPQUFPO1FBQ0wsT0FBTyxHQUFHLElBQUksQ0FBQyxRQUFRLENBQUMsT0FBTyxFQUFFLFVBQVUsQ0FBQztJQUM5QyxDQUFDO0lBRUQsS0FBSyxDQUFDLElBQUk7UUFDUixJQUFJLElBQUksQ0FBQyxLQUFLLEVBQUUsSUFBSSxJQUFJLENBQUMsUUFBUSxFQUFFO1lBQ2pDLE9BQU8sRUFBQyxLQUFLLEVBQUUsSUFBSSxFQUFFLElBQUksRUFBRSxJQUFJLEVBQUMsQ0FBQztTQUNsQztRQUNELE9BQU8sSUFBSSxDQUFDLFFBQVEsQ0FBQyxJQUFJLEVBQUUsQ0FBQztJQUM5QixDQUFDO0NBQ0Y7QUFFRCxrRUFBa0U7QUFDbEUsNkVBQTZFO0FBQzdFLFNBQVM7QUFDVCxNQUFNLHFCQUF5QixTQUFRLFlBQWlCO0lBS3RELFlBQ2MsUUFBeUIsRUFBWSxTQUFpQixFQUN0RCx1QkFBdUIsSUFBSTtRQUN2QyxLQUFLLEVBQUUsQ0FBQztRQUZJLGFBQVEsR0FBUixRQUFRLENBQWlCO1FBQVksY0FBUyxHQUFULFNBQVMsQ0FBUTtRQUN0RCx5QkFBb0IsR0FBcEIsb0JBQW9CLENBQU87UUFFdkM