effect
Version:
The missing standard library for TypeScript, for writing production-grade software.
1,751 lines (1,750 loc) • 118 kB
JavaScript
import * as groupBy_ from "./internal/groupBy.js";
import * as internal from "./internal/stream.js";
/**
* @since 2.0.0
* @category symbols
*/
export const StreamTypeId = internal.StreamTypeId;
/**
* The default chunk size used by the various combinators and constructors of
* `Stream`.
*
* @since 2.0.0
* @category constants
*/
export const DefaultChunkSize = internal.DefaultChunkSize;
/**
* Collects each underlying Chunk of the stream into a new chunk, and emits it
* on each pull.
*
* @since 2.0.0
* @category utils
*/
export const accumulate = internal.accumulate;
/**
* Re-chunks the elements of the stream by accumulating each underlying chunk.
*
* @since 2.0.0
* @category utils
*/
export const accumulateChunks = internal.accumulateChunks;
/**
* Creates a stream from a single value that will get cleaned up after the
* stream is consumed.
*
* @example
* ```ts
* import { Console, Effect, Stream } from "effect"
*
* // Simulating File operations
* const open = (filename: string) =>
* Effect.gen(function*() {
* yield* Console.log(`Opening ${filename}`)
* return {
* getLines: Effect.succeed(["Line 1", "Line 2", "Line 3"]),
* close: Console.log(`Closing ${filename}`)
* }
* })
*
* const stream = Stream.acquireRelease(
* open("file.txt"),
* (file) => file.close
* ).pipe(Stream.flatMap((file) => file.getLines))
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // Opening file.txt
* // Closing file.txt
* // { _id: 'Chunk', values: [ [ 'Line 1', 'Line 2', 'Line 3' ] ] }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const acquireRelease = internal.acquireRelease;
/**
* Aggregates elements of this stream using the provided sink for as long as
* the downstream operators on the stream are busy.
*
* This operator divides the stream into two asynchronous "islands". Operators
* upstream of this operator run on one fiber, while downstream operators run
* on another. Whenever the downstream fiber is busy processing elements, the
* upstream fiber will feed elements into the sink until it signals
* completion.
*
* Any sink can be used here, but see `Sink.foldWeightedEffect` and
* `Sink.foldUntilEffect` for sinks that cover the common usecases.
*
* @since 2.0.0
* @category utils
*/
export const aggregate = internal.aggregate;
/**
* Like `aggregateWithinEither`, but only returns the `Right` results.
*
* @param sink A `Sink` used to perform the aggregation.
* @param schedule A `Schedule` used to signal when to stop the aggregation.
* @since 2.0.0
* @category utils
*/
export const aggregateWithin = internal.aggregateWithin;
/**
* Aggregates elements using the provided sink until it completes, or until
* the delay signalled by the schedule has passed.
*
* This operator divides the stream into two asynchronous islands. Operators
* upstream of this operator run on one fiber, while downstream operators run
* on another. Elements will be aggregated by the sink until the downstream
* fiber pulls the aggregated value, or until the schedule's delay has passed.
*
* Aggregated elements will be fed into the schedule to determine the delays
* between pulls.
*
* @param sink A `Sink` used to perform the aggregation.
* @param schedule A `Schedule` used to signal when to stop the aggregation.
* @since 2.0.0
* @category utils
*/
export const aggregateWithinEither = internal.aggregateWithinEither;
/**
* Maps the success values of this stream to the specified constant value.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const stream = Stream.range(1, 5).pipe(Stream.as(null))
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [ null, null, null, null, null ] }
* ```
*
* @since 2.0.0
* @category mapping
*/
export const as = internal.as;
const _async = internal._async;
export {
/**
* Creates a stream from an asynchronous callback that can be called multiple
* times. The optionality of the error type `E` in `Emit` can be used to
* signal the end of the stream by setting it to `None`.
*
* The registration function can optionally return an `Effect`, which will be
* executed if the `Fiber` executing this Effect is interrupted.
*
* @example
* ```ts
* import type { StreamEmit } from "effect"
* import { Chunk, Effect, Option, Stream } from "effect"
*
* const events = [1, 2, 3, 4]
*
* const stream = Stream.async(
* (emit: StreamEmit.Emit<never, never, number, void>) => {
* events.forEach((n) => {
* setTimeout(() => {
* if (n === 3) {
* emit(Effect.fail(Option.none())) // Terminate the stream
* } else {
* emit(Effect.succeed(Chunk.of(n))) // Add the current item to the stream
* }
* }, 100 * n)
* })
* }
* )
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [ 1, 2 ] }
*
* ```
* @since 2.0.0
* @category constructors
*/
_async as async };
/**
* Creates a stream from an asynchronous callback that can be called multiple
* times The registration of the callback itself returns an effect. The
* optionality of the error type `E` can be used to signal the end of the
* stream, by setting it to `None`.
*
* @since 2.0.0
* @category constructors
*/
export const asyncEffect = internal.asyncEffect;
/**
* Creates a stream from an external push-based resource.
*
* You can use the `emit` helper to emit values to the stream. The `emit` helper
* returns a boolean indicating whether the value was emitted or not.
*
* You can also use the `emit` helper to signal the end of the stream by
* using apis such as `emit.end` or `emit.fail`.
*
* By default it uses an "unbounded" buffer size.
* You can customize the buffer size and strategy by passing an object as the
* second argument with the `bufferSize` and `strategy` fields.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* Stream.asyncPush<string>((emit) =>
* Effect.acquireRelease(
* Effect.gen(function*() {
* yield* Effect.log("subscribing")
* return setInterval(() => emit.single("tick"), 1000)
* }),
* (handle) =>
* Effect.gen(function*() {
* yield* Effect.log("unsubscribing")
* clearInterval(handle)
* })
* ), { bufferSize: 16, strategy: "dropping" })
* ```
*
* @since 3.6.0
* @category constructors
*/
export const asyncPush = internal.asyncPush;
/**
* Creates a stream from an asynchronous callback that can be called multiple
* times. The registration of the callback itself returns an a scoped
* resource. The optionality of the error type `E` can be used to signal the
* end of the stream, by setting it to `None`.
*
* @since 2.0.0
* @category constructors
*/
export const asyncScoped = internal.asyncScoped;
/**
* Returns a `Stream` that first collects `n` elements from the input `Stream`,
* and then creates a new `Stream` using the specified function, and sends all
* the following elements through that.
*
* @since 2.0.0
* @category sequencing
*/
export const branchAfter = internal.branchAfter;
/**
* Fan out the stream, producing a list of streams that have the same elements
* as this stream. The driver stream will only ever advance the `maximumLag`
* chunks before the slowest downstream stream.
*
* @example
* ```ts
* import { Console, Effect, Fiber, Schedule, Stream } from "effect"
*
* const numbers = Effect.scoped(
* Stream.range(1, 20).pipe(
* Stream.tap((n) => Console.log(`Emit ${n} element before broadcasting`)),
* Stream.broadcast(2, 5),
* Stream.flatMap(([first, second]) =>
* Effect.gen(function*() {
* const fiber1 = yield* Stream.runFold(first, 0, (acc, e) => Math.max(acc, e)).pipe(
* Effect.andThen((max) => Console.log(`Maximum: ${max}`)),
* Effect.fork
* )
* const fiber2 = yield* second.pipe(
* Stream.schedule(Schedule.spaced("1 second")),
* Stream.runForEach((n) => Console.log(`Logging to the Console: ${n}`)),
* Effect.fork
* )
* yield* Fiber.join(fiber1).pipe(
* Effect.zip(Fiber.join(fiber2), { concurrent: true })
* )
* })
* ),
* Stream.runCollect
* )
* )
*
* // Effect.runPromise(numbers).then(console.log)
* // Emit 1 element before broadcasting
* // Emit 2 element before broadcasting
* // Emit 3 element before broadcasting
* // Emit 4 element before broadcasting
* // Emit 5 element before broadcasting
* // Emit 6 element before broadcasting
* // Emit 7 element before broadcasting
* // Emit 8 element before broadcasting
* // Emit 9 element before broadcasting
* // Emit 10 element before broadcasting
* // Emit 11 element before broadcasting
* // Logging to the Console: 1
* // Logging to the Console: 2
* // Logging to the Console: 3
* // Logging to the Console: 4
* // Logging to the Console: 5
* // Emit 12 element before broadcasting
* // Emit 13 element before broadcasting
* // Emit 14 element before broadcasting
* // Emit 15 element before broadcasting
* // Emit 16 element before broadcasting
* // Logging to the Console: 6
* // Logging to the Console: 7
* // Logging to the Console: 8
* // Logging to the Console: 9
* // Logging to the Console: 10
* // Emit 17 element before broadcasting
* // Emit 18 element before broadcasting
* // Emit 19 element before broadcasting
* // Emit 20 element before broadcasting
* // Logging to the Console: 11
* // Logging to the Console: 12
* // Logging to the Console: 13
* // Logging to the Console: 14
* // Logging to the Console: 15
* // Maximum: 20
* // Logging to the Console: 16
* // Logging to the Console: 17
* // Logging to the Console: 18
* // Logging to the Console: 19
* // Logging to the Console: 20
* // { _id: 'Chunk', values: [ undefined ] }
* ```
*
* @since 2.0.0
* @category utils
*/
export const broadcast = internal.broadcast;
/**
* Returns a new Stream that multicasts the original Stream, subscribing to it as soon as the first consumer subscribes.
* As long as there is at least one consumer, the upstream will continue running and emitting data.
* When all consumers have exited, the upstream will be finalized.
*
* @since 3.8.0
* @category utils
*/
export const share = internal.share;
/**
* Fan out the stream, producing a dynamic number of streams that have the
* same elements as this stream. The driver stream will only ever advance the
* `maximumLag` chunks before the slowest downstream stream.
*
* @since 2.0.0
* @category utils
*/
export const broadcastDynamic = internal.broadcastDynamic;
/**
* Converts the stream to a scoped list of queues. Every value will be
* replicated to every queue with the slowest queue being allowed to buffer
* `maximumLag` chunks before the driver is back pressured.
*
* Queues can unsubscribe from upstream by shutting down.
*
* @since 2.0.0
* @category utils
*/
export const broadcastedQueues = internal.broadcastedQueues;
/**
* Converts the stream to a scoped dynamic amount of queues. Every chunk will
* be replicated to every queue with the slowest queue being allowed to buffer
* `maximumLag` chunks before the driver is back pressured.
*
* Queues can unsubscribe from upstream by shutting down.
*
* @since 2.0.0
* @category utils
*/
export const broadcastedQueuesDynamic = internal.broadcastedQueuesDynamic;
/**
* Allows a faster producer to progress independently of a slower consumer by
* buffering up to `capacity` elements in a queue.
*
* Note: This combinator destroys the chunking structure. It's recommended to
* use rechunk afterwards. Additionally, prefer capacities that are powers
* of 2 for better performance.
*
* @example
* ```ts
* import { Console, Effect, Schedule, Stream } from "effect"
*
* const stream = Stream.range(1, 10).pipe(
* Stream.tap((n) => Console.log(`before buffering: ${n}`)),
* Stream.buffer({ capacity: 4 }),
* Stream.tap((n) => Console.log(`after buffering: ${n}`)),
* Stream.schedule(Schedule.spaced("5 seconds"))
* )
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // before buffering: 1
* // before buffering: 2
* // before buffering: 3
* // before buffering: 4
* // before buffering: 5
* // before buffering: 6
* // after buffering: 1
* // after buffering: 2
* // before buffering: 7
* // after buffering: 3
* // before buffering: 8
* // after buffering: 4
* // before buffering: 9
* // after buffering: 5
* // before buffering: 10
* // ...
* ```
*
* @since 2.0.0
* @category utils
*/
export const buffer = internal.buffer;
/**
* Allows a faster producer to progress independently of a slower consumer by
* buffering up to `capacity` chunks in a queue.
*
* @note Prefer capacities that are powers of 2 for better performance.
* @since 2.0.0
* @category utils
*/
export const bufferChunks = internal.bufferChunks;
/**
* Switches over to the stream produced by the provided function in case this
* one fails with a typed error.
*
* @since 2.0.0
* @category error handling
*/
export const catchAll = internal.catchAll;
/**
* Switches over to the stream produced by the provided function in case this
* one fails. Allows recovery from all causes of failure, including
* interruption if the stream is uninterruptible.
*
* @since 2.0.0
* @category error handling
*/
export const catchAllCause = internal.catchAllCause;
/**
* Switches over to the stream produced by the provided function in case this
* one fails with some typed error.
*
* @since 2.0.0
* @category error handling
*/
export const catchSome = internal.catchSome;
/**
* Switches over to the stream produced by the provided function in case this
* one fails with an error matching the given `_tag`.
*
* @since 2.0.0
* @category error handling
*/
export const catchTag = internal.catchTag;
/**
* Switches over to the stream produced by one of the provided functions, in
* case this one fails with an error matching one of the given `_tag`'s.
*
* @since 2.0.0
* @category error handling
*/
export const catchTags = internal.catchTags;
/**
* Switches over to the stream produced by the provided function in case this
* one fails with some errors. Allows recovery from all causes of failure,
* including interruption if the stream is uninterruptible.
*
* @since 2.0.0
* @category error handling
*/
export const catchSomeCause = internal.catchSomeCause;
/**
* Returns a new stream that only emits elements that are not equal to the
* previous element emitted, using natural equality to determine whether two
* elements are equal.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const stream = Stream.make(1, 1, 1, 2, 2, 3, 4).pipe(Stream.changes)
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [ 1, 2, 3, 4 ] }
* ```
*
* @since 2.0.0
* @category utils
*/
export const changes = internal.changes;
/**
* Returns a new stream that only emits elements that are not equal to the
* previous element emitted, using the specified function to determine whether
* two elements are equal.
*
* @since 2.0.0
* @category utils
*/
export const changesWith = internal.changesWith;
/**
* Returns a new stream that only emits elements that are not equal to the
* previous element emitted, using the specified effectual function to
* determine whether two elements are equal.
*
* @since 2.0.0
* @category utils
*/
export const changesWithEffect = internal.changesWithEffect;
/**
* Exposes the underlying chunks of the stream as a stream of chunks of
* elements.
*
* @since 2.0.0
* @category utils
*/
export const chunks = internal.chunks;
/**
* Performs the specified stream transformation with the chunk structure of
* the stream exposed.
*
* @since 2.0.0
* @category utils
*/
export const chunksWith = internal.chunksWith;
/**
* Combines the elements from this stream and the specified stream by
* repeatedly applying the function `f` to extract an element using both sides
* and conceptually "offer" it to the destination stream. `f` can maintain
* some internal state to control the combining process, with the initial
* state being specified by `s`.
*
* Where possible, prefer `Stream.combineChunks` for a more efficient
* implementation.
*
* @since 2.0.0
* @category utils
*/
export const combine = internal.combine;
/**
* Combines the chunks from this stream and the specified stream by repeatedly
* applying the function `f` to extract a chunk using both sides and
* conceptually "offer" it to the destination stream. `f` can maintain some
* internal state to control the combining process, with the initial state
* being specified by `s`.
*
* @since 2.0.0
* @category utils
*/
export const combineChunks = internal.combineChunks;
/**
* Concatenates the specified stream with this stream, resulting in a stream
* that emits the elements from this stream and then the elements from the
* specified stream.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const s1 = Stream.make(1, 2, 3)
* const s2 = Stream.make(4, 5)
*
* const stream = Stream.concat(s1, s2)
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [ 1, 2, 3, 4, 5 ] }
* ```
*
* @since 2.0.0
* @category utils
*/
export const concat = internal.concat;
/**
* Concatenates all of the streams in the chunk to one stream.
*
* @example
* ```ts
* import { Chunk, Effect, Stream } from "effect"
*
* const s1 = Stream.make(1, 2, 3)
* const s2 = Stream.make(4, 5)
* const s3 = Stream.make(6, 7, 8)
*
* const stream = Stream.concatAll(Chunk.make(s1, s2, s3))
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // {
* // _id: 'Chunk',
* // values: [
* // 1, 2, 3, 4,
* // 5, 6, 7, 8
* // ]
* // }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const concatAll = internal.concatAll;
/**
* Composes this stream with the specified stream to create a cartesian
* product of elements. The `right` stream would be run multiple times, for
* every element in the `left` stream.
*
* See also `Stream.zip` for the more common point-wise variant.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const s1 = Stream.make(1, 2, 3)
* const s2 = Stream.make("a", "b")
*
* const product = Stream.cross(s1, s2)
*
* // Effect.runPromise(Stream.runCollect(product)).then(console.log)
* // {
* // _id: "Chunk",
* // values: [
* // [ 1, "a" ], [ 1, "b" ], [ 2, "a" ], [ 2, "b" ], [ 3, "a" ], [ 3, "b" ]
* // ]
* // }
* ```
*
* @since 2.0.0
* @category utils
*/
export const cross = internal.cross;
/**
* Composes this stream with the specified stream to create a cartesian
* product of elements, but keeps only elements from `left` stream. The `right`
* stream would be run multiple times, for every element in the `left` stream.
*
* See also `Stream.zipLeft` for the more common point-wise variant.
*
* @since 2.0.0
* @category utils
*/
export const crossLeft = internal.crossLeft;
/**
* Composes this stream with the specified stream to create a cartesian
* product of elements, but keeps only elements from the `right` stream. The
* `left` stream would be run multiple times, for every element in the `right`
* stream.
*
* See also `Stream.zipRight` for the more common point-wise variant.
*
* @since 2.0.0
* @category utils
*/
export const crossRight = internal.crossRight;
/**
* Composes this stream with the specified stream to create a cartesian
* product of elements with a specified function. The `right` stream would be
* run multiple times, for every element in the `left` stream.
*
* See also `Stream.zipWith` for the more common point-wise variant.
*
* @since 2.0.0
* @category utils
*/
export const crossWith = internal.crossWith;
/**
* Delays the emission of values by holding new values for a set duration. If
* no new values arrive during that time the value is emitted, however if a
* new value is received during the holding period the previous value is
* discarded and the process is repeated with the new value.
*
* This operator is useful if you have a stream of "bursty" events which
* eventually settle down and you only need the final event of the burst. For
* example, a search engine may only want to initiate a search after a user
* has paused typing so as to not prematurely recommend results.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* let last = Date.now()
* const log = (message: string) =>
* Effect.sync(() => {
* const end = Date.now()
* console.log(`${message} after ${end - last}ms`)
* last = end
* })
*
* const stream = Stream.make(1, 2, 3).pipe(
* Stream.concat(
* Stream.fromEffect(Effect.sleep("200 millis").pipe(Effect.as(4))) // Emit 4 after 200 ms
* ),
* Stream.concat(Stream.make(5, 6)), // Continue with more rapid values
* Stream.concat(
* Stream.fromEffect(Effect.sleep("150 millis").pipe(Effect.as(7))) // Emit 7 after 150 ms
* ),
* Stream.concat(Stream.make(8)),
* Stream.tap((n) => log(`Received ${n}`)),
* Stream.debounce("100 millis"), // Only emit values after a pause of at least 100 milliseconds,
* Stream.tap((n) => log(`> Emitted ${n}`))
* )
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // Received 1 after 5ms
* // Received 2 after 2ms
* // Received 3 after 0ms
* // > Emitted 3 after 104ms
* // Received 4 after 99ms
* // Received 5 after 1ms
* // Received 6 after 0ms
* // > Emitted 6 after 101ms
* // Received 7 after 50ms
* // Received 8 after 1ms
* // > Emitted 8 after 101ms
* // { _id: 'Chunk', values: [ 3, 6, 8 ] }
* ```
*
* @since 2.0.0
* @category utils
*/
export const debounce = internal.debounce;
/**
* The stream that dies with the specified defect.
*
* @since 2.0.0
* @category constructors
*/
export const die = internal.die;
/**
* The stream that dies with the specified lazily evaluated defect.
*
* @since 2.0.0
* @category constructors
*/
export const dieSync = internal.dieSync;
/**
* The stream that dies with an exception described by `message`.
*
* @since 2.0.0
* @category constructors
*/
export const dieMessage = internal.dieMessage;
/**
* More powerful version of `Stream.broadcast`. Allows to provide a function
* that determines what queues should receive which elements. The decide
* function will receive the indices of the queues in the resulting list.
*
* @since 2.0.0
* @category utils
*/
export const distributedWith = internal.distributedWith;
/**
* More powerful version of `Stream.distributedWith`. This returns a function
* that will produce new queues and corresponding indices. You can also
* provide a function that will be executed after the final events are
* enqueued in all queues. Shutdown of the queues is handled by the driver.
* Downstream users can also shutdown queues manually. In this case the driver
* will continue but no longer backpressure on them.
*
* @since 2.0.0
* @category utils
*/
export const distributedWithDynamic = internal.distributedWithDynamic;
/**
* Converts this stream to a stream that executes its effects but emits no
* elements. Useful for sequencing effects using streams:
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* // We create a stream and immediately drain it.
* const stream = Stream.range(1, 6).pipe(Stream.drain)
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [] }
* ```
*
* @since 2.0.0
* @category utils
*/
export const drain = internal.drain;
/**
* Drains the provided stream in the background for as long as this stream is
* running. If this stream ends before `other`, `other` will be interrupted.
* If `other` fails, this stream will fail with that error.
*
* @since 2.0.0
* @category utils
*/
export const drainFork = internal.drainFork;
/**
* Drops the specified number of elements from this stream.
*
* @since 2.0.0
* @category utils
*/
export const drop = internal.drop;
/**
* Drops the last specified number of elements from this stream.
*
* @note This combinator keeps `n` elements in memory. Be careful with big
* numbers.
* @since 2.0.0
* @category utils
*/
export const dropRight = internal.dropRight;
/**
* Drops all elements of the stream until the specified predicate evaluates to
* `true`.
*
* @since 2.0.0
* @category utils
*/
export const dropUntil = internal.dropUntil;
/**
* Drops all elements of the stream until the specified effectful predicate
* evaluates to `true`.
*
* @since 2.0.0
* @category utils
*/
export const dropUntilEffect = internal.dropUntilEffect;
/**
* Drops all elements of the stream for as long as the specified predicate
* evaluates to `true`.
*
* @since 2.0.0
* @category utils
*/
export const dropWhile = internal.dropWhile;
/**
* Drops all elements of the stream for as long as the specified predicate
* produces an effect that evalutates to `true`
*
* @since 2.0.0
* @category utils
*/
export const dropWhileEffect = internal.dropWhileEffect;
/**
* Returns a stream whose failures and successes have been lifted into an
* `Either`. The resulting stream cannot fail, because the failures have been
* exposed as part of the `Either` success case.
*
* @note The stream will end as soon as the first error occurs.
*
* @since 2.0.0
* @category utils
*/
export const either = internal.either;
/**
* The empty stream.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const stream = Stream.empty
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [] }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const empty = internal.empty;
/**
* Executes the provided finalizer after this stream's finalizers run.
*
* @example
* ```ts
* import { Console, Effect, Stream } from "effect"
*
* const program = Stream.fromEffect(Console.log("Application Logic.")).pipe(
* Stream.concat(Stream.finalizer(Console.log("Finalizing the stream"))),
* Stream.ensuring(
* Console.log("Doing some other works after stream's finalization")
* )
* )
*
* // Effect.runPromise(Stream.runCollect(program)).then(console.log)
* // Application Logic.
* // Finalizing the stream
* // Doing some other works after stream's finalization
* // { _id: 'Chunk', values: [ undefined, undefined ] }
* ```
*
* @since 2.0.0
* @category utils
*/
export const ensuring = internal.ensuring;
/**
* Executes the provided finalizer after this stream's finalizers run.
*
* @since 2.0.0
* @category utils
*/
export const ensuringWith = internal.ensuringWith;
/**
* Accesses the whole context of the stream.
*
* @since 2.0.0
* @category context
*/
export const context = internal.context;
/**
* Accesses the context of the stream.
*
* @since 2.0.0
* @category context
*/
export const contextWith = internal.contextWith;
/**
* Accesses the context of the stream in the context of an effect.
*
* @since 2.0.0
* @category context
*/
export const contextWithEffect = internal.contextWithEffect;
/**
* Accesses the context of the stream in the context of a stream.
*
* @since 2.0.0
* @category context
*/
export const contextWithStream = internal.contextWithStream;
/**
* Creates a stream that executes the specified effect but emits no elements.
*
* @since 2.0.0
* @category constructors
*/
export const execute = internal.execute;
/**
* Terminates with the specified error.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const stream = Stream.fail("Uh oh!")
*
* Effect.runPromiseExit(Stream.runCollect(stream)).then(console.log)
* // {
* // _id: 'Exit',
* // _tag: 'Failure',
* // cause: { _id: 'Cause', _tag: 'Fail', failure: 'Uh oh!' }
* // }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const fail = internal.fail;
/**
* Terminates with the specified lazily evaluated error.
*
* @since 2.0.0
* @category constructors
*/
export const failSync = internal.failSync;
/**
* The stream that always fails with the specified `Cause`.
*
* @since 2.0.0
* @category constructors
*/
export const failCause = internal.failCause;
/**
* The stream that always fails with the specified lazily evaluated `Cause`.
*
* @since 2.0.0
* @category constructors
*/
export const failCauseSync = internal.failCauseSync;
/**
* Filters the elements emitted by this stream using the provided function.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const stream = Stream.range(1, 11).pipe(Stream.filter((n) => n % 2 === 0))
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [ 2, 4, 6, 8, 10 ] }
* ```
*
* @since 2.0.0
* @category filtering
*/
export const filter = internal.filter;
/**
* Effectfully filters the elements emitted by this stream.
*
* @since 2.0.0
* @category filtering
*/
export const filterEffect = internal.filterEffect;
/**
* Performs a filter and map in a single step.
*
* @since 2.0.0
* @category utils
*/
export const filterMap = internal.filterMap;
/**
* Performs an effectful filter and map in a single step.
*
* @since 2.0.0
* @category utils
*/
export const filterMapEffect = internal.filterMapEffect;
/**
* Transforms all elements of the stream for as long as the specified partial
* function is defined.
*
* @since 2.0.0
* @category utils
*/
export const filterMapWhile = internal.filterMapWhile;
/**
* Effectfully transforms all elements of the stream for as long as the
* specified partial function is defined.
*
* @since 2.0.0
* @category utils
*/
export const filterMapWhileEffect = internal.filterMapWhileEffect;
/**
* Creates a one-element stream that never fails and executes the finalizer
* when it ends.
*
* @example
* ```ts
* import { Console, Effect, Stream } from "effect"
*
* const application = Stream.fromEffect(Console.log("Application Logic."))
*
* const deleteDir = (dir: string) => Console.log(`Deleting dir: ${dir}`)
*
* const program = application.pipe(
* Stream.concat(
* Stream.finalizer(
* deleteDir("tmp").pipe(
* Effect.andThen(Console.log("Temporary directory was deleted."))
* )
* )
* )
* )
*
* // Effect.runPromise(Stream.runCollect(program)).then(console.log)
* // Application Logic.
* // Deleting dir: tmp
* // Temporary directory was deleted.
* // { _id: 'Chunk', values: [ undefined, undefined ] }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const finalizer = internal.finalizer;
/**
* Finds the first element emitted by this stream that satisfies the provided
* predicate.
*
* @since 2.0.0
* @category elements
*/
export const find = internal.find;
/**
* Finds the first element emitted by this stream that satisfies the provided
* effectful predicate.
*
* @since 2.0.0
* @category elements
*/
export const findEffect = internal.findEffect;
/**
* Returns a stream made of the concatenation in strict order of all the
* streams produced by passing each element of this stream to `f0`
*
* @since 2.0.0
* @category sequencing
*/
export const flatMap = internal.flatMap;
/**
* Flattens this stream-of-streams into a stream made of the concatenation in
* strict order of all the streams.
*
* @since 2.0.0
* @category sequencing
*/
export const flatten = internal.flatten;
/**
* Submerges the chunks carried by this stream into the stream's structure,
* while still preserving them.
*
* @since 2.0.0
* @category sequencing
*/
export const flattenChunks = internal.flattenChunks;
/**
* Flattens `Effect` values into the stream's structure, preserving all
* information about the effect.
*
* @since 2.0.0
* @category sequencing
*/
export const flattenEffect = internal.flattenEffect;
/**
* Unwraps `Exit` values that also signify end-of-stream by failing with `None`.
*
* For `Exit` values that do not signal end-of-stream, prefer:
*
* ```ts
* stream.mapZIO(ZIO.done(_))
* ```
*
* @since 2.0.0
* @category sequencing
*/
export const flattenExitOption = internal.flattenExitOption;
/**
* Submerges the iterables carried by this stream into the stream's structure,
* while still preserving them.
*
* @since 2.0.0
* @category sequencing
*/
export const flattenIterables = internal.flattenIterables;
/**
* Unwraps `Exit` values and flatten chunks that also signify end-of-stream
* by failing with `None`.
*
* @since 2.0.0
* @category sequencing
*/
export const flattenTake = internal.flattenTake;
/**
* Repeats this stream forever.
*
* @since 2.0.0
* @category utils
*/
export const forever = internal.forever;
/**
* Creates a stream from an `AsyncIterable`.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const myAsyncIterable = async function*() {
* yield 1
* yield 2
* }
*
* const stream = Stream.fromAsyncIterable(
* myAsyncIterable(),
* (e) => new Error(String(e)) // Error Handling
* )
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [ 1, 2 ] }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const fromAsyncIterable = internal.fromAsyncIterable;
/**
* Creates a stream from a `Channel`.
*
* @since 2.0.0
* @category constructors
*/
export const fromChannel = internal.fromChannel;
/**
* Creates a channel from a `Stream`.
*
* @since 2.0.0
* @category constructors
*/
export const toChannel = internal.toChannel;
/**
* Creates a stream from a `Chunk` of values.
*
* @example
* ```ts
* import { Chunk, Effect, Stream } from "effect"
*
* // Creating a stream with values from a single Chunk
* const stream = Stream.fromChunk(Chunk.make(1, 2, 3))
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [ 1, 2, 3 ] }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const fromChunk = internal.fromChunk;
/**
* Creates a stream from a subscription to a `PubSub`.
*
* @param shutdown If `true`, the `PubSub` will be shutdown after the stream is evaluated (defaults to `false`)
* @since 2.0.0
* @category constructors
*/
export const fromChunkPubSub = internal.fromChunkPubSub;
/**
* Creates a stream from a `Queue` of values.
*
* @param shutdown If `true`, the queue will be shutdown after the stream is evaluated (defaults to `false`)
* @since 2.0.0
* @category constructors
*/
export const fromChunkQueue = internal.fromChunkQueue;
/**
* Creates a stream from an arbitrary number of chunks.
*
* @example
* ```ts
* import { Chunk, Effect, Stream } from "effect"
*
* // Creating a stream with values from multiple Chunks
* const stream = Stream.fromChunks(Chunk.make(1, 2, 3), Chunk.make(4, 5, 6))
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [ 1, 2, 3, 4, 5, 6 ] }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const fromChunks = internal.fromChunks;
/**
* Either emits the success value of this effect or terminates the stream
* with the failure value of this effect.
*
* @example
* ```ts
* import { Effect, Random, Stream } from "effect"
*
* const stream = Stream.fromEffect(Random.nextInt)
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // Example Output: { _id: 'Chunk', values: [ 922694024 ] }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const fromEffect = internal.fromEffect;
/**
* Creates a stream from an effect producing a value of type `A` or an empty
* `Stream`.
*
* @since 2.0.0
* @category constructors
*/
export const fromEffectOption = internal.fromEffectOption;
/**
* Creates a stream from a subscription to a `PubSub`.
*
* @param shutdown If `true`, the `PubSub` will be shutdown after the stream is evaluated (defaults to `false`)
* @since 2.0.0
* @category constructors
*/
export const fromPubSub = internal.fromPubSub;
/**
* Creates a stream from a subscription to a `TPubSub`.
*
* @since 3.10.0
* @category constructors
*/
export const fromTPubSub = internal.fromTPubSub;
/**
* Creates a new `Stream` from an iterable collection of values.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const numbers = [1, 2, 3]
*
* const stream = Stream.fromIterable(numbers)
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [ 1, 2, 3 ] }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const fromIterable = internal.fromIterable;
/**
* Creates a stream from an effect producing a value of type `Iterable<A>`.
*
* @example
* ```ts
* import { Context, Effect, Stream } from "effect"
*
* class Database extends Context.Tag("Database")<
* Database,
* { readonly getUsers: Effect.Effect<Array<string>> }
* >() {}
*
* const getUsers = Database.pipe(Effect.andThen((_) => _.getUsers))
*
* const stream = Stream.fromIterableEffect(getUsers)
*
* // Effect.runPromise(
* // Stream.runCollect(stream.pipe(Stream.provideService(Database, { getUsers: Effect.succeed(["user1", "user2"]) })))
* // ).then(console.log)
* // { _id: 'Chunk', values: [ 'user1', 'user2' ] }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const fromIterableEffect = internal.fromIterableEffect;
/**
* Creates a stream from an iterator
*
* @since 2.0.0
* @category constructors
*/
export const fromIteratorSucceed = internal.fromIteratorSucceed;
/**
* Creates a stream from an effect that pulls elements from another stream.
*
* See `Stream.toPull` for reference.
*
* @since 2.0.0
* @category constructors
*/
export const fromPull = internal.fromPull;
/**
* Creates a stream from a queue of values
*
* @param maxChunkSize The maximum number of queued elements to put in one chunk in the stream
* @param shutdown If `true`, the queue will be shutdown after the stream is evaluated (defaults to `false`)
* @since 2.0.0
* @category constructors
*/
export const fromQueue = internal.fromQueue;
/**
* Creates a stream from a TQueue of values
*
* @since 3.10.0
* @category constructors
*/
export const fromTQueue = internal.fromTQueue;
/**
* Creates a stream from a `ReadableStream`.
*
* See https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream.
*
* @since 2.0.0
* @category constructors
*/
export const fromReadableStream = internal.fromReadableStream;
/**
* Creates a stream from a `ReadableStreamBYOBReader`.
*
* See https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamBYOBReader.
*
* @param allocSize Controls the size of the underlying `ArrayBuffer` (defaults to `4096`).
* @since 2.0.0
* @category constructors
*/
export const fromReadableStreamByob = internal.fromReadableStreamByob;
/**
* Creates a stream from a `Schedule` that does not require any further
* input. The stream will emit an element for each value output from the
* schedule, continuing for as long as the schedule continues.
*
* @example
* ```ts
* import { Effect, Schedule, Stream } from "effect"
*
* // Emits values every 1 second for a total of 5 emissions
* const schedule = Schedule.spaced("1 second").pipe(
* Schedule.compose(Schedule.recurs(5))
* )
*
* const stream = Stream.fromSchedule(schedule)
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [ 0, 1, 2, 3, 4 ] }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const fromSchedule = internal.fromSchedule;
/**
* Creates a pipeline that groups on adjacent keys, calculated by the
* specified function.
*
* @since 2.0.0
* @category grouping
*/
export const groupAdjacentBy = internal.groupAdjacentBy;
/**
* More powerful version of `Stream.groupByKey`.
*
* @example
* ```ts
* import { Chunk, Effect, GroupBy, Stream } from "effect"
*
* const groupByKeyResult = Stream.fromIterable([
* "Mary",
* "James",
* "Robert",
* "Patricia",
* "John",
* "Jennifer",
* "Rebecca",
* "Peter"
* ]).pipe(
* Stream.groupBy((name) => Effect.succeed([name.substring(0, 1), name]))
* )
*
* const stream = GroupBy.evaluate(groupByKeyResult, (key, stream) =>
* Stream.fromEffect(
* Stream.runCollect(stream).pipe(
* Effect.andThen((chunk) => [key, Chunk.size(chunk)] as const)
* )
* ))
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // {
* // _id: 'Chunk',
* // values: [ [ 'M', 1 ], [ 'J', 3 ], [ 'R', 2 ], [ 'P', 2 ] ]
* // }
* ```
*
* @since 2.0.0
* @category grouping
*/
export const groupBy = groupBy_.groupBy;
/**
* Partition a stream using a function and process each stream individually.
* This returns a data structure that can be used to further filter down which
* groups shall be processed.
*
* After calling apply on the GroupBy object, the remaining groups will be
* processed in parallel and the resulting streams merged in a
* nondeterministic fashion.
*
* Up to `buffer` elements may be buffered in any group stream before the
* producer is backpressured. Take care to consume from all streams in order
* to prevent deadlocks.
*
* For example, to collect the first 2 words for every starting letter from a
* stream of words:
*
* ```ts
* import * as GroupBy from "./GroupBy"
* import * as Stream from "./Stream"
* import { pipe } from "./Function"
*
* pipe(
* Stream.fromIterable(["hello", "world", "hi", "holla"]),
* Stream.groupByKey((word) => word[0]),
* GroupBy.evaluate((key, stream) =>
* pipe(
* stream,
* Stream.take(2),
* Stream.map((words) => [key, words] as const)
* )
* )
* )
* ```
*
* @since 2.0.0
* @category grouping
*/
export const groupByKey = groupBy_.groupByKey;
/**
* Partitions the stream with specified `chunkSize`.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const stream = Stream.range(0, 8).pipe(Stream.grouped(3))
*
* // Effect.runPromise(Stream.runCollect(stream)).then((chunks) => console.log("%o", chunks))
* // {
* // _id: 'Chunk',
* // values: [
* // { _id: 'Chunk', values: [ 0, 1, 2, [length]: 3 ] },
* // { _id: 'Chunk', values: [ 3, 4, 5, [length]: 3 ] },
* // { _id: 'Chunk', values: [ 6, 7, 8, [length]: 3 ] },
* // [length]: 3
* // ]
* // }
* ```
*
* @since 2.0.0
* @category grouping
*/
export const grouped = internal.grouped;
/**
* Partitions the stream with the specified `chunkSize` or until the specified
* `duration` has passed, whichever is satisfied first.
*
* @example
* ```ts
* import { Chunk, Effect, Schedule, Stream } from "effect"
*
* const stream = Stream.range(0, 9).pipe(
* Stream.repeat(Schedule.spaced("1 second")),
* Stream.groupedWithin(18, "1.5 seconds"),
* Stream.take(3)
* )
*
* // Effect.runPromise(Stream.runCollect(stream)).then((chunks) => console.log(Chunk.toArray(chunks)))
* // [
* // {
* // _id: 'Chunk',
* // values: [
* // 0, 1, 2, 3, 4, 5, 6,
* // 7, 8, 9, 0, 1, 2, 3,
* // 4, 5, 6, 7
* // ]
* // },
* // {
* // _id: 'Chunk',
* // values: [
* // 8, 9, 0, 1, 2,
* // 3, 4, 5, 6, 7,
* // 8, 9
* // ]
* // },
* // {
* // _id: 'Chunk',
* // values: [
* // 0, 1, 2, 3, 4, 5, 6,
* // 7, 8, 9, 0, 1, 2, 3,
* // 4, 5, 6, 7
* // ]
* // }
* // ]
* ```
*
* @since 2.0.0
* @category grouping
*/
export const groupedWithin = internal.groupedWithin;
/**
* Specialized version of haltWhen which halts the evaluation of this stream
* after the given duration.
*
* An element in the process of being pulled will not be interrupted when the
* given duration completes. See `interruptAfter` for this behavior.
*
* @since 2.0.0
* @category utils
*/
export const haltAfter = internal.haltAfter;
/**
* Halts the evaluation of this stream when the provided effect completes. The
* given effect will be forked as part of the returned stream, and its success
* will be discarded.
*
* An element in the process of being pulled will not be interrupted when the
* effect completes. See `interruptWhen` for this behavior.
*
* If the effect completes with a failure, the stream will emit that failure.
*
* @since 2.0.0
* @category utils
*/
export const haltWhen = internal.haltWhen;
/**
* Halts the evaluation of this stream when the provided promise resolves.
*
* If the promise completes with a failure, the stream will emit that failure.
*
* @since 2.0.0
* @category utils
*/
export const haltWhenDeferred = internal.haltWhenDeferred;
/**
* The identity pipeline, which does not modify streams in any way.
*
* @since 2.0.0
* @category utils
*/
export const identity = internal.identityStream;
/**
* Interleaves this stream and the specified stream deterministically by
* alternating pulling values from this stream and the specified stream. When
* one stream is exhausted all remaining values in the other stream will be
* pulled.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const s1 = Stream.make(1, 2, 3)
* const s2 = Stream.make(4, 5, 6)
*
* const stream = Stream.interleave(s1, s2)
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [ 1, 4, 2, 5, 3, 6 ] }
* ```
* @since 2.0.0
* @category utils
*/
export const interleave = internal.interleave;
/**
* Combines this stream and the specified stream deterministically using the
* stream of boolean values `pull` to control which stream to pull from next.
* A value of `true` indicates to pull from this stream and a value of `false`
* indicates to pull from the specified stream. Only consumes as many elements
* as requested by the `pull` stream. If either this stream or the specified
* stream are exhausted further requests for values from that stream will be
* ignored.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const s1 = Stream.make(1, 3, 5, 7, 9)
* const s2 = Stream.make(2, 4, 6, 8, 10)
*
* const booleanStream = Stream.make(true, false, false).pipe(Stream.forever)
*
* const stream = Stream.interleaveWith(s1, s2, booleanStream)
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // {
* // _id: 'Chunk',
* // values: [
* // 1, 2, 4, 3, 6,
* // 8, 5, 10, 7, 9
* // ]
* // }
* ```
*
* @since 2.0.0
* @category utils
*/
export const interleaveWith = internal.interleaveWith;
/**
* Intersperse stream with provided `element`.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const stream = Stream.make(1, 2, 3, 4, 5).pipe(Stream.intersperse(0))
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // {
* // _id: 'Chunk',
* // values: [
* // 1, 0, 2, 0, 3,
* // 0, 4, 0, 5
* // ]
* // }
* ```
*
* @since 2.0.0
* @category utils
*/
export const intersperse = internal.intersperse;
/**
* Intersperse the specified element, also adding a prefix and a suffix.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const stream = Stream.make(1, 2, 3, 4, 5).pipe(
* Stream.intersperseAffixes({
* start: "[",
* middle: "-",
* end: "]"
* })
* )
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // {
* // _id: 'Chunk',
* // values: [
* // '[', 1, '-', 2, '-',
* // 3, '-', 4, '-', 5,
* // ']'
* // ]
* // }
* ```
*
* @since 2.0.0
* @category utils
*/
export const intersperseAffixes = internal.intersperseAffixes;
/**
* Specialized version of `Stream.interruptWhen` which interrupts the
* evaluation of this stream after the given `Duration`.
*
* @since 2.0.0
* @category utils
*/
export const interruptAfter = internal.interruptAfter;
/**
* Interrupts the evaluation of this stream when the provided effect
* completes. The given effect will be forked as part of this stream, and its
* success will be discarded. This combinator will also interrupt any
* in-progress element being pulled from upstream.
*
* If the effect completes with a failure before the stream completes, the
* returned stream will emit that failure.
*
* @since 2.0.0
* @category utils
*/
export const interruptWhen = internal.interruptWhen;
/**
* Interrupts the evaluation of this stream when the provided promise
* resolves. This combinator will also interrupt any in-progress element being
* pulled from upstream.
*
* If the promise completes with a failure, the stream will emit that failure.
*
* @since 2.0.0
* @category utils
*/
export const interruptWhenDeferred = internal.interruptWhenDeferred;
/**
* The infinite stream of iterative function application: a, f(a), f(f(a)),
* f(f(f(a))), ...
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* // An infinite Stream of numbers starting from 1 and incrementing
* const stream = Stream.iterate(1, (n) => n + 1)
*
* // Effect.runPromise(Stream.runCollect(stream.pipe(Stream.take(10)))).then(console.log)
* // { _id: 'Chunk', values: [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ] }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const iterate = internal.iterate;
/**
* Creates a stream from an sequence of values.
*
* @example
* ```ts
* import { Effect, Stream } from "effect"
*
* const stream = Stream.make(1, 2, 3)
*
* // Effect.runPromise(Stream.runCollect(stream)).then(console.log)
* // { _id: 'Chunk', values: [ 1, 2, 3 ] }
* ```
*
* @since 2.0.0
* @category constructors
*/
export const make = internal.make;
/**
* Transforms the elements of this stream using the supplied function.