remove duplicate descriptions

This commit is contained in:
Jerry Kurian 2019-09-09 15:58:35 -04:00
parent 7aeea4815a
commit 83ef6e9734
23 changed files with 21 additions and 129 deletions

View File

@ -1,11 +1,6 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { TransformOptions } from "./baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/**
* Stores chunks of data internally in array and batches when batchSize is reached.
*
* @param batchSize Size of the batches
* @param maxBatchAge Max lifetime of a batch in seconds
*/
export function batch( export function batch(
batchSize: number = 1000, batchSize: number = 1000,
maxBatchAge: number = 500, maxBatchAge: number = 500,

View File

@ -1,9 +1,6 @@
import { ChildProcess } from "child_process"; import { ChildProcess } from "child_process";
import { duplex } from "./baseFunctions"; import { duplex } from "./baseFunctions";
/**
* Return a Duplex stream from a child process' stdin and stdout
* @param childProcess Child process from which to create duplex stream
*/
export function child(childProcess: ChildProcess) { export function child(childProcess: ChildProcess) {
if (childProcess.stdin === null) { if (childProcess.stdin === null) {
throw new Error("childProcess.stdin is null"); throw new Error("childProcess.stdin is null");

View File

@ -1,10 +1,6 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { ThroughOptions } from "./baseDefinitions"; import { ThroughOptions } from "./baseDefinitions";
/**
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
* @param options
* @param options.objectMode Whether this stream should behave as a stream of objects
*/
export function collect( export function collect(
options: ThroughOptions = { objectMode: false }, options: ThroughOptions = { objectMode: false },
): Transform { ): Transform {

View File

@ -1,9 +1,5 @@
import { pipeline, Duplex, DuplexOptions } from "stream"; import { pipeline, Duplex, DuplexOptions } from "stream";
/**
* Return a Readable stream of readable streams concatenated together
* @param streams Readable streams to concatenate
*/
export function compose( export function compose(
streams: Array< streams: Array<
NodeJS.ReadableStream | NodeJS.ReadWriteStream | NodeJS.WritableStream NodeJS.ReadableStream | NodeJS.ReadWriteStream | NodeJS.WritableStream

View File

@ -1,8 +1,5 @@
import { Readable } from "stream"; import { Readable } from "stream";
/**
* Return a Readable stream of readable streams concatenated together
* @param streams Readable streams to concatenate
*/
export function concat(...streams: NodeJS.ReadableStream[]): Readable { export function concat(...streams: NodeJS.ReadableStream[]): Readable {
let isStarted = false; let isStarted = false;
let currentStreamIndex = 0; let currentStreamIndex = 0;

View File

@ -22,11 +22,6 @@ const eventsTarget = {
unpipe: EventSubscription.Unhandled, unpipe: EventSubscription.Unhandled,
}; };
/**
* Return a Duplex stream that is pushed data from multiple sources
* @param streams Source streams to multiplex
* @param options Duplex stream options
*/
export function demux( export function demux(
construct: () => NodeJS.WritableStream | NodeJS.ReadWriteStream, construct: () => NodeJS.WritableStream | NodeJS.ReadWriteStream,
demuxBy: { key?: string; keyBy?: (chunk: any) => string }, demuxBy: { key?: string; keyBy?: (chunk: any) => string },

View File

@ -1,10 +1,5 @@
import { Duplex } from "stream"; import { Duplex } from "stream";
/**
* Return a Duplex stream from a writable stream that is assumed to somehow, when written to,
* cause the given readable stream to yield chunks
* @param writable Writable stream assumed to cause the readable stream to yield chunks when written to
* @param readable Readable stream assumed to yield chunks when the writable stream is written to
*/
export function duplex( export function duplex(
writable: NodeJS.WritableStream, writable: NodeJS.WritableStream,
readable: NodeJS.ReadableStream, readable: NodeJS.ReadableStream,

View File

@ -1,10 +1,5 @@
import { Transform, TransformOptions } from "stream"; import { Transform, TransformOptions } from "stream";
/**
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
* @param predicate Predicate with which to filter scream chunks
* @param options
* @param options.objectMode Whether this stream should behave as a stream of objects
*/
export function filter<T>( export function filter<T>(
predicate: predicate:
| ((chunk: T, encoding: string) => boolean) | ((chunk: T, encoding: string) => boolean)

View File

@ -1,12 +1,6 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { TransformOptions } from "./baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/**
* Return a ReadWrite stream that flat maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such)
* @param options
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
*/
export function flatMap<T, R>( export function flatMap<T, R>(
mapper: mapper:
| ((chunk: T, encoding: string) => R[]) | ((chunk: T, encoding: string) => R[])

View File

@ -1,8 +1,5 @@
import { Readable } from "stream"; import { Readable } from "stream";
/**
* Convert an array into a Readable stream of its elements
* @param array Array of elements to stream
*/
export function fromArray(array: any[]): Readable { export function fromArray(array: any[]): Readable {
let cursor = 0; let cursor = 0;
return new Readable({ return new Readable({

View File

@ -1,14 +1,6 @@
import { Readable, Writable, Transform, Duplex } from "stream"; import { Transform } from "stream";
import { ChildProcess } from "child_process";
import * as baseFunctions from "./baseFunctions"; import * as baseFunctions from "./baseFunctions";
import {
ThroughOptions,
TransformOptions,
WithEncoding,
JsonParseOptions,
} from "./baseDefinitions";
/** /**
* Convert an array into a Readable stream of its elements * Convert an array into a Readable stream of its elements
* @param array Array of elements to stream * @param array Array of elements to stream
@ -206,7 +198,7 @@ export const accumulatorBy = baseFunctions.accumulatorBy;
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream. * Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
* @param streams Array of streams to compose. Minimum of two. * @param streams Array of streams to compose. Minimum of two.
* @param options Transform stream options * @param options Transform stream options
**/ */
export const compose = baseFunctions.compose; export const compose = baseFunctions.compose;
/** /**
@ -216,5 +208,5 @@ export const compose = baseFunctions.compose;
* @param demuxBy.key? Key to fetch value from source chunks to demultiplex source. * @param demuxBy.key? Key to fetch value from source chunks to demultiplex source.
* @param demuxBy.keyBy? Function to fetch value from source chunks to demultiplex source. * @param demuxBy.keyBy? Function to fetch value from source chunks to demultiplex source.
* @param options Writable stream options * @param options Writable stream options
**/ */
export const demux = baseFunctions.demux; export const demux = baseFunctions.demux;

View File

@ -1,12 +1,7 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { StringDecoder } from "string_decoder"; import { StringDecoder } from "string_decoder";
import { WithEncoding } from "./baseDefinitions"; import { WithEncoding } from "./baseDefinitions";
/**
* Return a ReadWrite stream that joins streamed chunks using the given separator
* @param separator Separator to join with
* @param options
* @param options.encoding Encoding written chunks are assumed to use
*/
export function join( export function join(
separator: string, separator: string,
options: WithEncoding = { encoding: "utf8" }, options: WithEncoding = { encoding: "utf8" },

View File

@ -1,8 +1,3 @@
/**
* Return a Promise resolving to the last streamed chunk of the given readable stream, after it has
* ended
* @param readable Readable stream to wait on
*/
export function last<T>(readable: NodeJS.ReadableStream): Promise<T | null> { export function last<T>(readable: NodeJS.ReadableStream): Promise<T | null> {
let lastChunk: T | null = null; let lastChunk: T | null = null;
return new Promise((resolve, _) => { return new Promise((resolve, _) => {

View File

@ -1,12 +1,6 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { TransformOptions } from "./baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/**
* Return a ReadWrite stream that maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such)
* @param options
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
*/
export function map<T, R>( export function map<T, R>(
mapper: (chunk: T, encoding: string) => R, mapper: (chunk: T, encoding: string) => R,
options: TransformOptions = { options: TransformOptions = {

View File

@ -1,8 +1,5 @@
import { Readable } from "stream"; import { Readable } from "stream";
/**
* Return a Readable stream of readable streams merged together in chunk arrival order
* @param streams Readable streams to merge
*/
export function merge(...streams: Readable[]): Readable { export function merge(...streams: Readable[]): Readable {
let isStarted = false; let isStarted = false;
let streamEndedCount = 0; let streamEndedCount = 0;

View File

@ -1,12 +1,7 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { sleep } from "../helpers"; import { sleep } from "../helpers";
import { TransformOptions } from "./baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/**
* Limits number of parallel processes in flight.
* @param parallel Max number of parallel processes.
* @param func Function to execute on each data chunk
* @param pause Amount of time to pause processing when max number of parallel processes are executing.
*/
export function parallelMap<T, R>( export function parallelMap<T, R>(
mapper: (data: T) => R, mapper: (data: T) => R,
parallel: number = 10, parallel: number = 10,

View File

@ -1,11 +1,7 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { StringDecoder } from "string_decoder"; import { StringDecoder } from "string_decoder";
import { SerializationFormats } from "./baseDefinitions"; import { SerializationFormats } from "./baseDefinitions";
/**
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
* must be a fully defined JSON string.
* @param format Format of serialized data, only utf8 supported.
*/
export function parse( export function parse(
format: SerializationFormats = SerializationFormats.utf8, format: SerializationFormats = SerializationFormats.utf8,
): Transform { ): Transform {

View File

@ -2,11 +2,7 @@ import { Transform } from "stream";
import { performance } from "perf_hooks"; import { performance } from "perf_hooks";
import { sleep } from "../helpers"; import { sleep } from "../helpers";
import { TransformOptions } from "./baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/**
* Limits date of data transferred into stream.
* @param targetRate Desired rate in ms
* @param period Period to sleep for when rate is above or equal to targetRate
*/
export function rate( export function rate(
targetRate: number = 50, targetRate: number = 50,
period: number = 1, period: number = 1,

View File

@ -1,14 +1,6 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { TransformOptions } from "./baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/**
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
* value
* @param iteratee Reducer function to apply on each streamed chunk
* @param initialValue Initial value
* @param options
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
*/
export function reduce<T, R>( export function reduce<T, R>(
iteratee: iteratee:
| ((previousValue: R, chunk: T, encoding: string) => R) | ((previousValue: R, chunk: T, encoding: string) => R)

View File

@ -1,14 +1,7 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { StringDecoder } from "string_decoder"; import { StringDecoder } from "string_decoder";
import { WithEncoding } from "./baseDefinitions"; import { WithEncoding } from "./baseDefinitions";
/**
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
* the streamed chunks with the specified replacement string
* @param searchValue Search string to use
* @param replaceValue Replacement string to use
* @param options
* @param options.encoding Encoding written chunks are assumed to use
*/
export function replace( export function replace(
searchValue: string | RegExp, searchValue: string | RegExp,
replaceValue: string, replaceValue: string,

View File

@ -1,12 +1,7 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { StringDecoder } from "string_decoder"; import { StringDecoder } from "string_decoder";
import { WithEncoding } from "./baseDefinitions"; import { WithEncoding } from "./baseDefinitions";
/**
* Return a ReadWrite stream that splits streamed chunks using the given separator
* @param separator Separator to split by, defaulting to "\n"
* @param options
* @param options.encoding Encoding written chunks are assumed to use
*/
export function split( export function split(
separator: string | RegExp = "\n", separator: string | RegExp = "\n",
options: WithEncoding = { encoding: "utf8" }, options: WithEncoding = { encoding: "utf8" },

View File

@ -1,9 +1,6 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { JsonValue, JsonParseOptions } from "./baseDefinitions"; import { JsonValue, JsonParseOptions } from "./baseDefinitions";
/**
* Return a ReadWrite stream that stringifies the streamed chunks to JSON
*/
export function stringify( export function stringify(
options: JsonParseOptions = { pretty: false }, options: JsonParseOptions = { pretty: false },
): Transform { ): Transform {

View File

@ -1,8 +1,6 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { TransformOptions } from "./baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/**
* Unbatches and sends individual chunks of data
*/
export function unbatch( export function unbatch(
options: TransformOptions = { options: TransformOptions = {
readableObjectMode: true, readableObjectMode: true,