baseDefinitions

This commit is contained in:
Jerry Kurian 2019-08-15 15:54:53 -04:00
parent 5a9fcc94a6
commit d6d974ee0d
16 changed files with 20 additions and 23 deletions

View File

@ -1,15 +1,19 @@
import { Transform } from "stream";
import { AccumulatorByIteratee, FlushStrategy } from "./definitions";
import { TransformOptions } from "../baseDefinitions";
import { batch } from "../../index";
function _accumulator<T>(
accumulateBy: (data: T, buffer: T[], stream: Transform) => void,
shouldFlush: boolean = true,
options: TransformOptions = {
readableObjectMode: true,
writableObjectMode: true,
},
) {
const buffer: T[] = [];
return new Transform({
readableObjectMode: true,
writableObjectMode: true,
...options,
transform(data: any, encoding, callback) {
try {
accumulateBy(data, buffer, this);

View File

@ -21,10 +21,3 @@ export type JsonValue = JsonPrimitive | JsonPrimitive[];
export interface JsonParseOptions {
pretty: boolean;
}
export enum FlushStrategy {
rolling = "rolling",
sliding = "sliding",
}
export type AccumulatorByIteratee<T> = (event: T, bufferChunk: T) => boolean;

View File

@ -1,5 +1,5 @@
import { Transform } from "stream";
import { TransformOptions } from "../definitions";
import { TransformOptions } from "../baseDefinitions";
/**
* Stores chunks of data internally in array and batches when batchSize is reached.
*

View File

@ -1,5 +1,5 @@
import { Transform } from "stream";
import { ThroughOptions } from "../definitions";
import { ThroughOptions } from "../baseDefinitions";
/**
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
* @param options

View File

@ -1,5 +1,5 @@
import { Transform } from "stream";
import { ThroughOptions } from "../definitions";
import { ThroughOptions } from "../baseDefinitions";
/**
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
* @param predicate Predicate with which to filter scream chunks

View File

@ -1,5 +1,5 @@
import { Transform } from "stream";
import { TransformOptions } from "../definitions";
import { TransformOptions } from "../baseDefinitions";
/**
* Return a ReadWrite stream that flat maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such)

View File

@ -1,6 +1,6 @@
import { Transform } from "stream";
import { StringDecoder } from "string_decoder";
import { WithEncoding } from "../definitions";
import { WithEncoding } from "../baseDefinitions";
/**
* Return a ReadWrite stream that joins streamed chunks using the given separator
* @param separator Separator to join with

View File

@ -1,5 +1,5 @@
import { Transform } from "stream";
import { TransformOptions } from "../definitions";
import { TransformOptions } from "../baseDefinitions";
/**
* Return a ReadWrite stream that maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such)

View File

@ -1,6 +1,6 @@
import { Transform } from "stream";
import { sleep } from "../../helpers";
import { TransformOptions } from "../definitions";
import { TransformOptions } from "../baseDefinitions";
/**
* Limits number of parallel processes in flight.
* @param parallel Max number of parallel processes.

View File

@ -1,6 +1,6 @@
import { Transform } from "stream";
import { StringDecoder } from "string_decoder";
import { SerializationFormats } from "../definitions";
import { SerializationFormats } from "../baseDefinitions";
/**
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
* must be a fully defined JSON string.

View File

@ -1,7 +1,7 @@
import { Transform } from "stream";
import { performance } from "perf_hooks";
import { sleep } from "../../helpers";
import { TransformOptions } from "../definitions";
import { TransformOptions } from "../baseDefinitions";
/**
* Limits date of data transferred into stream.
* @param targetRate Desired rate in ms

View File

@ -1,5 +1,5 @@
import { Transform } from "stream";
import { TransformOptions } from "../definitions";
import { TransformOptions } from "../baseDefinitions";
/**
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
* value

View File

@ -1,6 +1,6 @@
import { Transform } from "stream";
import { StringDecoder } from "string_decoder";
import { WithEncoding } from "../definitions";
import { WithEncoding } from "../baseDefinitions";
/**
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
* the streamed chunks with the specified replacement string

View File

@ -1,6 +1,6 @@
import { Transform } from "stream";
import { StringDecoder } from "string_decoder";
import { WithEncoding } from "../definitions";
import { WithEncoding } from "../baseDefinitions";
/**
* Return a ReadWrite stream that splits streamed chunks using the given separator
* @param separator Separator to split by, defaulting to "\n"

View File

@ -1,5 +1,5 @@
import { Transform } from "stream";
import { JsonValue, JsonParseOptions } from "../definitions";
import { JsonValue, JsonParseOptions } from "../baseDefinitions";
/**
* Return a ReadWrite stream that stringifies the streamed chunks to JSON

View File

@ -1,5 +1,5 @@
import { Transform } from "stream";
import { TransformOptions } from "../definitions";
import { TransformOptions } from "../baseDefinitions";
/**
* Unbatches and sends individual chunks of data
*/