From 81bc9e6bc5c9e951590cd2f0f714a16e81f8d1ae Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Fri, 31 May 2019 15:06:26 -0400 Subject: [PATCH 01/69] Add batch to Transform Add tests for batched stream Add unbatching Update comment Add Rate Add parallel processing Remove only Remove deep Cleanup Cleanup --- src/index.spec.ts | 149 ++++++++++++++++++++++++++++++++++++++++++++++ src/index.ts | 98 ++++++++++++++++++++++++++++++ tslint.json | 2 +- 3 files changed, 248 insertions(+), 1 deletion(-) diff --git a/src/index.spec.ts b/src/index.spec.ts index a15050d..051fdd7 100644 --- a/src/index.spec.ts +++ b/src/index.spec.ts @@ -1,6 +1,7 @@ import * as cp from "child_process"; import test from "ava"; import { expect } from "chai"; +import { performance } from "perf_hooks"; import { Readable } from "stream"; import { fromArray, @@ -19,6 +20,10 @@ import { child, reduce, last, + batch, + unbatch, + rate, + parallelMap, } from "."; test.cb("fromArray() streams array elements in flowing mode", t => { @@ -1180,3 +1185,147 @@ test("last() resolves to the last chunk streamed by the given readable stream", const lastChunk = await lastPromise; expect(lastChunk).to.equal("ef"); }); + +test.cb("batch() batches chunks together", t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = [["a", "b", "c"], ["d", "e", "f"], ["g"]]; + let i = 0; + source + .pipe(batch(3)) + .on("data", (element: string[]) => { + expect(element).to.deep.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push("d"); + source.push("e"); + source.push("f"); + source.push("g"); + source.push(null); +}); + +test.cb("unbatch() unbatches", t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "b", "c"]; + let i = 0; + source + .pipe(batch(3)) + .pipe(unbatch()) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("rate() sends data at desired rate", t => { + t.plan(9); + const fastRate = 500; + const medRate = 50; + const slowRate = 1; + const sourceFast = new Readable({ objectMode: true }); + const sourceMed = new Readable({ objectMode: true }); + const sourceSlow = new Readable({ objectMode: true }); + const expectedElements = ["a", "b", "c"]; + const start = performance.now(); + let i = 0; + let j = 0; + let k = 0; + + sourceFast + .pipe(rate(fastRate)) + .on("data", (element: string[]) => { + const currentRate = (i / (performance.now() - start)) * 1000; + expect(element).to.deep.equal(expectedElements[i]); + expect(currentRate).lessThan(fastRate); + t.pass(); + i++; + }) + .on("error", t.end); + + sourceMed + .pipe(rate(medRate)) + .on("data", (element: string[]) => { + const currentRate = (j / (performance.now() - start)) * 1000; + expect(element).to.deep.equal(expectedElements[j]); + expect(currentRate).lessThan(medRate); + t.pass(); + j++; + }) + .on("error", t.end); + + sourceSlow + .pipe(rate(slowRate)) + .on("data", (element: string[]) => { + const currentRate = (k / (performance.now() - start)) * 1000; + expect(element).to.deep.equal(expectedElements[k]); + expect(currentRate).lessThan(slowRate); + t.pass(); + k++; + }) + .on("error", t.end) + .on("end", t.end); + + sourceFast.push("a"); + sourceFast.push("b"); + sourceFast.push("c"); + sourceFast.push(null); + sourceMed.push("a"); + sourceMed.push("b"); + sourceMed.push("c"); + sourceMed.push(null); + sourceSlow.push("a"); + sourceSlow.push("b"); + sourceSlow.push("c"); + sourceSlow.push(null); +}); + +test.cb("parallel() parallel mapping", t => { + t.plan(5); + const source = new Readable({ objectMode: true }); + const expectedElements = [ + "a_processed", + "b_processed", + "c_processed", + "d_processed", + "e_processed", + ]; + const orderedResults: string[] = []; + source + .pipe(parallelMap(2, data => data + "_processed")) + .on("data", (element: string) => { + t.true(expectedElements.includes(element)); + orderedResults.push(element); + }) + .on("error", t.end) + .on("end", () => { + expect(orderedResults[0]).to.equal("a_processed") + expect(orderedResults[1]).to.equal("b_processed") + expect(orderedResults[2]).to.equal("d_processed") + expect(orderedResults[3]).to.equal("c_processed") + expect(orderedResults[4]).to.equal("e_processed") + t.end(); + }); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push("d"); + source.push("e"); + source.push(null); +}); diff --git a/src/index.ts b/src/index.ts index 6da9256..dfbee1b 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,4 +1,5 @@ import { Transform, Readable, Writable, Duplex } from "stream"; +import { performance } from "perf_hooks"; import { ChildProcess } from "child_process"; import { StringDecoder } from "string_decoder"; @@ -13,6 +14,10 @@ export interface WithEncoding { encoding: string; } +async function sleep(time: number) { + return time > 0 ? new Promise(resolve => setTimeout(resolve, time)) : null; +} + /** * Convert an array into a Readable stream of its elements * @param array Array of elements to stream @@ -499,3 +504,96 @@ export function last(readable: Readable): Promise { .on("end", () => resolve(lastChunk)); }); } + +/** + * Stores chunks of data internally in array and batches when batchSize is reached. + * + * @param batchSize Size of the batches + */ +export function batch(batchSize: number) { + const buffer: any[] = []; + return new Transform({ + objectMode: true, + transform(chunk, encoding, callback) { + if (buffer.length === batchSize - 1) { + buffer.push(chunk); + callback(undefined, buffer.splice(0)); + } else { + buffer.push(chunk); + callback(); + } + }, + flush(callback) { + callback(undefined, buffer.splice(0)); + }, + }); +} + +/** + * Unbatches and sends individual chunks of data + */ +export function unbatch() { + return new Transform({ + objectMode: true, + transform(data, encoding, callback) { + for (const d of data) { + this.push(d); + } + callback(); + }, + }); +} + +/** + * Limits date of data transferred into stream. + * @param rate Desired rate in ms + */ +export function rate(targetRate: number) { + const deltaMS = (1 / targetRate) * 1000; + let total = 0; + const start = performance.now(); + return new Transform({ + objectMode: true, + async transform(data, encoding, callback) { + const currentRate = (total / (performance.now() - start)) * 1000; + if (targetRate && currentRate > targetRate) { + await sleep(deltaMS); + } + total += 1; + callback(undefined, data); + }, + }); +} + +/** + * Limits number of parallel processes in flight. + * @param parallel Max number of parallel processes. + * @param func Function to execute on each data chunk + */ +export function parallelMap(parallel: number, func: (data: T) => R) { + let inflight = 0; + return new Transform({ + objectMode: true, + async transform(data, encoding, callback) { + while (parallel <= inflight) { + await sleep(5); + } + inflight += 1; + callback(); + try { + const res = await func(data); + this.push(res); + } catch (e) { + this.emit(e); + } finally { + inflight -= 1; + } + }, + async flush(callback) { + while (inflight > 0) { + await sleep(5); + } + callback(); + }, + }); +} diff --git a/tslint.json b/tslint.json index b1c37a5..becd92c 100644 --- a/tslint.json +++ b/tslint.json @@ -7,7 +7,7 @@ "rules": { "no-console": false, "no-implicit-dependencies": [true, "dev"], - "prettier": true, + "prettier": [true, ".prettierrc"], "ordered-imports": false, "interface-name": false } From eaeed0dde699f6bac2d41d02db596597744fb4e4 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Tue, 4 Jun 2019 10:21:15 -0400 Subject: [PATCH 02/69] Refactor lib structure --- src/functions/definitions.ts | 24 + .../functions.spec.ts} | 22 +- src/functions/functions.ts | 605 +++++++++++++++++ src/functions/index.ts | 137 ++++ src/helpers.ts | 3 + src/index.ts | 621 +----------------- 6 files changed, 803 insertions(+), 609 deletions(-) create mode 100644 src/functions/definitions.ts rename src/{index.spec.ts => functions/functions.spec.ts} (98%) create mode 100644 src/functions/functions.ts create mode 100644 src/functions/index.ts create mode 100644 src/helpers.ts diff --git a/src/functions/definitions.ts b/src/functions/definitions.ts new file mode 100644 index 0000000..fc97bd1 --- /dev/null +++ b/src/functions/definitions.ts @@ -0,0 +1,24 @@ +export interface ThroughOptions { + objectMode?: boolean; +} + +export interface TransformOptions { + readableObjectMode?: boolean; + writableObjectMode?: boolean; +} + +export interface WithEncoding { + encoding: string; +} + +export enum SerializationFormats { + utf8 = "utf8", +} + + +type JsonPrimitive = string | number | object; +export type JsonValue = JsonPrimitive | JsonPrimitive[]; + +export interface JsonParseOptions { + pretty: boolean; +} diff --git a/src/index.spec.ts b/src/functions/functions.spec.ts similarity index 98% rename from src/index.spec.ts rename to src/functions/functions.spec.ts index 051fdd7..50e5916 100644 --- a/src/index.spec.ts +++ b/src/functions/functions.spec.ts @@ -25,6 +25,7 @@ import { rate, parallelMap, } from "."; +import { SerializationFormats } from "./definitions"; test.cb("fromArray() streams array elements in flowing mode", t => { t.plan(3); @@ -682,7 +683,7 @@ test.cb("parse() parses the streamed elements as JSON", t => { const expectedElements = ["abc", {}, []]; let i = 0; source - .pipe(parse()) + .pipe(parse(SerializationFormats.utf8)) .on("data", part => { expect(part).to.deep.equal(expectedElements[i]); t.pass(); @@ -701,7 +702,7 @@ test.cb("parse() emits errors on invalid JSON", t => { t.plan(2); const source = new Readable({ objectMode: true }); source - .pipe(parse()) + .pipe(parse(SerializationFormats.utf8)) .resume() .on("error", () => t.pass()) .on("end", t.end); @@ -1235,7 +1236,7 @@ test.cb("unbatch() unbatches", t => { test.cb("rate() sends data at desired rate", t => { t.plan(9); - const fastRate = 500; + const fastRate = 150; const medRate = 50; const slowRate = 1; const sourceFast = new Readable({ objectMode: true }); @@ -1270,7 +1271,7 @@ test.cb("rate() sends data at desired rate", t => { .on("error", t.end); sourceSlow - .pipe(rate(slowRate)) + .pipe(rate(slowRate, 1)) .on("data", (element: string[]) => { const currentRate = (k / (performance.now() - start)) * 1000; expect(element).to.deep.equal(expectedElements[k]); @@ -1306,19 +1307,20 @@ test.cb("parallel() parallel mapping", t => { "e_processed", ]; const orderedResults: string[] = []; + // Record start / end times of each process and then compare to figure out # of processes ocurring and order source - .pipe(parallelMap(2, data => data + "_processed")) + .pipe(parallelMap(data => data + "_processed")) .on("data", (element: string) => { t.true(expectedElements.includes(element)); orderedResults.push(element); }) .on("error", t.end) .on("end", () => { - expect(orderedResults[0]).to.equal("a_processed") - expect(orderedResults[1]).to.equal("b_processed") - expect(orderedResults[2]).to.equal("d_processed") - expect(orderedResults[3]).to.equal("c_processed") - expect(orderedResults[4]).to.equal("e_processed") + expect(orderedResults[0]).to.equal("a_processed"); + expect(orderedResults[1]).to.equal("b_processed"); + expect(orderedResults[2]).to.equal("d_processed"); + expect(orderedResults[3]).to.equal("c_processed"); + expect(orderedResults[4]).to.equal("e_processed"); t.end(); }); diff --git a/src/functions/functions.ts b/src/functions/functions.ts new file mode 100644 index 0000000..0107110 --- /dev/null +++ b/src/functions/functions.ts @@ -0,0 +1,605 @@ +import { Transform, Readable, Writable, Duplex } from "stream"; +import { performance } from "perf_hooks"; +import { ChildProcess } from "child_process"; +import { StringDecoder } from "string_decoder"; + +import { + TransformOptions, + ThroughOptions, + WithEncoding, + SerializationFormats, + JsonValue, + JsonParseOptions, +} from "./definitions"; +import { sleep } from "../helpers"; + +/** + * Convert an array into a Readable stream of its elements + * @param array Array of elements to stream + */ +export function fromArray(array: any[]): NodeJS.ReadableStream { + let cursor = 0; + return new Readable({ + objectMode: true, + read() { + if (cursor < array.length) { + this.push(array[cursor]); + cursor++; + } else { + this.push(null); + } + }, + }); +} + +/** + * Return a ReadWrite stream that maps streamed chunks + * @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such) + * @param options + * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects + * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects + */ +export function map( + mapper: (chunk: T, encoding: string) => R, + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +): NodeJS.ReadWriteStream { + return new Transform({ + ...options, + async transform(chunk: T, encoding, callback) { + let isPromise = false; + try { + const mapped = mapper(chunk, encoding); + isPromise = mapped instanceof Promise; + callback(undefined, await mapped); + } catch (err) { + if (isPromise) { + // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly + this.emit("error", err); + callback(); + } else { + callback(err); + } + } + }, + }); +} + +/** + * Return a ReadWrite stream that flat maps streamed chunks + * @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such) + * @param options + * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects + * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects + */ +export function flatMap( + mapper: + | ((chunk: T, encoding: string) => R[]) + | ((chunk: T, encoding: string) => Promise), + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +): NodeJS.ReadWriteStream { + return new Transform({ + ...options, + async transform(chunk: T, encoding, callback) { + let isPromise = false; + try { + const mapped = mapper(chunk, encoding); + isPromise = mapped instanceof Promise; + (await mapped).forEach(c => this.push(c)); + callback(); + } catch (err) { + if (isPromise) { + // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly + this.emit("error", err); + callback(); + } else { + callback(err); + } + } + }, + }); +} + +/** + * Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold + * @param predicate Predicate with which to filter scream chunks + * @param options + * @param options.objectMode Whether this stream should behave as a stream of objects + */ +export function filter( + predicate: + | ((chunk: T, encoding: string) => boolean) + | ((chunk: T, encoding: string) => Promise), + options: ThroughOptions = { + objectMode: true, + }, +) { + return new Transform({ + readableObjectMode: options.objectMode, + writableObjectMode: options.objectMode, + async transform(chunk: T, encoding, callback) { + let isPromise = false; + try { + const result = predicate(chunk, encoding); + isPromise = result instanceof Promise; + if (!!(await result)) { + callback(undefined, chunk); + } else { + callback(); + } + } catch (err) { + if (isPromise) { + // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly + this.emit("error", err); + callback(); + } else { + callback(err); + } + } + }, + }); +} + +/** + * Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that + * value + * @param iteratee Reducer function to apply on each streamed chunk + * @param initialValue Initial value + * @param options + * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects + * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects + */ +export function reduce( + iteratee: + | ((previousValue: R, chunk: T, encoding: string) => R) + | ((previousValue: R, chunk: T, encoding: string) => Promise), + initialValue: R, + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +) { + let value = initialValue; + return new Transform({ + readableObjectMode: options.readableObjectMode, + writableObjectMode: options.writableObjectMode, + async transform(chunk: T, encoding, callback) { + let isPromise = false; + try { + const result = iteratee(value, chunk, encoding); + isPromise = result instanceof Promise; + value = await result; + callback(); + } catch (err) { + if (isPromise) { + // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly + this.emit("error", err); + callback(); + } else { + callback(err); + } + } + }, + flush(callback) { + // Best effort attempt at yielding the final value (will throw if e.g. yielding an object and + // downstream doesn't expect objects) + try { + callback(undefined, value); + } catch (err) { + try { + this.emit("error", err); + } catch { + // Best effort was made + } + } + }, + }); +} + +/** + * Return a ReadWrite stream that splits streamed chunks using the given separator + * @param separator Separator to split by, defaulting to "\n" + * @param options + * @param options.encoding Encoding written chunks are assumed to use + */ +export function split( + separator: string | RegExp = "\n", + options: WithEncoding = { encoding: "utf8" }, +): NodeJS.ReadWriteStream { + let buffered = ""; + const decoder = new StringDecoder(options.encoding); + + return new Transform({ + readableObjectMode: true, + transform(chunk: Buffer, encoding, callback) { + const asString = decoder.write(chunk); + const splitted = asString.split(separator); + if (splitted.length > 1) { + splitted[0] = buffered.concat(splitted[0]); + buffered = ""; + } + buffered += splitted[splitted.length - 1]; + splitted.slice(0, -1).forEach((part: string) => this.push(part)); + callback(); + }, + flush(callback) { + callback(undefined, buffered + decoder.end()); + }, + }); +} + +/** + * Return a ReadWrite stream that joins streamed chunks using the given separator + * @param separator Separator to join with + * @param options + * @param options.encoding Encoding written chunks are assumed to use + */ +export function join( + separator: string, + options: WithEncoding = { encoding: "utf8" }, +): NodeJS.ReadWriteStream { + let isFirstChunk = true; + const decoder = new StringDecoder(options.encoding); + return new Transform({ + readableObjectMode: true, + async transform(chunk: Buffer, encoding, callback) { + const asString = decoder.write(chunk); + // Take care not to break up multi-byte characters spanning multiple chunks + if (asString !== "" || chunk.length === 0) { + if (!isFirstChunk) { + this.push(separator); + } + this.push(asString); + isFirstChunk = false; + } + callback(); + }, + }); +} + +/** + * Return a ReadWrite stream that replaces occurrences of the given string or regular expression in + * the streamed chunks with the specified replacement string + * @param searchValue Search string to use + * @param replaceValue Replacement string to use + * @param options + * @param options.encoding Encoding written chunks are assumed to use + */ +export function replace( + searchValue: string | RegExp, + replaceValue: string, + options: WithEncoding = { encoding: "utf8" }, +): NodeJS.ReadWriteStream { + const decoder = new StringDecoder(options.encoding); + return new Transform({ + readableObjectMode: true, + transform(chunk: Buffer, encoding, callback) { + const asString = decoder.write(chunk); + // Take care not to break up multi-byte characters spanning multiple chunks + if (asString !== "" || chunk.length === 0) { + callback( + undefined, + asString.replace(searchValue, replaceValue), + ); + } else { + callback(); + } + }, + }); +} + +/** + * Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk + * must be a fully defined JSON string. + * @param format Format of serialized data, only utf8 supported. + */ +export function parse( + format: SerializationFormats = SerializationFormats.utf8, +): NodeJS.ReadWriteStream { + const decoder = new StringDecoder(format); + return new Transform({ + readableObjectMode: true, + writableObjectMode: true, + async transform(chunk: Buffer, encoding, callback) { + try { + const asString = decoder.write(chunk); + // Using await causes parsing errors to be emitted + callback(undefined, await JSON.parse(asString)); + } catch (err) { + callback(err); + } + }, + }); +} + +/** + * Return a ReadWrite stream that stringifies the streamed chunks to JSON + */ +export function stringify( + options: JsonParseOptions = { pretty: false }, +): NodeJS.ReadWriteStream { + return new Transform({ + readableObjectMode: true, + writableObjectMode: true, + transform(chunk: JsonValue, encoding, callback) { + callback( + undefined, + options.pretty + ? JSON.stringify(chunk, null, 2) + : JSON.stringify(chunk), + ); + }, + }); +} + +/** + * Return a ReadWrite stream that collects streamed chunks into an array or buffer + * @param options + * @param options.objectMode Whether this stream should behave as a stream of objects + */ +export function collect( + options: ThroughOptions = { objectMode: false }, +): NodeJS.ReadWriteStream { + const collected: any[] = []; + return new Transform({ + readableObjectMode: options.objectMode, + writableObjectMode: options.objectMode, + transform(data, encoding, callback) { + collected.push(data); + callback(); + }, + flush(callback) { + this.push( + options.objectMode ? collected : Buffer.concat(collected), + ); + callback(); + }, + }); +} + +/** + * Return a Readable stream of readable streams concatenated together + * @param streams Readable streams to concatenate + */ +export function concat( + ...streams: NodeJS.ReadableStream[] +): NodeJS.ReadableStream { + let isStarted = false; + let currentStreamIndex = 0; + const startCurrentStream = () => { + if (currentStreamIndex >= streams.length) { + wrapper.push(null); + } else { + streams[currentStreamIndex] + .on("data", chunk => { + if (!wrapper.push(chunk)) { + streams[currentStreamIndex].pause(); + } + }) + .on("error", err => wrapper.emit("error", err)) + .on("end", () => { + currentStreamIndex++; + startCurrentStream(); + }); + } + }; + + const wrapper = new Readable({ + objectMode: true, + read() { + if (!isStarted) { + isStarted = true; + startCurrentStream(); + } + if (currentStreamIndex < streams.length) { + streams[currentStreamIndex].resume(); + } + }, + }); + return wrapper; +} + +/** + * Return a Readable stream of readable streams merged together in chunk arrival order + * @param streams Readable streams to merge + */ +export function merge( + ...streams: NodeJS.ReadableStream[] +): NodeJS.ReadableStream { + let isStarted = false; + let streamEndedCount = 0; + return new Readable({ + objectMode: true, + read() { + if (streamEndedCount >= streams.length) { + this.push(null); + } else if (!isStarted) { + isStarted = true; + streams.forEach(stream => + stream + .on("data", chunk => { + if (!this.push(chunk)) { + streams.forEach(s => s.pause()); + } + }) + .on("error", err => this.emit("error", err)) + .on("end", () => { + streamEndedCount++; + if (streamEndedCount === streams.length) { + this.push(null); + } + }), + ); + } else { + streams.forEach(s => s.resume()); + } + }, + }); +} + +/** + * Return a Duplex stream from a writable stream that is assumed to somehow, when written to, + * cause the given readable stream to yield chunks + * @param writable Writable stream assumed to cause the readable stream to yield chunks when written to + * @param readable Readable stream assumed to yield chunks when the writable stream is written to + */ +export function duplex(writable: Writable, readable: Readable) { + const wrapper = new Duplex({ + readableObjectMode: true, + writableObjectMode: true, + read() { + readable.resume(); + }, + write(chunk, encoding, callback) { + return writable.write(chunk, encoding, callback); + }, + final(callback) { + writable.end(callback); + }, + }); + readable + .on("data", chunk => { + if (!wrapper.push(chunk)) { + readable.pause(); + } + }) + .on("error", err => wrapper.emit("error", err)) + .on("end", () => wrapper.push(null)); + writable.on("drain", () => wrapper.emit("drain")); + writable.on("error", err => wrapper.emit("error", err)); + return wrapper; +} + +/** + * Return a Duplex stream from a child process' stdin and stdout + * @param childProcess Child process from which to create duplex stream + */ +export function child(childProcess: ChildProcess) { + return duplex(childProcess.stdin, childProcess.stdout); +} + +/** + * Return a Promise resolving to the last streamed chunk of the given readable stream, after it has + * ended + * @param readable Readable stream to wait on + */ +export function last(readable: Readable): Promise { + let lastChunk: T | null = null; + return new Promise((resolve, reject) => { + readable + .on("data", chunk => (lastChunk = chunk)) + .on("end", () => resolve(lastChunk)); + }); +} + +/** + * Stores chunks of data internally in array and batches when batchSize is reached. + * + * @param batchSize Size of the batches + * @param maxBatchAge Max lifetime of a batch + */ +export function batch(batchSize: number = 1000, maxBatchAge: number = 500) { + const buffer: any[] = []; + let startTime: number | null = null; + return new Transform({ + objectMode: true, + transform(chunk, encoding, callback) { + if ( + buffer.length === batchSize - 1 || + (startTime !== null && + startTime - performance.now() >= maxBatchAge) + ) { + buffer.push(chunk); + callback(undefined, buffer.splice(0)); + } else { + if (startTime === null) { + startTime = performance.now(); + } + buffer.push(chunk); + callback(); + } + }, + flush(callback) { + callback(undefined, buffer.splice(0)); + }, + }); +} + +/** + * Unbatches and sends individual chunks of data + */ +export function unbatch() { + return new Transform({ + objectMode: true, + transform(data, encoding, callback) { + for (const d of data) { + this.push(d); + } + callback(); + }, + }); +} + +/** + * Limits date of data transferred into stream. + * @param rate Desired rate in ms + */ +export function rate(targetRate: number = 50, period: number = 2) { + const deltaMS = ((1 / targetRate) * 1000) / period; // Skip half a period + let total = 0; + const start = performance.now(); + return new Transform({ + objectMode: true, + async transform(data, encoding, callback) { + const currentRate = (total / (performance.now() - start)) * 1000; + if (targetRate && currentRate > targetRate) { + await sleep(deltaMS); + } + total += 1; + callback(undefined, data); + }, + }); +} + +/** + * Limits number of parallel processes in flight. + * @param parallel Max number of parallel processes. + * @param func Function to execute on each data chunk + * @param pause Amount of time to pause processing when max number of parallel processes are executing. + */ +export function parallelMap( + mapper: (data: T) => R, + parallel: number = 10, + sleepTime: number = 5, +) { + let inflight = 0; + return new Transform({ + objectMode: true, + async transform(data, encoding, callback) { + while (parallel <= inflight) { + await sleep(sleepTime); + } + inflight += 1; + callback(); + try { + const res = await mapper(data); + this.push(res); + } catch (e) { + this.emit(e); + } finally { + inflight -= 1; + } + }, + async flush(callback) { + while (inflight > 0) { + await sleep(sleepTime); + } + callback(); + }, + }); +} diff --git a/src/functions/index.ts b/src/functions/index.ts new file mode 100644 index 0000000..f9c1e58 --- /dev/null +++ b/src/functions/index.ts @@ -0,0 +1,137 @@ +import { Readable, Writable } from "stream"; +import { ChildProcess } from "child_process"; +import * as baseFunctions from "./functions"; + +import { + ThroughOptions, + TransformOptions, + WithEncoding, + SerializationFormats, + JsonParseOptions, +} from "./definitions"; + +export function fromArray(array: any[]): NodeJS.ReadableStream { + return baseFunctions.fromArray(array); +} + +export function map( + mapper: (chunk: T, encoding?: string) => R, + options?: TransformOptions, +): NodeJS.ReadWriteStream { + return baseFunctions.map(mapper, options); +} + +export function flatMap( + mapper: + | ((chunk: T, encoding: string) => R[]) + | ((chunk: T, encoding: string) => Promise), + options?: TransformOptions, +): NodeJS.ReadWriteStream { + return baseFunctions.flatMap(mapper, options); +} + +export function filter( + mapper: + | ((chunk: T, encoding: string) => boolean) + | ((chunk: T, encoding: string) => Promise), + options?: ThroughOptions, +): NodeJS.ReadWriteStream { + return baseFunctions.filter(mapper, options); +} + +export function reduce( + iteratee: + | ((previousValue: R, chunk: T, encoding: string) => R) + | ((previousValue: R, chunk: T, encoding: string) => Promise), + initialValue: R, + options?: TransformOptions, +): NodeJS.ReadWriteStream { + return baseFunctions.reduce(iteratee, initialValue, options); +} + +export function split( + separator?: string | RegExp, + options?: WithEncoding, +): NodeJS.ReadWriteStream { + return baseFunctions.split(separator, options); +} + +export function join( + separator: string, + options?: WithEncoding, +): NodeJS.ReadWriteStream { + return baseFunctions.join(separator, options); +} + +export function replace( + searchValue: string | RegExp, + replaceValue: string, + options?: WithEncoding, +): NodeJS.ReadWriteStream { + return baseFunctions.replace(searchValue, replaceValue, options); +} + +export function parse(format: SerializationFormats): NodeJS.ReadWriteStream { + return baseFunctions.parse(format); +} + +export function stringify(options?: JsonParseOptions): NodeJS.ReadWriteStream { + return baseFunctions.stringify(options); +} + +export function collect(options?: ThroughOptions): NodeJS.ReadWriteStream { + return baseFunctions.collect(options); +} + +export function concat( + ...streams: NodeJS.ReadableStream[] +): NodeJS.ReadableStream { + return baseFunctions.concat(...streams); +} + +export function merge( + ...streams: NodeJS.ReadableStream[] +): NodeJS.ReadableStream { + return baseFunctions.merge(...streams); +} + +export function duplex( + writable: Writable, + readable: Readable, +): NodeJS.ReadWriteStream { + return baseFunctions.duplex(writable, readable); +} + +export function child(childProcess: ChildProcess): NodeJS.ReadWriteStream { + return baseFunctions.child(childProcess); +} + +export function last(readable: Readable): Promise { + return baseFunctions.last(readable); +} + +export function batch( + batchSize?: number, + maxBatchAge?: number, +): NodeJS.ReadWriteStream { + return baseFunctions.batch(batchSize, maxBatchAge); +} + +export function unbatch(): NodeJS.ReadWriteStream { + return baseFunctions.unbatch(); +} + +export function rate( + targetRate?: number, + period?: number, +): NodeJS.ReadWriteStream { + return baseFunctions.rate(targetRate, period); +} + +export function parallelMap( + mapper: (data: T) => R, + parallel?: number, + sleepTime?: number, +) { + return baseFunctions.parallelMap(mapper, parallel, sleepTime); +} diff --git a/src/helpers.ts b/src/helpers.ts new file mode 100644 index 0000000..242d264 --- /dev/null +++ b/src/helpers.ts @@ -0,0 +1,3 @@ +export async function sleep(time: number): Promise<{} | null> { + return time > 0 ? new Promise(resolve => setTimeout(resolve, time)) : null; +} diff --git a/src/index.ts b/src/index.ts index dfbee1b..e9f1369 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,599 +1,22 @@ -import { Transform, Readable, Writable, Duplex } from "stream"; -import { performance } from "perf_hooks"; -import { ChildProcess } from "child_process"; -import { StringDecoder } from "string_decoder"; - -export interface ThroughOptions { - objectMode?: boolean; -} -export interface TransformOptions { - readableObjectMode?: boolean; - writableObjectMode?: boolean; -} -export interface WithEncoding { - encoding: string; -} - -async function sleep(time: number) { - return time > 0 ? new Promise(resolve => setTimeout(resolve, time)) : null; -} - -/** - * Convert an array into a Readable stream of its elements - * @param array Array of elements to stream - */ -export function fromArray(array: any[]): NodeJS.ReadableStream { - let cursor = 0; - return new Readable({ - objectMode: true, - read() { - if (cursor < array.length) { - this.push(array[cursor]); - cursor++; - } else { - this.push(null); - } - }, - }); -} - -/** - * Return a ReadWrite stream that maps streamed chunks - * @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such) - * @param options - * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects - * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects - */ -export function map( - mapper: (chunk: T, encoding: string) => R, - options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, - }, -): NodeJS.ReadWriteStream { - return new Transform({ - ...options, - async transform(chunk: T, encoding, callback) { - let isPromise = false; - try { - const mapped = mapper(chunk, encoding); - isPromise = mapped instanceof Promise; - callback(undefined, await mapped); - } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } - } - }, - }); -} - -/** - * Return a ReadWrite stream that flat maps streamed chunks - * @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such) - * @param options - * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects - * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects - */ -export function flatMap( - mapper: - | ((chunk: T, encoding: string) => R[]) - | ((chunk: T, encoding: string) => Promise), - options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, - }, -): NodeJS.ReadWriteStream { - return new Transform({ - ...options, - async transform(chunk: T, encoding, callback) { - let isPromise = false; - try { - const mapped = mapper(chunk, encoding); - isPromise = mapped instanceof Promise; - (await mapped).forEach(c => this.push(c)); - callback(); - } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } - } - }, - }); -} - -/** - * Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold - * @param predicate Predicate with which to filter scream chunks - * @param options - * @param options.objectMode Whether this stream should behave as a stream of objects - */ -export function filter( - predicate: - | ((chunk: T, encoding: string) => boolean) - | ((chunk: T, encoding: string) => Promise), - options: ThroughOptions = { - objectMode: true, - }, -) { - return new Transform({ - readableObjectMode: options.objectMode, - writableObjectMode: options.objectMode, - async transform(chunk: T, encoding, callback) { - let isPromise = false; - try { - const result = predicate(chunk, encoding); - isPromise = result instanceof Promise; - if (!!(await result)) { - callback(undefined, chunk); - } else { - callback(); - } - } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } - } - }, - }); -} - -/** - * Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that - * value - * @param iteratee Reducer function to apply on each streamed chunk - * @param initialValue Initial value - * @param options - * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects - * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects - */ -export function reduce( - iteratee: - | ((previousValue: R, chunk: T, encoding: string) => R) - | ((previousValue: R, chunk: T, encoding: string) => Promise), - initialValue: R, - options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, - }, -) { - let value = initialValue; - return new Transform({ - readableObjectMode: options.readableObjectMode, - writableObjectMode: options.writableObjectMode, - async transform(chunk: T, encoding, callback) { - let isPromise = false; - try { - const result = iteratee(value, chunk, encoding); - isPromise = result instanceof Promise; - value = await result; - callback(); - } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } - } - }, - flush(callback) { - // Best effort attempt at yielding the final value (will throw if e.g. yielding an object and - // downstream doesn't expect objects) - try { - callback(undefined, value); - } catch (err) { - try { - this.emit("error", err); - } catch { - // Best effort was made - } - } - }, - }); -} - -/** - * Return a ReadWrite stream that splits streamed chunks using the given separator - * @param separator Separator to split by, defaulting to "\n" - * @param options - * @param options.encoding Encoding written chunks are assumed to use - */ -export function split( - separator: string | RegExp = "\n", - options: WithEncoding = { encoding: "utf8" }, -): NodeJS.ReadWriteStream { - let buffered = ""; - const decoder = new StringDecoder(options.encoding); - - return new Transform({ - readableObjectMode: true, - transform(chunk: Buffer, encoding, callback) { - const asString = decoder.write(chunk); - const splitted = asString.split(separator); - if (splitted.length > 1) { - splitted[0] = buffered.concat(splitted[0]); - buffered = ""; - } - buffered += splitted[splitted.length - 1]; - splitted.slice(0, -1).forEach((part: string) => this.push(part)); - callback(); - }, - flush(callback) { - callback(undefined, buffered + decoder.end()); - }, - }); -} - -/** - * Return a ReadWrite stream that joins streamed chunks using the given separator - * @param separator Separator to join with - * @param options - * @param options.encoding Encoding written chunks are assumed to use - */ -export function join( - separator: string, - options: WithEncoding = { encoding: "utf8" }, -): NodeJS.ReadWriteStream { - let isFirstChunk = true; - const decoder = new StringDecoder(options.encoding); - return new Transform({ - readableObjectMode: true, - async transform(chunk: Buffer, encoding, callback) { - const asString = decoder.write(chunk); - // Take care not to break up multi-byte characters spanning multiple chunks - if (asString !== "" || chunk.length === 0) { - if (!isFirstChunk) { - this.push(separator); - } - this.push(asString); - isFirstChunk = false; - } - callback(); - }, - }); -} - -/** - * Return a ReadWrite stream that replaces occurrences of the given string or regular expression in - * the streamed chunks with the specified replacement string - * @param searchValue Search string to use - * @param replaceValue Replacement string to use - * @param options - * @param options.encoding Encoding written chunks are assumed to use - */ -export function replace( - searchValue: string | RegExp, - replaceValue: string, - options: WithEncoding = { encoding: "utf8" }, -): NodeJS.ReadWriteStream { - const decoder = new StringDecoder(options.encoding); - return new Transform({ - readableObjectMode: true, - transform(chunk: Buffer, encoding, callback) { - const asString = decoder.write(chunk); - // Take care not to break up multi-byte characters spanning multiple chunks - if (asString !== "" || chunk.length === 0) { - callback( - undefined, - asString.replace(searchValue, replaceValue), - ); - } else { - callback(); - } - }, - }); -} - -/** - * Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk - * must be a fully defined JSON string. - */ -export function parse(): NodeJS.ReadWriteStream { - const decoder = new StringDecoder("utf8"); // JSON must be utf8 - return new Transform({ - readableObjectMode: true, - writableObjectMode: true, - async transform(chunk: Buffer, encoding, callback) { - try { - const asString = decoder.write(chunk); - // Using await causes parsing errors to be emitted - callback(undefined, await JSON.parse(asString)); - } catch (err) { - callback(err); - } - }, - }); -} - -type JsonPrimitive = string | number | object; -type JsonValue = JsonPrimitive | JsonPrimitive[]; -interface JsonParseOptions { - pretty: boolean; -} - -/** - * Return a ReadWrite stream that stringifies the streamed chunks to JSON - */ -export function stringify( - options: JsonParseOptions = { pretty: false }, -): NodeJS.ReadWriteStream { - return new Transform({ - readableObjectMode: true, - writableObjectMode: true, - transform(chunk: JsonValue, encoding, callback) { - callback( - undefined, - options.pretty - ? JSON.stringify(chunk, null, 2) - : JSON.stringify(chunk), - ); - }, - }); -} - -/** - * Return a ReadWrite stream that collects streamed chunks into an array or buffer - * @param options - * @param options.objectMode Whether this stream should behave as a stream of objects - */ -export function collect( - options: ThroughOptions = { objectMode: false }, -): NodeJS.ReadWriteStream { - const collected: any[] = []; - return new Transform({ - readableObjectMode: options.objectMode, - writableObjectMode: options.objectMode, - transform(data, encoding, callback) { - collected.push(data); - callback(); - }, - flush(callback) { - this.push( - options.objectMode ? collected : Buffer.concat(collected), - ); - callback(); - }, - }); -} - -/** - * Return a Readable stream of readable streams concatenated together - * @param streams Readable streams to concatenate - */ -export function concat( - ...streams: NodeJS.ReadableStream[] -): NodeJS.ReadableStream { - let isStarted = false; - let currentStreamIndex = 0; - const startCurrentStream = () => { - if (currentStreamIndex >= streams.length) { - wrapper.push(null); - } else { - streams[currentStreamIndex] - .on("data", chunk => { - if (!wrapper.push(chunk)) { - streams[currentStreamIndex].pause(); - } - }) - .on("error", err => wrapper.emit("error", err)) - .on("end", () => { - currentStreamIndex++; - startCurrentStream(); - }); - } - }; - - const wrapper = new Readable({ - objectMode: true, - read() { - if (!isStarted) { - isStarted = true; - startCurrentStream(); - } - if (currentStreamIndex < streams.length) { - streams[currentStreamIndex].resume(); - } - }, - }); - return wrapper; -} - -/** - * Return a Readable stream of readable streams merged together in chunk arrival order - * @param streams Readable streams to merge - */ -export function merge( - ...streams: NodeJS.ReadableStream[] -): NodeJS.ReadableStream { - let isStarted = false; - let streamEndedCount = 0; - return new Readable({ - objectMode: true, - read() { - if (streamEndedCount >= streams.length) { - this.push(null); - } else if (!isStarted) { - isStarted = true; - streams.forEach(stream => - stream - .on("data", chunk => { - if (!this.push(chunk)) { - streams.forEach(s => s.pause()); - } - }) - .on("error", err => this.emit("error", err)) - .on("end", () => { - streamEndedCount++; - if (streamEndedCount === streams.length) { - this.push(null); - } - }), - ); - } else { - streams.forEach(s => s.resume()); - } - }, - }); -} - -/** - * Return a Duplex stream from a writable stream that is assumed to somehow, when written to, - * cause the given readable stream to yield chunks - * @param writable Writable stream assumed to cause the readable stream to yield chunks when written to - * @param readable Readable stream assumed to yield chunks when the writable stream is written to - */ -export function duplex(writable: Writable, readable: Readable) { - const wrapper = new Duplex({ - readableObjectMode: true, - writableObjectMode: true, - read() { - readable.resume(); - }, - write(chunk, encoding, callback) { - return writable.write(chunk, encoding, callback); - }, - final(callback) { - writable.end(callback); - }, - }); - readable - .on("data", chunk => { - if (!wrapper.push(chunk)) { - readable.pause(); - } - }) - .on("error", err => wrapper.emit("error", err)) - .on("end", () => wrapper.push(null)); - writable.on("drain", () => wrapper.emit("drain")); - writable.on("error", err => wrapper.emit("error", err)); - return wrapper; -} - -/** - * Return a Duplex stream from a child process' stdin and stdout - * @param childProcess Child process from which to create duplex stream - */ -export function child(childProcess: ChildProcess) { - return duplex(childProcess.stdin, childProcess.stdout); -} - -/** - * Return a Promise resolving to the last streamed chunk of the given readable stream, after it has - * ended - * @param readable Readable stream to wait on - */ -export function last(readable: Readable): Promise { - let lastChunk: T | null = null; - return new Promise((resolve, reject) => { - readable - .on("data", chunk => (lastChunk = chunk)) - .on("end", () => resolve(lastChunk)); - }); -} - -/** - * Stores chunks of data internally in array and batches when batchSize is reached. - * - * @param batchSize Size of the batches - */ -export function batch(batchSize: number) { - const buffer: any[] = []; - return new Transform({ - objectMode: true, - transform(chunk, encoding, callback) { - if (buffer.length === batchSize - 1) { - buffer.push(chunk); - callback(undefined, buffer.splice(0)); - } else { - buffer.push(chunk); - callback(); - } - }, - flush(callback) { - callback(undefined, buffer.splice(0)); - }, - }); -} - -/** - * Unbatches and sends individual chunks of data - */ -export function unbatch() { - return new Transform({ - objectMode: true, - transform(data, encoding, callback) { - for (const d of data) { - this.push(d); - } - callback(); - }, - }); -} - -/** - * Limits date of data transferred into stream. - * @param rate Desired rate in ms - */ -export function rate(targetRate: number) { - const deltaMS = (1 / targetRate) * 1000; - let total = 0; - const start = performance.now(); - return new Transform({ - objectMode: true, - async transform(data, encoding, callback) { - const currentRate = (total / (performance.now() - start)) * 1000; - if (targetRate && currentRate > targetRate) { - await sleep(deltaMS); - } - total += 1; - callback(undefined, data); - }, - }); -} - -/** - * Limits number of parallel processes in flight. - * @param parallel Max number of parallel processes. - * @param func Function to execute on each data chunk - */ -export function parallelMap(parallel: number, func: (data: T) => R) { - let inflight = 0; - return new Transform({ - objectMode: true, - async transform(data, encoding, callback) { - while (parallel <= inflight) { - await sleep(5); - } - inflight += 1; - callback(); - try { - const res = await func(data); - this.push(res); - } catch (e) { - this.emit(e); - } finally { - inflight -= 1; - } - }, - async flush(callback) { - while (inflight > 0) { - await sleep(5); - } - callback(); - }, - }); -} +export { + fromArray, + map, + flatMap, + filter, + reduce, + split, + join, + replace, + parse, + stringify, + collect, + concat, + merge, + duplex, + child, + last, + batch, + unbatch, + rate, + parallelMap, +} from "./functions"; From 5eeae1755970717b1b7290acd542718c5845fb28 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Tue, 4 Jun 2019 11:25:33 -0400 Subject: [PATCH 03/69] Add description + interfaces --- src/functions/definitions.ts | 17 +++- src/functions/functions.ts | 3 +- src/functions/index.ts | 145 +++++++++++++++++++++++++++++++---- 3 files changed, 147 insertions(+), 18 deletions(-) diff --git a/src/functions/definitions.ts b/src/functions/definitions.ts index fc97bd1..f18659a 100644 --- a/src/functions/definitions.ts +++ b/src/functions/definitions.ts @@ -15,10 +15,25 @@ export enum SerializationFormats { utf8 = "utf8", } - type JsonPrimitive = string | number | object; export type JsonValue = JsonPrimitive | JsonPrimitive[]; export interface JsonParseOptions { pretty: boolean; } + +export interface IBatchParams { + batchSize?: number; + maxBatchAge?: number; +} + +export interface IRateParams { + targetRate?: number; + period?: number; +} + +export interface IParallelMapParams { + mapper: (data: T) => R; + parallel?: number; + sleepTime?: number; +} diff --git a/src/functions/functions.ts b/src/functions/functions.ts index 0107110..be2afff 100644 --- a/src/functions/functions.ts +++ b/src/functions/functions.ts @@ -547,7 +547,8 @@ export function unbatch() { /** * Limits date of data transferred into stream. - * @param rate Desired rate in ms + * @param targetRate Desired rate in ms + * @param period Period to sleep for when rate is above or equal to targetRate */ export function rate(targetRate: number = 50, period: number = 2) { const deltaMS = ((1 / targetRate) * 1000) / period; // Skip half a period diff --git a/src/functions/index.ts b/src/functions/index.ts index f9c1e58..1ad0f10 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -6,14 +6,27 @@ import { ThroughOptions, TransformOptions, WithEncoding, - SerializationFormats, JsonParseOptions, + IBatchParams, + IRateParams, + IParallelMapParams, } from "./definitions"; +/** + * Convert an array into a Readable stream of its elements + * @param array Array of elements to stream + */ export function fromArray(array: any[]): NodeJS.ReadableStream { return baseFunctions.fromArray(array); } +/** + * Return a ReadWrite stream that maps streamed chunks + * @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such) + * @param options? + * @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects + * @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects + */ export function map( mapper: (chunk: T, encoding?: string) => R, options?: TransformOptions, @@ -21,6 +34,13 @@ export function map( return baseFunctions.map(mapper, options); } +/** + * Return a ReadWrite stream that flat maps streamed chunks + * @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such) + * @param options? + * @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects + * @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects + */ export function flatMap( mapper: | ((chunk: T, encoding: string) => R[]) @@ -30,6 +50,12 @@ export function flatMap( return baseFunctions.flatMap(mapper, options); } +/** + * Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold + * @param predicate Predicate with which to filter scream chunks + * @param options? + * @param options.objectMode? Whether this stream should behave as a stream of objects. + */ export function filter( mapper: | ((chunk: T, encoding: string) => boolean) @@ -39,6 +65,15 @@ export function filter( return baseFunctions.filter(mapper, options); } +/** + * Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that + * value + * @param iteratee Reducer function to apply on each streamed chunk + * @param initialValue Initial value + * @param options? + * @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects + * @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects + */ export function reduce( iteratee: | ((previousValue: R, chunk: T, encoding: string) => R) @@ -49,6 +84,12 @@ export function reduce( return baseFunctions.reduce(iteratee, initialValue, options); } +/** + * Return a ReadWrite stream that splits streamed chunks using the given separator + * @param separator? Separator to split by, defaulting to "\n" + * @param options? Defaults to encoding: utf8 + * @param options.encoding? Encoding written chunks are assumed to use + */ export function split( separator?: string | RegExp, options?: WithEncoding, @@ -56,6 +97,12 @@ export function split( return baseFunctions.split(separator, options); } +/** + * Return a ReadWrite stream that joins streamed chunks using the given separator + * @param separator Separator to join with + * @param options? Defaults to encoding: utf8 + * @param options.encoding? Encoding written chunks are assumed to use + */ export function join( separator: string, options?: WithEncoding, @@ -63,6 +110,14 @@ export function join( return baseFunctions.join(separator, options); } +/** + * Return a ReadWrite stream that replaces occurrences of the given string or regular expression in + * the streamed chunks with the specified replacement string + * @param searchValue Search string to use + * @param replaceValue Replacement string to use + * @param options? Defaults to encoding: utf8 + * @param options.encoding Encoding written chunks are assumed to use + */ export function replace( searchValue: string | RegExp, replaceValue: string, @@ -71,30 +126,59 @@ export function replace( return baseFunctions.replace(searchValue, replaceValue, options); } -export function parse(format: SerializationFormats): NodeJS.ReadWriteStream { - return baseFunctions.parse(format); +/** + * Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk + * must be a fully defined JSON string in utf8. + */ +export function parse(): NodeJS.ReadWriteStream { + return baseFunctions.parse(); } +/** + * Return a ReadWrite stream that stringifies the streamed chunks to JSON + * @param options? + * @param options.pretty If true, whitespace is inserted into the stringified chunks. + * + */ export function stringify(options?: JsonParseOptions): NodeJS.ReadWriteStream { return baseFunctions.stringify(options); } +/** + * Return a ReadWrite stream that collects streamed chunks into an array or buffer + * @param options? + * @param options.objectMode? Whether this stream should behave as a stream of objects + */ export function collect(options?: ThroughOptions): NodeJS.ReadWriteStream { return baseFunctions.collect(options); } +/** + * Return a Readable stream of readable streams concatenated together + * @param streams Readable streams to concatenate + */ export function concat( ...streams: NodeJS.ReadableStream[] ): NodeJS.ReadableStream { return baseFunctions.concat(...streams); } +/** + * Return a Readable stream of readable streams concatenated together + * @param streams Readable streams to merge + */ export function merge( ...streams: NodeJS.ReadableStream[] ): NodeJS.ReadableStream { return baseFunctions.merge(...streams); } +/** + * Return a Duplex stream from a writable stream that is assumed to somehow, when written to, + * cause the given readable stream to yield chunks + * @param writable Writable stream assumed to cause the readable stream to yield chunks when written to + * @param readable Readable stream assumed to yield chunks when the writable stream is written to + */ export function duplex( writable: Writable, readable: Readable, @@ -102,36 +186,65 @@ export function duplex( return baseFunctions.duplex(writable, readable); } +/** + * Return a Duplex stream from a child process' stdin and stdout + * @param childProcess Child process from which to create duplex stream + */ export function child(childProcess: ChildProcess): NodeJS.ReadWriteStream { return baseFunctions.child(childProcess); } +/** + * Return a Promise resolving to the last streamed chunk of the given readable stream, after it has + * ended + * @param readable Readable stream to wait on + */ export function last(readable: Readable): Promise { return baseFunctions.last(readable); } -export function batch( - batchSize?: number, - maxBatchAge?: number, -): NodeJS.ReadWriteStream { +/** + * Stores chunks of data internally in array and batches when batchSize is reached. + * + * @param batchSize? Size of the batches, defaults to 1000. + * @param maxBatchAge? Max lifetime of a batch, defaults to 500 + */ +export function batch({ + batchSize, + maxBatchAge, +}: IBatchParams): NodeJS.ReadWriteStream { return baseFunctions.batch(batchSize, maxBatchAge); } +/** + * Unbatches and sends individual chunks of data + */ export function unbatch(): NodeJS.ReadWriteStream { return baseFunctions.unbatch(); } -export function rate( - targetRate?: number, - period?: number, -): NodeJS.ReadWriteStream { +/** + * Limits date of data transferred into stream. + * @param targetRate? Desired rate in ms + * @param period? Period to sleep for when rate is above or equal to targetRate + */ +export function rate({ + targetRate, + period, +}: IRateParams): NodeJS.ReadWriteStream { return baseFunctions.rate(targetRate, period); } -export function parallelMap( - mapper: (data: T) => R, - parallel?: number, - sleepTime?: number, -) { +/** + * Limits number of parallel processes in flight. + * @param parallel Max number of parallel processes. + * @param func Function to execute on each data chunk + * @param pause Amount of time to pause processing when max number of parallel processes are executing. + */ +export function parallelMap({ + mapper, + parallel, + sleepTime, +}: IParallelMapParams) { return baseFunctions.parallelMap(mapper, parallel, sleepTime); } From 0171208a367620ac49cf10e3547e1e405ec22c58 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 13 Jun 2019 15:12:55 -0400 Subject: [PATCH 04/69] Update tests for parallel map --- package.json | 1 + src/functions/definitions.ts | 15 ---------- src/functions/functions.spec.ts | 53 +++++++++++++++++++++++++-------- src/functions/index.ts | 27 ++++++----------- yarn.lock | 12 ++++++++ 5 files changed, 62 insertions(+), 46 deletions(-) diff --git a/package.json b/package.json index 38dc4c5..8009f9f 100644 --- a/package.json +++ b/package.json @@ -31,6 +31,7 @@ "devDependencies": { "@types/chai": "^4.1.7", "@types/node": "^10.12.10", + "@types/typescript": "^2.0.0", "ava": "^1.0.0-rc.2", "chai": "^4.2.0", "mhysa": "./", diff --git a/src/functions/definitions.ts b/src/functions/definitions.ts index f18659a..8abb2ea 100644 --- a/src/functions/definitions.ts +++ b/src/functions/definitions.ts @@ -22,18 +22,3 @@ export interface JsonParseOptions { pretty: boolean; } -export interface IBatchParams { - batchSize?: number; - maxBatchAge?: number; -} - -export interface IRateParams { - targetRate?: number; - period?: number; -} - -export interface IParallelMapParams { - mapper: (data: T) => R; - parallel?: number; - sleepTime?: number; -} diff --git a/src/functions/functions.spec.ts b/src/functions/functions.spec.ts index 50e5916..02e5303 100644 --- a/src/functions/functions.spec.ts +++ b/src/functions/functions.spec.ts @@ -25,7 +25,7 @@ import { rate, parallelMap, } from "."; -import { SerializationFormats } from "./definitions"; +import { sleep } from "../helpers"; test.cb("fromArray() streams array elements in flowing mode", t => { t.plan(3); @@ -683,7 +683,7 @@ test.cb("parse() parses the streamed elements as JSON", t => { const expectedElements = ["abc", {}, []]; let i = 0; source - .pipe(parse(SerializationFormats.utf8)) + .pipe(parse()) .on("data", part => { expect(part).to.deep.equal(expectedElements[i]); t.pass(); @@ -702,7 +702,7 @@ test.cb("parse() emits errors on invalid JSON", t => { t.plan(2); const source = new Readable({ objectMode: true }); source - .pipe(parse(SerializationFormats.utf8)) + .pipe(parse()) .resume() .on("error", () => t.pass()) .on("end", t.end); @@ -1297,7 +1297,7 @@ test.cb("rate() sends data at desired rate", t => { }); test.cb("parallel() parallel mapping", t => { - t.plan(5); + t.plan(6); const source = new Readable({ objectMode: true }); const expectedElements = [ "a_processed", @@ -1305,22 +1305,48 @@ test.cb("parallel() parallel mapping", t => { "c_processed", "d_processed", "e_processed", + "f_processed", ]; - const orderedResults: string[] = []; + const orderedResults: Array<{ output: string; processed: number }> = []; // Record start / end times of each process and then compare to figure out # of processes ocurring and order source - .pipe(parallelMap(data => data + "_processed")) + .pipe( + parallelMap(async (data: any) => { + const c = data + "_processed"; + await sleep(500); + orderedResults.push({ + output: c, + processed: performance.now(), + }); + return c; + }, 2), + ) .on("data", (element: string) => { t.true(expectedElements.includes(element)); - orderedResults.push(element); }) .on("error", t.end) - .on("end", () => { - expect(orderedResults[0]).to.equal("a_processed"); - expect(orderedResults[1]).to.equal("b_processed"); - expect(orderedResults[2]).to.equal("d_processed"); - expect(orderedResults[3]).to.equal("c_processed"); - expect(orderedResults[4]).to.equal("e_processed"); + .on("end", async () => { + expect(orderedResults[0].processed).to.be.lessThan( + orderedResults[1].processed + 500, + ); + expect(orderedResults[2].processed).to.be.lessThan( + orderedResults[3].processed + 500, + ); + expect(orderedResults[4].processed).to.be.lessThan( + orderedResults[5].processed + 500, + ); + expect(orderedResults[2].processed).to.be.greaterThan( + orderedResults[0].processed + 500, + ); + expect(orderedResults[3].processed).to.be.greaterThan( + orderedResults[1].processed + 500, + ); + expect(orderedResults[4].processed).to.be.greaterThan( + orderedResults[2].processed + 500, + ); + expect(orderedResults[5].processed).to.be.greaterThan( + orderedResults[3].processed + 500, + ); t.end(); }); @@ -1329,5 +1355,6 @@ test.cb("parallel() parallel mapping", t => { source.push("c"); source.push("d"); source.push("e"); + source.push("f"); source.push(null); }); diff --git a/src/functions/index.ts b/src/functions/index.ts index 1ad0f10..9085930 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -7,9 +7,6 @@ import { TransformOptions, WithEncoding, JsonParseOptions, - IBatchParams, - IRateParams, - IParallelMapParams, } from "./definitions"; /** @@ -205,14 +202,10 @@ export function last(readable: Readable): Promise { /** * Stores chunks of data internally in array and batches when batchSize is reached. - * - * @param batchSize? Size of the batches, defaults to 1000. + * @param batchSize Size of the batches, defaults to 1000. * @param maxBatchAge? Max lifetime of a batch, defaults to 500 */ -export function batch({ - batchSize, - maxBatchAge, -}: IBatchParams): NodeJS.ReadWriteStream { +export function batch(batchSize: number, maxBatchAge?: number): NodeJS.ReadWriteStream { return baseFunctions.batch(batchSize, maxBatchAge); } @@ -225,13 +218,11 @@ export function unbatch(): NodeJS.ReadWriteStream { /** * Limits date of data transferred into stream. + * @param options? * @param targetRate? Desired rate in ms * @param period? Period to sleep for when rate is above or equal to targetRate */ -export function rate({ - targetRate, - period, -}: IRateParams): NodeJS.ReadWriteStream { +export function rate(targetRate?: number, period?: number): NodeJS.ReadWriteStream { return baseFunctions.rate(targetRate, period); } @@ -241,10 +232,10 @@ export function rate({ * @param func Function to execute on each data chunk * @param pause Amount of time to pause processing when max number of parallel processes are executing. */ -export function parallelMap({ - mapper, - parallel, - sleepTime, -}: IParallelMapParams) { +export function parallelMap( + mapper: (chunk: T) => R, + parallel?: number, + sleepTime?: number, +) { return baseFunctions.parallelMap(mapper, parallel, sleepTime); } diff --git a/yarn.lock b/yarn.lock index 082e22d..dfb9135 100644 --- a/yarn.lock +++ b/yarn.lock @@ -335,6 +335,13 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-10.12.10.tgz#4fa76e6598b7de3f0cb6ec3abacc4f59e5b3a2ce" integrity sha512-8xZEYckCbUVgK8Eg7lf5Iy4COKJ5uXlnIOnePN0WUwSQggy9tolM+tDJf7wMOnT/JT/W9xDYIaYggt3mRV2O5w== +"@types/typescript@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@types/typescript/-/typescript-2.0.0.tgz#c433539c98bae28682b307eaa7a0fd2115b83c28" + integrity sha1-xDNTnJi64oaCswfqp6D9IRW4PCg= + dependencies: + typescript "*" + abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" @@ -3205,6 +3212,11 @@ type-detect@^4.0.0, type-detect@^4.0.5: resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== +typescript@*: + version "3.5.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.2.tgz#a09e1dc69bc9551cadf17dba10ee42cf55e5d56c" + integrity sha512-7KxJovlYhTX5RaRbUdkAXN1KUZ8PwWlTzQdHV6xNqvuFOs7+WBo10TQUqT19Q/Jz2hk5v9TQDIhyLhhJY4p5AA== + typescript@^3.1.6: version "3.1.6" resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.1.6.tgz#b6543a83cfc8c2befb3f4c8fba6896f5b0c9be68" From 712e538c3e86c08bc2003b3af7188b8f221b8833 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 13 Jun 2019 15:13:59 -0400 Subject: [PATCH 05/69] Update sleep time --- src/functions/functions.spec.ts | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/functions/functions.spec.ts b/src/functions/functions.spec.ts index 02e5303..ed11a46 100644 --- a/src/functions/functions.spec.ts +++ b/src/functions/functions.spec.ts @@ -1313,7 +1313,7 @@ test.cb("parallel() parallel mapping", t => { .pipe( parallelMap(async (data: any) => { const c = data + "_processed"; - await sleep(500); + await sleep(50); orderedResults.push({ output: c, processed: performance.now(), @@ -1327,25 +1327,25 @@ test.cb("parallel() parallel mapping", t => { .on("error", t.end) .on("end", async () => { expect(orderedResults[0].processed).to.be.lessThan( - orderedResults[1].processed + 500, + orderedResults[1].processed + 50, ); expect(orderedResults[2].processed).to.be.lessThan( - orderedResults[3].processed + 500, + orderedResults[3].processed + 50, ); expect(orderedResults[4].processed).to.be.lessThan( - orderedResults[5].processed + 500, + orderedResults[5].processed + 50, ); expect(orderedResults[2].processed).to.be.greaterThan( - orderedResults[0].processed + 500, + orderedResults[0].processed + 50, ); expect(orderedResults[3].processed).to.be.greaterThan( - orderedResults[1].processed + 500, + orderedResults[1].processed + 50, ); expect(orderedResults[4].processed).to.be.greaterThan( - orderedResults[2].processed + 500, + orderedResults[2].processed + 50, ); expect(orderedResults[5].processed).to.be.greaterThan( - orderedResults[3].processed + 500, + orderedResults[3].processed + 50, ); t.end(); }); From 86020a50ada365eb7e20e69703cf847f9b17d397 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 13 Jun 2019 15:28:41 -0400 Subject: [PATCH 06/69] Update tests for parallelMap --- src/functions/functions.spec.ts | 50 +++++++++++++++++++-------------- 1 file changed, 29 insertions(+), 21 deletions(-) diff --git a/src/functions/functions.spec.ts b/src/functions/functions.spec.ts index ed11a46..1acb73d 100644 --- a/src/functions/functions.spec.ts +++ b/src/functions/functions.spec.ts @@ -1298,6 +1298,7 @@ test.cb("rate() sends data at desired rate", t => { test.cb("parallel() parallel mapping", t => { t.plan(6); + const offset = 50; const source = new Readable({ objectMode: true }); const expectedElements = [ "a_processed", @@ -1307,17 +1308,21 @@ test.cb("parallel() parallel mapping", t => { "e_processed", "f_processed", ]; - const orderedResults: Array<{ output: string; processed: number }> = []; - // Record start / end times of each process and then compare to figure out # of processes ocurring and order + interface IPerfData { + start: number; + output?: string; + finish?: number; + } + const orderedResults: IPerfData[] = []; source .pipe( parallelMap(async (data: any) => { + const perfData: IPerfData = { start: performance.now() }; const c = data + "_processed"; - await sleep(50); - orderedResults.push({ - output: c, - processed: performance.now(), - }); + perfData.output = c; + await sleep(offset); + perfData.finish = performance.now(); + orderedResults.push(perfData); return c; }, 2), ) @@ -1326,26 +1331,29 @@ test.cb("parallel() parallel mapping", t => { }) .on("error", t.end) .on("end", async () => { - expect(orderedResults[0].processed).to.be.lessThan( - orderedResults[1].processed + 50, + expect(orderedResults[0].finish).to.be.lessThan( + orderedResults[2].start, ); - expect(orderedResults[2].processed).to.be.lessThan( - orderedResults[3].processed + 50, + expect(orderedResults[1].finish).to.be.lessThan( + orderedResults[3].start, ); - expect(orderedResults[4].processed).to.be.lessThan( - orderedResults[5].processed + 50, + expect(orderedResults[2].finish).to.be.lessThan( + orderedResults[4].start, ); - expect(orderedResults[2].processed).to.be.greaterThan( - orderedResults[0].processed + 50, + expect(orderedResults[3].finish).to.be.lessThan( + orderedResults[5].start, ); - expect(orderedResults[3].processed).to.be.greaterThan( - orderedResults[1].processed + 50, + expect(orderedResults[0].start).to.be.lessThan( + orderedResults[2].start + offset ); - expect(orderedResults[4].processed).to.be.greaterThan( - orderedResults[2].processed + 50, + expect(orderedResults[1].start).to.be.lessThan( + orderedResults[3].start + offset ); - expect(orderedResults[5].processed).to.be.greaterThan( - orderedResults[3].processed + 50, + expect(orderedResults[2].start).to.be.lessThan( + orderedResults[4].start + offset + ); + expect(orderedResults[3].start).to.be.lessThan( + orderedResults[5].start + offset ); t.end(); }); From bad58a27fe7154936e2951288151dc421bdf4a4f Mon Sep 17 00:00:00 2001 From: Lewis Diamond Date: Tue, 6 Aug 2019 16:50:58 -0400 Subject: [PATCH 07/69] Add batching timeout --- src/functions/functions.spec.ts | 89 ++- src/functions/functions.ts | 48 +- yarn.lock | 1165 +++++++++++++++++-------------- 3 files changed, 716 insertions(+), 586 deletions(-) diff --git a/src/functions/functions.spec.ts b/src/functions/functions.spec.ts index 1acb73d..5bc2f9f 100644 --- a/src/functions/functions.spec.ts +++ b/src/functions/functions.spec.ts @@ -141,30 +141,35 @@ test.cb("map() emits errors during synchronous mapping", t => { source.push(null); }); -test.cb("map() emits errors during asynchronous mapping", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - map(async (element: string) => { - await Promise.resolve(); - if (element !== "a") { - throw new Error("Failed mapping"); - } - return element.toUpperCase(); - }), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed mapping"); - t.pass(); - }) - .on("end", t.end); +test("map() emits errors during asynchronous mapping", t => { + t.plan(1); + return new Promise((resolve, reject) => { + const source = new Readable({ objectMode: true }); + source + .pipe( + map(async (element: string) => { + await Promise.resolve(); + if (element !== "a") { + throw new Error("Failed mapping"); + } + return element.toUpperCase(); + }), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed mapping"); + t.pass(); + resolve(); + }) + .on("end", () => { + t.fail(); + }); - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); + }); }); test.cb("flatMap() maps elements synchronously", t => { @@ -1212,6 +1217,36 @@ test.cb("batch() batches chunks together", t => { source.push(null); }); +test.cb("batch() yields a batch after the timeout", t => { + t.plan(3); + const source = new Readable({ + objectMode: true, + read(size: number) {}, + }); + const expectedElements = [["a", "b"], ["c"], ["d"]]; + let i = 0; + source + .pipe(batch(3)) + .on("data", (element: string[]) => { + console.error("DATA", element); + expect(element).to.deep.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.fail) + .on("end", t.end); + + source.push("a"); + source.push("b"); + setTimeout(() => { + source.push("c"); + }, 600); + setTimeout(() => { + source.push("d"); + source.push(null); + }, 600 * 2); +}); + test.cb("unbatch() unbatches", t => { t.plan(3); const source = new Readable({ objectMode: true }); @@ -1344,16 +1379,16 @@ test.cb("parallel() parallel mapping", t => { orderedResults[5].start, ); expect(orderedResults[0].start).to.be.lessThan( - orderedResults[2].start + offset + orderedResults[2].start + offset, ); expect(orderedResults[1].start).to.be.lessThan( - orderedResults[3].start + offset + orderedResults[3].start + offset, ); expect(orderedResults[2].start).to.be.lessThan( - orderedResults[4].start + offset + orderedResults[4].start + offset, ); expect(orderedResults[3].start).to.be.lessThan( - orderedResults[5].start + offset + orderedResults[5].start + offset, ); t.end(); }); diff --git a/src/functions/functions.ts b/src/functions/functions.ts index be2afff..fd10e73 100644 --- a/src/functions/functions.ts +++ b/src/functions/functions.ts @@ -49,19 +49,12 @@ export function map( return new Transform({ ...options, async transform(chunk: T, encoding, callback) { - let isPromise = false; try { - const mapped = mapper(chunk, encoding); - isPromise = mapped instanceof Promise; - callback(undefined, await mapped); + const mapped = await mapper(chunk, encoding); + this.push(mapped); + callback(); } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } + callback(err); } }, }); @@ -504,28 +497,33 @@ export function last(readable: Readable): Promise { * @param maxBatchAge Max lifetime of a batch */ export function batch(batchSize: number = 1000, maxBatchAge: number = 500) { - const buffer: any[] = []; - let startTime: number | null = null; + let buffer: any[] = []; + let timer: NodeJS.Timer | null = null; + let sendChunk = (self: Transform) => { + timer && clearTimeout(timer); + timer = null; + self.push(buffer); + buffer = []; + }; return new Transform({ objectMode: true, transform(chunk, encoding, callback) { - if ( - buffer.length === batchSize - 1 || - (startTime !== null && - startTime - performance.now() >= maxBatchAge) - ) { - buffer.push(chunk); - callback(undefined, buffer.splice(0)); + buffer.push(chunk); + if (buffer.length === batchSize) { + sendChunk(this); } else { - if (startTime === null) { - startTime = performance.now(); + if (timer === null) { + timer = setInterval(() => { + sendChunk(this); + }, maxBatchAge); } - buffer.push(chunk); - callback(); } + callback(); }, flush(callback) { - callback(undefined, buffer.splice(0)); + console.error("flushing"); + sendChunk(this); + callback(); }, }); } diff --git a/yarn.lock b/yarn.lock index dfb9135..a103462 100644 --- a/yarn.lock +++ b/yarn.lock @@ -20,13 +20,13 @@ "@babel/plugin-transform-exponentiation-operator" "^7.0.0" "@babel/plugin-transform-modules-commonjs" "^7.0.0" -"@ava/babel-preset-transform-test-files@^4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@ava/babel-preset-transform-test-files/-/babel-preset-transform-test-files-4.0.0.tgz#95d426f5982f934567ae5a21e43eac0a463d6feb" - integrity sha512-V9hYHA/ZLb4I8imrrG8PT0mzgThjWWmahPV+mrQUZobVnsekBUDrf0JsfXVm4guS3binWxWn+MmQt+V81hTizA== +"@ava/babel-preset-transform-test-files@^5.0.0": + version "5.0.0" + resolved "https://registry.yarnpkg.com/@ava/babel-preset-transform-test-files/-/babel-preset-transform-test-files-5.0.0.tgz#e06fc762069511e597531cc1120e22216aac6981" + integrity sha512-rqgyQwkT0+j2JzYP51dOv80u33rzAvjBtXRzUON+7+6u26mjoudRXci2+1s18rat8r4uOlZfbzm114YS6pwmYw== dependencies: "@ava/babel-plugin-throws-helper" "^3.0.0" - babel-plugin-espower "^3.0.0" + babel-plugin-espower "^3.0.1" "@ava/write-file-atomic@^2.2.0": version "2.2.0" @@ -44,34 +44,34 @@ dependencies: "@babel/highlight" "^7.0.0" -"@babel/core@^7.1.5": - version "7.1.6" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.1.6.tgz#3733cbee4317429bc87c62b29cf8587dba7baeb3" - integrity sha512-Hz6PJT6e44iUNpAn8AoyAs6B3bl60g7MJQaI0rZEar6ECzh6+srYO1xlIdssio34mPaUtAb1y+XlkkSJzok3yw== +"@babel/core@^7.4.0": + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.5.0.tgz#6ed6a2881ad48a732c5433096d96d1b0ee5eb734" + integrity sha512-6Isr4X98pwXqHvtigw71CKgmhL1etZjPs5A67jL/w0TkLM9eqmFR40YrnJvEc1WnMZFsskjsmid8bHZyxKEAnw== dependencies: "@babel/code-frame" "^7.0.0" - "@babel/generator" "^7.1.6" - "@babel/helpers" "^7.1.5" - "@babel/parser" "^7.1.6" - "@babel/template" "^7.1.2" - "@babel/traverse" "^7.1.6" - "@babel/types" "^7.1.6" + "@babel/generator" "^7.5.0" + "@babel/helpers" "^7.5.0" + "@babel/parser" "^7.5.0" + "@babel/template" "^7.4.4" + "@babel/traverse" "^7.5.0" + "@babel/types" "^7.5.0" convert-source-map "^1.1.0" debug "^4.1.0" json5 "^2.1.0" - lodash "^4.17.10" + lodash "^4.17.11" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" -"@babel/generator@^7.0.0", "@babel/generator@^7.1.5", "@babel/generator@^7.1.6": - version "7.1.6" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.1.6.tgz#001303cf87a5b9d093494a4bf251d7b5d03d3999" - integrity sha512-brwPBtVvdYdGxtenbQgfCdDPmtkmUBZPjUoK5SXJEBuHaA5BCubh9ly65fzXz7R6o5rA76Rs22ES8Z+HCc0YIQ== +"@babel/generator@^7.0.0", "@babel/generator@^7.4.0", "@babel/generator@^7.5.0": + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.5.0.tgz#f20e4b7a91750ee8b63656073d843d2a736dca4a" + integrity sha512-1TTVrt7J9rcG5PMjvO7VEG3FrEoEJNHxumRq66GemPmzboLWtIjjcJgk8rokuAS7IiRSpgVSu5Vb9lc99iJkOA== dependencies: - "@babel/types" "^7.1.6" + "@babel/types" "^7.5.0" jsesc "^2.5.1" - lodash "^4.17.10" + lodash "^4.17.11" source-map "^0.5.0" trim-right "^1.0.1" @@ -121,29 +121,29 @@ dependencies: "@babel/types" "^7.0.0" -"@babel/helper-module-transforms@^7.1.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.1.0.tgz#470d4f9676d9fad50b324cdcce5fbabbc3da5787" - integrity sha512-0JZRd2yhawo79Rcm4w0LwSMILFmFXjugG3yqf+P/UsKsRS1mJCmMwwlHDlMg7Avr9LrvSpp4ZSULO9r8jpCzcw== +"@babel/helper-module-transforms@^7.4.4": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.4.4.tgz#96115ea42a2f139e619e98ed46df6019b94414b8" + integrity sha512-3Z1yp8TVQf+B4ynN7WoHPKS8EkdTbgAEy0nU0rs/1Kw4pDgmvYH3rz3aI11KgxKCba2cn7N+tqzV1mY2HMN96w== dependencies: "@babel/helper-module-imports" "^7.0.0" "@babel/helper-simple-access" "^7.1.0" - "@babel/helper-split-export-declaration" "^7.0.0" - "@babel/template" "^7.1.0" - "@babel/types" "^7.0.0" - lodash "^4.17.10" + "@babel/helper-split-export-declaration" "^7.4.4" + "@babel/template" "^7.4.4" + "@babel/types" "^7.4.4" + lodash "^4.17.11" "@babel/helper-plugin-utils@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250" integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== -"@babel/helper-regex@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.0.0.tgz#2c1718923b57f9bbe64705ffe5640ac64d9bdb27" - integrity sha512-TR0/N0NDCcUIUEbqV6dCO+LptmmSQFQ7q70lfcEB4URsjD0E1HzicrwUH+ap6BAQ2jhCX9Q4UqZy4wilujWlkg== +"@babel/helper-regex@^7.4.4": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.4.4.tgz#a47e02bc91fb259d2e6727c2a30013e3ac13c4a2" + integrity sha512-Y5nuB/kESmR3tKjU8Nkn1wMGEx1tjJX076HBMeL3XLQCu6vA/YRzuTW0bbb+qRnXvQGn+d6Rx953yffl8vEy7Q== dependencies: - lodash "^4.17.10" + lodash "^4.17.11" "@babel/helper-remap-async-to-generator@^7.1.0": version "7.1.0" @@ -164,158 +164,159 @@ "@babel/template" "^7.1.0" "@babel/types" "^7.0.0" -"@babel/helper-split-export-declaration@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.0.0.tgz#3aae285c0311c2ab095d997b8c9a94cad547d813" - integrity sha512-MXkOJqva62dfC0w85mEf/LucPPS/1+04nmmRMPEBUB++hiiThQ2zPtX/mEWQ3mtzCEjIJvPY8nuwxXtQeQwUag== +"@babel/helper-split-export-declaration@^7.4.4": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.4.4.tgz#ff94894a340be78f53f06af038b205c49d993677" + integrity sha512-Ro/XkzLf3JFITkW6b+hNxzZ1n5OQ80NvIUdmHspih1XAhtN3vPTuUFT4eQnela+2MaZ5ulH+iyP513KJrxbN7Q== dependencies: - "@babel/types" "^7.0.0" + "@babel/types" "^7.4.4" "@babel/helper-wrap-function@^7.1.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.1.0.tgz#8cf54e9190706067f016af8f75cb3df829cc8c66" - integrity sha512-R6HU3dete+rwsdAfrOzTlE9Mcpk4RjU3aX3gi9grtmugQY0u79X7eogUvfXA5sI81Mfq1cn6AgxihfN33STjJA== + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.2.0.tgz#c4e0012445769e2815b55296ead43a958549f6fa" + integrity sha512-o9fP1BZLLSrYlxYEYyl2aS+Flun5gtjTIG8iln+XuEzQTs0PLagAGSXUcqruJwD5fM48jzIEggCKpIfWTcR7pQ== dependencies: "@babel/helper-function-name" "^7.1.0" "@babel/template" "^7.1.0" "@babel/traverse" "^7.1.0" - "@babel/types" "^7.0.0" + "@babel/types" "^7.2.0" -"@babel/helpers@^7.1.5": - version "7.1.5" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.1.5.tgz#68bfc1895d685f2b8f1995e788dbfe1f6ccb1996" - integrity sha512-2jkcdL02ywNBry1YNFAH/fViq4fXG0vdckHqeJk+75fpQ2OH+Az6076tX/M0835zA45E0Cqa6pV5Kiv9YOqjEg== +"@babel/helpers@^7.5.0": + version "7.5.1" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.5.1.tgz#65407c741a56ddd59dd86346cd112da3de912db3" + integrity sha512-rVOTDv8sH8kNI72Unenusxw6u+1vEepZgLxeV+jHkhsQlYhzVhzL1EpfoWT7Ub3zpWSv2WV03V853dqsnyoQzA== dependencies: - "@babel/template" "^7.1.2" - "@babel/traverse" "^7.1.5" - "@babel/types" "^7.1.5" + "@babel/template" "^7.4.4" + "@babel/traverse" "^7.5.0" + "@babel/types" "^7.5.0" "@babel/highlight@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.0.0.tgz#f710c38c8d458e6dd9a201afb637fcb781ce99e4" - integrity sha512-UFMC4ZeFC48Tpvj7C8UgLvtkaUuovQX+5xNWrsIoMG8o2z+XFKjKaN9iVmS84dPwVN00W4wPmqvYoZF3EGAsfw== + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.5.0.tgz#56d11312bd9248fa619591d02472be6e8cb32540" + integrity sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ== dependencies: chalk "^2.0.0" esutils "^2.0.2" js-tokens "^4.0.0" -"@babel/parser@^7.0.0", "@babel/parser@^7.1.2", "@babel/parser@^7.1.6": - version "7.1.6" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.1.6.tgz#16e97aca1ec1062324a01c5a6a7d0df8dd189854" - integrity sha512-dWP6LJm9nKT6ALaa+bnL247GHHMWir3vSlZ2+IHgHgktZQx0L3Uvq2uAWcuzIe+fujRsYWBW2q622C5UvGK9iQ== +"@babel/parser@^7.0.0", "@babel/parser@^7.4.4", "@babel/parser@^7.5.0": + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.5.0.tgz#3e0713dff89ad6ae37faec3b29dcfc5c979770b7" + integrity sha512-I5nW8AhGpOXGCCNYGc+p7ExQIBxRFnS2fd/d862bNOKvmoEPjYPcfIjsfdy0ujagYOIYPczKgD9l3FsgTkAzKA== "@babel/plugin-proposal-async-generator-functions@^7.0.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.1.0.tgz#41c1a702e10081456e23a7b74d891922dd1bb6ce" - integrity sha512-Fq803F3Jcxo20MXUSDdmZZXrPe6BWyGcWBPPNB/M7WaUYESKDeKMOGIxEzQOjGSmW/NWb6UaPZrtTB2ekhB/ew== + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.2.0.tgz#b289b306669dce4ad20b0252889a15768c9d417e" + integrity sha512-+Dfo/SCQqrwx48ptLVGLdE39YtWRuKc/Y9I5Fy0P1DDBB9lsAHpjcEJQt+4IifuSOSTLBKJObJqMvaO1pIE8LQ== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.1.0" - "@babel/plugin-syntax-async-generators" "^7.0.0" + "@babel/plugin-syntax-async-generators" "^7.2.0" "@babel/plugin-proposal-object-rest-spread@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.0.0.tgz#9a17b547f64d0676b6c9cecd4edf74a82ab85e7e" - integrity sha512-14fhfoPcNu7itSen7Py1iGN0gEm87hX/B+8nZPqkdmANyyYWYMY2pjA3r8WXbWVKMzfnSNS0xY8GVS0IjXi/iw== + version "7.5.1" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.5.1.tgz#5788ab097c63135e4236548b4f112bfce09dd394" + integrity sha512-PVGXx5LYHcT7L4MdoE+rM5uq68IKlvU9lljVQ4OXY6aUEnGvezcGbM4VNY57Ug+3R2Zg/nYHlEdiWoIBoRA0mw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-syntax-object-rest-spread" "^7.0.0" + "@babel/plugin-syntax-object-rest-spread" "^7.2.0" "@babel/plugin-proposal-optional-catch-binding@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.0.0.tgz#b610d928fe551ff7117d42c8bb410eec312a6425" - integrity sha512-JPqAvLG1s13B/AuoBjdBYvn38RqW6n1TzrQO839/sIpqLpbnXKacsAgpZHzLD83Sm8SDXMkkrAvEnJ25+0yIpw== + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.2.0.tgz#135d81edb68a081e55e56ec48541ece8065c38f5" + integrity sha512-mgYj3jCcxug6KUcX4OBoOJz3CMrwRfQELPQ5560F70YQUBZB7uac9fqaWamKR1iWUzGiK2t0ygzjTScZnVz75g== dependencies: "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-syntax-optional-catch-binding" "^7.0.0" + "@babel/plugin-syntax-optional-catch-binding" "^7.2.0" -"@babel/plugin-syntax-async-generators@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.0.0.tgz#bf0891dcdbf59558359d0c626fdc9490e20bc13c" - integrity sha512-im7ged00ddGKAjcZgewXmp1vxSZQQywuQXe2B1A7kajjZmDeY/ekMPmWr9zJgveSaQH0k7BcGrojQhcK06l0zA== +"@babel/plugin-syntax-async-generators@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.2.0.tgz#69e1f0db34c6f5a0cf7e2b3323bf159a76c8cb7f" + integrity sha512-1ZrIRBv2t0GSlcwVoQ6VgSLpLgiN/FVQUzt9znxo7v2Ov4jJrs8RY8tv0wvDmFN3qIdMKWrmMMW6yZ0G19MfGg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" -"@babel/plugin-syntax-object-rest-spread@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.0.0.tgz#37d8fbcaf216bd658ea1aebbeb8b75e88ebc549b" - integrity sha512-5A0n4p6bIiVe5OvQPxBnesezsgFJdHhSs3uFSvaPdMqtsovajLZ+G2vZyvNe10EzJBWWo3AcHGKhAFUxqwp2dw== +"@babel/plugin-syntax-object-rest-spread@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.2.0.tgz#3b7a3e733510c57e820b9142a6579ac8b0dfad2e" + integrity sha512-t0JKGgqk2We+9may3t0xDdmneaXmyxq0xieYcKHxIsrJO64n1OiMWNUtc5gQK1PA0NpdCRrtZp4z+IUaKugrSA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" -"@babel/plugin-syntax-optional-catch-binding@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.0.0.tgz#886f72008b3a8b185977f7cb70713b45e51ee475" - integrity sha512-Wc+HVvwjcq5qBg1w5RG9o9RVzmCaAg/Vp0erHCKpAYV8La6I94o4GQAmFYNmkzoMO6gzoOSulpKeSSz6mPEoZw== +"@babel/plugin-syntax-optional-catch-binding@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.2.0.tgz#a94013d6eda8908dfe6a477e7f9eda85656ecf5c" + integrity sha512-bDe4xKNhb0LI7IvZHiA13kff0KEfaGX/Hv4lMA9+7TEc63hMNvfKo6ZFpXhKuEp+II/q35Gc4NoMeDZyaUbj9w== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-async-to-generator@^7.0.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.1.0.tgz#109e036496c51dd65857e16acab3bafdf3c57811" - integrity sha512-rNmcmoQ78IrvNCIt/R9U+cixUHeYAzgusTFgIAv+wQb9HJU4szhpDD6e5GCACmj/JP5KxuCwM96bX3L9v4ZN/g== + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.5.0.tgz#89a3848a0166623b5bc481164b5936ab947e887e" + integrity sha512-mqvkzwIGkq0bEF1zLRRiTdjfomZJDV33AH3oQzHVGkI2VzEmXLpKKOBvEVaFZBJdN0XTyH38s9j/Kiqr68dggg== dependencies: "@babel/helper-module-imports" "^7.0.0" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-remap-async-to-generator" "^7.1.0" "@babel/plugin-transform-dotall-regex@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.0.0.tgz#73a24da69bc3c370251f43a3d048198546115e58" - integrity sha512-00THs8eJxOJUFVx1w8i1MBF4XH4PsAjKjQ1eqN/uCH3YKwP21GCKfrn6YZFZswbOk9+0cw1zGQPHVc1KBlSxig== + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.4.4.tgz#361a148bc951444312c69446d76ed1ea8e4450c3" + integrity sha512-P05YEhRc2h53lZDjRPk/OektxCVevFzZs2Gfjd545Wde3k+yFDbXORgl2e0xpbq8mLcKJ7Idss4fAg0zORN/zg== dependencies: "@babel/helper-plugin-utils" "^7.0.0" - "@babel/helper-regex" "^7.0.0" - regexpu-core "^4.1.3" + "@babel/helper-regex" "^7.4.4" + regexpu-core "^4.5.4" "@babel/plugin-transform-exponentiation-operator@^7.0.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.1.0.tgz#9c34c2ee7fd77e02779cfa37e403a2e1003ccc73" - integrity sha512-uZt9kD1Pp/JubkukOGQml9tqAeI8NkE98oZnHZ2qHRElmeKCodbTZgOEUtujSCSLhHSBWbzNiFSDIMC4/RBTLQ== + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.2.0.tgz#a63868289e5b4007f7054d46491af51435766008" + integrity sha512-umh4hR6N7mu4Elq9GG8TOu9M0bakvlsREEC+ialrQN6ABS4oDQ69qJv1VtR3uxlKMCQMCvzk7vr17RHKcjx68A== dependencies: "@babel/helper-builder-binary-assignment-operator-visitor" "^7.1.0" "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-transform-modules-commonjs@^7.0.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.1.0.tgz#0a9d86451cbbfb29bd15186306897c67f6f9a05c" - integrity sha512-wtNwtMjn1XGwM0AXPspQgvmE6msSJP15CX2RVfpTSTNPLhKhaOjaIfBaVfj4iUZ/VrFSodcFedwtPg/NxwQlPA== + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.5.0.tgz#425127e6045231360858eeaa47a71d75eded7a74" + integrity sha512-xmHq0B+ytyrWJvQTc5OWAC4ii6Dhr0s22STOoydokG51JjWhyYo5mRPXoi+ZmtHQhZZwuXNN+GG5jy5UZZJxIQ== dependencies: - "@babel/helper-module-transforms" "^7.1.0" + "@babel/helper-module-transforms" "^7.4.4" "@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-simple-access" "^7.1.0" + babel-plugin-dynamic-import-node "^2.3.0" -"@babel/template@^7.1.0", "@babel/template@^7.1.2": - version "7.1.2" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.1.2.tgz#090484a574fef5a2d2d7726a674eceda5c5b5644" - integrity sha512-SY1MmplssORfFiLDcOETrW7fCLl+PavlwMh92rrGcikQaRq4iWPVH0MpwPpY3etVMx6RnDjXtr6VZYr/IbP/Ag== +"@babel/template@^7.1.0", "@babel/template@^7.4.4": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.4.4.tgz#f4b88d1225689a08f5bc3a17483545be9e4ed237" + integrity sha512-CiGzLN9KgAvgZsnivND7rkA+AeJ9JB0ciPOD4U59GKbQP2iQl+olF1l76kJOupqidozfZ32ghwBEJDhnk9MEcw== dependencies: "@babel/code-frame" "^7.0.0" - "@babel/parser" "^7.1.2" - "@babel/types" "^7.1.2" + "@babel/parser" "^7.4.4" + "@babel/types" "^7.4.4" -"@babel/traverse@^7.1.0", "@babel/traverse@^7.1.5", "@babel/traverse@^7.1.6": - version "7.1.6" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.1.6.tgz#c8db9963ab4ce5b894222435482bd8ea854b7b5c" - integrity sha512-CXedit6GpISz3sC2k2FsGCUpOhUqKdyL0lqNrImQojagnUMXf8hex4AxYFRuMkNGcvJX5QAFGzB5WJQmSv8SiQ== +"@babel/traverse@^7.1.0", "@babel/traverse@^7.5.0": + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.5.0.tgz#4216d6586854ef5c3c4592dab56ec7eb78485485" + integrity sha512-SnA9aLbyOCcnnbQEGwdfBggnc142h/rbqqsXcaATj2hZcegCl903pUD/lfpsNBlBSuWow/YDfRyJuWi2EPR5cg== dependencies: "@babel/code-frame" "^7.0.0" - "@babel/generator" "^7.1.6" + "@babel/generator" "^7.5.0" "@babel/helper-function-name" "^7.1.0" - "@babel/helper-split-export-declaration" "^7.0.0" - "@babel/parser" "^7.1.6" - "@babel/types" "^7.1.6" + "@babel/helper-split-export-declaration" "^7.4.4" + "@babel/parser" "^7.5.0" + "@babel/types" "^7.5.0" debug "^4.1.0" globals "^11.1.0" - lodash "^4.17.10" + lodash "^4.17.11" -"@babel/types@^7.0.0", "@babel/types@^7.1.2", "@babel/types@^7.1.5", "@babel/types@^7.1.6": - version "7.1.6" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.1.6.tgz#0adb330c3a281348a190263aceb540e10f04bcce" - integrity sha512-DMiUzlY9DSjVsOylJssxLHSgj6tWM9PRFJOGW/RaOglVOK9nzTxoOMfTfRQXGUCUQ/HmlG2efwC+XqUEJ5ay4w== +"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.0": + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.5.0.tgz#e47d43840c2e7f9105bc4d3a2c371b4d0c7832ab" + integrity sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ== dependencies: esutils "^2.0.2" - lodash "^4.17.10" + lodash "^4.17.11" to-fast-properties "^2.0.0" "@concordance/react@^2.0.0": @@ -330,10 +331,34 @@ resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.1.7.tgz#1b8e33b61a8c09cbe1f85133071baa0dbf9fa71a" integrity sha512-2Y8uPt0/jwjhQ6EiluT0XCri1Dbplr0ZxfFXUz+ye13gaqE8u5gL5ppao1JrUYr9cIip5S6MvQzBS7Kke7U9VA== +"@types/events@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" + integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g== + +"@types/glob@^7.1.1": + version "7.1.1" + resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.1.tgz#aa59a1c6e3fbc421e07ccd31a944c30eba521575" + integrity sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w== + dependencies: + "@types/events" "*" + "@types/minimatch" "*" + "@types/node" "*" + +"@types/minimatch@*": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" + integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== + +"@types/node@*": + version "12.0.12" + resolved "https://registry.yarnpkg.com/@types/node/-/node-12.0.12.tgz#cc791b402360db1eaf7176479072f91ee6c6c7ca" + integrity sha512-Uy0PN4R5vgBUXFoJrKryf5aTk3kJ8Rv3PdlHjl6UaX+Cqp1QE0yPQ68MPXGrZOfG7gZVNDIJZYyot0B9ubXUrQ== + "@types/node@^10.12.10": - version "10.12.10" - resolved "https://registry.yarnpkg.com/@types/node/-/node-10.12.10.tgz#4fa76e6598b7de3f0cb6ec3abacc4f59e5b3a2ce" - integrity sha512-8xZEYckCbUVgK8Eg7lf5Iy4COKJ5uXlnIOnePN0WUwSQggy9tolM+tDJf7wMOnT/JT/W9xDYIaYggt3mRV2O5w== + version "10.14.12" + resolved "https://registry.yarnpkg.com/@types/node/-/node-10.14.12.tgz#0eec3155a46e6c4db1f27c3e588a205f767d622f" + integrity sha512-QcAKpaO6nhHLlxWBvpc4WeLrTvPqlHOvaj0s5GriKkA1zq+bsFBPpfYCvQhLqLgYlIko8A9YrPdaMHCo5mBcpg== "@types/typescript@^2.0.0": version "2.0.0" @@ -354,10 +379,10 @@ ansi-align@^2.0.0: dependencies: string-width "^2.0.0" -ansi-escapes@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.1.0.tgz#f73207bb81207d75fd6c83f125af26eea378ca30" - integrity sha512-UgAb8H9D41AQnu/PbWlCofQVcnV4Gs2bBJi9eZPxfU/hgglFh3SMDMENRIqdr7H6XFnXdoknctFByVsCOotTVw== +ansi-escapes@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" + integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== ansi-regex@^2.0.0: version "2.1.1" @@ -369,15 +394,10 @@ ansi-regex@^3.0.0: resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= -ansi-regex@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.0.0.tgz#70de791edf021404c3fd615aa89118ae0432e5a9" - integrity sha512-iB5Dda8t/UqpPI/IjsejXu5jOGDrzn41wJyljwPH65VCIbk6+1BzFIMJGFwTNrYXT1CrD+B4l19U7awiQ8rk7w== - -ansi-styles@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" - integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= +ansi-regex@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" + integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== ansi-styles@^3.2.1: version "3.2.1" @@ -429,17 +449,17 @@ arr-union@^3.1.0: resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= -array-differ@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-1.0.0.tgz#eff52e3758249d33be402b8bb8e564bb2b5d4031" - integrity sha1-7/UuN1gknTO+QCuLuOVkuytdQDE= +array-differ@^2.0.3: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-2.1.0.tgz#4b9c1c3f14b906757082925769e8ab904f4801b1" + integrity sha512-KbUpJgx909ZscOc/7CLATBFam7P1Z1QRQInvgT0UztM9Q72aGKCunKASAl7WNW0tnPmPyEMeMhdsfWhfmW037w== array-find-index@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= -array-union@^1.0.1: +array-union@^1.0.1, array-union@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= @@ -452,9 +472,9 @@ array-uniq@^1.0.1: integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= array-uniq@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-2.0.0.tgz#0009e30306e37a6dd2e2e2480db5316fdade1583" - integrity sha512-O3QZEr+3wDj7otzF7PjNGs6CA3qmYMLvt5xGkjY/V0VxS+ovvqVo/5wKM/OVOAyuX4DTh9H31zE/yKtO66hTkg== + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-2.1.0.tgz#46603d5e28e79bfd02b046fcc1d77c6820bd8e98" + integrity sha512-bdHxtev7FN6+MXI1YFW0Q8mQ8dTJc2S8AMfju+ZR77pbg2yAdVyDlwkaUI7Har0LyOMRFPHrJ9lYdyjZZswdlQ== array-unique@^0.3.2: version "0.3.2" @@ -476,10 +496,10 @@ assign-symbols@^1.0.0: resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= -async-each@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d" - integrity sha1-GdOGodntxufByF04iu28xW0zYC0= +async-each@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" + integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== atob@^2.1.1: version "2.1.2" @@ -487,28 +507,28 @@ atob@^2.1.1: integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== ava@^1.0.0-rc.2: - version "1.0.0-rc.2" - resolved "https://registry.yarnpkg.com/ava/-/ava-1.0.0-rc.2.tgz#a2f63211d2ad1924fa4671ff3b29212b1da656af" - integrity sha512-MxCW+bY+ddID5SrZZHbkuCiwsLup3Dn/bAgnrl6BzHYRI4RF9Yk5zv6S2L1TjqUF1LvRS8RiqZBrD+G6db+fEg== + version "1.4.1" + resolved "https://registry.yarnpkg.com/ava/-/ava-1.4.1.tgz#4a59289e0c9728e492ec3a5be21d1072636be172" + integrity sha512-wKpgOPTL7hJSBWpfbU4SA8rlsTZrph9g9g7qYDV7M6uK1rKeW8oCUJWRwCd8B24S4N0Y5myf6cTEnA66WIk0sA== dependencies: "@ava/babel-preset-stage-4" "^2.0.0" - "@ava/babel-preset-transform-test-files" "^4.0.0" + "@ava/babel-preset-transform-test-files" "^5.0.0" "@ava/write-file-atomic" "^2.2.0" - "@babel/core" "^7.1.5" - "@babel/generator" "^7.1.5" - "@babel/plugin-syntax-async-generators" "^7.0.0" - "@babel/plugin-syntax-object-rest-spread" "^7.0.0" - "@babel/plugin-syntax-optional-catch-binding" "^7.0.0" + "@babel/core" "^7.4.0" + "@babel/generator" "^7.4.0" + "@babel/plugin-syntax-async-generators" "^7.2.0" + "@babel/plugin-syntax-object-rest-spread" "^7.2.0" + "@babel/plugin-syntax-optional-catch-binding" "^7.2.0" "@concordance/react" "^2.0.0" - ansi-escapes "^3.1.0" + ansi-escapes "^3.2.0" ansi-styles "^3.2.1" arr-flatten "^1.1.0" array-union "^1.0.1" array-uniq "^2.0.0" arrify "^1.0.0" bluebird "^3.5.3" - chalk "^2.4.1" - chokidar "^2.0.4" + chalk "^2.4.2" + chokidar "^2.1.5" chunkd "^1.0.0" ci-parallel-vars "^1.0.0" clean-stack "^2.0.0" @@ -520,22 +540,22 @@ ava@^1.0.0-rc.2: concordance "^4.0.0" convert-source-map "^1.6.0" currently-unhandled "^0.4.1" - debug "^4.1.0" - del "^3.0.0" + debug "^4.1.1" + del "^4.0.0" dot-prop "^4.2.0" emittery "^0.4.1" empower-core "^1.2.0" equal-length "^1.0.0" escape-string-regexp "^1.0.5" - esm "^3.0.84" + esm "^3.2.20" figures "^2.0.0" find-up "^3.0.0" - get-port "^4.0.0" + get-port "^4.2.0" globby "^7.1.1" ignore-by-default "^1.0.0" import-local "^2.0.0" indent-string "^3.2.0" - is-ci "^1.2.1" + is-ci "^2.0.0" is-error "^2.2.1" is-observable "^1.1.0" is-plain-object "^2.0.4" @@ -547,45 +567,43 @@ ava@^1.0.0-rc.2: lodash.difference "^4.3.0" lodash.flatten "^4.2.0" loud-rejection "^1.2.0" - make-dir "^1.3.0" + make-dir "^2.1.0" matcher "^1.1.1" md5-hex "^2.0.0" meow "^5.0.0" ms "^2.1.1" - multimatch "^2.1.0" + multimatch "^3.0.0" observable-to-promise "^0.5.0" - ora "^3.0.0" - package-hash "^2.0.0" - pkg-conf "^2.1.0" + ora "^3.2.0" + package-hash "^3.0.0" + pkg-conf "^3.0.0" plur "^3.0.1" pretty-ms "^4.0.0" require-precompiled "^0.1.0" resolve-cwd "^2.0.0" slash "^2.0.0" - source-map-support "^0.5.9" - stack-utils "^1.0.1" - strip-ansi "^5.0.0" + source-map-support "^0.5.11" + stack-utils "^1.0.2" + strip-ansi "^5.2.0" strip-bom-buf "^1.0.0" supertap "^1.0.0" - supports-color "^5.5.0" + supports-color "^6.1.0" trim-off-newlines "^1.0.1" trim-right "^1.0.1" unique-temp-dir "^1.0.0" update-notifier "^2.5.0" -babel-code-frame@^6.22.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" - integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= +babel-plugin-dynamic-import-node@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f" + integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== dependencies: - chalk "^1.1.3" - esutils "^2.0.2" - js-tokens "^3.0.2" + object.assign "^4.1.0" -babel-plugin-espower@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/babel-plugin-espower/-/babel-plugin-espower-3.0.0.tgz#8dadfa5ec2b9c82e3c4aa0a2d14fbd3ff6d40061" - integrity sha512-f2IUz5kQyrwXnShcv7tvGxf76QkrEl00ENYgd6R0VMrz4xqlwBLZXFs5vse2vehs1Z+T2sXTP3UWX2QxMorzzw== +babel-plugin-espower@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/babel-plugin-espower/-/babel-plugin-espower-3.0.1.tgz#180db17126f88e754105b8b5216d21e520a6bd4e" + integrity sha512-Ms49U7VIAtQ/TtcqRbD6UBmJBUCSxiC3+zPc+eGqxKUIFO1lTshyEDRUjhoAbd2rWfwYf3cZ62oXozrd8W6J0A== dependencies: "@babel/generator" "^7.0.0" "@babel/parser" "^7.0.0" @@ -614,14 +632,14 @@ base@^0.11.1: pascalcase "^0.1.1" binary-extensions@^1.0.0: - version "1.12.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.12.0.tgz#c2d780f53d45bba8317a8902d4ceeaf3a6385b14" - integrity sha512-DYWGk01lDcxeS/K9IHPGWfT8PsJmbXRtRd2Sx72Tnb8pcYZQFF1oSDb8hJtS1vhp212q1Rzi5dUf9+nq0o9UIg== + version "1.13.1" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" + integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== bluebird@^3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.3.tgz#7d01c6f9616c9a51ab0f8c549a79dfe6ec33efa7" - integrity sha512-/qKPUQlaW1OyR51WeCPBvRnAlnZFUJkCSG5HzGnuIqhgyJtF+T94lFnn33eiazjRm2LAHVy2guNnaq48X9SJuw== + version "3.5.5" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.5.tgz#a8d0afd73251effbbd5fe384a77d73003c17a71f" + integrity sha512-5am6HnnfN+urzt4yfg7IgTbotDjIT/u8AJpEt0sIU9FtXfVeezXAPKswrG+xKUCOYAINpSdgZVDU6QFh+cuH3w== boxen@^1.2.1: version "1.3.0" @@ -644,7 +662,7 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" -braces@^2.3.0, braces@^2.3.1: +braces@^2.3.1, braces@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== @@ -665,7 +683,7 @@ buffer-from@^1.0.0, buffer-from@^1.1.0: resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== -builtin-modules@^1.0.0, builtin-modules@^1.1.1: +builtin-modules@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" integrity sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8= @@ -731,21 +749,10 @@ chai@^4.2.0: pathval "^1.1.0" type-detect "^4.0.5" -chalk@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" - integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= - dependencies: - ansi-styles "^2.2.1" - escape-string-regexp "^1.0.2" - has-ansi "^2.0.0" - strip-ansi "^3.0.0" - supports-color "^2.0.0" - -chalk@^2.0.0, chalk@^2.0.1, chalk@^2.3.0, chalk@^2.3.1, chalk@^2.4.1: - version "2.4.1" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.1.tgz#18c49ab16a037b6eb0152cc83e3471338215b66e" - integrity sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ== +chalk@^2.0.0, chalk@^2.0.1, chalk@^2.3.0, chalk@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" @@ -756,30 +763,29 @@ check-error@^1.0.2: resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.2.tgz#574d312edd88bb5dd8912e9286dd6c0aed4aac82" integrity sha1-V00xLt2Iu13YkS6Sht1sCu1KrII= -chokidar@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.0.4.tgz#356ff4e2b0e8e43e322d18a372460bbcf3accd26" - integrity sha512-z9n7yt9rOvIJrMhvDtDictKrkFHeihkNl6uWMmZlmL6tJtX9Cs+87oK+teBx+JIgzvbX3yZHT3eF8vpbDxHJXQ== +chokidar@^2.1.5: + version "2.1.6" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.6.tgz#b6cad653a929e244ce8a834244164d241fa954c5" + integrity sha512-V2jUo67OKkc6ySiRpJrjlpJKl9kDuG+Xb8VgsGzb+aEouhgS1D0weyPU4lEzdAcsCAvrih2J2BqyXqHWvVLw5g== dependencies: anymatch "^2.0.0" - async-each "^1.0.0" - braces "^2.3.0" + async-each "^1.0.1" + braces "^2.3.2" glob-parent "^3.1.0" - inherits "^2.0.1" + inherits "^2.0.3" is-binary-path "^1.0.0" is-glob "^4.0.0" - lodash.debounce "^4.0.8" - normalize-path "^2.1.1" + normalize-path "^3.0.0" path-is-absolute "^1.0.0" - readdirp "^2.0.0" - upath "^1.0.5" + readdirp "^2.2.1" + upath "^1.1.1" optionalDependencies: - fsevents "^1.2.2" + fsevents "^1.2.7" chownr@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.1.tgz#54726b8b8fff4df053c42187e801fb4412df1494" - integrity sha512-j38EvO5+LHX84jlo6h4UzmOwi0UgW61WRyPtJz4qaadK5eY3BTS5TY/S1Stc3Uk2lIM6TPevAlULiEJwie860g== + version "1.1.2" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.2.tgz#a18f1e0b269c8a6a5d3c86eb298beb14c3dd7bf6" + integrity sha512-GkfeAQh+QNy3wquu9oIZr6SS5x7wGdSgNQvD10X3r+AZr1Oys22HW8kAmDMvNg2+Dm0TeGaEuO8gFwdBXxwO8A== chunkd@^1.0.0: version "1.0.0" @@ -791,6 +797,11 @@ ci-info@^1.5.0: resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-1.6.0.tgz#2ca20dbb9ceb32d4524a683303313f0304b1e497" integrity sha512-vsGdkwSCDpWmP80ncATX7iea5DWQemg1UgCW5J8tqjU3lYw4FBYuj89J0CTVomA7BEfvSZd84GmHko+MxFQU2A== +ci-info@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" + integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== + ci-parallel-vars@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/ci-parallel-vars/-/ci-parallel-vars-1.0.0.tgz#af97729ed1c7381911ca37bcea263d62638701b3" @@ -807,9 +818,9 @@ class-utils@^0.3.5: static-extend "^0.1.1" clean-stack@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.0.0.tgz#301bfa9e8dd2d3d984c0e542f7aa67b996f63e0a" - integrity sha512-VEoL9Qh7I8s8iHnV53DaeWSt8NJ0g3khMfK6NiCPB7H657juhro+cSw2O88uo3bo0c0X5usamtXk0/Of0wXa5A== + version "2.1.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.1.0.tgz#9e7fec7f3f8340a2ab4f127c80273085e8fbbdd0" + integrity sha512-uQWrpRm+iZZUCAp7ZZJQbd4Za9I3AjR/3YTjmcnAtkauaIm/T5CT6U8zVI6e60T6OANqBFAzuR9/HB3NzuZCRA== clean-yaml-object@^0.1.0: version "0.1.0" @@ -828,10 +839,10 @@ cli-cursor@^2.1.0: dependencies: restore-cursor "^2.0.0" -cli-spinners@^1.1.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-1.3.1.tgz#002c1990912d0d59580c93bd36c056de99e4259a" - integrity sha512-1QL4544moEsDVH9T/l6Cemov/37iv1RtoKf7NJ04A60+4MREXNfx/QvavbH6QoGdsD4N4Mwy49cmaINR/o2mdg== +cli-spinners@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.2.0.tgz#e8b988d9206c692302d8ee834e7a85c0144d8f77" + integrity sha512-tgU3fKwzYjiLEQgPMD9Jt+JjHVL9kW93FiIMX/l7rivvOD4/LL0Mf7gda3+4U2KJBloybwgj5KEoQgGRioMiKQ== cli-truncate@^1.1.0: version "1.1.0" @@ -879,9 +890,9 @@ color-name@1.1.3: integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= commander@^2.12.1: - version "2.19.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" - integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== + version "2.20.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.0.tgz#d58bb2b5c1ee8f87b0d340027e9e94e222c5a422" + integrity sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ== common-path-prefix@^1.0.0: version "1.0.0" @@ -889,9 +900,9 @@ common-path-prefix@^1.0.0: integrity sha1-zVL28HEuC6q5fW+XModPIvR3UsA= component-emitter@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" - integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY= + version "1.3.0" + resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" + integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== concat-map@0.0.1: version "0.0.1" @@ -950,9 +961,9 @@ copy-descriptor@^0.1.0: integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= core-js@^2.0.0: - version "2.5.7" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.7.tgz#f972608ff0cead68b841a16a932d0b183791814e" - integrity sha512-RszJCAxg/PP6uzXVXL6BsxSXx/B05oJAQ2vkJRjyjrEcNVycaqOmNb5OTxZPE3xa5gwZduqza6L9JOCenh/Ecw== + version "2.6.9" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.9.tgz#6b4b214620c834152e179323727fc19741b084f2" + integrity sha512-HOpZf6eXmnl7la+cUdMnLvUxKNqLUzJvgIziQ0DiF3JwSImNphIqdGqzj6hIKyX04MmV0poclQ7+wjWvxQyR2A== core-util-is@~1.0.0: version "1.0.2" @@ -994,17 +1005,24 @@ date-time@^2.1.0: dependencies: time-zone "^1.0.0" -debug@^2.1.2, debug@^2.2.0, debug@^2.3.3: +debug@^2.2.0, debug@^2.3.3: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" -debug@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.0.tgz#373687bffa678b38b1cd91f861b63850035ddc87" - integrity sha512-heNPJUJIqC+xB6ayLAMHaIrmN9HKa7aQO8MGqKpvCA+uJYVcvR6l5kgdrhRuwPFHU7P5/A1w0BjByPHwpfTDKg== +debug@^3.2.6: + version "3.2.6" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" + integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== + dependencies: + ms "^2.1.1" + +debug@^4.1.0, debug@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" + integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== dependencies: ms "^2.1.1" @@ -1050,6 +1068,13 @@ defaults@^1.0.3: dependencies: clone "^1.0.2" +define-properties@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" + integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== + dependencies: + object-keys "^1.0.12" + define-property@^0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" @@ -1072,17 +1097,18 @@ define-property@^2.0.2: is-descriptor "^1.0.2" isobject "^3.0.1" -del@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/del/-/del-3.0.0.tgz#53ecf699ffcbcb39637691ab13baf160819766e5" - integrity sha1-U+z2mf/LyzljdpGrE7rxYIGXZuU= +del@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" + integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== dependencies: + "@types/glob" "^7.1.1" globby "^6.1.0" - is-path-cwd "^1.0.0" - is-path-in-cwd "^1.0.0" - p-map "^1.1.1" - pify "^3.0.0" - rimraf "^2.2.8" + is-path-cwd "^2.0.0" + is-path-in-cwd "^2.0.0" + p-map "^2.0.0" + pify "^4.0.1" + rimraf "^2.6.3" delegates@^1.0.0: version "1.0.0" @@ -1100,11 +1126,10 @@ diff@^3.1.0, diff@^3.2.0: integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA== dir-glob@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.0.0.tgz#0b205d2b6aef98238ca286598a8204d29d0a0034" - integrity sha512-37qirFDz8cA5fimp9feo43fSuRo2gHwaIn6dXL8Ber1dGwUosDrGZeCCXq57WnIqE4aQ+u3eQZzsk1yOzhdwag== + version "2.2.2" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.2.2.tgz#fa09f0694153c8918b18ba0deafae94769fc50c4" + integrity sha512-f9LBi5QWzIW3I6e//uxZoLBlUt9kcp66qo0sSCxL6YZKc75R1c4MFCoe/LaZiBGmgujvQdxc5Bn3QhfyvK5Hsw== dependencies: - arrify "^1.0.1" path-type "^3.0.0" dot-prop@^4.1.0, dot-prop@^4.2.0: @@ -1149,7 +1174,7 @@ es6-error@^4.0.1: resolved "https://registry.yarnpkg.com/es6-error/-/es6-error-4.1.1.tgz#9e3af407459deed47e9a91f9b885a84eb05c561d" integrity sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg== -escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.4, escape-string-regexp@^1.0.5: +escape-string-regexp@^1.0.4, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= @@ -1162,10 +1187,10 @@ eslint-plugin-prettier@^2.2.0: fast-diff "^1.1.1" jest-docblock "^21.0.0" -esm@^3.0.84: - version "3.0.84" - resolved "https://registry.yarnpkg.com/esm/-/esm-3.0.84.tgz#bb108989f4673b32d4f62406869c28eed3815a63" - integrity sha512-SzSGoZc17S7P+12R9cg21Bdb7eybX25RnIeRZ80xZs+VZ3kdQKzqTp2k4hZJjR7p9l0186TTXSgrxzlMDBktlw== +esm@^3.2.20: + version "3.2.25" + resolved "https://registry.yarnpkg.com/esm/-/esm-3.2.25.tgz#342c18c29d56157688ba5ce31f8431fbb795cc10" + integrity sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA== espower-location-detector@^1.0.0: version "1.0.0" @@ -1303,9 +1328,9 @@ fragment-cache@^0.2.1: map-cache "^0.2.2" fs-minipass@^1.2.5: - version "1.2.5" - resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.5.tgz#06c277218454ec288df77ada54a03b8702aacb9d" - integrity sha512-JhBl0skXjUPCFH7x6x61gQxrKyXsxB5gcgePLZCwfyCGGsTISMoIeObbrvVeP6Xmyaudw4TT43qV2Gz+iyd2oQ== + version "1.2.6" + resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.6.tgz#2c5cc30ded81282bfe8a0d7c7c1853ddeb102c07" + integrity sha512-crhvyXcMejjv3Z5d2Fa9sf5xLYVCF5O1c71QxbVnbLsmYMBEvDAftewesN/HhY03YRoA7zOMxjNGrF5svGaaeQ== dependencies: minipass "^2.2.1" @@ -1314,13 +1339,18 @@ fs.realpath@^1.0.0: resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= -fsevents@^1.2.2: - version "1.2.4" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.4.tgz#f41dcb1af2582af3692da36fc55cbd8e1041c426" - integrity sha512-z8H8/diyk76B7q5wg+Ud0+CqzcAF3mBBI/bA5ne5zrRUUIvNkJY//D3BqyH571KuAC4Nr7Rw7CjWX4r0y9DvNg== +fsevents@^1.2.7: + version "1.2.9" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" + integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== dependencies: - nan "^2.9.2" - node-pre-gyp "^0.10.0" + nan "^2.12.1" + node-pre-gyp "^0.12.0" + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== gauge@~2.7.3: version "2.7.4" @@ -1341,10 +1371,10 @@ get-func-name@^2.0.0: resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41" integrity sha1-6td0q+5y4gQJQzoGY2YCPdaIekE= -get-port@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/get-port/-/get-port-4.0.0.tgz#373c85960138ee20027c070e3cb08019fea29816" - integrity sha512-Yy3yNI2oShgbaWg4cmPhWjkZfktEvpKI09aDX4PZzNtlU9obuYrX7x2mumQsrNxlF+Ls7OtMQW/u+X4s896bOQ== +get-port@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/get-port/-/get-port-4.2.0.tgz#e37368b1e863b7629c43c5a323625f95cf24b119" + integrity sha512-/b3jarXkH8KJoOMQc3uVGHASwGLPq3gSFJ7tgJm2diza+bydJPTGOibin2steecKeOylE8oY2JERlVWkAJO6yw== get-stream@^3.0.0: version "3.0.0" @@ -1364,10 +1394,10 @@ glob-parent@^3.1.0: is-glob "^3.1.0" path-dirname "^1.0.0" -glob@^7.0.3, glob@^7.0.5, glob@^7.1.1, glob@^7.1.2: - version "7.1.3" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.3.tgz#3960832d3f1574108342dafd3a67b332c0969df1" - integrity sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ== +glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3: + version "7.1.4" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255" + integrity sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" @@ -1384,9 +1414,9 @@ global-dirs@^0.1.0: ini "^1.3.4" globals@^11.1.0: - version "11.9.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.9.0.tgz#bde236808e987f290768a93d065060d78e6ab249" - integrity sha512-5cJVtyXWH8PiJPVLZzzoIizXx944O4OmRro5MWKx5fT4MgcN7OfaMutPeaTdJCCURwbWdhhcCWcKIffPnmTzBg== + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globby@^6.1.0: version "6.1.0" @@ -1428,23 +1458,21 @@ got@^6.7.1: unzip-response "^2.0.1" url-parse-lax "^1.0.0" -graceful-fs@^4.1.11, graceful-fs@^4.1.2: - version "4.1.15" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.15.tgz#ffb703e1066e8a0eeaa4c8b80ba9253eeefbfb00" - integrity sha512-6uHUhOPEBgQ24HM+r6b/QwWfZq+yiFcipKFrOFiBEnWdy5sdzYoi+pJeQaPI5qOLRFqWmAXUPQNsielzdLoecA== - -has-ansi@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" - integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= - dependencies: - ansi-regex "^2.0.0" +graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2: + version "4.2.0" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.0.tgz#8d8fdc73977cb04104721cb53666c1ca64cd328b" + integrity sha512-jpSvDPV4Cq/bgtpndIWbI5hmYxhQGHPC4d4cqBPb4DLniCfhJokdXhwhaDuLBGLQdvvRum/UiX6ECVIPvDXqdg== has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= +has-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.0.tgz#ba1a8f1af2a0fc39650f5c850367704122063b44" + integrity sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q= + has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" @@ -1481,6 +1509,13 @@ has-values@^1.0.0: is-number "^3.0.0" kind-of "^4.0.0" +hasha@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/hasha/-/hasha-3.0.0.tgz#52a32fab8569d41ca69a61ff1a214f8eb7c8bd39" + integrity sha1-UqMvq4Vp1BymmmH/GiFPjrfIvTk= + dependencies: + is-stream "^1.0.1" + hosted-git-info@^2.1.4: version "2.7.1" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.7.1.tgz#97f236977bd6e125408930ff6de3eec6281ec047" @@ -1541,10 +1576,10 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@^2.0.1, inherits@~2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= +inherits@2, inherits@^2.0.3, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== ini@^1.3.4, ini@~1.3.0: version "1.3.5" @@ -1587,20 +1622,20 @@ is-buffer@^1.1.5: resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== -is-builtin-module@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-1.0.0.tgz#540572d34f7ac3119f8f76c30cbc1b1e037affbe" - integrity sha1-VAVy0096wxGfj3bDDLwbHgN6/74= - dependencies: - builtin-modules "^1.0.0" - -is-ci@^1.0.10, is-ci@^1.2.1: +is-ci@^1.0.10: version "1.2.1" resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-1.2.1.tgz#e3779c8ee17fccf428488f6e281187f2e632841c" integrity sha512-s6tfsaQaQi3JNciBH6shVqEDvhGut0SUXr31ag8Pd8BBbVVlcGfWhpPmEOoM6RJ5TFhbypvf5yyRw/VXW1IiWg== dependencies: ci-info "^1.5.0" +is-ci@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c" + integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== + dependencies: + ci-info "^2.0.0" + is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" @@ -1634,9 +1669,9 @@ is-descriptor@^1.0.0, is-descriptor@^1.0.2: kind-of "^6.0.2" is-error@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/is-error/-/is-error-2.2.1.tgz#684a96d84076577c98f4cdb40c6d26a5123bf19c" - integrity sha1-aEqW2EB2V3yY9M20DG0mpRI78Zw= + version "2.2.2" + resolved "https://registry.yarnpkg.com/is-error/-/is-error-2.2.2.tgz#c10ade187b3c93510c5470a5567833ee25649843" + integrity sha512-IOQqts/aHWbiisY5DuPJQ0gcbvaLFCa7fBa9xoLfxBZvQ+ZI/Zh9xoI7Gk+G64N0FdK4AbibytHht2tWgpJWLg== is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" @@ -1675,9 +1710,9 @@ is-glob@^3.1.0: is-extglob "^2.1.0" is-glob@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.0.tgz#9521c76845cc2610a85203ddf080a958c2ffabc0" - integrity sha1-lSHHaEXMJhCoUgPd8ICpWML/q8A= + version "4.0.1" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" + integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== dependencies: is-extglob "^2.1.1" @@ -1720,17 +1755,17 @@ is-observable@^1.1.0: dependencies: symbol-observable "^1.1.0" -is-path-cwd@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-1.0.0.tgz#d225ec23132e89edd38fda767472e62e65f1106d" - integrity sha1-0iXsIxMuie3Tj9p2dHLmLmXxEG0= +is-path-cwd@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" + integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== -is-path-in-cwd@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-1.0.1.tgz#5ac48b345ef675339bd6c7a48a912110b241cf52" - integrity sha512-FjV1RTW48E7CWM7eE/J2NJvAEEVektecDBVBE5Hh3nM1Jd0kvhHtX68Pr3xsDf857xt3Y4AkwVULK1Vku62aaQ== +is-path-in-cwd@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" + integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== dependencies: - is-path-inside "^1.0.0" + is-path-inside "^2.1.0" is-path-inside@^1.0.0: version "1.0.1" @@ -1739,12 +1774,19 @@ is-path-inside@^1.0.0: dependencies: path-is-inside "^1.0.1" +is-path-inside@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" + integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== + dependencies: + path-is-inside "^1.0.2" + is-plain-obj@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= -is-plain-object@^2.0.1, is-plain-object@^2.0.3, is-plain-object@^2.0.4: +is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== @@ -1766,7 +1808,7 @@ is-retry-allowed@^1.0.0: resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz#11a060568b67339444033d0125a61a20d564fb34" integrity sha1-EaBgVotnM5REAz0BJaYaINVk+zQ= -is-stream@^1.0.0, is-stream@^1.1.0: +is-stream@^1.0.0, is-stream@^1.0.1, is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= @@ -1818,20 +1860,15 @@ js-string-escape@^1.0.1: resolved "https://registry.yarnpkg.com/js-string-escape/-/js-string-escape-1.0.1.tgz#e2625badbc0d67c7533e9edc1068c587ae4137ef" integrity sha1-4mJbrbwNZ8dTPp7cEGjFh65BN+8= -js-tokens@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" - integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= - js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== -js-yaml@^3.10.0, js-yaml@^3.7.0: - version "3.12.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.12.0.tgz#eaed656ec8344f10f527c6bfa1b6e2244de167d1" - integrity sha512-PIt2cnwmPfL4hKNwqeiuz4bKfnzHTBv6HyVgjahA6mPLwPDzjDWrplJBMjHUFxku/N3FlmrbyPclad+I+4mJ3A== +js-yaml@^3.10.0, js-yaml@^3.13.1: + version "3.13.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" + integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== dependencies: argparse "^1.0.7" esprima "^4.0.0" @@ -1904,6 +1941,17 @@ load-json-file@^4.0.0: pify "^3.0.0" strip-bom "^3.0.0" +load-json-file@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-5.3.0.tgz#4d3c1e01fa1c03ea78a60ac7af932c9ce53403f3" + integrity sha512-cJGP40Jc/VXUsp8/OrnyKyTZ1y6v/dphm3bioS+RrKXjK2BB6wHUd6JptZEFDGgGahMT+InnZO5i1Ei9mpC8Bw== + dependencies: + graceful-fs "^4.1.15" + parse-json "^4.0.0" + pify "^4.0.1" + strip-bom "^3.0.0" + type-fest "^0.3.0" + locate-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" @@ -1935,7 +1983,7 @@ lodash.clonedeepwith@^4.5.0: resolved "https://registry.yarnpkg.com/lodash.clonedeepwith/-/lodash.clonedeepwith-4.5.0.tgz#6ee30573a03a1a60d670a62ef33c10cf1afdbdd4" integrity sha1-buMFc6A6GmDWcKYu8zwQzxr9vdQ= -lodash.debounce@^4.0.3, lodash.debounce@^4.0.8: +lodash.debounce@^4.0.3: version "4.0.8" resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" integrity sha1-gteb/zCmfEAF/9XiUVMArZyk168= @@ -1965,7 +2013,7 @@ lodash.merge@^4.6.1: resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.1.tgz#adc25d9cb99b9391c59624f379fbba60d7111d54" integrity sha512-AOYza4+Hf5z1/0Hztxpm2/xiPZgi/cjMqdnKTUWTBSKchJlxXXuUSxCCl8rJlf4g6yww/j6mA8nC8Hw/EZWxKQ== -lodash@^4.17.10: +lodash@^4.17.11: version "4.17.11" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d" integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg== @@ -1991,20 +2039,28 @@ lowercase-keys@^1.0.0: integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== lru-cache@^4.0.1: - version "4.1.4" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.4.tgz#51cc46e8e6d9530771c857e24ccc720ecdbcc031" - integrity sha512-EPstzZ23znHUVLKj+lcXO1KvZkrlw+ZirdwvOmnAnA/1PB4ggyXJ77LRkCqkff+ShQ+cqoxCxLQOh4cKITO5iA== + version "4.1.5" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" + integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== dependencies: pseudomap "^1.0.2" - yallist "^3.0.2" + yallist "^2.1.2" -make-dir@^1.0.0, make-dir@^1.3.0: +make-dir@^1.0.0: version "1.3.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c" integrity sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ== dependencies: pify "^3.0.0" +make-dir@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" + integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== + dependencies: + pify "^4.0.1" + semver "^5.6.0" + make-error@^1.1.1: version "1.3.5" resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.5.tgz#efe4e81f6db28cadd605c70f29c831b58ef776c8" @@ -2093,7 +2149,7 @@ mimic-fn@^1.0.0: resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== -minimatch@^3.0.0, minimatch@^3.0.4: +minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== @@ -2118,7 +2174,7 @@ minimist@^1.2.0: resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= -minipass@^2.2.1, minipass@^2.3.4: +minipass@^2.2.1, minipass@^2.3.5: version "2.3.5" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.3.5.tgz#cacebe492022497f656b0f0f51e2682a9ed2d848" integrity sha512-Gi1W4k059gyRbyVUZQ4mEqLm0YIUiGYfvxhF6SIlk3ui1WVxMTGfGdQ2SInh3PDrRTVvPKgULkpJtT4RH10+VA== @@ -2126,17 +2182,17 @@ minipass@^2.2.1, minipass@^2.3.4: safe-buffer "^5.1.2" yallist "^3.0.0" -minizlib@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.1.1.tgz#6734acc045a46e61d596a43bb9d9cd326e19cc42" - integrity sha512-TrfjCjk4jLhcJyGMYymBH6oTXcWjYbUAXTHDbtnWHjZC25h0cdajHuPE1zxb4DVmu8crfh+HwH/WMuyLG0nHBg== +minizlib@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.2.1.tgz#dd27ea6136243c7c880684e8672bb3a45fd9b614" + integrity sha512-7+4oTUOWKg7AuL3vloEWekXY2/D20cevzsrNT2kGWm+39J9hGTCBv8VI5Pm5lXZ/o3/mdR4f8rflAPhnQb8mPA== dependencies: minipass "^2.2.1" mixin-deep@^1.2.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.1.tgz#a49e7268dce1a0d9698e45326c5626df3543d0fe" - integrity sha512-8ZItLHeEgaqEvd5lYBXfm4EZSFCX29Jb9K+lAHhDKzReKBQKj3R+7NOF6tjqYi9t4oI8VUfaWITJQm86wnXGNQ== + version "1.3.2" + resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" + integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== dependencies: for-in "^1.0.2" is-extendable "^1.0.1" @@ -2154,24 +2210,24 @@ ms@2.0.0: integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= ms@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" - integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -multimatch@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/multimatch/-/multimatch-2.1.0.tgz#9c7906a22fb4c02919e2f5f75161b4cdbd4b2a2b" - integrity sha1-nHkGoi+0wCkZ4vX3UWG0zb1LKis= +multimatch@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/multimatch/-/multimatch-3.0.0.tgz#0e2534cc6bc238d9ab67e1b9cd5fcd85a6dbf70b" + integrity sha512-22foS/gqQfANZ3o+W7ST2x25ueHDVNWl/b9OlGcLpy/iKxjCpvcNCM51YCenUi7Mt/jAjjqv8JwZRs8YP5sRjA== dependencies: - array-differ "^1.0.0" - array-union "^1.0.1" - arrify "^1.0.0" - minimatch "^3.0.0" + array-differ "^2.0.3" + array-union "^1.0.2" + arrify "^1.0.1" + minimatch "^3.0.4" -nan@^2.9.2: - version "2.11.1" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.11.1.tgz#90e22bccb8ca57ea4cd37cc83d3819b52eea6766" - integrity sha512-iji6k87OSXa0CcrLl9z+ZiYSuR2o+c0bGuNmXdrhTQTakxytAFsC56SArGYoiHlJlFoHSnvmhpceZJaXkVuOtA== +nan@^2.12.1: + version "2.14.0" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" + integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== nanomatch@^1.2.9: version "1.2.13" @@ -2191,18 +2247,18 @@ nanomatch@^1.2.9: to-regex "^3.0.1" needle@^2.2.1: - version "2.2.4" - resolved "https://registry.yarnpkg.com/needle/-/needle-2.2.4.tgz#51931bff82533b1928b7d1d69e01f1b00ffd2a4e" - integrity sha512-HyoqEb4wr/rsoaIDfTH2aVL9nWtQqba2/HvMv+++m8u0dz808MaagKILxtfeSN7QU7nvbQ79zk3vYOJp9zsNEA== + version "2.4.0" + resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" + integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== dependencies: - debug "^2.1.2" + debug "^3.2.6" iconv-lite "^0.4.4" sax "^1.2.4" -node-pre-gyp@^0.10.0: - version "0.10.3" - resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.10.3.tgz#3070040716afdc778747b61b6887bf78880b80fc" - integrity sha512-d1xFs+C/IPS8Id0qPTZ4bUT8wWryfR/OzzAFxweG+uLN85oPzyo2Iw6bVlLQ/JOdgNonXLCoRyqDzDWq4iw72A== +node-pre-gyp@^0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" + integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== dependencies: detect-libc "^1.0.2" mkdirp "^0.5.1" @@ -2224,12 +2280,12 @@ nopt@^4.0.1: osenv "^0.1.4" normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: - version "2.4.0" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.4.0.tgz#12f95a307d58352075a04907b84ac8be98ac012f" - integrity sha512-9jjUFbTPfEy3R/ad/2oNbKtW9Hgovl5O1FvFWKkKblNXoN/Oou6+9+KKohPK13Yc3/TyunyWhJp6gvRNR/PPAw== + version "2.5.0" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" + integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" - is-builtin-module "^1.0.0" + resolve "^1.10.0" semver "2 || 3 || 4 || 5" validate-npm-package-license "^3.0.1" @@ -2240,15 +2296,20 @@ normalize-path@^2.1.1: dependencies: remove-trailing-separator "^1.0.1" +normalize-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + npm-bundled@^1.0.1: - version "1.0.5" - resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.5.tgz#3c1732b7ba936b3a10325aef616467c0ccbcc979" - integrity sha512-m/e6jgWu8/v5niCUKQi9qQl8QdeEduFA96xHDDzFGqly0OOjI7c+60KM/2sppfnUU9JJagf+zs+yGhqSOFj71g== + version "1.0.6" + resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" + integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== npm-packlist@^1.1.6: - version "1.1.12" - resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.1.12.tgz#22bde2ebc12e72ca482abd67afc51eb49377243a" - integrity sha512-WJKFOVMeAlsU/pjXuqVdzU0WfgtIBCupkEVwn+1Y0ERAbUfWw8R4GjgVbaKnUjRoD2FoQbHOCbOyT5Mbs9Lw4g== + version "1.4.4" + resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.4.tgz#866224233850ac534b63d1a6e76050092b5d2f44" + integrity sha512-zTLo8UcVYtDU3gdeaFu2Xu0n0EvelfHDGuqtNIn5RO7yQj4H1TqNdBc/yZjxnWA0PVB8D3Woyp0i5B43JwQ6Vw== dependencies: ignore-walk "^3.0.1" npm-bundled "^1.0.1" @@ -2289,6 +2350,11 @@ object-copy@^0.1.0: define-property "^0.2.5" kind-of "^3.0.3" +object-keys@^1.0.11, object-keys@^1.0.12: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + object-visit@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" @@ -2296,6 +2362,16 @@ object-visit@^1.0.0: dependencies: isobject "^3.0.0" +object.assign@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" + integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== + dependencies: + define-properties "^1.1.2" + function-bind "^1.1.1" + has-symbols "^1.0.0" + object-keys "^1.0.11" + object.pick@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" @@ -2325,16 +2401,16 @@ onetime@^2.0.0: dependencies: mimic-fn "^1.0.0" -ora@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/ora/-/ora-3.0.0.tgz#8179e3525b9aafd99242d63cc206fd64732741d0" - integrity sha512-LBS97LFe2RV6GJmXBi6OKcETKyklHNMV0xw7BtsVn2MlsgsydyZetSCbCANr+PFLmDyv4KV88nn0eCKza665Mg== +ora@^3.2.0: + version "3.4.0" + resolved "https://registry.yarnpkg.com/ora/-/ora-3.4.0.tgz#bf0752491059a3ef3ed4c85097531de9fdbcd318" + integrity sha512-eNwHudNbO1folBP3JsZ19v9azXWtQZjICdr3Q0TDPIaeBQ3mXLrh54wM+er0+hSp+dWKf+Z8KM58CYzEyIYxYg== dependencies: - chalk "^2.3.1" + chalk "^2.4.2" cli-cursor "^2.1.0" - cli-spinners "^1.1.0" + cli-spinners "^2.0.0" log-symbols "^2.2.0" - strip-ansi "^4.0.0" + strip-ansi "^5.2.0" wcwidth "^1.0.1" os-homedir@^1.0.0: @@ -2368,9 +2444,9 @@ p-limit@^1.1.0: p-try "^1.0.0" p-limit@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.0.0.tgz#e624ed54ee8c460a778b3c9f3670496ff8a57aec" - integrity sha512-fl5s52lI5ahKCernzzIyAP0QAZbGIovtVHGwpcu1Jr/EpzLVDI2myISHwGqK7m8uQFugVWSrbxH7XnhGtvEc+A== + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.0.tgz#417c9941e6027a9abcba5092dd2904e255b5fbc2" + integrity sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ== dependencies: p-try "^2.0.0" @@ -2388,10 +2464,10 @@ p-locate@^3.0.0: dependencies: p-limit "^2.0.0" -p-map@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-1.2.0.tgz#e4e94f311eabbc8633a1e79908165fca26241b6b" - integrity sha512-r6zKACMNhjPJMTl8KcFH4li//gkrXWfbD6feV8l6doRHlzljFWGJ2AP6iKaCJXyZmAUMOPtvbW7EXkbWO/pLEA== +p-map@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" + integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== p-try@^1.0.0: version "1.0.0" @@ -2399,18 +2475,18 @@ p-try@^1.0.0: integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= p-try@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.0.0.tgz#85080bb87c64688fa47996fe8f7dfbe8211760b1" - integrity sha512-hMp0onDKIajHfIkdRk3P4CdCmErkYAxxDtP3Wx/4nZ3aGlau2VKh3mZpcuFkH27WQkL/3WBCPOktzA9ZOAnMQQ== + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== -package-hash@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/package-hash/-/package-hash-2.0.0.tgz#78ae326c89e05a4d813b68601977af05c00d2a0d" - integrity sha1-eK4ybIngWk2BO2hgGXevBcANKg0= +package-hash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/package-hash/-/package-hash-3.0.0.tgz#50183f2d36c9e3e528ea0a8605dff57ce976f88e" + integrity sha512-lOtmukMDVvtkL84rJHI7dpTYq+0rli8N2wlnqUcBuDWCfVhRUfOmnR9SsoHFMLpACvEV60dX7rd0rFaYDZI+FA== dependencies: - graceful-fs "^4.1.11" + graceful-fs "^4.1.15" + hasha "^3.0.0" lodash.flattendeep "^4.4.0" - md5-hex "^2.0.0" release-zalgo "^1.0.0" package-json@^4.0.0: @@ -2432,9 +2508,9 @@ parse-json@^4.0.0: json-parse-better-errors "^1.0.1" parse-ms@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/parse-ms/-/parse-ms-2.0.0.tgz#7b3640295100caf3fa0100ccceb56635b62f9d62" - integrity sha512-AddiXFSLLCqj+tCRJ9MrUtHZB4DWojO3tk0NVZ+g5MaMQHF2+p2ktqxuoXyPFLljz/aUK0Nfhd/uGWnhXVXEyA== + version "2.1.0" + resolved "https://registry.yarnpkg.com/parse-ms/-/parse-ms-2.1.0.tgz#348565a753d4391fa524029956b172cb7753097d" + integrity sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA== pascalcase@^0.1.1: version "0.1.1" @@ -2456,7 +2532,7 @@ path-is-absolute@^1.0.0: resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= -path-is-inside@^1.0.1: +path-is-inside@^1.0.1, path-is-inside@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= @@ -2466,7 +2542,7 @@ path-key@^2.0.0: resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= -path-parse@^1.0.5: +path-parse@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== @@ -2493,6 +2569,11 @@ pify@^3.0.0: resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= +pify@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" + integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== + pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" @@ -2505,13 +2586,13 @@ pinkie@^2.0.0: resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= -pkg-conf@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/pkg-conf/-/pkg-conf-2.1.0.tgz#2126514ca6f2abfebd168596df18ba57867f0058" - integrity sha1-ISZRTKbyq/69FoWW3xi6V4Z/AFg= +pkg-conf@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/pkg-conf/-/pkg-conf-3.1.0.tgz#d9f9c75ea1bae0e77938cde045b276dac7cc69ae" + integrity sha512-m0OTbR/5VPNPqO1ph6Fqbj7Hv6QU7gR/tQW40ZqrL1rjgCU85W6C1bJn0BItuJqnR98PWzw7Z8hHeChD1WrgdQ== dependencies: - find-up "^2.0.0" - load-json-file "^4.0.0" + find-up "^3.0.0" + load-json-file "^5.2.0" pkg-dir@^3.0.0: version "3.0.0" @@ -2521,9 +2602,9 @@ pkg-dir@^3.0.0: find-up "^3.0.0" plur@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/plur/-/plur-3.0.1.tgz#268652d605f816699b42b86248de73c9acd06a7c" - integrity sha512-lJl0ojUynAM1BZn58Pas2WT/TXeC1+bS+UqShl0x9+49AtOn7DixRXVzaC8qrDOIxNDmepKnLuMTH7NQmkX0PA== + version "3.1.1" + resolved "https://registry.yarnpkg.com/plur/-/plur-3.1.1.tgz#60267967866a8d811504fe58f2faaba237546a5b" + integrity sha512-t1Ax8KUvV3FFII8ltczPn2tJdjqbd1sIzu6t4JL7nQ3EyeL/lTrj5PWKb06ic5/6XYDr65rQ4uzQEGN70/6X5w== dependencies: irregular-plurals "^2.0.0" @@ -2538,9 +2619,9 @@ prepend-http@^1.0.1: integrity sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw= prettier@^1.14.3: - version "1.15.2" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.15.2.tgz#d31abe22afa4351efa14c7f8b94b58bb7452205e" - integrity sha512-YgPLFFA0CdKL4Eg2IHtUSjzj/BWgszDHiNQAe0VAIBse34148whfdzLagRL+QiKS+YfK5ftB6X4v/MBw8yCoug== + version "1.18.2" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.18.2.tgz#6823e7c5900017b4bd3acf46fe9ac4b4d7bda9ea" + integrity sha512-OeHeMc0JhFE9idD4ZdtNibzY0+TPHSpSSb9h8FqtP+YnoZZ1sl8Vc9b1sasjfymH3SonAF4QcA2+mzHPhMvIiw== pretty-ms@^4.0.0: version "4.0.0" @@ -2550,9 +2631,9 @@ pretty-ms@^4.0.0: parse-ms "^2.0.0" process-nextick-args@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa" - integrity sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw== + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== pseudomap@^1.0.2: version "1.0.2" @@ -2604,7 +2685,7 @@ readable-stream@^2.0.2, readable-stream@^2.0.6: string_decoder "~1.1.1" util-deprecate "~1.0.1" -readdirp@^2.0.0: +readdirp@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== @@ -2621,10 +2702,10 @@ redent@^2.0.0: indent-string "^3.0.0" strip-indent "^2.0.0" -regenerate-unicode-properties@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-7.0.0.tgz#107405afcc4a190ec5ed450ecaa00ed0cafa7a4c" - integrity sha512-s5NGghCE4itSlUS+0WUj88G6cfMVMmH8boTPNvABf8od+2dhT9WDlWu8n01raQAJZMOK8Ch6jSexaRO7swd6aw== +regenerate-unicode-properties@^8.0.2: + version "8.1.0" + resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" + integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== dependencies: regenerate "^1.4.0" @@ -2641,22 +2722,22 @@ regex-not@^1.0.0, regex-not@^1.0.2: extend-shallow "^3.0.2" safe-regex "^1.1.0" -regexpu-core@^4.1.3: - version "4.2.0" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.2.0.tgz#a3744fa03806cffe146dea4421a3e73bdcc47b1d" - integrity sha512-Z835VSnJJ46CNBttalHD/dB+Sj2ezmY6Xp38npwU87peK6mqOzOpV8eYktdkLTEkzzD+JsTcxd84ozd8I14+rw== +regexpu-core@^4.5.4: + version "4.5.4" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.5.4.tgz#080d9d02289aa87fe1667a4f5136bc98a6aebaae" + integrity sha512-BtizvGtFQKGPUcTy56o3nk1bGRp4SZOTYrDtGNlqCQufptV5IkkLN6Emw+yunAJjzf+C9FQFtvq7IoA3+oMYHQ== dependencies: regenerate "^1.4.0" - regenerate-unicode-properties "^7.0.0" - regjsgen "^0.4.0" - regjsparser "^0.3.0" + regenerate-unicode-properties "^8.0.2" + regjsgen "^0.5.0" + regjsparser "^0.6.0" unicode-match-property-ecmascript "^1.0.4" - unicode-match-property-value-ecmascript "^1.0.2" + unicode-match-property-value-ecmascript "^1.1.0" registry-auth-token@^3.0.1: - version "3.3.2" - resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-3.3.2.tgz#851fd49038eecb586911115af845260eec983f20" - integrity sha512-JL39c60XlzCVgNrO+qq68FoNb56w/m7JYvGR2jT5iR1xBrUA3Mfx5Twk5rqTThPmQKMWydGmq8oFtDlxfrmxnQ== + version "3.4.0" + resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-3.4.0.tgz#d7446815433f5d5ed6431cd5dca21048f66b397e" + integrity sha512-4LM6Fw8eBQdwMYcES4yTnn2TqIasbXuwDx3um+QRs7S55aMKCBKBxvPXl2RiUjHwuJLTyYfxSpmfSAjQpcuP+A== dependencies: rc "^1.1.6" safe-buffer "^5.0.1" @@ -2668,15 +2749,15 @@ registry-url@^3.0.3: dependencies: rc "^1.0.1" -regjsgen@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.4.0.tgz#c1eb4c89a209263f8717c782591523913ede2561" - integrity sha512-X51Lte1gCYUdlwhF28+2YMO0U6WeN0GLpgpA7LK7mbdDnkQYiwvEpmpe0F/cv5L14EbxgrdayAG3JETBv0dbXA== +regjsgen@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.0.tgz#a7634dc08f89209c2049adda3525711fb97265dd" + integrity sha512-RnIrLhrXCX5ow/E5/Mh2O4e/oa1/jW0eaBKTSy3LaCj+M3Bqvm97GWDp2yUtzIs4LEn65zR2yiYGFqb2ApnzDA== -regjsparser@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.3.0.tgz#3c326da7fcfd69fa0d332575a41c8c0cdf588c96" - integrity sha512-zza72oZBBHzt64G7DxdqrOo/30bhHkwMUoT0WqfGu98XLd7N+1tsy5MJ96Bk4MD0y74n629RhmrGW6XlnLLwCA== +regjsparser@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c" + integrity sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ== dependencies: jsesc "~0.5.0" @@ -2724,12 +2805,12 @@ resolve-url@^0.2.1: resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= -resolve@^1.3.2: - version "1.8.1" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.8.1.tgz#82f1ec19a423ac1fbd080b0bab06ba36e84a7a26" - integrity sha512-AicPrAC7Qu1JxPCZ9ZgCZlY35QgFnNqc+0LtbRNxnVw4TXvjQ72wnuL9JQcEBgXkI9JM8MsT9kaQoHcpCRJOYA== +resolve@^1.10.0, resolve@^1.3.2: + version "1.11.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.11.1.tgz#ea10d8110376982fef578df8fc30b9ac30a07a3e" + integrity sha512-vIpgF6wfuJOZI7KKKSP+HmiKggadPQAdsp5HiC1mvqnfp0gF1vdwgBWZIdrVft9pgqoMFQN+R7BSWZiBxx+BBw== dependencies: - path-parse "^1.0.5" + path-parse "^1.0.6" restore-cursor@^2.0.0: version "2.0.0" @@ -2744,14 +2825,19 @@ ret@~0.1.10: resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== -rimraf@^2.2.8, rimraf@^2.6.1: - version "2.6.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36" - integrity sha512-lreewLK/BlghmxtfH36YYVg1i8IAce4TI7oao75I1g245+6BctqTVQiBP3YUJ9C6DQOXJmkYR9X9fCLtCOJc5w== +rimraf@^2.6.1, rimraf@^2.6.3: + version "2.6.3" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" + integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== dependencies: - glob "^7.0.5" + glob "^7.1.3" -safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: +safe-buffer@^5.0.1, safe-buffer@^5.1.2: + version "5.2.0" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519" + integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg== + +safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== @@ -2780,10 +2866,10 @@ semver-diff@^2.0.0: dependencies: semver "^5.0.3" -"semver@2 || 3 || 4 || 5", semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@^5.4.1, semver@^5.5.1: - version "5.6.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.6.0.tgz#7e74256fbaa49c75aa7c7a205cc22799cac80004" - integrity sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg== +"semver@2 || 3 || 4 || 5", semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@^5.4.1, semver@^5.5.1, semver@^5.6.0: + version "5.7.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.0.tgz#790a7cf6fea5459bac96110b29b60412dc8ff96b" + integrity sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA== serialize-error@^2.1.0: version "2.1.0" @@ -2795,20 +2881,10 @@ set-blocking@~2.0.0: resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= -set-value@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/set-value/-/set-value-0.4.3.tgz#7db08f9d3d22dc7f78e53af3c3bf4666ecdfccf1" - integrity sha1-fbCPnT0i3H945Trzw79GZuzfzPE= - dependencies: - extend-shallow "^2.0.1" - is-extendable "^0.1.1" - is-plain-object "^2.0.1" - to-object-path "^0.3.0" - -set-value@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.0.tgz#71ae4a88f0feefbbf52d1ea604f3fb315ebb6274" - integrity sha512-hw0yxk9GT/Hr5yJEYnHNKYXkIA8mVJgd9ditYZCe16ZczcaELYYcfvaXesNACk2O8O0nTiPQcQhGUQj8JLzeeg== +set-value@^2.0.0, set-value@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" + integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== dependencies: extend-shallow "^2.0.1" is-extendable "^0.1.1" @@ -2895,7 +2971,15 @@ source-map-resolve@^0.5.0: source-map-url "^0.4.0" urix "^0.1.0" -source-map-support@^0.5.6, source-map-support@^0.5.9: +source-map-support@^0.5.11: + version "0.5.12" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.12.tgz#b4f3b10d51857a5af0138d3ce8003b201613d599" + integrity sha512-4h2Pbvyy15EE02G+JOZpUCmqWJuqrs+sEkzewTm++BPi7Hvn/HwcqLAcNxYAyI0x13CpPPn+kMjl+hplXMHITQ== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map-support@^0.5.6: version "0.5.9" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.9.tgz#41bc953b2534267ea2d605bccfa7bfa3111ced5f" integrity sha512-gR6Rw4MvUlYy83vP0vxoVNzM6t8MUXqNuRsuBmBHQDu1Fh6X015FrLdgoDKcNdkwGubozq0P4N0Q37UyFVr1EA== @@ -2919,9 +3003,9 @@ source-map@^0.6.0: integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== spdx-correct@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.0.2.tgz#19bb409e91b47b1ad54159243f7312a858db3c2e" - integrity sha512-q9hedtzyXHr5S0A1vEPoK/7l8NpfkFYTq6iCY+Pno2ZbdZR6WexZFtqeVGkGxW3TEJMN914Z55EnAGMmenlIQQ== + version "3.1.0" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" + integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== dependencies: spdx-expression-parse "^3.0.0" spdx-license-ids "^3.0.0" @@ -2940,9 +3024,9 @@ spdx-expression-parse@^3.0.0: spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.2.tgz#a59efc09784c2a5bada13cfeaf5c75dd214044d2" - integrity sha512-qky9CVt0lVIECkEsYbNILVnPvycuEBkXoMFLRWsREkomQLevYhtRKC+R91a5TOAQ3bCMjikRwhyaRqj1VYatYg== + version "3.0.4" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.4.tgz#75ecd1a88de8c184ef015eafb51b5b48bfd11bb1" + integrity sha512-7j8LYJLeY/Yb6ACbQ7F76qy5jHkp0U6jgBfJsk97bwWlVUnUWsAgpyaCvo17h0/RQGnQ036tVDomiwoI4pDkQA== split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" @@ -2956,7 +3040,7 @@ sprintf-js@~1.0.2: resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= -stack-utils@^1.0.1: +stack-utils@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-1.0.2.tgz#33eba3897788558bebfc2db059dc158ec36cebb8" integrity sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA== @@ -3007,12 +3091,12 @@ strip-ansi@^4.0.0: dependencies: ansi-regex "^3.0.0" -strip-ansi@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.0.0.tgz#f78f68b5d0866c20b2c9b8c61b5298508dc8756f" - integrity sha512-Uu7gQyZI7J7gn5qLn1Np3G9vcYGTVqB+lFTytnDJv83dd8T22aGH451P3jueT2/QemInJDfxHB5Tde5OzgG1Ow== +strip-ansi@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" + integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== dependencies: - ansi-regex "^4.0.0" + ansi-regex "^4.1.0" strip-bom-buf@^1.0.0: version "1.0.0" @@ -3052,18 +3136,20 @@ supertap@^1.0.0: serialize-error "^2.1.0" strip-ansi "^4.0.0" -supports-color@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" - integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= - -supports-color@^5.3.0, supports-color@^5.5.0: +supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" +supports-color@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" + integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== + dependencies: + has-flag "^3.0.0" + symbol-observable@^0.2.2: version "0.2.4" resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-0.2.4.tgz#95a83db26186d6af7e7a18dbd9760a2f86d08f40" @@ -3075,17 +3161,17 @@ symbol-observable@^1.0.4, symbol-observable@^1.1.0: integrity sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ== tar@^4: - version "4.4.8" - resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.8.tgz#b19eec3fde2a96e64666df9fdb40c5ca1bc3747d" - integrity sha512-LzHF64s5chPQQS0IYBn9IN5h3i98c12bo4NCO7e0sGM2llXQ3p2FGC5sdENN4cTW48O915Sh+x+EXx7XW96xYQ== + version "4.4.10" + resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.10.tgz#946b2810b9a5e0b26140cf78bea6b0b0d689eba1" + integrity sha512-g2SVs5QIxvo6OLp0GudTqEf05maawKUxXru104iaayWA09551tFCTI8f1Asb4lPfkBr91k07iL4c11XO3/b0tA== dependencies: chownr "^1.1.1" fs-minipass "^1.2.5" - minipass "^2.3.4" - minizlib "^1.1.1" + minipass "^2.3.5" + minizlib "^1.2.1" mkdirp "^0.5.0" safe-buffer "^5.1.2" - yallist "^3.0.2" + yallist "^3.0.3" term-size@^1.2.0: version "1.2.0" @@ -3163,15 +3249,20 @@ ts-node@^7.0.1: source-map-support "^0.5.6" yn "^2.0.0" -tslib@^1.7.1, tslib@^1.8.0, tslib@^1.8.1: +tslib@^1.7.1: version "1.9.3" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.3.tgz#d7e4dd79245d85428c4d7e4822a79917954ca286" integrity sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ== +tslib@^1.8.0, tslib@^1.8.1: + version "1.10.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" + integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== + tslint-config-prettier@^1.16.0: - version "1.16.0" - resolved "https://registry.yarnpkg.com/tslint-config-prettier/-/tslint-config-prettier-1.16.0.tgz#4627d0e2639554d89210e480093c381b5186963f" - integrity sha512-zu6RAcpBtqdvhT6KpBh9kRPYATjOf9BnRi718kNqVKFjEgSE4rFrPprFju1YJrkOa3RbtbWI1ZSuLd2NBX1MDw== + version "1.18.0" + resolved "https://registry.yarnpkg.com/tslint-config-prettier/-/tslint-config-prettier-1.18.0.tgz#75f140bde947d35d8f0d238e0ebf809d64592c37" + integrity sha512-xPw9PgNPLG3iKRxmK7DWr+Ea/SzrvfHtjFt5LBl61gk2UBG/DB9kCXRjv+xyIU1rUtnayLeMUVJBcMX8Z17nDg== tslint-plugin-prettier@^2.0.1: version "2.0.1" @@ -3183,24 +3274,25 @@ tslint-plugin-prettier@^2.0.1: tslib "^1.7.1" tslint@^5.11.0: - version "5.11.0" - resolved "https://registry.yarnpkg.com/tslint/-/tslint-5.11.0.tgz#98f30c02eae3cde7006201e4c33cb08b48581eed" - integrity sha1-mPMMAurjzecAYgHkwzywi0hYHu0= + version "5.18.0" + resolved "https://registry.yarnpkg.com/tslint/-/tslint-5.18.0.tgz#f61a6ddcf372344ac5e41708095bbf043a147ac6" + integrity sha512-Q3kXkuDEijQ37nXZZLKErssQVnwCV/+23gFEMROi8IlbaBG6tXqLPQJ5Wjcyt/yHPKBC+hD5SzuGaMora+ZS6w== dependencies: - babel-code-frame "^6.22.0" + "@babel/code-frame" "^7.0.0" builtin-modules "^1.1.1" chalk "^2.3.0" commander "^2.12.1" diff "^3.2.0" glob "^7.1.1" - js-yaml "^3.7.0" + js-yaml "^3.13.1" minimatch "^3.0.4" + mkdirp "^0.5.1" resolve "^1.3.2" semver "^5.3.0" tslib "^1.8.0" - tsutils "^2.27.2" + tsutils "^2.29.0" -tsutils@^2.27.2: +tsutils@^2.29.0: version "2.29.0" resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-2.29.0.tgz#32b488501467acbedd4b85498673a0812aca0b99" integrity sha512-g5JVHCIJwzfISaXpXE1qvNalca5Jwob6FjI4AoPlqMusJ6ftFE7IkkFoMhVLRgK+4Kx3gkzb8UZK5t5yTTvEmA== @@ -3212,16 +3304,16 @@ type-detect@^4.0.0, type-detect@^4.0.5: resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== -typescript@*: +type-fest@^0.3.0: + version "0.3.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1" + integrity sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ== + +typescript@*, typescript@^3.1.6: version "3.5.2" resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.2.tgz#a09e1dc69bc9551cadf17dba10ee42cf55e5d56c" integrity sha512-7KxJovlYhTX5RaRbUdkAXN1KUZ8PwWlTzQdHV6xNqvuFOs7+WBo10TQUqT19Q/Jz2hk5v9TQDIhyLhhJY4p5AA== -typescript@^3.1.6: - version "3.1.6" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.1.6.tgz#b6543a83cfc8c2befb3f4c8fba6896f5b0c9be68" - integrity sha512-tDMYfVtvpb96msS1lDX9MEdHrW4yOuZ4Kdc4Him9oU796XldPYF/t2+uKoX0BBa0hXXwDlqYQbXY5Rzjzc5hBA== - uid2@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/uid2/-/uid2-0.0.3.tgz#483126e11774df2f71b8b639dcd799c376162b82" @@ -3240,25 +3332,25 @@ unicode-match-property-ecmascript@^1.0.4: unicode-canonical-property-names-ecmascript "^1.0.4" unicode-property-aliases-ecmascript "^1.0.4" -unicode-match-property-value-ecmascript@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.0.2.tgz#9f1dc76926d6ccf452310564fd834ace059663d4" - integrity sha512-Rx7yODZC1L/T8XKo/2kNzVAQaRE88AaMvI1EF/Xnj3GW2wzN6fop9DDWuFAKUVFH7vozkz26DzP0qyWLKLIVPQ== +unicode-match-property-value-ecmascript@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz#5b4b426e08d13a80365e0d657ac7a6c1ec46a277" + integrity sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g== unicode-property-aliases-ecmascript@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.4.tgz#5a533f31b4317ea76f17d807fa0d116546111dd0" - integrity sha512-2WSLa6OdYd2ng8oqiGIWnJqyFArvhn+5vgx5GTxMbUYjCYKUcuKS62YLFF0R/BDGlB1yzXjQOLtPAfHsgirEpg== + version "1.0.5" + resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz#a9cc6cc7ce63a0a3023fc99e341b94431d405a57" + integrity sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw== union-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.0.tgz#5c71c34cb5bad5dcebe3ea0cd08207ba5aa1aea4" - integrity sha1-XHHDTLW61dzr4+oM0IIHulqhrqQ= + version "1.0.1" + resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" + integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== dependencies: arr-union "^3.1.0" get-value "^2.0.6" is-extendable "^0.1.1" - set-value "^0.4.3" + set-value "^2.0.1" unique-string@^1.0.0: version "1.0.0" @@ -3289,10 +3381,10 @@ unzip-response@^2.0.1: resolved "https://registry.yarnpkg.com/unzip-response/-/unzip-response-2.0.1.tgz#d2f0f737d16b0615e72a6935ed04214572d56f97" integrity sha1-0vD3N9FrBhXnKmk17QQhRXLVb5c= -upath@^1.0.5: - version "1.1.0" - resolved "https://registry.yarnpkg.com/upath/-/upath-1.1.0.tgz#35256597e46a581db4793d0ce47fa9aebfc9fabd" - integrity sha512-bzpH/oBhoS/QI/YtbkqCg6VEiPYjSZtrHQM6/QnJS6OL9pKUFLqb3aFh4Scvwm45+7iAgiMkLhSbaZxUqmrprw== +upath@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/upath/-/upath-1.1.2.tgz#3db658600edaeeccbe6db5e684d67ee8c2acd068" + integrity sha512-kXpym8nmDmlCBr7nKdIx8P2jNBa+pBpIUFRnKJ4dr8htyYGJFokkr2ZvERRtUN+9SY+JqXouNgUPtv6JQva/2Q== update-notifier@^2.5.0: version "2.5.0" @@ -3379,9 +3471,9 @@ wrappy@1: integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= write-file-atomic@^2.0.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-2.3.0.tgz#1ff61575c2e2a4e8e510d6fa4e243cce183999ab" - integrity sha512-xuPeK4OdjWqtfi59ylvVL0Yn35SF3zgcAcv7rBPFHVaEapaDr4GdGgm3j7ckTwH9wHL7fGmgfAnb0+THrHb8tA== + version "2.4.3" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-2.4.3.tgz#1fd2e9ae1df3e75b8d8c367443c692d4ca81f481" + integrity sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ== dependencies: graceful-fs "^4.1.11" imurmurhash "^0.1.4" @@ -3397,7 +3489,12 @@ xtend@^4.0.0: resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" integrity sha1-pcbVMr5lbiPbgg77lDofBJmNY68= -yallist@^3.0.0, yallist@^3.0.2: +yallist@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" + integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= + +yallist@^3.0.0, yallist@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.0.3.tgz#b4b049e314be545e3ce802236d6cd22cd91c3de9" integrity sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A== From af9293ab52a02b94e9b3b552479716d058bd0005 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Wed, 7 Aug 2019 17:18:51 -0400 Subject: [PATCH 08/69] Save --- package.json | 2 +- src/functions/definitions.ts | 37 +++++++++++++++++++ src/functions/functions.spec.ts | 37 +++++++++++++++++++ src/functions/functions.ts | 65 +++++++++++++++++++++++++++++++++ src/functions/index.ts | 20 +++++++++- yarn.lock | 7 +++- 6 files changed, 164 insertions(+), 4 deletions(-) diff --git a/package.json b/package.json index 8009f9f..9f86034 100644 --- a/package.json +++ b/package.json @@ -40,7 +40,7 @@ "tslint": "^5.11.0", "tslint-config-prettier": "^1.16.0", "tslint-plugin-prettier": "^2.0.1", - "typescript": "^3.1.6" + "typescript": "^3.5.3" }, "ava": { "files": [ diff --git a/src/functions/definitions.ts b/src/functions/definitions.ts index 8abb2ea..6593ec4 100644 --- a/src/functions/definitions.ts +++ b/src/functions/definitions.ts @@ -22,3 +22,40 @@ export interface JsonParseOptions { pretty: boolean; } +export enum FlushStrategy { + sampling, + rolling, + sliding, +} + +export type AccumulatorOptions = S extends FlushStrategy.sampling + ? SamplingFlushOptions + : S extends FlushStrategy.sliding + ? SlidingFlushOptions + : S extends FlushStrategy.rolling + ? RollingFlushOptions + : never; + +export interface RollingFlushOptions { + windowLength: number; + afterFlush?: (flushed: Array) => Array; +} + +export interface SlidingFlushOptions { + windowLength: number; + afterFlush?: (flushed: Array) => Array; +} + +export interface SlidingFlushResult { + first: T; +} + +export interface SamplingFlushOptions { + condition: (event: T, buffer: Array) => boolean; + flushMapper?: (flushed: Array) => Array; +} + +export interface SamplingFlushResult { + flushed: boolean; + flush?: Array | null; +} diff --git a/src/functions/functions.spec.ts b/src/functions/functions.spec.ts index 5bc2f9f..fc64ada 100644 --- a/src/functions/functions.spec.ts +++ b/src/functions/functions.spec.ts @@ -3,6 +3,7 @@ import test from "ava"; import { expect } from "chai"; import { performance } from "perf_hooks"; import { Readable } from "stream"; +import { FlushStrategy } from "./definitions"; import { fromArray, map, @@ -24,6 +25,7 @@ import { unbatch, rate, parallelMap, + accumulator, } from "."; import { sleep } from "../helpers"; @@ -1401,3 +1403,38 @@ test.cb("parallel() parallel mapping", t => { source.push("f"); source.push(null); }); + +test.cb.only("accumulator() buffering strategy", t => { + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const expectedElements = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 2, key: "d" }, + { ts: 3, key: "e" }, + ]; + source + .pipe( + accumulator(FlushStrategy.sampling, { + condition: (event: TestObject) => event.ts > 2, + }), + ) + .on("data", (flush: TestObject[]) => { + console.log("FLUSH", flush); + flush.forEach(item => expectedElements.includes(item)); + }) + .on("error", e => { + console.log("Got error: ", e); + t.end(); + }) + .on("end", () => { + console.log("end"); + t.end(); + }); + source.push(expectedElements); + source.push(null); +}); diff --git a/src/functions/functions.ts b/src/functions/functions.ts index fd10e73..2cc2d78 100644 --- a/src/functions/functions.ts +++ b/src/functions/functions.ts @@ -4,6 +4,10 @@ import { ChildProcess } from "child_process"; import { StringDecoder } from "string_decoder"; import { + FlushStrategy, + AccumulatorOptions, + SamplingFlushOptions, + SamplingFlushResult, TransformOptions, ThroughOptions, WithEncoding, @@ -602,3 +606,64 @@ export function parallelMap( }, }); } + +function samplingFlush( + event: T, + options: SamplingFlushOptions, + buffer: Array, +): SamplingFlushResult { + let flush = null; + if (options.condition(event, buffer)) { + flush = buffer.slice(0); + buffer.length = 0; + } + buffer.push(event); + return { flushed: true, flush }; +} + +function executeSamplingStrategy( + events: T[], + options: SamplingFlushOptions, + buffer: Array, + stream: Transform, +): void { + events.forEach(event => { + const sample = samplingFlush(event, options, buffer); + if (sample.flushed && sample.flush && options.flushMapper) { + stream.push(options.flushMapper(sample.flush)); + } else if (sample.flushed && sample.flush) { + stream.push(sample.flush); + } + }); +} + +export function accumulator( + flushStrategy: S, + options: AccumulatorOptions, +) { + const buffer: Array = []; + return new Transform({ + objectMode: true, + async transform(data, encoding, callback) { + callback(); + switch (flushStrategy) { + case FlushStrategy.sampling: { + executeSamplingStrategy( + data, + options as SamplingFlushOptions, + buffer, + this, + ); + break; + } + case FlushStrategy.sliding: { + break; + } + } + }, + flush(callback) { + this.push(buffer); + callback(); + }, + }); +} diff --git a/src/functions/index.ts b/src/functions/index.ts index 9085930..6f2c058 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -3,6 +3,8 @@ import { ChildProcess } from "child_process"; import * as baseFunctions from "./functions"; import { + AccumulatorOptions, + FlushStrategy, ThroughOptions, TransformOptions, WithEncoding, @@ -205,7 +207,10 @@ export function last(readable: Readable): Promise { * @param batchSize Size of the batches, defaults to 1000. * @param maxBatchAge? Max lifetime of a batch, defaults to 500 */ -export function batch(batchSize: number, maxBatchAge?: number): NodeJS.ReadWriteStream { +export function batch( + batchSize: number, + maxBatchAge?: number, +): NodeJS.ReadWriteStream { return baseFunctions.batch(batchSize, maxBatchAge); } @@ -222,7 +227,10 @@ export function unbatch(): NodeJS.ReadWriteStream { * @param targetRate? Desired rate in ms * @param period? Period to sleep for when rate is above or equal to targetRate */ -export function rate(targetRate?: number, period?: number): NodeJS.ReadWriteStream { +export function rate( + targetRate?: number, + period?: number, +): NodeJS.ReadWriteStream { return baseFunctions.rate(targetRate, period); } @@ -237,5 +245,13 @@ export function parallelMap( parallel?: number, sleepTime?: number, ) { + console.log("hi"); return baseFunctions.parallelMap(mapper, parallel, sleepTime); } + +export function accumulator( + flushStrategy: S, + options: AccumulatorOptions, +) { + return baseFunctions.accumulator(flushStrategy, options); +} diff --git a/yarn.lock b/yarn.lock index a103462..e0b7aca 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3309,11 +3309,16 @@ type-fest@^0.3.0: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1" integrity sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ== -typescript@*, typescript@^3.1.6: +typescript@*: version "3.5.2" resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.2.tgz#a09e1dc69bc9551cadf17dba10ee42cf55e5d56c" integrity sha512-7KxJovlYhTX5RaRbUdkAXN1KUZ8PwWlTzQdHV6xNqvuFOs7+WBo10TQUqT19Q/Jz2hk5v9TQDIhyLhhJY4p5AA== +typescript@^3.5.3: + version "3.5.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" + integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== + uid2@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/uid2/-/uid2-0.0.3.tgz#483126e11774df2f71b8b639dcd799c376162b82" From d918d8ca108c143daf13a020f9788b6c11ac2e9f Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Wed, 7 Aug 2019 18:46:33 -0400 Subject: [PATCH 09/69] Save --- src/functions/functions.spec.ts | 18 ++++++++++-------- src/functions/functions.ts | 3 ++- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/src/functions/functions.spec.ts b/src/functions/functions.spec.ts index fc64ada..decd771 100644 --- a/src/functions/functions.spec.ts +++ b/src/functions/functions.spec.ts @@ -1405,6 +1405,7 @@ test.cb("parallel() parallel mapping", t => { }); test.cb.only("accumulator() buffering strategy", t => { + let chunkIndex = 0; interface TestObject { ts: number; key: string; @@ -1417,6 +1418,7 @@ test.cb.only("accumulator() buffering strategy", t => { { ts: 2, key: "d" }, { ts: 3, key: "e" }, ]; + source .pipe( accumulator(FlushStrategy.sampling, { @@ -1424,17 +1426,17 @@ test.cb.only("accumulator() buffering strategy", t => { }), ) .on("data", (flush: TestObject[]) => { - console.log("FLUSH", flush); - flush.forEach(item => expectedElements.includes(item)); - }) - .on("error", e => { - console.log("Got error: ", e); - t.end(); + if (chunkIndex === 0) { + chunkIndex++; + t.deepEqual(flush, expectedElements.slice(0, 4)); + } else { + t.deepEqual(flush, expectedElements.slice(4)); + } }) + .on("error", e => t.end) .on("end", () => { - console.log("end"); t.end(); }); - source.push(expectedElements); + expectedElements.forEach(element => source.push(element)); source.push(null); }); diff --git a/src/functions/functions.ts b/src/functions/functions.ts index 2cc2d78..40a534c 100644 --- a/src/functions/functions.ts +++ b/src/functions/functions.ts @@ -644,10 +644,11 @@ export function accumulator( const buffer: Array = []; return new Transform({ objectMode: true, - async transform(data, encoding, callback) { + async transform(data: T[] | T, encoding, callback) { callback(); switch (flushStrategy) { case FlushStrategy.sampling: { + if (!Array.isArray(data)) data = [data]; executeSamplingStrategy( data, options as SamplingFlushOptions, From a60b23496bb415cc010a39a87574fdc2fce71093 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 8 Aug 2019 10:58:56 -0400 Subject: [PATCH 10/69] Add tests --- src/functions/definitions.ts | 7 ++- src/functions/functions.spec.ts | 96 ++++++++++++++++++++++++++++++--- src/functions/functions.ts | 16 ++++-- src/functions/index.ts | 1 - 4 files changed, 108 insertions(+), 12 deletions(-) diff --git a/src/functions/definitions.ts b/src/functions/definitions.ts index 6593ec4..460e4ca 100644 --- a/src/functions/definitions.ts +++ b/src/functions/definitions.ts @@ -38,12 +38,14 @@ export type AccumulatorOptions = S extends FlushStrategy.sampling export interface RollingFlushOptions { windowLength: number; - afterFlush?: (flushed: Array) => Array; + flushMapper?: (flushed: Array) => Array; + timeout?: number; } export interface SlidingFlushOptions { windowLength: number; - afterFlush?: (flushed: Array) => Array; + flushMapper?: (flushed: Array) => Array; + timeout?: number; } export interface SlidingFlushResult { @@ -53,6 +55,7 @@ export interface SlidingFlushResult { export interface SamplingFlushOptions { condition: (event: T, buffer: Array) => boolean; flushMapper?: (flushed: Array) => Array; + timeout?: number; } export interface SamplingFlushResult { diff --git a/src/functions/functions.spec.ts b/src/functions/functions.spec.ts index decd771..5289065 100644 --- a/src/functions/functions.spec.ts +++ b/src/functions/functions.spec.ts @@ -1404,20 +1404,21 @@ test.cb("parallel() parallel mapping", t => { source.push(null); }); -test.cb.only("accumulator() buffering strategy", t => { +test.cb("accumulator() buffering strategy clears buffer on condition", t => { + t.plan(2); let chunkIndex = 0; interface TestObject { ts: number; key: string; } const source = new Readable({ objectMode: true }); - const expectedElements = [ + const firstFlush = [ { ts: 0, key: "a" }, { ts: 1, key: "b" }, { ts: 2, key: "c" }, { ts: 2, key: "d" }, - { ts: 3, key: "e" }, ]; + const secondFlush = [{ ts: 3, key: "e" }]; source .pipe( @@ -1428,15 +1429,98 @@ test.cb.only("accumulator() buffering strategy", t => { .on("data", (flush: TestObject[]) => { if (chunkIndex === 0) { chunkIndex++; - t.deepEqual(flush, expectedElements.slice(0, 4)); + t.deepEqual(flush, firstFlush); } else { - t.deepEqual(flush, expectedElements.slice(4)); + t.deepEqual(flush, secondFlush); } }) .on("error", e => t.end) .on("end", () => { t.end(); }); - expectedElements.forEach(element => source.push(element)); + source.push([...firstFlush, ...secondFlush]); source.push(null); }); + +test.cb("accumulator() buffering strategy clears buffer on timeout", t => { + t.plan(2); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true, read: () => {} }); + const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const secondFlush = [ + { ts: 2, key: "c" }, + { ts: 2, key: "d" }, + { ts: 3, key: "e" }, + ]; + source + .pipe( + accumulator(FlushStrategy.sampling, { + condition: (event: TestObject) => event.ts > 3, + timeout: 1000, + }), + ) + .on("data", (flush: TestObject[]) => { + if (chunkIndex === 0) { + chunkIndex++; + t.deepEqual(flush, firstFlush); + } else { + t.deepEqual(flush, secondFlush); + } + }) + .on("error", e => t.end) + .on("end", () => { + t.end(); + }); + source.push(firstFlush); + setTimeout(() => { + source.push(secondFlush); + source.push(null); + }, 2000); +}); + +test.cb( + "accumulator() buffering strategy clears buffer on condition or timeout", + t => { + t.plan(3); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true, read: () => {} }); + const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const secondFlush = [{ ts: 2, key: "c" }, { ts: 2, key: "d" }]; + const thirdFlush = [{ ts: 3, key: "e" }]; + source + .pipe( + accumulator(FlushStrategy.sampling, { + condition: (event: TestObject) => event.ts > 2, + timeout: 1000, + }), + ) + .on("data", (flush: TestObject[]) => { + if (chunkIndex === 0) { + chunkIndex++; + t.deepEqual(flush, firstFlush); + } else if (chunkIndex === 1) { + chunkIndex++; + t.deepEqual(flush, secondFlush); + } else { + t.deepEqual(flush, thirdFlush); + } + }) + .on("error", e => t.end) + .on("end", () => { + t.end(); + }); + source.push(firstFlush); + setTimeout(() => { + source.push([...secondFlush, ...thirdFlush]); + source.push(null); + }, 2000); + }, +); diff --git a/src/functions/functions.ts b/src/functions/functions.ts index 40a534c..b9fd812 100644 --- a/src/functions/functions.ts +++ b/src/functions/functions.ts @@ -525,7 +525,6 @@ export function batch(batchSize: number = 1000, maxBatchAge: number = 500) { callback(); }, flush(callback) { - console.error("flushing"); sendChunk(this); callback(); }, @@ -642,10 +641,18 @@ export function accumulator( options: AccumulatorOptions, ) { const buffer: Array = []; - return new Transform({ + let handle: NodeJS.Timer | null = null; + if (options.timeout) { + handle = setInterval(() => { + if (buffer.length > 0) { + transform.push(buffer); + buffer.length = 0; + } + }, options.timeout); + } + const transform = new Transform({ objectMode: true, async transform(data: T[] | T, encoding, callback) { - callback(); switch (flushStrategy) { case FlushStrategy.sampling: { if (!Array.isArray(data)) data = [data]; @@ -655,6 +662,7 @@ export function accumulator( buffer, this, ); + callback(); break; } case FlushStrategy.sliding: { @@ -663,8 +671,10 @@ export function accumulator( } }, flush(callback) { + handle && clearInterval(handle); this.push(buffer); callback(); }, }); + return transform; } diff --git a/src/functions/index.ts b/src/functions/index.ts index 6f2c058..a6ae32e 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -245,7 +245,6 @@ export function parallelMap( parallel?: number, sleepTime?: number, ) { - console.log("hi"); return baseFunctions.parallelMap(mapper, parallel, sleepTime); } From c1ef5fec4be68418feb0cee9d403b8afb8d56b9f Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Fri, 9 Aug 2019 09:58:14 -0400 Subject: [PATCH 11/69] Export accumulator and map enum --- src/functions/definitions.ts | 6 +++--- src/index.ts | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/functions/definitions.ts b/src/functions/definitions.ts index 460e4ca..a328242 100644 --- a/src/functions/definitions.ts +++ b/src/functions/definitions.ts @@ -23,9 +23,9 @@ export interface JsonParseOptions { } export enum FlushStrategy { - sampling, - rolling, - sliding, + sampling = "sampling", + rolling = "rolling", + sliding = "sliding", } export type AccumulatorOptions = S extends FlushStrategy.sampling diff --git a/src/index.ts b/src/index.ts index e9f1369..3d57c81 100644 --- a/src/index.ts +++ b/src/index.ts @@ -19,4 +19,5 @@ export { unbatch, rate, parallelMap, + accumulator, } from "./functions"; From fdcc5bafc61e421f82bbacd675f483cec1717391 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Fri, 9 Aug 2019 17:13:48 -0400 Subject: [PATCH 12/69] Add sliding, rolling functions with tests --- src/functions/definitions.ts | 2 +- src/functions/functions.spec.ts | 201 ++++++++++++++++++++------------ src/functions/functions.ts | 153 +++++++++++++----------- src/functions/index.ts | 17 ++- 4 files changed, 222 insertions(+), 151 deletions(-) diff --git a/src/functions/definitions.ts b/src/functions/definitions.ts index a328242..962a7a4 100644 --- a/src/functions/definitions.ts +++ b/src/functions/definitions.ts @@ -44,7 +44,7 @@ export interface RollingFlushOptions { export interface SlidingFlushOptions { windowLength: number; - flushMapper?: (flushed: Array) => Array; + windowMapper?: (flushed: Array) => Array; timeout?: number; } diff --git a/src/functions/functions.spec.ts b/src/functions/functions.spec.ts index 5289065..d5b2072 100644 --- a/src/functions/functions.spec.ts +++ b/src/functions/functions.spec.ts @@ -3,7 +3,6 @@ import test from "ava"; import { expect } from "chai"; import { performance } from "perf_hooks"; import { Readable } from "stream"; -import { FlushStrategy } from "./definitions"; import { fromArray, map, @@ -1404,7 +1403,36 @@ test.cb("parallel() parallel mapping", t => { source.push(null); }); -test.cb("accumulator() buffering strategy clears buffer on condition", t => { +test.cb("accumulator() rolling", t => { + t.plan(3); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const secondFlush = [{ ts: 2, key: "d" }, { ts: 3, key: "e" }]; + const thirdFlush = [{ ts: 4, key: "f" }]; + const flushes = [firstFlush, secondFlush, thirdFlush]; + + source + .pipe(accumulator(2, 999, "rolling")) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => t.end) + .on("end", () => { + t.end(); + }); + [...firstFlush, ...secondFlush, ...thirdFlush].forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb("accumulator() rolling with key", t => { t.plan(2); let chunkIndex = 0; interface TestObject { @@ -1421,11 +1449,7 @@ test.cb("accumulator() buffering strategy clears buffer on condition", t => { const secondFlush = [{ ts: 3, key: "e" }]; source - .pipe( - accumulator(FlushStrategy.sampling, { - condition: (event: TestObject) => event.ts > 2, - }), - ) + .pipe(accumulator(3, 999, "rolling", "ts")) .on("data", (flush: TestObject[]) => { if (chunkIndex === 0) { chunkIndex++; @@ -1434,93 +1458,118 @@ test.cb("accumulator() buffering strategy clears buffer on condition", t => { t.deepEqual(flush, secondFlush); } }) - .on("error", e => t.end) + .on("error", (e: any) => t.end) .on("end", () => { t.end(); }); - source.push([...firstFlush, ...secondFlush]); + [...firstFlush, ...secondFlush].forEach(item => { + source.push(item); + }); source.push(null); }); -test.cb("accumulator() buffering strategy clears buffer on timeout", t => { - t.plan(2); +test.cb("accumulator() sliding", t => { + t.plan(5); let chunkIndex = 0; interface TestObject { ts: number; key: string; } - const source = new Readable({ objectMode: true, read: () => {} }); - const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; - const secondFlush = [ + const source = new Readable({ objectMode: true }); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, { ts: 2, key: "c" }, - { ts: 2, key: "d" }, - { ts: 3, key: "e" }, + { ts: 4, key: "d" }, + ]; + const firstFlush = [{ ts: 0, key: "a" }]; + const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const thirdFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + ]; + const fourthFlush = [ + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 4, key: "d" }, + ]; + + const flushes = [ + firstFlush, + secondFlush, + thirdFlush, + fourthFlush, + fourthFlush, ]; source - .pipe( - accumulator(FlushStrategy.sampling, { - condition: (event: TestObject) => event.ts > 3, - timeout: 1000, - }), - ) + .pipe(accumulator(3, 999, "sliding")) .on("data", (flush: TestObject[]) => { - if (chunkIndex === 0) { - chunkIndex++; - t.deepEqual(flush, firstFlush); - } else { - t.deepEqual(flush, secondFlush); - } + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; }) - .on("error", e => t.end) + .on("error", (e: any) => t.end) .on("end", () => { t.end(); }); - source.push(firstFlush); - setTimeout(() => { - source.push(secondFlush); - source.push(null); - }, 2000); + input.forEach(item => { + source.push(item); + }); + source.push(null); }); -test.cb( - "accumulator() buffering strategy clears buffer on condition or timeout", - t => { - t.plan(3); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true, read: () => {} }); - const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; - const secondFlush = [{ ts: 2, key: "c" }, { ts: 2, key: "d" }]; - const thirdFlush = [{ ts: 3, key: "e" }]; - source - .pipe( - accumulator(FlushStrategy.sampling, { - condition: (event: TestObject) => event.ts > 2, - timeout: 1000, - }), - ) - .on("data", (flush: TestObject[]) => { - if (chunkIndex === 0) { - chunkIndex++; - t.deepEqual(flush, firstFlush); - } else if (chunkIndex === 1) { - chunkIndex++; - t.deepEqual(flush, secondFlush); - } else { - t.deepEqual(flush, thirdFlush); - } - }) - .on("error", e => t.end) - .on("end", () => { - t.end(); - }); - source.push(firstFlush); - setTimeout(() => { - source.push([...secondFlush, ...thirdFlush]); - source.push(null); - }, 2000); - }, -); +test.cb("accumulator() sliding with key", t => { + t.plan(7); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + { ts: 5, key: "f" }, + { ts: 6, key: "g" }, + ]; + const firstFlush = [{ ts: 0, key: "a" }]; + const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const thirdFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + ]; + const fourthFlush = [ + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + ]; + const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }]; + const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }]; + + const flushes = [ + firstFlush, + secondFlush, + thirdFlush, + fourthFlush, + fifthFlush, + sixthFlush, + sixthFlush, + ]; + source + .pipe(accumulator(3, 999, "sliding", "ts")) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => t.end) + .on("end", () => { + t.end(); + }); + input.forEach(item => { + source.push(item); + }); + source.push(null); +}); diff --git a/src/functions/functions.ts b/src/functions/functions.ts index b9fd812..219fed0 100644 --- a/src/functions/functions.ts +++ b/src/functions/functions.ts @@ -2,12 +2,7 @@ import { Transform, Readable, Writable, Duplex } from "stream"; import { performance } from "perf_hooks"; import { ChildProcess } from "child_process"; import { StringDecoder } from "string_decoder"; - import { - FlushStrategy, - AccumulatorOptions, - SamplingFlushOptions, - SamplingFlushResult, TransformOptions, ThroughOptions, WithEncoding, @@ -606,75 +601,97 @@ export function parallelMap( }); } -function samplingFlush( - event: T, - options: SamplingFlushOptions, - buffer: Array, -): SamplingFlushResult { - let flush = null; - if (options.condition(event, buffer)) { - flush = buffer.slice(0); - buffer.length = 0; - } - buffer.push(event); - return { flushed: true, flush }; -} - -function executeSamplingStrategy( - events: T[], - options: SamplingFlushOptions, - buffer: Array, - stream: Transform, -): void { - events.forEach(event => { - const sample = samplingFlush(event, options, buffer); - if (sample.flushed && sample.flush && options.flushMapper) { - stream.push(options.flushMapper(sample.flush)); - } else if (sample.flushed && sample.flush) { - stream.push(sample.flush); - } - }); -} - -export function accumulator( - flushStrategy: S, - options: AccumulatorOptions, +function _accumulator( + accumulateBy: (data: T, buffer: T[], stream: Transform) => void, ) { - const buffer: Array = []; - let handle: NodeJS.Timer | null = null; - if (options.timeout) { - handle = setInterval(() => { - if (buffer.length > 0) { - transform.push(buffer); - buffer.length = 0; - } - }, options.timeout); - } - const transform = new Transform({ + const buffer: T[] = []; + return new Transform({ objectMode: true, - async transform(data: T[] | T, encoding, callback) { - switch (flushStrategy) { - case FlushStrategy.sampling: { - if (!Array.isArray(data)) data = [data]; - executeSamplingStrategy( - data, - options as SamplingFlushOptions, - buffer, - this, - ); - callback(); - break; - } - case FlushStrategy.sliding: { - break; - } - } + async transform(data: any, encoding, callback) { + accumulateBy(data, buffer, this); + callback(); }, flush(callback) { - handle && clearInterval(handle); this.push(buffer); callback(); }, }); - return transform; +} + +function _slidingBy( + windowLength: number, + rate: number, + key?: string, +): (event: T, buffer: T[], stream: Transform) => void { + return (event: T, buffer: T[], stream: Transform) => { + if (key) { + let index = 0; + while ( + buffer.length > 0 && + buffer[index][key] + windowLength <= event[key] + ) { + index++; + } + buffer.splice(0, index); + } else if (buffer.length === windowLength) { + buffer.shift(); + } + buffer.push(event); + stream.push(buffer); + }; +} + +function _rollingBy( + windowLength: number, + rate: number, + key?: string, +): (event: T, buffer: T[], stream: Transform) => void { + return (event: T, buffer: T[], stream: Transform) => { + if (key) { + if ( + buffer.length > 0 && + buffer[0][key] + windowLength <= event[key] + ) { + stream.push(buffer.slice(0)); + buffer.length = 0; + } + } else if (buffer.length === windowLength) { + stream.push(buffer.slice(0)); + buffer.length = 0; + } + buffer.push(event); + }; +} + +export function accumulator( + batchSize: number, + batchRate: number, + flushStrategy: "sliding" | "rolling", + keyBy?: string, +): Transform { + if (flushStrategy === "sliding") { + return sliding(batchSize, batchRate, keyBy); + } else if (flushStrategy === "rolling") { + return rolling(batchSize, batchRate, keyBy); + } else { + return batch(batchSize, batchRate); + } +} + +export function sliding( + windowLength: number, + rate: number, + key?: string, +): Transform { + const slidingByFn = _slidingBy(windowLength, rate, key); + return _accumulator(slidingByFn); +} + +export function rolling( + windowLength: number, + rate: number, + key?: string, +): Transform { + const rollingByFn = _rollingBy(windowLength, rate, key); + return _accumulator(rollingByFn); } diff --git a/src/functions/index.ts b/src/functions/index.ts index a6ae32e..3fe17b2 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -3,8 +3,6 @@ import { ChildProcess } from "child_process"; import * as baseFunctions from "./functions"; import { - AccumulatorOptions, - FlushStrategy, ThroughOptions, TransformOptions, WithEncoding, @@ -248,9 +246,16 @@ export function parallelMap( return baseFunctions.parallelMap(mapper, parallel, sleepTime); } -export function accumulator( - flushStrategy: S, - options: AccumulatorOptions, +export function accumulator( + batchSize: number, + batchRate: number, + flushStrategy: "sliding" | "rolling", + keyBy?: string, ) { - return baseFunctions.accumulator(flushStrategy, options); + return baseFunctions.accumulator( + batchSize, + batchRate, + flushStrategy, + keyBy, + ); } From e932adde670f1ff9aee6ad1109429610ebd40766 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Mon, 12 Aug 2019 11:07:39 -0400 Subject: [PATCH 13/69] Update tests --- src/functions/functions.spec.ts | 34 +++++++++++++-------------------- src/functions/functions.ts | 17 ++++++++++------- src/functions/index.ts | 2 +- 3 files changed, 24 insertions(+), 29 deletions(-) diff --git a/src/functions/functions.spec.ts b/src/functions/functions.spec.ts index d5b2072..6fe8384 100644 --- a/src/functions/functions.spec.ts +++ b/src/functions/functions.spec.ts @@ -1417,12 +1417,14 @@ test.cb("accumulator() rolling", t => { const flushes = [firstFlush, secondFlush, thirdFlush]; source - .pipe(accumulator(2, 999, "rolling")) + .pipe(accumulator(2, undefined, "rolling")) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; }) - .on("error", (e: any) => t.end) + .on("error", (e: any) => { + t.end(e); + }) .on("end", () => { t.end(); }); @@ -1447,16 +1449,13 @@ test.cb("accumulator() rolling with key", t => { { ts: 2, key: "d" }, ]; const secondFlush = [{ ts: 3, key: "e" }]; + const flushes = [firstFlush, secondFlush]; source - .pipe(accumulator(3, 999, "rolling", "ts")) + .pipe(accumulator(3, undefined, "rolling", "ts")) .on("data", (flush: TestObject[]) => { - if (chunkIndex === 0) { - chunkIndex++; - t.deepEqual(flush, firstFlush); - } else { - t.deepEqual(flush, secondFlush); - } + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; }) .on("error", (e: any) => t.end) .on("end", () => { @@ -1469,7 +1468,7 @@ test.cb("accumulator() rolling with key", t => { }); test.cb("accumulator() sliding", t => { - t.plan(5); + t.plan(4); let chunkIndex = 0; interface TestObject { ts: number; @@ -1495,15 +1494,9 @@ test.cb("accumulator() sliding", t => { { ts: 4, key: "d" }, ]; - const flushes = [ - firstFlush, - secondFlush, - thirdFlush, - fourthFlush, - fourthFlush, - ]; + const flushes = [firstFlush, secondFlush, thirdFlush, fourthFlush]; source - .pipe(accumulator(3, 999, "sliding")) + .pipe(accumulator(3, undefined, "sliding")) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; @@ -1519,7 +1512,7 @@ test.cb("accumulator() sliding", t => { }); test.cb("accumulator() sliding with key", t => { - t.plan(7); + t.plan(6); let chunkIndex = 0; interface TestObject { ts: number; @@ -1556,10 +1549,9 @@ test.cb("accumulator() sliding with key", t => { fourthFlush, fifthFlush, sixthFlush, - sixthFlush, ]; source - .pipe(accumulator(3, 999, "sliding", "ts")) + .pipe(accumulator(3, undefined, "sliding", "ts")) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; diff --git a/src/functions/functions.ts b/src/functions/functions.ts index 219fed0..143a5cc 100644 --- a/src/functions/functions.ts +++ b/src/functions/functions.ts @@ -603,6 +603,7 @@ export function parallelMap( function _accumulator( accumulateBy: (data: T, buffer: T[], stream: Transform) => void, + shouldFlush: boolean = true, ) { const buffer: T[] = []; return new Transform({ @@ -612,7 +613,9 @@ function _accumulator( callback(); }, flush(callback) { - this.push(buffer); + if (shouldFlush) { + this.push(buffer); + } callback(); }, }); @@ -620,7 +623,7 @@ function _accumulator( function _slidingBy( windowLength: number, - rate: number, + rate: number | undefined, key?: string, ): (event: T, buffer: T[], stream: Transform) => void { return (event: T, buffer: T[], stream: Transform) => { @@ -643,7 +646,7 @@ function _slidingBy( function _rollingBy( windowLength: number, - rate: number, + rate: number | undefined, key?: string, ): (event: T, buffer: T[], stream: Transform) => void { return (event: T, buffer: T[], stream: Transform) => { @@ -665,7 +668,7 @@ function _rollingBy( export function accumulator( batchSize: number, - batchRate: number, + batchRate: number | undefined, flushStrategy: "sliding" | "rolling", keyBy?: string, ): Transform { @@ -680,16 +683,16 @@ export function accumulator( export function sliding( windowLength: number, - rate: number, + rate: number | undefined, key?: string, ): Transform { const slidingByFn = _slidingBy(windowLength, rate, key); - return _accumulator(slidingByFn); + return _accumulator(slidingByFn, false); } export function rolling( windowLength: number, - rate: number, + rate: number | undefined, key?: string, ): Transform { const rollingByFn = _rollingBy(windowLength, rate, key); diff --git a/src/functions/index.ts b/src/functions/index.ts index 3fe17b2..5599934 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -248,7 +248,7 @@ export function parallelMap( export function accumulator( batchSize: number, - batchRate: number, + batchRate: number | undefined, flushStrategy: "sliding" | "rolling", keyBy?: string, ) { From 5112ee954053651bf0f706f1023e5a940de86654 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Mon, 12 Aug 2019 11:08:53 -0400 Subject: [PATCH 14/69] Types --- src/functions/functions.spec.ts | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/functions/functions.spec.ts b/src/functions/functions.spec.ts index 6fe8384..6d1b31e 100644 --- a/src/functions/functions.spec.ts +++ b/src/functions/functions.spec.ts @@ -1457,7 +1457,9 @@ test.cb("accumulator() rolling with key", t => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; }) - .on("error", (e: any) => t.end) + .on("error", (e: any) => { + t.end(e); + }) .on("end", () => { t.end(); }); @@ -1501,7 +1503,9 @@ test.cb("accumulator() sliding", t => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; }) - .on("error", (e: any) => t.end) + .on("error", (e: any) => { + t.end(e); + }) .on("end", () => { t.end(); }); @@ -1556,7 +1560,9 @@ test.cb("accumulator() sliding with key", t => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; }) - .on("error", (e: any) => t.end) + .on("error", (e: any) => { + t.end(e); + }) .on("end", () => { t.end(); }); From e8d672d903332be0accddb7604db248417d985db Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Mon, 12 Aug 2019 11:59:27 -0400 Subject: [PATCH 15/69] Clean up types --- src/functions/definitions.ts | 36 ------------------------------------ src/functions/functions.ts | 18 ++++++++++++++---- 2 files changed, 14 insertions(+), 40 deletions(-) diff --git a/src/functions/definitions.ts b/src/functions/definitions.ts index 962a7a4..2729b65 100644 --- a/src/functions/definitions.ts +++ b/src/functions/definitions.ts @@ -23,42 +23,6 @@ export interface JsonParseOptions { } export enum FlushStrategy { - sampling = "sampling", rolling = "rolling", sliding = "sliding", } - -export type AccumulatorOptions = S extends FlushStrategy.sampling - ? SamplingFlushOptions - : S extends FlushStrategy.sliding - ? SlidingFlushOptions - : S extends FlushStrategy.rolling - ? RollingFlushOptions - : never; - -export interface RollingFlushOptions { - windowLength: number; - flushMapper?: (flushed: Array) => Array; - timeout?: number; -} - -export interface SlidingFlushOptions { - windowLength: number; - windowMapper?: (flushed: Array) => Array; - timeout?: number; -} - -export interface SlidingFlushResult { - first: T; -} - -export interface SamplingFlushOptions { - condition: (event: T, buffer: Array) => boolean; - flushMapper?: (flushed: Array) => Array; - timeout?: number; -} - -export interface SamplingFlushResult { - flushed: boolean; - flush?: Array | null; -} diff --git a/src/functions/functions.ts b/src/functions/functions.ts index 143a5cc..2df000e 100644 --- a/src/functions/functions.ts +++ b/src/functions/functions.ts @@ -9,6 +9,7 @@ import { SerializationFormats, JsonValue, JsonParseOptions, + FlushStrategy, } from "./definitions"; import { sleep } from "../helpers"; @@ -651,7 +652,16 @@ function _rollingBy( ): (event: T, buffer: T[], stream: Transform) => void { return (event: T, buffer: T[], stream: Transform) => { if (key) { - if ( + if (event[key] === undefined) { + stream.emit( + "error", + new Error( + `Key is missing in event: (${key}, ${JSON.stringify( + event, + )})`, + ), + ); + } else if ( buffer.length > 0 && buffer[0][key] + windowLength <= event[key] ) { @@ -669,12 +679,12 @@ function _rollingBy( export function accumulator( batchSize: number, batchRate: number | undefined, - flushStrategy: "sliding" | "rolling", + flushStrategy: FlushStrategy, keyBy?: string, ): Transform { - if (flushStrategy === "sliding") { + if (flushStrategy === FlushStrategy.sliding) { return sliding(batchSize, batchRate, keyBy); - } else if (flushStrategy === "rolling") { + } else if (flushStrategy === FlushStrategy.rolling) { return rolling(batchSize, batchRate, keyBy); } else { return batch(batchSize, batchRate); From c72ecaf219271e24679d2860373234a0cc5b3ee0 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Mon, 12 Aug 2019 12:08:42 -0400 Subject: [PATCH 16/69] Add FlushStrategy as enum --- src/functions/index.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/functions/index.ts b/src/functions/index.ts index 5599934..6265a92 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -7,6 +7,7 @@ import { TransformOptions, WithEncoding, JsonParseOptions, + FlushStrategy, } from "./definitions"; /** @@ -249,7 +250,7 @@ export function parallelMap( export function accumulator( batchSize: number, batchRate: number | undefined, - flushStrategy: "sliding" | "rolling", + flushStrategy: FlushStrategy, keyBy?: string, ) { return baseFunctions.accumulator( From 3a1fbf44d78b30c7050595ba80ec3f017278a878 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Mon, 12 Aug 2019 14:42:54 -0400 Subject: [PATCH 17/69] More tests --- src/functions/definitions.ts | 2 + src/functions/functions.spec.ts | 118 ++++++++++++++++++++++++++++++-- src/functions/functions.ts | 69 +++++++++++++++++-- src/functions/index.ts | 9 +++ 4 files changed, 187 insertions(+), 11 deletions(-) diff --git a/src/functions/definitions.ts b/src/functions/definitions.ts index 2729b65..6cc97a8 100644 --- a/src/functions/definitions.ts +++ b/src/functions/definitions.ts @@ -26,3 +26,5 @@ export enum FlushStrategy { rolling = "rolling", sliding = "sliding", } + +export type AccumulatorByIteratee = (event: T, bufferChunk: T) => boolean; diff --git a/src/functions/functions.spec.ts b/src/functions/functions.spec.ts index 6d1b31e..f174928 100644 --- a/src/functions/functions.spec.ts +++ b/src/functions/functions.spec.ts @@ -25,7 +25,9 @@ import { rate, parallelMap, accumulator, + accumulatorBy, } from "."; +import { FlushStrategy } from "./definitions"; import { sleep } from "../helpers"; test.cb("fromArray() streams array elements in flowing mode", t => { @@ -1417,7 +1419,7 @@ test.cb("accumulator() rolling", t => { const flushes = [firstFlush, secondFlush, thirdFlush]; source - .pipe(accumulator(2, undefined, "rolling")) + .pipe(accumulator(2, undefined, FlushStrategy.rolling)) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; @@ -1452,7 +1454,50 @@ test.cb("accumulator() rolling with key", t => { const flushes = [firstFlush, secondFlush]; source - .pipe(accumulator(3, undefined, "rolling", "ts")) + .pipe(accumulator(3, undefined, FlushStrategy.rolling, "ts")) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + [...firstFlush, ...secondFlush].forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb("accumulatorBy() rolling", t => { + t.plan(2); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const firstFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 2, key: "d" }, + ]; + const secondFlush = [{ ts: 3, key: "e" }]; + const flushes = [firstFlush, secondFlush]; + + source + .pipe( + accumulatorBy( + undefined, + FlushStrategy.rolling, + (event: TestObject, bufferChunk: TestObject) => { + return bufferChunk.ts + 3 <= event.ts; + }, + ), + ) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; @@ -1498,7 +1543,7 @@ test.cb("accumulator() sliding", t => { const flushes = [firstFlush, secondFlush, thirdFlush, fourthFlush]; source - .pipe(accumulator(3, undefined, "sliding")) + .pipe(accumulator(3, undefined, FlushStrategy.sliding)) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; @@ -1555,7 +1600,72 @@ test.cb("accumulator() sliding with key", t => { sixthFlush, ]; source - .pipe(accumulator(3, undefined, "sliding", "ts")) + .pipe(accumulator(3, undefined, FlushStrategy.sliding, "ts")) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + input.forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb("accumulatorBy() sliding", t => { + t.plan(6); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + { ts: 5, key: "f" }, + { ts: 6, key: "g" }, + ]; + const firstFlush = [{ ts: 0, key: "a" }]; + const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const thirdFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + ]; + const fourthFlush = [ + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + ]; + const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }]; + const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }]; + + const flushes = [ + firstFlush, + secondFlush, + thirdFlush, + fourthFlush, + fifthFlush, + sixthFlush, + ]; + source + .pipe( + accumulatorBy( + undefined, + FlushStrategy.sliding, + (event: TestObject, bufferChunk: TestObject) => { + return bufferChunk.ts + 3 <= event.ts ? true : false; + }, + ), + ) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; diff --git a/src/functions/functions.ts b/src/functions/functions.ts index 2df000e..49eea1f 100644 --- a/src/functions/functions.ts +++ b/src/functions/functions.ts @@ -10,6 +10,7 @@ import { JsonValue, JsonParseOptions, FlushStrategy, + AccumulatorByIteratee, } from "./definitions"; import { sleep } from "../helpers"; @@ -622,7 +623,7 @@ function _accumulator( }); } -function _slidingBy( +function _sliding( windowLength: number, rate: number | undefined, key?: string, @@ -631,7 +632,7 @@ function _slidingBy( if (key) { let index = 0; while ( - buffer.length > 0 && + index < buffer.length && buffer[index][key] + windowLength <= event[key] ) { index++; @@ -645,7 +646,37 @@ function _slidingBy( }; } -function _rollingBy( +function _slidingByFunction( + rate: number | undefined, + iteratee: AccumulatorByIteratee, +): (event: T, buffer: T[], stream: Transform) => void { + return (event: T, buffer: T[], stream: Transform) => { + let index = 0; + while (index < buffer.length && iteratee(event, buffer[index])) { + index++; + } + buffer.splice(0, index); + buffer.push(event); + stream.push(buffer); + }; +} + +function _rollingByFunction( + rate: number | undefined, + iteratee: AccumulatorByIteratee, +): (event: T, buffer: T[], stream: Transform) => void { + return (event: T, buffer: T[], stream: Transform) => { + if (iteratee) { + if (buffer.length > 0 && iteratee(event, buffer[0])) { + stream.push(buffer.slice(0)); + buffer.length = 0; + } + } + buffer.push(event); + }; +} + +function _rolling( windowLength: number, rate: number | undefined, key?: string, @@ -691,13 +722,31 @@ export function accumulator( } } +export function accumulatorBy( + batchRate: number | undefined, + flushStrategy: S, + iteratee: AccumulatorByIteratee, +): Transform { + if (flushStrategy === FlushStrategy.sliding) { + return slidingBy(batchRate, iteratee); + } else { + return rollingBy(batchRate, iteratee); + } +} + export function sliding( windowLength: number, rate: number | undefined, key?: string, ): Transform { - const slidingByFn = _slidingBy(windowLength, rate, key); - return _accumulator(slidingByFn, false); + return _accumulator(_sliding(windowLength, rate, key), false); +} + +export function slidingBy( + rate: number | undefined, + iteratee: AccumulatorByIteratee, +): Transform { + return _accumulator(_slidingByFunction(rate, iteratee), false); } export function rolling( @@ -705,6 +754,12 @@ export function rolling( rate: number | undefined, key?: string, ): Transform { - const rollingByFn = _rollingBy(windowLength, rate, key); - return _accumulator(rollingByFn); + return _accumulator(_rolling(windowLength, rate, key)); +} + +export function rollingBy( + rate: number | undefined, + iteratee: AccumulatorByIteratee, +): Transform { + return _accumulator(_rollingByFunction(rate, iteratee)); } diff --git a/src/functions/index.ts b/src/functions/index.ts index 6265a92..2c511c0 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -8,6 +8,7 @@ import { WithEncoding, JsonParseOptions, FlushStrategy, + AccumulatorByIteratee, } from "./definitions"; /** @@ -260,3 +261,11 @@ export function accumulator( keyBy, ); } + +export function accumulatorBy( + batchRate: number | undefined, + flushStrategy: S, + iteratee: AccumulatorByIteratee, +) { + return baseFunctions.accumulatorBy(batchRate, flushStrategy, iteratee); +} From a40b1bf38c6b85aa444215dbaaa84e91765ffcb6 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 15 Aug 2019 11:54:50 -0400 Subject: [PATCH 18/69] Save --- package.json | 6 +- src/functions/accumulator/accumulator.spec.ts | 323 ++++ src/functions/accumulator/definitions.ts | 6 + src/functions/accumulator/index.ts | 169 ++ src/functions/baseFunctions.ts | 20 + src/functions/batch/index.ts | 47 + src/functions/child/index.ts | 12 + src/functions/collect/index.ts | 26 + src/functions/concat/index.ts | 40 + src/functions/duplex/index.ts | 33 + src/functions/filter/filter.spec.ts | 102 + src/functions/filter/index.ts | 41 + src/functions/flatMap/flatMap.spec.ts | 100 + src/functions/flatMap/index.ts | 39 + src/functions/fromArray/fromArray.spec.ts | 45 + src/functions/fromArray/index.ts | 19 + src/functions/functions.spec.ts | 1683 ----------------- src/functions/functions.ts | 765 -------- src/functions/index.ts | 11 +- src/functions/join/index.ts | 31 + src/functions/join/join.spec.ts | 56 + src/functions/last/index.ts | 14 + src/functions/map/index.ts | 29 + src/functions/map/map.spec.ts | 107 ++ src/functions/merge/index.ts | 36 + src/functions/parallelMap/index.ts | 44 + src/functions/parse/index.ts | 26 + src/functions/rate/index.ts | 31 + src/functions/reduce/index.ts | 57 + src/functions/reduce/reduce.spec.ts | 98 + src/functions/replace/index.ts | 33 + src/functions/replace/replace.spec.ts | 80 + src/functions/split/index.ts | 34 + src/functions/split/split.spec.ts | 98 + src/functions/stringify/index.ts | 22 + src/functions/unbatch/index.ts | 21 + tsconfig.json | 4 +- yarn.lock | 289 ++- 38 files changed, 1981 insertions(+), 2616 deletions(-) create mode 100644 src/functions/accumulator/accumulator.spec.ts create mode 100644 src/functions/accumulator/definitions.ts create mode 100644 src/functions/accumulator/index.ts create mode 100644 src/functions/baseFunctions.ts create mode 100644 src/functions/batch/index.ts create mode 100644 src/functions/child/index.ts create mode 100644 src/functions/collect/index.ts create mode 100644 src/functions/concat/index.ts create mode 100644 src/functions/duplex/index.ts create mode 100644 src/functions/filter/filter.spec.ts create mode 100644 src/functions/filter/index.ts create mode 100644 src/functions/flatMap/flatMap.spec.ts create mode 100644 src/functions/flatMap/index.ts create mode 100644 src/functions/fromArray/fromArray.spec.ts create mode 100644 src/functions/fromArray/index.ts delete mode 100644 src/functions/functions.spec.ts delete mode 100644 src/functions/functions.ts create mode 100644 src/functions/join/index.ts create mode 100644 src/functions/join/join.spec.ts create mode 100644 src/functions/last/index.ts create mode 100644 src/functions/map/index.ts create mode 100644 src/functions/map/map.spec.ts create mode 100644 src/functions/merge/index.ts create mode 100644 src/functions/parallelMap/index.ts create mode 100644 src/functions/parse/index.ts create mode 100644 src/functions/rate/index.ts create mode 100644 src/functions/reduce/index.ts create mode 100644 src/functions/reduce/reduce.spec.ts create mode 100644 src/functions/replace/index.ts create mode 100644 src/functions/replace/replace.spec.ts create mode 100644 src/functions/split/index.ts create mode 100644 src/functions/split/split.spec.ts create mode 100644 src/functions/stringify/index.ts create mode 100644 src/functions/unbatch/index.ts diff --git a/package.json b/package.json index 9f86034..a5c8e2e 100644 --- a/package.json +++ b/package.json @@ -23,6 +23,7 @@ }, "scripts": { "test": "ava", + "test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.js", "lint": "tslint -p tsconfig.json", "validate:tslint": "tslint-config-prettier-check ./tslint.json", "prepublishOnly": "yarn lint && yarn test && yarn tsc" @@ -30,13 +31,12 @@ "dependencies": {}, "devDependencies": { "@types/chai": "^4.1.7", - "@types/node": "^10.12.10", - "@types/typescript": "^2.0.0", + "@types/node": "^12.7.2", "ava": "^1.0.0-rc.2", "chai": "^4.2.0", "mhysa": "./", "prettier": "^1.14.3", - "ts-node": "^7.0.1", + "ts-node": "^8.3.0", "tslint": "^5.11.0", "tslint-config-prettier": "^1.16.0", "tslint-plugin-prettier": "^2.0.1", diff --git a/src/functions/accumulator/accumulator.spec.ts b/src/functions/accumulator/accumulator.spec.ts new file mode 100644 index 0000000..c22b2fe --- /dev/null +++ b/src/functions/accumulator/accumulator.spec.ts @@ -0,0 +1,323 @@ +import test from "ava"; +import { expect } from "chai"; +import { Readable } from "stream"; +import { accumulator, accumulatorBy } from "."; +import { FlushStrategy } from "./definitions"; + +test.cb("accumulator() rolling", t => { + t.plan(3); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const secondFlush = [{ ts: 2, key: "d" }, { ts: 3, key: "e" }]; + const thirdFlush = [{ ts: 4, key: "f" }]; + const flushes = [firstFlush, secondFlush, thirdFlush]; + + source + .pipe(accumulator(2, undefined, FlushStrategy.rolling)) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + [...firstFlush, ...secondFlush, ...thirdFlush].forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb("accumulator() rolling with key", t => { + t.plan(2); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const firstFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 2, key: "d" }, + ]; + const secondFlush = [{ ts: 3, key: "e" }]; + const flushes = [firstFlush, secondFlush]; + + source + .pipe(accumulator(3, undefined, FlushStrategy.rolling, "ts")) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + [...firstFlush, ...secondFlush].forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb("accumulatorBy() rolling", t => { + t.plan(2); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const firstFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 2, key: "d" }, + ]; + const secondFlush = [{ ts: 3, key: "e" }]; + const flushes = [firstFlush, secondFlush]; + + source + .pipe( + accumulatorBy( + undefined, + FlushStrategy.rolling, + (event: TestObject, bufferChunk: TestObject) => { + return bufferChunk.ts + 3 <= event.ts; + }, + ), + ) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + [...firstFlush, ...secondFlush].forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb("accumulator() sliding", t => { + t.plan(4); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 4, key: "d" }, + ]; + const firstFlush = [{ ts: 0, key: "a" }]; + const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const thirdFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + ]; + const fourthFlush = [ + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 4, key: "d" }, + ]; + + const flushes = [firstFlush, secondFlush, thirdFlush, fourthFlush]; + source + .pipe(accumulator(3, undefined, FlushStrategy.sliding)) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + input.forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb("accumulator() sliding with key", t => { + t.plan(6); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + { ts: 5, key: "f" }, + { ts: 6, key: "g" }, + ]; + const firstFlush = [{ ts: 0, key: "a" }]; + const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const thirdFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + ]; + const fourthFlush = [ + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + ]; + const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }]; + const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }]; + + const flushes = [ + firstFlush, + secondFlush, + thirdFlush, + fourthFlush, + fifthFlush, + sixthFlush, + ]; + source + .pipe(accumulator(3, undefined, FlushStrategy.sliding, "ts")) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + input.forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb("accumulatorBy() sliding", t => { + t.plan(6); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + { ts: 5, key: "f" }, + { ts: 6, key: "g" }, + ]; + const firstFlush = [{ ts: 0, key: "a" }]; + const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const thirdFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + ]; + const fourthFlush = [ + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + ]; + const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }]; + const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }]; + + const flushes = [ + firstFlush, + secondFlush, + thirdFlush, + fourthFlush, + fifthFlush, + sixthFlush, + ]; + source + .pipe( + accumulatorBy( + undefined, + FlushStrategy.sliding, + (event: TestObject, bufferChunk: TestObject) => { + return bufferChunk.ts + 3 <= event.ts ? true : false; + }, + ), + ) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + input.forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb.only("accumulatorBy() sliding should throw", t => { + t.plan(2); + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + ]; + const accumulaterStream = accumulatorBy( + undefined, + FlushStrategy.sliding, + (event: TestObject, bufferChunk: TestObject) => { + if (event.key !== "a" && event.key !== "b") { + throw new Error("Failed mapping"); + } + return bufferChunk.ts + 3 <= event.ts ? true : false; + }, + ); + source + .pipe(accumulaterStream) + .on("error", (err: any) => { + source.pipe(accumulaterStream); + accumulaterStream.resume(); + expect(err.message).to.equal("Failed mapping"); + t.pass(); + }) + .on("end", () => { + t.end(); + }); + + input.forEach(item => { + source.push(item); + }); + source.push(null); +}); diff --git a/src/functions/accumulator/definitions.ts b/src/functions/accumulator/definitions.ts new file mode 100644 index 0000000..bd6ec50 --- /dev/null +++ b/src/functions/accumulator/definitions.ts @@ -0,0 +1,6 @@ +export enum FlushStrategy { + rolling = "rolling", + sliding = "sliding", +} + +export type AccumulatorByIteratee = (event: T, bufferChunk: T) => boolean; diff --git a/src/functions/accumulator/index.ts b/src/functions/accumulator/index.ts new file mode 100644 index 0000000..a801dad --- /dev/null +++ b/src/functions/accumulator/index.ts @@ -0,0 +1,169 @@ +import { Transform } from "stream"; +import { AccumulatorByIteratee, FlushStrategy } from "./definitions"; +import { batch } from "../../index"; + +function _accumulator( + accumulateBy: (data: T, buffer: T[], stream: Transform) => void, + shouldFlush: boolean = true, +) { + const buffer: T[] = []; + return new Transform({ + readableObjectMode: true, + writableObjectMode: true, + transform(data: any, encoding, callback) { + try { + accumulateBy(data, buffer, this); + callback(); + } catch (err) { + callback(err); + } + }, + flush(callback) { + if (shouldFlush) { + this.push(buffer); + } + callback(); + }, + }); +} + +function _sliding( + windowLength: number, + rate: number | undefined, + key?: string, +): (event: T, buffer: T[], stream: Transform) => void { + return (event: T, buffer: T[], stream: Transform) => { + if (key) { + let index = 0; + while ( + index < buffer.length && + buffer[index][key] + windowLength <= event[key] + ) { + index++; + } + buffer.splice(0, index); + } else if (buffer.length === windowLength) { + buffer.shift(); + } + buffer.push(event); + stream.push(buffer); + }; +} + +function _slidingByFunction( + rate: number | undefined, + iteratee: AccumulatorByIteratee, +): (event: T, buffer: T[], stream: Transform) => void { + return (event: T, buffer: T[], stream: Transform) => { + let index = 0; + while (index < buffer.length && iteratee(event, buffer[index])) { + index++; + } + buffer.splice(0, index); + buffer.push(event); + stream.push(buffer); + }; +} + +function _rollingByFunction( + rate: number | undefined, + iteratee: AccumulatorByIteratee, +): (event: T, buffer: T[], stream: Transform) => void { + return (event: T, buffer: T[], stream: Transform) => { + if (iteratee) { + if (buffer.length > 0 && iteratee(event, buffer[0])) { + stream.push(buffer.slice(0)); + buffer.length = 0; + } + } + buffer.push(event); + }; +} + +function _rolling( + windowLength: number, + rate: number | undefined, + key?: string, +): (event: T, buffer: T[], stream: Transform) => void { + return (event: T, buffer: T[], stream: Transform) => { + if (key) { + if (event[key] === undefined) { + stream.emit( + "error", + new Error( + `Key is missing in event: (${key}, ${JSON.stringify( + event, + )})`, + ), + ); + } else if ( + buffer.length > 0 && + buffer[0][key] + windowLength <= event[key] + ) { + stream.push(buffer.slice(0)); + buffer.length = 0; + } + } else if (buffer.length === windowLength) { + stream.push(buffer.slice(0)); + buffer.length = 0; + } + buffer.push(event); + }; +} + +export function accumulator( + batchSize: number, + batchRate: number | undefined, + flushStrategy: FlushStrategy, + keyBy?: string, +): Transform { + if (flushStrategy === FlushStrategy.sliding) { + return sliding(batchSize, batchRate, keyBy); + } else if (flushStrategy === FlushStrategy.rolling) { + return rolling(batchSize, batchRate, keyBy); + } else { + return batch(batchSize, batchRate); + } +} + +export function accumulatorBy( + batchRate: number | undefined, + flushStrategy: S, + iteratee: AccumulatorByIteratee, +): Transform { + if (flushStrategy === FlushStrategy.sliding) { + return slidingBy(batchRate, iteratee); + } else { + return rollingBy(batchRate, iteratee); + } +} + +function sliding( + windowLength: number, + rate: number | undefined, + key?: string, +): Transform { + return _accumulator(_sliding(windowLength, rate, key), false); +} + +function slidingBy( + rate: number | undefined, + iteratee: AccumulatorByIteratee, +): Transform { + return _accumulator(_slidingByFunction(rate, iteratee), false); +} + +function rolling( + windowLength: number, + rate: number | undefined, + key?: string, +): Transform { + return _accumulator(_rolling(windowLength, rate, key)); +} + +function rollingBy( + rate: number | undefined, + iteratee: AccumulatorByIteratee, +): Transform { + return _accumulator(_rollingByFunction(rate, iteratee)); +} diff --git a/src/functions/baseFunctions.ts b/src/functions/baseFunctions.ts new file mode 100644 index 0000000..7117746 --- /dev/null +++ b/src/functions/baseFunctions.ts @@ -0,0 +1,20 @@ +export { accumulator, accumulatorBy } from "./accumulator"; +export { batch } from "./batch"; +export { child } from "./child"; +export { collect } from "./collect"; +export { concat } from "./concat"; +export { duplex } from "./duplex"; +export { filter } from "./filter"; +export { flatMap } from "./flatMap"; +export { fromArray } from "./fromArray"; +export { join } from "./join"; +export { last } from "./last"; +export { map } from "./map"; +export { merge } from "./merge"; +export { parallelMap } from "./parallelMap"; +export { parse } from "./parse"; +export { rate } from "./rate"; +export { reduce } from "./reduce"; +export { split } from "./split"; +export { stringify } from "./stringify"; +export { unbatch } from "./unbatch"; diff --git a/src/functions/batch/index.ts b/src/functions/batch/index.ts new file mode 100644 index 0000000..6ff3d87 --- /dev/null +++ b/src/functions/batch/index.ts @@ -0,0 +1,47 @@ +import { Transform } from "stream"; +import { TransformOptions } from "../definitions"; +/** + * Stores chunks of data internally in array and batches when batchSize is reached. + * + * @param batchSize Size of the batches + * @param maxBatchAge Max lifetime of a batch + */ +export function batch( + batchSize: number = 1000, + maxBatchAge: number = 500, + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +): Transform { + let buffer: any[] = []; + let timer: NodeJS.Timer | null = null; + const sendChunk = (self: Transform) => { + if (timer) { + clearTimeout(timer); + } + timer = null; + self.push(buffer); + buffer = []; + }; + return new Transform({ + ...options, + transform(chunk, encoding, callback) { + buffer.push(chunk); + if (buffer.length === batchSize) { + sendChunk(this); + } else { + if (timer === null) { + timer = setInterval(() => { + sendChunk(this); + }, maxBatchAge); + } + } + callback(); + }, + flush(callback) { + sendChunk(this); + callback(); + }, + }); +} diff --git a/src/functions/child/index.ts b/src/functions/child/index.ts new file mode 100644 index 0000000..efe4f90 --- /dev/null +++ b/src/functions/child/index.ts @@ -0,0 +1,12 @@ +/** + * Return a Duplex stream from a child process' stdin and stdout + * @param childProcess Child process from which to create duplex stream + */ +export function child(childProcess: ChildProcess) { + if (childProcess.stdin === null) { + throw new Error("childProcess.stdin is null"); + } else if (childProcess.stdout === null) { + throw new Error("childProcess.stdout is null"); + } + return duplex(childProcess.stdin, childProcess.stdout); +} diff --git a/src/functions/collect/index.ts b/src/functions/collect/index.ts new file mode 100644 index 0000000..11ad423 --- /dev/null +++ b/src/functions/collect/index.ts @@ -0,0 +1,26 @@ +import { Transform } from "stream"; +import { ThroughOptions } from "../definitions"; +/** + * Return a ReadWrite stream that collects streamed chunks into an array or buffer + * @param options + * @param options.objectMode Whether this stream should behave as a stream of objects + */ +export function collect( + options: ThroughOptions = { objectMode: false }, +): NodeJS.ReadWriteStream { + const collected: any[] = []; + return new Transform({ + readableObjectMode: options.objectMode, + writableObjectMode: options.objectMode, + transform(data, encoding, callback) { + collected.push(data); + callback(); + }, + flush(callback) { + this.push( + options.objectMode ? collected : Buffer.concat(collected), + ); + callback(); + }, + }); +} diff --git a/src/functions/concat/index.ts b/src/functions/concat/index.ts new file mode 100644 index 0000000..8064b30 --- /dev/null +++ b/src/functions/concat/index.ts @@ -0,0 +1,40 @@ +import { Readable } from "stream"; +/** + * Return a Readable stream of readable streams concatenated together + * @param streams Readable streams to concatenate + */ +export function concat(...streams: Readable[]): Readable { + let isStarted = false; + let currentStreamIndex = 0; + const startCurrentStream = () => { + if (currentStreamIndex >= streams.length) { + wrapper.push(null); + } else { + streams[currentStreamIndex] + .on("data", chunk => { + if (!wrapper.push(chunk)) { + streams[currentStreamIndex].pause(); + } + }) + .on("error", err => wrapper.emit("error", err)) + .on("end", () => { + currentStreamIndex++; + startCurrentStream(); + }); + } + }; + + const wrapper = new Readable({ + objectMode: true, + read() { + if (!isStarted) { + isStarted = true; + startCurrentStream(); + } + if (currentStreamIndex < streams.length) { + streams[currentStreamIndex].resume(); + } + }, + }); + return wrapper; +} diff --git a/src/functions/duplex/index.ts b/src/functions/duplex/index.ts new file mode 100644 index 0000000..2470da1 --- /dev/null +++ b/src/functions/duplex/index.ts @@ -0,0 +1,33 @@ +import { Duplex, Writable, Readable } from "stream"; +/** + * Return a Duplex stream from a writable stream that is assumed to somehow, when written to, + * cause the given readable stream to yield chunks + * @param writable Writable stream assumed to cause the readable stream to yield chunks when written to + * @param readable Readable stream assumed to yield chunks when the writable stream is written to + */ +export function duplex(writable: Writable, readable: Readable) { + const wrapper = new Duplex({ + readableObjectMode: true, + writableObjectMode: true, + read() { + readable.resume(); + }, + write(chunk, encoding, callback) { + return writable.write(chunk, encoding, callback); + }, + final(callback) { + writable.end(callback); + }, + }); + readable + .on("data", chunk => { + if (!wrapper.push(chunk)) { + readable.pause(); + } + }) + .on("error", err => wrapper.emit("error", err)) + .on("end", () => wrapper.push(null)); + writable.on("drain", () => wrapper.emit("drain")); + writable.on("error", err => wrapper.emit("error", err)); + return wrapper; +} diff --git a/src/functions/filter/filter.spec.ts b/src/functions/filter/filter.spec.ts new file mode 100644 index 0000000..a537372 --- /dev/null +++ b/src/functions/filter/filter.spec.ts @@ -0,0 +1,102 @@ +import test from "ava"; +import { expect } from "chai"; +import { Readable } from "stream"; +import { filter } from "."; + +test.cb("filter() filters elements synchronously", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "c"]; + let i = 0; + source + .pipe(filter((element: string) => element !== "b")) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("filter() filters elements asynchronously", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "c"]; + let i = 0; + source + .pipe( + filter(async (element: string) => { + await Promise.resolve(); + return element !== "b"; + }), + ) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("filter() emits errors during synchronous filtering", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + filter((element: string) => { + if (element !== "a") { + throw new Error("Failed filtering"); + } + return true; + }), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed filtering"); + t.pass(); + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("filter() emits errors during asynchronous filtering", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + filter(async (element: string) => { + await Promise.resolve(); + if (element !== "a") { + throw new Error("Failed filtering"); + } + return true; + }), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed filtering"); + t.pass(); + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); diff --git a/src/functions/filter/index.ts b/src/functions/filter/index.ts new file mode 100644 index 0000000..49e7a05 --- /dev/null +++ b/src/functions/filter/index.ts @@ -0,0 +1,41 @@ +import { Transform } from "stream"; +import { ThroughOptions } from "../definitions"; +/** + * Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold + * @param predicate Predicate with which to filter scream chunks + * @param options + * @param options.objectMode Whether this stream should behave as a stream of objects + */ +export function filter( + predicate: + | ((chunk: T, encoding: string) => boolean) + | ((chunk: T, encoding: string) => Promise), + options: ThroughOptions = { + objectMode: true, + }, +) { + return new Transform({ + readableObjectMode: options.objectMode, + writableObjectMode: options.objectMode, + async transform(chunk: T, encoding, callback) { + let isPromise = false; + try { + const result = predicate(chunk, encoding); + isPromise = result instanceof Promise; + if (!!(await result)) { + callback(undefined, chunk); + } else { + callback(); + } + } catch (err) { + if (isPromise) { + // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly + this.emit("error", err); + callback(); + } else { + callback(err); + } + } + }, + }); +} diff --git a/src/functions/flatMap/flatMap.spec.ts b/src/functions/flatMap/flatMap.spec.ts new file mode 100644 index 0000000..4e6c28d --- /dev/null +++ b/src/functions/flatMap/flatMap.spec.ts @@ -0,0 +1,100 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { flatMap } from "."; + +test.cb("flatMap() maps elements synchronously", t => { + t.plan(6); + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "A", "b", "B", "c", "C"]; + let i = 0; + source + .pipe(flatMap((element: string) => [element, element.toUpperCase()])) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("flatMap() maps elements asynchronously", t => { + t.plan(6); + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "A", "b", "B", "c", "C"]; + let i = 0; + source + .pipe( + flatMap(async (element: string) => { + await Promise.resolve(); + return [element, element.toUpperCase()]; + }), + ) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("flatMap() emits errors during synchronous mapping", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + flatMap((element: string) => { + if (element !== "a") { + throw new Error("Failed mapping"); + } + return [element, element.toUpperCase()]; + }), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed mapping"); + t.pass(); + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("flatMap() emits errors during asynchronous mapping", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + flatMap(async (element: string) => { + await Promise.resolve(); + if (element !== "a") { + throw new Error("Failed mapping"); + } + return [element, element.toUpperCase()]; + }), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed mapping"); + t.pass(); + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); diff --git a/src/functions/flatMap/index.ts b/src/functions/flatMap/index.ts new file mode 100644 index 0000000..9e90c04 --- /dev/null +++ b/src/functions/flatMap/index.ts @@ -0,0 +1,39 @@ +import { Transform } from "stream"; +import { TransformOptions } from "../definitions"; +/** + * Return a ReadWrite stream that flat maps streamed chunks + * @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such) + * @param options + * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects + * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects + */ +export function flatMap( + mapper: + | ((chunk: T, encoding: string) => R[]) + | ((chunk: T, encoding: string) => Promise), + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +): NodeJS.ReadWriteStream { + return new Transform({ + ...options, + async transform(chunk: T, encoding, callback) { + let isPromise = false; + try { + const mapped = mapper(chunk, encoding); + isPromise = mapped instanceof Promise; + (await mapped).forEach(c => this.push(c)); + callback(); + } catch (err) { + if (isPromise) { + // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly + this.emit("error", err); + callback(); + } else { + callback(err); + } + } + }, + }); +} diff --git a/src/functions/fromArray/fromArray.spec.ts b/src/functions/fromArray/fromArray.spec.ts new file mode 100644 index 0000000..b0b9a95 --- /dev/null +++ b/src/functions/fromArray/fromArray.spec.ts @@ -0,0 +1,45 @@ +import test from "ava"; +import { expect } from "chai"; +import { fromArray } from "."; + +test.cb("fromArray() streams array elements in flowing mode", t => { + t.plan(3); + const elements = ["a", "b", "c"]; + const stream = fromArray(elements); + let i = 0; + stream + .on("data", (element: string) => { + expect(element).to.equal(elements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); +}); + +test.cb("fromArray() ends immediately if there are no array elements", t => { + t.plan(0); + fromArray([]) + .on("data", () => t.fail()) + .on("error", t.end) + .on("end", t.end); +}); + +test.cb("fromArray() streams array elements in paused mode", t => { + t.plan(3); + const elements = ["a", "b", "c"]; + const stream = fromArray(elements); + let i = 0; + stream + .on("readable", () => { + let element = stream.read(); + while (element !== null) { + expect(element).to.equal(elements[i]); + t.pass(); + i++; + element = stream.read(); + } + }) + .on("error", t.end) + .on("end", t.end); +}); diff --git a/src/functions/fromArray/index.ts b/src/functions/fromArray/index.ts new file mode 100644 index 0000000..f92654e --- /dev/null +++ b/src/functions/fromArray/index.ts @@ -0,0 +1,19 @@ +import { Readable } from "stream"; +/** + * Convert an array into a Readable stream of its elements + * @param array Array of elements to stream + */ +export function fromArray(array: any[]): NodeJS.ReadableStream { + let cursor = 0; + return new Readable({ + objectMode: true, + read() { + if (cursor < array.length) { + this.push(array[cursor]); + cursor++; + } else { + this.push(null); + } + }, + }); +} diff --git a/src/functions/functions.spec.ts b/src/functions/functions.spec.ts deleted file mode 100644 index f174928..0000000 --- a/src/functions/functions.spec.ts +++ /dev/null @@ -1,1683 +0,0 @@ -import * as cp from "child_process"; -import test from "ava"; -import { expect } from "chai"; -import { performance } from "perf_hooks"; -import { Readable } from "stream"; -import { - fromArray, - map, - flatMap, - filter, - split, - join, - replace, - parse, - stringify, - collect, - concat, - merge, - duplex, - child, - reduce, - last, - batch, - unbatch, - rate, - parallelMap, - accumulator, - accumulatorBy, -} from "."; -import { FlushStrategy } from "./definitions"; -import { sleep } from "../helpers"; - -test.cb("fromArray() streams array elements in flowing mode", t => { - t.plan(3); - const elements = ["a", "b", "c"]; - const stream = fromArray(elements); - let i = 0; - stream - .on("data", (element: string) => { - expect(element).to.equal(elements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); -}); - -test.cb("fromArray() streams array elements in paused mode", t => { - t.plan(3); - const elements = ["a", "b", "c"]; - const stream = fromArray(elements); - let i = 0; - stream - .on("readable", () => { - let element = stream.read(); - while (element !== null) { - expect(element).to.equal(elements[i]); - t.pass(); - i++; - element = stream.read(); - } - }) - .on("error", t.end) - .on("end", t.end); -}); - -test.cb("fromArray() ends immediately if there are no array elements", t => { - t.plan(0); - fromArray([]) - .on("data", () => t.fail()) - .on("error", t.end) - .on("end", t.end); -}); - -test.cb("map() maps elements synchronously", t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["A", "B", "C"]; - let i = 0; - source - .pipe(map((element: string) => element.toUpperCase())) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("map() maps elements asynchronously", t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["A", "B", "C"]; - let i = 0; - source - .pipe( - map(async (element: string) => { - await Promise.resolve(); - return element.toUpperCase(); - }), - ) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("map() emits errors during synchronous mapping", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - map((element: string) => { - if (element !== "a") { - throw new Error("Failed mapping"); - } - return element.toUpperCase(); - }), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed mapping"); - t.pass(); - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test("map() emits errors during asynchronous mapping", t => { - t.plan(1); - return new Promise((resolve, reject) => { - const source = new Readable({ objectMode: true }); - source - .pipe( - map(async (element: string) => { - await Promise.resolve(); - if (element !== "a") { - throw new Error("Failed mapping"); - } - return element.toUpperCase(); - }), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed mapping"); - t.pass(); - resolve(); - }) - .on("end", () => { - t.fail(); - }); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); - }); -}); - -test.cb("flatMap() maps elements synchronously", t => { - t.plan(6); - const source = new Readable({ objectMode: true }); - const expectedElements = ["a", "A", "b", "B", "c", "C"]; - let i = 0; - source - .pipe(flatMap((element: string) => [element, element.toUpperCase()])) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("flatMap() maps elements asynchronously", t => { - t.plan(6); - const source = new Readable({ objectMode: true }); - const expectedElements = ["a", "A", "b", "B", "c", "C"]; - let i = 0; - source - .pipe( - flatMap(async (element: string) => { - await Promise.resolve(); - return [element, element.toUpperCase()]; - }), - ) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("flatMap() emits errors during synchronous mapping", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - flatMap((element: string) => { - if (element !== "a") { - throw new Error("Failed mapping"); - } - return [element, element.toUpperCase()]; - }), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed mapping"); - t.pass(); - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("flatMap() emits errors during asynchronous mapping", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - flatMap(async (element: string) => { - await Promise.resolve(); - if (element !== "a") { - throw new Error("Failed mapping"); - } - return [element, element.toUpperCase()]; - }), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed mapping"); - t.pass(); - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("filter() filters elements synchronously", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - const expectedElements = ["a", "c"]; - let i = 0; - source - .pipe(filter((element: string) => element !== "b")) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("filter() filters elements asynchronously", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - const expectedElements = ["a", "c"]; - let i = 0; - source - .pipe( - filter(async (element: string) => { - await Promise.resolve(); - return element !== "b"; - }), - ) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("filter() emits errors during synchronous filtering", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - filter((element: string) => { - if (element !== "a") { - throw new Error("Failed filtering"); - } - return true; - }), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed filtering"); - t.pass(); - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("filter() emits errors during asynchronous filtering", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - filter(async (element: string) => { - await Promise.resolve(); - if (element !== "a") { - throw new Error("Failed filtering"); - } - return true; - }), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed filtering"); - t.pass(); - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("reduce() reduces elements synchronously", t => { - t.plan(1); - const source = new Readable({ objectMode: true }); - const expectedValue = 6; - source - .pipe(reduce((acc: number, element: string) => acc + element.length, 0)) - .on("data", (element: string) => { - expect(element).to.equal(expectedValue); - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); -}); - -test.cb("reduce() reduces elements asynchronously", t => { - t.plan(1); - const source = new Readable({ objectMode: true }); - const expectedValue = 6; - source - .pipe( - reduce(async (acc: number, element: string) => { - await Promise.resolve(); - return acc + element.length; - }, 0), - ) - .on("data", (element: string) => { - expect(element).to.equal(expectedValue); - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); -}); - -test.cb("reduce() emits errors during synchronous reduce", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - reduce((acc: number, element: string) => { - if (element !== "ab") { - throw new Error("Failed reduce"); - } - return acc + element.length; - }, 0), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed reduce"); - t.pass(); - }) - .on("end", t.end); - - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); -}); - -test.cb("reduce() emits errors during asynchronous reduce", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - reduce(async (acc: number, element: string) => { - await Promise.resolve(); - if (element !== "ab") { - throw new Error("Failed mapping"); - } - return acc + element.length; - }, 0), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed mapping"); - t.pass(); - }) - .on("end", t.end); - - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); -}); - -test.cb("split() splits chunks using the default separator (\\n)", t => { - t.plan(5); - const source = new Readable({ objectMode: true }); - const expectedParts = ["ab", "c", "d", "ef", ""]; - let i = 0; - source - .pipe(split()) - .on("data", part => { - expect(part).to.equal(expectedParts[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("ab\n"); - source.push("c"); - source.push("\n"); - source.push("d"); - source.push("\nef\n"); - source.push(null); -}); - -test.cb("split() splits chunks using the specified separator", t => { - t.plan(6); - const source = new Readable({ objectMode: true }); - const expectedParts = ["ab", "c", "d", "e", "f", ""]; - let i = 0; - source - .pipe(split("|")) - .on("data", (part: string) => { - expect(part).to.equal(expectedParts[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("ab|"); - source.push("c|d"); - source.push("|"); - source.push("e"); - source.push("|f|"); - source.push(null); -}); - -test.cb( - "split() splits utf8 encoded buffers using the specified separator", - t => { - t.plan(3); - const expectedElements = ["a", "b", "c"]; - let i = 0; - const through = split(","); - const buf = Buffer.from("a,b,c"); - through - .on("data", element => { - expect(element).to.equal(expectedElements[i]); - i++; - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - for (let j = 0; j < buf.length; ++j) { - through.write(buf.slice(j, j + 1)); - } - through.end(); - }, -); - -test.cb( - "split() splits utf8 encoded buffers with multi-byte characters using the specified separator", - t => { - t.plan(3); - const expectedElements = ["一", "一", "一"]; - let i = 0; - const through = split(","); - const buf = Buffer.from("一,一,一"); // Those spaces are multi-byte utf8 characters (code: 4E00) - through - .on("data", element => { - expect(element).to.equal(expectedElements[i]); - i++; - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - for (let j = 0; j < buf.length; ++j) { - through.write(buf.slice(j, j + 1)); - } - through.end(); - }, -); - -test.cb("join() joins chunks using the specified separator", t => { - t.plan(9); - const source = new Readable({ objectMode: true }); - const expectedParts = ["ab|", "|", "c|d", "|", "|", "|", "e", "|", "|f|"]; - let i = 0; - source - .pipe(join("|")) - .on("data", part => { - expect(part).to.equal(expectedParts[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("ab|"); - source.push("c|d"); - source.push("|"); - source.push("e"); - source.push("|f|"); - source.push(null); -}); - -test.cb( - "join() joins chunks using the specified separator without breaking up multi-byte characters " + - "spanning multiple chunks", - t => { - t.plan(5); - const source = new Readable({ objectMode: true }); - const expectedParts = ["ø", "|", "ö", "|", "一"]; - let i = 0; - source - .pipe(join("|")) - .on("data", part => { - expect(part).to.equal(expectedParts[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks - source.push(Buffer.from("ø").slice(1, 2)); - source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks - source.push(Buffer.from("ö").slice(1, 2)); - source.push(Buffer.from("一").slice(0, 1)); // 3-byte character spanning three chunks - source.push(Buffer.from("一").slice(1, 2)); - source.push(Buffer.from("一").slice(2, 3)); - source.push(null); - }, -); - -test.cb( - "replace() replaces occurrences of the given string in the streamed elements with the specified " + - "replacement string", - t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["abc", "xyf", "ghi"]; - let i = 0; - source - .pipe(replace("de", "xy")) - .on("data", part => { - expect(part).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("abc"); - source.push("def"); - source.push("ghi"); - source.push(null); - }, -); - -test.cb( - "replace() replaces occurrences of the given regular expression in the streamed elements with " + - "the specified replacement string", - t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["abc", "xyz", "ghi"]; - let i = 0; - source - .pipe(replace(/^def$/, "xyz")) - .on("data", part => { - expect(part).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("abc"); - source.push("def"); - source.push("ghi"); - source.push(null); - }, -); - -test.cb( - "replace() replaces occurrences of the given multi-byte character even if it spans multiple chunks", - t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["ø", "O", "a"]; - let i = 0; - source - .pipe(replace("ö", "O")) - .on("data", part => { - expect(part).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks - source.push(Buffer.from("ø").slice(1, 2)); - source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks - source.push(Buffer.from("ö").slice(1, 2)); - source.push("a"); - source.push(null); - }, -); - -test.cb("parse() parses the streamed elements as JSON", t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["abc", {}, []]; - let i = 0; - source - .pipe(parse()) - .on("data", part => { - expect(part).to.deep.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push('"abc"'); - source.push("{}"); - source.push("[]"); - source.push(null); -}); - -test.cb("parse() emits errors on invalid JSON", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe(parse()) - .resume() - .on("error", () => t.pass()) - .on("end", t.end); - - source.push("{}"); - source.push({}); - source.push([]); - source.push(null); -}); - -test.cb("stringify() stringifies the streamed elements as JSON", t => { - t.plan(4); - const source = new Readable({ objectMode: true }); - const expectedElements = [ - '"abc"', - "0", - '{"a":"a","b":"b","c":"c"}', - '["a","b","c"]', - ]; - let i = 0; - source - .pipe(stringify()) - .on("data", part => { - expect(part).to.deep.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("abc"); - source.push(0); - source.push({ a: "a", b: "b", c: "c" }); - source.push(["a", "b", "c"]); - source.push(null); -}); - -test.cb( - "stringify() stringifies the streamed elements as pretty-printed JSON", - t => { - t.plan(4); - const source = new Readable({ objectMode: true }); - const expectedElements = [ - '"abc"', - "0", - '{\n "a": "a",\n "b": "b",\n "c": "c"\n}', - '[\n "a",\n "b",\n "c"\n]', - ]; - let i = 0; - source - .pipe(stringify({ pretty: true })) - .on("data", part => { - expect(part).to.deep.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("abc"); - source.push(0); - source.push({ a: "a", b: "b", c: "c" }); - source.push(["a", "b", "c"]); - source.push(null); - }, -); - -test.cb( - "collect() collects streamed elements into an array (object, flowing mode)", - t => { - t.plan(1); - const source = new Readable({ objectMode: true }); - - source - .pipe(collect({ objectMode: true })) - .on("data", collected => { - expect(collected).to.deep.equal(["a", "b", "c"]); - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); - }, -); - -test.cb( - "collect() collects streamed elements into an array (object, paused mode)", - t => { - t.plan(1); - const source = new Readable({ objectMode: true }); - const collector = source.pipe(collect({ objectMode: true })); - - collector - .on("readable", () => { - let collected = collector.read(); - while (collected !== null) { - expect(collected).to.deep.equal(["a", "b", "c"]); - t.pass(); - collected = collector.read(); - } - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); - }, -); - -test.cb( - "collect() collects streamed bytes into a buffer (non-object, flowing mode)", - t => { - t.plan(1); - const source = new Readable({ objectMode: false }); - - source - .pipe(collect()) - .on("data", collected => { - expect(collected).to.deep.equal(Buffer.from("abc")); - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); - }, -); - -test.cb( - "collect() collects streamed bytes into a buffer (non-object, paused mode)", - t => { - t.plan(1); - const source = new Readable({ objectMode: false }); - const collector = source.pipe(collect({ objectMode: false })); - collector - .on("readable", () => { - let collected = collector.read(); - while (collected !== null) { - expect(collected).to.deep.equal(Buffer.from("abc")); - t.pass(); - collected = collector.read(); - } - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); - }, -); - -test.cb( - "collect() emits an empty array if the source was empty (object mode)", - t => { - t.plan(1); - const source = new Readable({ objectMode: true }); - const collector = source.pipe(collect({ objectMode: true })); - collector - .on("data", collected => { - expect(collected).to.deep.equal([]); - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - source.push(null); - }, -); - -test.cb( - "collect() emits nothing if the source was empty (non-object mode)", - t => { - t.plan(0); - const source = new Readable({ objectMode: false }); - const collector = source.pipe(collect({ objectMode: false })); - collector - .on("data", () => t.fail()) - .on("error", t.end) - .on("end", t.end); - - source.push(null); - }, -); - -test.cb( - "concat() concatenates multiple readable streams (object, flowing mode)", - t => { - t.plan(6); - const source1 = new Readable({ objectMode: true }); - const source2 = new Readable({ objectMode: true }); - const expectedElements = ["a", "b", "c", "d", "e", "f"]; - let i = 0; - concat(source1, source2) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source1.push("a"); - source2.push("d"); - source1.push("b"); - source2.push("e"); - source1.push("c"); - source2.push("f"); - source2.push(null); - source1.push(null); - }, -); - -test.cb( - "concat() concatenates multiple readable streams (object, paused mode)", - t => { - t.plan(6); - const source1 = new Readable({ objectMode: true }); - const source2 = new Readable({ objectMode: true }); - const expectedElements = ["a", "b", "c", "d", "e", "f"]; - let i = 0; - const concatenation = concat(source1, source2) - .on("readable", () => { - let element = concatenation.read(); - while (element !== null) { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - element = concatenation.read(); - } - }) - .on("error", t.end) - .on("end", t.end); - - source1.push("a"); - source2.push("d"); - source1.push("b"); - source2.push("e"); - source1.push("c"); - source2.push("f"); - source2.push(null); - source1.push(null); - }, -); - -test.cb( - "concat() concatenates multiple readable streams (non-object, flowing mode)", - t => { - t.plan(6); - const source1 = new Readable({ objectMode: false }); - const source2 = new Readable({ objectMode: false }); - const expectedElements = ["a", "b", "c", "d", "e", "f"]; - let i = 0; - concat(source1, source2) - .on("data", (element: string) => { - expect(element).to.deep.equal(Buffer.from(expectedElements[i])); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source1.push("a"); - source2.push("d"); - source1.push("b"); - source2.push("e"); - source1.push("c"); - source2.push("f"); - source2.push(null); - source1.push(null); - }, -); - -test.cb( - "concat() concatenates multiple readable streams (non-object, paused mode)", - t => { - t.plan(6); - const source1 = new Readable({ objectMode: false, read: () => ({}) }); - const source2 = new Readable({ objectMode: false, read: () => ({}) }); - const expectedElements = ["a", "b", "c", "d", "e", "f"]; - let i = 0; - const concatenation = concat(source1, source2) - .on("readable", () => { - let element = concatenation.read(); - while (element !== null) { - expect(element).to.deep.equal( - Buffer.from(expectedElements[i]), - ); - t.pass(); - i++; - element = concatenation.read(); - } - }) - .on("error", t.end) - .on("end", t.end); - - source1.push("a"); - setTimeout(() => source2.push("d"), 10); - setTimeout(() => source1.push("b"), 20); - setTimeout(() => source2.push("e"), 30); - setTimeout(() => source1.push("c"), 40); - setTimeout(() => source2.push("f"), 50); - setTimeout(() => source2.push(null), 60); - setTimeout(() => source1.push(null), 70); - }, -); - -test.cb("concat() concatenates a single readable stream (object mode)", t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["a", "b", "c", "d", "e", "f"]; - let i = 0; - concat(source) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb( - "concat() concatenates a single readable stream (non-object mode)", - t => { - t.plan(3); - const source = new Readable({ objectMode: false }); - const expectedElements = ["a", "b", "c", "d", "e", "f"]; - let i = 0; - concat(source) - .on("data", (element: string) => { - expect(element).to.deep.equal(Buffer.from(expectedElements[i])); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); - }, -); - -test.cb("concat() concatenates empty list of readable streams", t => { - t.plan(0); - concat() - .pipe(collect()) - .on("data", _ => { - t.fail(); - }) - .on("error", t.end) - .on("end", t.end); -}); - -test.cb( - "merge() merges multiple readable streams in chunk arrival order", - t => { - t.plan(6); - const source1 = new Readable({ objectMode: true, read: () => ({}) }); - const source2 = new Readable({ objectMode: true, read: () => ({}) }); - const expectedElements = ["a", "d", "b", "e", "c", "f"]; - let i = 0; - merge(source1, source2) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source1.push("a"); - setTimeout(() => source2.push("d"), 10); - setTimeout(() => source1.push("b"), 20); - setTimeout(() => source2.push("e"), 30); - setTimeout(() => source1.push("c"), 40); - setTimeout(() => source2.push("f"), 50); - setTimeout(() => source2.push(null), 60); - setTimeout(() => source1.push(null), 70); - }, -); - -test.cb("merge() merges a readable stream", t => { - t.plan(3); - const source = new Readable({ objectMode: true, read: () => ({}) }); - const expectedElements = ["a", "b", "c"]; - let i = 0; - merge(source) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("merge() merges an empty list of readable streams", t => { - t.plan(0); - merge() - .on("data", () => t.pass()) - .on("error", t.end) - .on("end", t.end); -}); - -test.cb( - "duplex() combines a writable and readable stream into a ReadWrite stream", - t => { - t.plan(1); - const source = new Readable(); - const catProcess = cp.exec("cat"); - let out = ""; - source - .pipe(duplex(catProcess.stdin, catProcess.stdout)) - .on("data", chunk => (out += chunk)) - .on("error", t.end) - .on("end", () => { - expect(out).to.equal("abcdef"); - t.pass(); - t.end(); - }); - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); - }, -); - -test.cb( - "child() allows easily writing to child process stdin and reading from its stdout", - t => { - t.plan(1); - const source = new Readable(); - const catProcess = cp.exec("cat"); - let out = ""; - source - .pipe(child(catProcess)) - .on("data", chunk => (out += chunk)) - .on("error", t.end) - .on("end", () => { - expect(out).to.equal("abcdef"); - t.pass(); - t.end(); - }); - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); - }, -); - -test("last() resolves to the last chunk streamed by the given readable stream", async t => { - const source = new Readable({ objectMode: true }); - const lastPromise = last(source); - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); - const lastChunk = await lastPromise; - expect(lastChunk).to.equal("ef"); -}); - -test.cb("batch() batches chunks together", t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = [["a", "b", "c"], ["d", "e", "f"], ["g"]]; - let i = 0; - source - .pipe(batch(3)) - .on("data", (element: string[]) => { - expect(element).to.deep.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push("d"); - source.push("e"); - source.push("f"); - source.push("g"); - source.push(null); -}); - -test.cb("batch() yields a batch after the timeout", t => { - t.plan(3); - const source = new Readable({ - objectMode: true, - read(size: number) {}, - }); - const expectedElements = [["a", "b"], ["c"], ["d"]]; - let i = 0; - source - .pipe(batch(3)) - .on("data", (element: string[]) => { - console.error("DATA", element); - expect(element).to.deep.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.fail) - .on("end", t.end); - - source.push("a"); - source.push("b"); - setTimeout(() => { - source.push("c"); - }, 600); - setTimeout(() => { - source.push("d"); - source.push(null); - }, 600 * 2); -}); - -test.cb("unbatch() unbatches", t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["a", "b", "c"]; - let i = 0; - source - .pipe(batch(3)) - .pipe(unbatch()) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("rate() sends data at desired rate", t => { - t.plan(9); - const fastRate = 150; - const medRate = 50; - const slowRate = 1; - const sourceFast = new Readable({ objectMode: true }); - const sourceMed = new Readable({ objectMode: true }); - const sourceSlow = new Readable({ objectMode: true }); - const expectedElements = ["a", "b", "c"]; - const start = performance.now(); - let i = 0; - let j = 0; - let k = 0; - - sourceFast - .pipe(rate(fastRate)) - .on("data", (element: string[]) => { - const currentRate = (i / (performance.now() - start)) * 1000; - expect(element).to.deep.equal(expectedElements[i]); - expect(currentRate).lessThan(fastRate); - t.pass(); - i++; - }) - .on("error", t.end); - - sourceMed - .pipe(rate(medRate)) - .on("data", (element: string[]) => { - const currentRate = (j / (performance.now() - start)) * 1000; - expect(element).to.deep.equal(expectedElements[j]); - expect(currentRate).lessThan(medRate); - t.pass(); - j++; - }) - .on("error", t.end); - - sourceSlow - .pipe(rate(slowRate, 1)) - .on("data", (element: string[]) => { - const currentRate = (k / (performance.now() - start)) * 1000; - expect(element).to.deep.equal(expectedElements[k]); - expect(currentRate).lessThan(slowRate); - t.pass(); - k++; - }) - .on("error", t.end) - .on("end", t.end); - - sourceFast.push("a"); - sourceFast.push("b"); - sourceFast.push("c"); - sourceFast.push(null); - sourceMed.push("a"); - sourceMed.push("b"); - sourceMed.push("c"); - sourceMed.push(null); - sourceSlow.push("a"); - sourceSlow.push("b"); - sourceSlow.push("c"); - sourceSlow.push(null); -}); - -test.cb("parallel() parallel mapping", t => { - t.plan(6); - const offset = 50; - const source = new Readable({ objectMode: true }); - const expectedElements = [ - "a_processed", - "b_processed", - "c_processed", - "d_processed", - "e_processed", - "f_processed", - ]; - interface IPerfData { - start: number; - output?: string; - finish?: number; - } - const orderedResults: IPerfData[] = []; - source - .pipe( - parallelMap(async (data: any) => { - const perfData: IPerfData = { start: performance.now() }; - const c = data + "_processed"; - perfData.output = c; - await sleep(offset); - perfData.finish = performance.now(); - orderedResults.push(perfData); - return c; - }, 2), - ) - .on("data", (element: string) => { - t.true(expectedElements.includes(element)); - }) - .on("error", t.end) - .on("end", async () => { - expect(orderedResults[0].finish).to.be.lessThan( - orderedResults[2].start, - ); - expect(orderedResults[1].finish).to.be.lessThan( - orderedResults[3].start, - ); - expect(orderedResults[2].finish).to.be.lessThan( - orderedResults[4].start, - ); - expect(orderedResults[3].finish).to.be.lessThan( - orderedResults[5].start, - ); - expect(orderedResults[0].start).to.be.lessThan( - orderedResults[2].start + offset, - ); - expect(orderedResults[1].start).to.be.lessThan( - orderedResults[3].start + offset, - ); - expect(orderedResults[2].start).to.be.lessThan( - orderedResults[4].start + offset, - ); - expect(orderedResults[3].start).to.be.lessThan( - orderedResults[5].start + offset, - ); - t.end(); - }); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push("d"); - source.push("e"); - source.push("f"); - source.push(null); -}); - -test.cb("accumulator() rolling", t => { - t.plan(3); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; - const secondFlush = [{ ts: 2, key: "d" }, { ts: 3, key: "e" }]; - const thirdFlush = [{ ts: 4, key: "f" }]; - const flushes = [firstFlush, secondFlush, thirdFlush]; - - source - .pipe(accumulator(2, undefined, FlushStrategy.rolling)) - .on("data", (flush: TestObject[]) => { - t.deepEqual(flush, flushes[chunkIndex]); - chunkIndex++; - }) - .on("error", (e: any) => { - t.end(e); - }) - .on("end", () => { - t.end(); - }); - [...firstFlush, ...secondFlush, ...thirdFlush].forEach(item => { - source.push(item); - }); - source.push(null); -}); - -test.cb("accumulator() rolling with key", t => { - t.plan(2); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const firstFlush = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 2, key: "d" }, - ]; - const secondFlush = [{ ts: 3, key: "e" }]; - const flushes = [firstFlush, secondFlush]; - - source - .pipe(accumulator(3, undefined, FlushStrategy.rolling, "ts")) - .on("data", (flush: TestObject[]) => { - t.deepEqual(flush, flushes[chunkIndex]); - chunkIndex++; - }) - .on("error", (e: any) => { - t.end(e); - }) - .on("end", () => { - t.end(); - }); - [...firstFlush, ...secondFlush].forEach(item => { - source.push(item); - }); - source.push(null); -}); - -test.cb("accumulatorBy() rolling", t => { - t.plan(2); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const firstFlush = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 2, key: "d" }, - ]; - const secondFlush = [{ ts: 3, key: "e" }]; - const flushes = [firstFlush, secondFlush]; - - source - .pipe( - accumulatorBy( - undefined, - FlushStrategy.rolling, - (event: TestObject, bufferChunk: TestObject) => { - return bufferChunk.ts + 3 <= event.ts; - }, - ), - ) - .on("data", (flush: TestObject[]) => { - t.deepEqual(flush, flushes[chunkIndex]); - chunkIndex++; - }) - .on("error", (e: any) => { - t.end(e); - }) - .on("end", () => { - t.end(); - }); - [...firstFlush, ...secondFlush].forEach(item => { - source.push(item); - }); - source.push(null); -}); - -test.cb("accumulator() sliding", t => { - t.plan(4); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const input = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 4, key: "d" }, - ]; - const firstFlush = [{ ts: 0, key: "a" }]; - const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; - const thirdFlush = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - ]; - const fourthFlush = [ - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 4, key: "d" }, - ]; - - const flushes = [firstFlush, secondFlush, thirdFlush, fourthFlush]; - source - .pipe(accumulator(3, undefined, FlushStrategy.sliding)) - .on("data", (flush: TestObject[]) => { - t.deepEqual(flush, flushes[chunkIndex]); - chunkIndex++; - }) - .on("error", (e: any) => { - t.end(e); - }) - .on("end", () => { - t.end(); - }); - input.forEach(item => { - source.push(item); - }); - source.push(null); -}); - -test.cb("accumulator() sliding with key", t => { - t.plan(6); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const input = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 3, key: "d" }, - { ts: 5, key: "f" }, - { ts: 6, key: "g" }, - ]; - const firstFlush = [{ ts: 0, key: "a" }]; - const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; - const thirdFlush = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - ]; - const fourthFlush = [ - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 3, key: "d" }, - ]; - const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }]; - const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }]; - - const flushes = [ - firstFlush, - secondFlush, - thirdFlush, - fourthFlush, - fifthFlush, - sixthFlush, - ]; - source - .pipe(accumulator(3, undefined, FlushStrategy.sliding, "ts")) - .on("data", (flush: TestObject[]) => { - t.deepEqual(flush, flushes[chunkIndex]); - chunkIndex++; - }) - .on("error", (e: any) => { - t.end(e); - }) - .on("end", () => { - t.end(); - }); - input.forEach(item => { - source.push(item); - }); - source.push(null); -}); - -test.cb("accumulatorBy() sliding", t => { - t.plan(6); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const input = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 3, key: "d" }, - { ts: 5, key: "f" }, - { ts: 6, key: "g" }, - ]; - const firstFlush = [{ ts: 0, key: "a" }]; - const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; - const thirdFlush = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - ]; - const fourthFlush = [ - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 3, key: "d" }, - ]; - const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }]; - const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }]; - - const flushes = [ - firstFlush, - secondFlush, - thirdFlush, - fourthFlush, - fifthFlush, - sixthFlush, - ]; - source - .pipe( - accumulatorBy( - undefined, - FlushStrategy.sliding, - (event: TestObject, bufferChunk: TestObject) => { - return bufferChunk.ts + 3 <= event.ts ? true : false; - }, - ), - ) - .on("data", (flush: TestObject[]) => { - t.deepEqual(flush, flushes[chunkIndex]); - chunkIndex++; - }) - .on("error", (e: any) => { - t.end(e); - }) - .on("end", () => { - t.end(); - }); - input.forEach(item => { - source.push(item); - }); - source.push(null); -}); diff --git a/src/functions/functions.ts b/src/functions/functions.ts deleted file mode 100644 index 49eea1f..0000000 --- a/src/functions/functions.ts +++ /dev/null @@ -1,765 +0,0 @@ -import { Transform, Readable, Writable, Duplex } from "stream"; -import { performance } from "perf_hooks"; -import { ChildProcess } from "child_process"; -import { StringDecoder } from "string_decoder"; -import { - TransformOptions, - ThroughOptions, - WithEncoding, - SerializationFormats, - JsonValue, - JsonParseOptions, - FlushStrategy, - AccumulatorByIteratee, -} from "./definitions"; -import { sleep } from "../helpers"; - -/** - * Convert an array into a Readable stream of its elements - * @param array Array of elements to stream - */ -export function fromArray(array: any[]): NodeJS.ReadableStream { - let cursor = 0; - return new Readable({ - objectMode: true, - read() { - if (cursor < array.length) { - this.push(array[cursor]); - cursor++; - } else { - this.push(null); - } - }, - }); -} - -/** - * Return a ReadWrite stream that maps streamed chunks - * @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such) - * @param options - * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects - * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects - */ -export function map( - mapper: (chunk: T, encoding: string) => R, - options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, - }, -): NodeJS.ReadWriteStream { - return new Transform({ - ...options, - async transform(chunk: T, encoding, callback) { - try { - const mapped = await mapper(chunk, encoding); - this.push(mapped); - callback(); - } catch (err) { - callback(err); - } - }, - }); -} - -/** - * Return a ReadWrite stream that flat maps streamed chunks - * @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such) - * @param options - * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects - * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects - */ -export function flatMap( - mapper: - | ((chunk: T, encoding: string) => R[]) - | ((chunk: T, encoding: string) => Promise), - options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, - }, -): NodeJS.ReadWriteStream { - return new Transform({ - ...options, - async transform(chunk: T, encoding, callback) { - let isPromise = false; - try { - const mapped = mapper(chunk, encoding); - isPromise = mapped instanceof Promise; - (await mapped).forEach(c => this.push(c)); - callback(); - } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } - } - }, - }); -} - -/** - * Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold - * @param predicate Predicate with which to filter scream chunks - * @param options - * @param options.objectMode Whether this stream should behave as a stream of objects - */ -export function filter( - predicate: - | ((chunk: T, encoding: string) => boolean) - | ((chunk: T, encoding: string) => Promise), - options: ThroughOptions = { - objectMode: true, - }, -) { - return new Transform({ - readableObjectMode: options.objectMode, - writableObjectMode: options.objectMode, - async transform(chunk: T, encoding, callback) { - let isPromise = false; - try { - const result = predicate(chunk, encoding); - isPromise = result instanceof Promise; - if (!!(await result)) { - callback(undefined, chunk); - } else { - callback(); - } - } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } - } - }, - }); -} - -/** - * Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that - * value - * @param iteratee Reducer function to apply on each streamed chunk - * @param initialValue Initial value - * @param options - * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects - * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects - */ -export function reduce( - iteratee: - | ((previousValue: R, chunk: T, encoding: string) => R) - | ((previousValue: R, chunk: T, encoding: string) => Promise), - initialValue: R, - options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, - }, -) { - let value = initialValue; - return new Transform({ - readableObjectMode: options.readableObjectMode, - writableObjectMode: options.writableObjectMode, - async transform(chunk: T, encoding, callback) { - let isPromise = false; - try { - const result = iteratee(value, chunk, encoding); - isPromise = result instanceof Promise; - value = await result; - callback(); - } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } - } - }, - flush(callback) { - // Best effort attempt at yielding the final value (will throw if e.g. yielding an object and - // downstream doesn't expect objects) - try { - callback(undefined, value); - } catch (err) { - try { - this.emit("error", err); - } catch { - // Best effort was made - } - } - }, - }); -} - -/** - * Return a ReadWrite stream that splits streamed chunks using the given separator - * @param separator Separator to split by, defaulting to "\n" - * @param options - * @param options.encoding Encoding written chunks are assumed to use - */ -export function split( - separator: string | RegExp = "\n", - options: WithEncoding = { encoding: "utf8" }, -): NodeJS.ReadWriteStream { - let buffered = ""; - const decoder = new StringDecoder(options.encoding); - - return new Transform({ - readableObjectMode: true, - transform(chunk: Buffer, encoding, callback) { - const asString = decoder.write(chunk); - const splitted = asString.split(separator); - if (splitted.length > 1) { - splitted[0] = buffered.concat(splitted[0]); - buffered = ""; - } - buffered += splitted[splitted.length - 1]; - splitted.slice(0, -1).forEach((part: string) => this.push(part)); - callback(); - }, - flush(callback) { - callback(undefined, buffered + decoder.end()); - }, - }); -} - -/** - * Return a ReadWrite stream that joins streamed chunks using the given separator - * @param separator Separator to join with - * @param options - * @param options.encoding Encoding written chunks are assumed to use - */ -export function join( - separator: string, - options: WithEncoding = { encoding: "utf8" }, -): NodeJS.ReadWriteStream { - let isFirstChunk = true; - const decoder = new StringDecoder(options.encoding); - return new Transform({ - readableObjectMode: true, - async transform(chunk: Buffer, encoding, callback) { - const asString = decoder.write(chunk); - // Take care not to break up multi-byte characters spanning multiple chunks - if (asString !== "" || chunk.length === 0) { - if (!isFirstChunk) { - this.push(separator); - } - this.push(asString); - isFirstChunk = false; - } - callback(); - }, - }); -} - -/** - * Return a ReadWrite stream that replaces occurrences of the given string or regular expression in - * the streamed chunks with the specified replacement string - * @param searchValue Search string to use - * @param replaceValue Replacement string to use - * @param options - * @param options.encoding Encoding written chunks are assumed to use - */ -export function replace( - searchValue: string | RegExp, - replaceValue: string, - options: WithEncoding = { encoding: "utf8" }, -): NodeJS.ReadWriteStream { - const decoder = new StringDecoder(options.encoding); - return new Transform({ - readableObjectMode: true, - transform(chunk: Buffer, encoding, callback) { - const asString = decoder.write(chunk); - // Take care not to break up multi-byte characters spanning multiple chunks - if (asString !== "" || chunk.length === 0) { - callback( - undefined, - asString.replace(searchValue, replaceValue), - ); - } else { - callback(); - } - }, - }); -} - -/** - * Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk - * must be a fully defined JSON string. - * @param format Format of serialized data, only utf8 supported. - */ -export function parse( - format: SerializationFormats = SerializationFormats.utf8, -): NodeJS.ReadWriteStream { - const decoder = new StringDecoder(format); - return new Transform({ - readableObjectMode: true, - writableObjectMode: true, - async transform(chunk: Buffer, encoding, callback) { - try { - const asString = decoder.write(chunk); - // Using await causes parsing errors to be emitted - callback(undefined, await JSON.parse(asString)); - } catch (err) { - callback(err); - } - }, - }); -} - -/** - * Return a ReadWrite stream that stringifies the streamed chunks to JSON - */ -export function stringify( - options: JsonParseOptions = { pretty: false }, -): NodeJS.ReadWriteStream { - return new Transform({ - readableObjectMode: true, - writableObjectMode: true, - transform(chunk: JsonValue, encoding, callback) { - callback( - undefined, - options.pretty - ? JSON.stringify(chunk, null, 2) - : JSON.stringify(chunk), - ); - }, - }); -} - -/** - * Return a ReadWrite stream that collects streamed chunks into an array or buffer - * @param options - * @param options.objectMode Whether this stream should behave as a stream of objects - */ -export function collect( - options: ThroughOptions = { objectMode: false }, -): NodeJS.ReadWriteStream { - const collected: any[] = []; - return new Transform({ - readableObjectMode: options.objectMode, - writableObjectMode: options.objectMode, - transform(data, encoding, callback) { - collected.push(data); - callback(); - }, - flush(callback) { - this.push( - options.objectMode ? collected : Buffer.concat(collected), - ); - callback(); - }, - }); -} - -/** - * Return a Readable stream of readable streams concatenated together - * @param streams Readable streams to concatenate - */ -export function concat( - ...streams: NodeJS.ReadableStream[] -): NodeJS.ReadableStream { - let isStarted = false; - let currentStreamIndex = 0; - const startCurrentStream = () => { - if (currentStreamIndex >= streams.length) { - wrapper.push(null); - } else { - streams[currentStreamIndex] - .on("data", chunk => { - if (!wrapper.push(chunk)) { - streams[currentStreamIndex].pause(); - } - }) - .on("error", err => wrapper.emit("error", err)) - .on("end", () => { - currentStreamIndex++; - startCurrentStream(); - }); - } - }; - - const wrapper = new Readable({ - objectMode: true, - read() { - if (!isStarted) { - isStarted = true; - startCurrentStream(); - } - if (currentStreamIndex < streams.length) { - streams[currentStreamIndex].resume(); - } - }, - }); - return wrapper; -} - -/** - * Return a Readable stream of readable streams merged together in chunk arrival order - * @param streams Readable streams to merge - */ -export function merge( - ...streams: NodeJS.ReadableStream[] -): NodeJS.ReadableStream { - let isStarted = false; - let streamEndedCount = 0; - return new Readable({ - objectMode: true, - read() { - if (streamEndedCount >= streams.length) { - this.push(null); - } else if (!isStarted) { - isStarted = true; - streams.forEach(stream => - stream - .on("data", chunk => { - if (!this.push(chunk)) { - streams.forEach(s => s.pause()); - } - }) - .on("error", err => this.emit("error", err)) - .on("end", () => { - streamEndedCount++; - if (streamEndedCount === streams.length) { - this.push(null); - } - }), - ); - } else { - streams.forEach(s => s.resume()); - } - }, - }); -} - -/** - * Return a Duplex stream from a writable stream that is assumed to somehow, when written to, - * cause the given readable stream to yield chunks - * @param writable Writable stream assumed to cause the readable stream to yield chunks when written to - * @param readable Readable stream assumed to yield chunks when the writable stream is written to - */ -export function duplex(writable: Writable, readable: Readable) { - const wrapper = new Duplex({ - readableObjectMode: true, - writableObjectMode: true, - read() { - readable.resume(); - }, - write(chunk, encoding, callback) { - return writable.write(chunk, encoding, callback); - }, - final(callback) { - writable.end(callback); - }, - }); - readable - .on("data", chunk => { - if (!wrapper.push(chunk)) { - readable.pause(); - } - }) - .on("error", err => wrapper.emit("error", err)) - .on("end", () => wrapper.push(null)); - writable.on("drain", () => wrapper.emit("drain")); - writable.on("error", err => wrapper.emit("error", err)); - return wrapper; -} - -/** - * Return a Duplex stream from a child process' stdin and stdout - * @param childProcess Child process from which to create duplex stream - */ -export function child(childProcess: ChildProcess) { - return duplex(childProcess.stdin, childProcess.stdout); -} - -/** - * Return a Promise resolving to the last streamed chunk of the given readable stream, after it has - * ended - * @param readable Readable stream to wait on - */ -export function last(readable: Readable): Promise { - let lastChunk: T | null = null; - return new Promise((resolve, reject) => { - readable - .on("data", chunk => (lastChunk = chunk)) - .on("end", () => resolve(lastChunk)); - }); -} - -/** - * Stores chunks of data internally in array and batches when batchSize is reached. - * - * @param batchSize Size of the batches - * @param maxBatchAge Max lifetime of a batch - */ -export function batch(batchSize: number = 1000, maxBatchAge: number = 500) { - let buffer: any[] = []; - let timer: NodeJS.Timer | null = null; - let sendChunk = (self: Transform) => { - timer && clearTimeout(timer); - timer = null; - self.push(buffer); - buffer = []; - }; - return new Transform({ - objectMode: true, - transform(chunk, encoding, callback) { - buffer.push(chunk); - if (buffer.length === batchSize) { - sendChunk(this); - } else { - if (timer === null) { - timer = setInterval(() => { - sendChunk(this); - }, maxBatchAge); - } - } - callback(); - }, - flush(callback) { - sendChunk(this); - callback(); - }, - }); -} - -/** - * Unbatches and sends individual chunks of data - */ -export function unbatch() { - return new Transform({ - objectMode: true, - transform(data, encoding, callback) { - for (const d of data) { - this.push(d); - } - callback(); - }, - }); -} - -/** - * Limits date of data transferred into stream. - * @param targetRate Desired rate in ms - * @param period Period to sleep for when rate is above or equal to targetRate - */ -export function rate(targetRate: number = 50, period: number = 2) { - const deltaMS = ((1 / targetRate) * 1000) / period; // Skip half a period - let total = 0; - const start = performance.now(); - return new Transform({ - objectMode: true, - async transform(data, encoding, callback) { - const currentRate = (total / (performance.now() - start)) * 1000; - if (targetRate && currentRate > targetRate) { - await sleep(deltaMS); - } - total += 1; - callback(undefined, data); - }, - }); -} - -/** - * Limits number of parallel processes in flight. - * @param parallel Max number of parallel processes. - * @param func Function to execute on each data chunk - * @param pause Amount of time to pause processing when max number of parallel processes are executing. - */ -export function parallelMap( - mapper: (data: T) => R, - parallel: number = 10, - sleepTime: number = 5, -) { - let inflight = 0; - return new Transform({ - objectMode: true, - async transform(data, encoding, callback) { - while (parallel <= inflight) { - await sleep(sleepTime); - } - inflight += 1; - callback(); - try { - const res = await mapper(data); - this.push(res); - } catch (e) { - this.emit(e); - } finally { - inflight -= 1; - } - }, - async flush(callback) { - while (inflight > 0) { - await sleep(sleepTime); - } - callback(); - }, - }); -} - -function _accumulator( - accumulateBy: (data: T, buffer: T[], stream: Transform) => void, - shouldFlush: boolean = true, -) { - const buffer: T[] = []; - return new Transform({ - objectMode: true, - async transform(data: any, encoding, callback) { - accumulateBy(data, buffer, this); - callback(); - }, - flush(callback) { - if (shouldFlush) { - this.push(buffer); - } - callback(); - }, - }); -} - -function _sliding( - windowLength: number, - rate: number | undefined, - key?: string, -): (event: T, buffer: T[], stream: Transform) => void { - return (event: T, buffer: T[], stream: Transform) => { - if (key) { - let index = 0; - while ( - index < buffer.length && - buffer[index][key] + windowLength <= event[key] - ) { - index++; - } - buffer.splice(0, index); - } else if (buffer.length === windowLength) { - buffer.shift(); - } - buffer.push(event); - stream.push(buffer); - }; -} - -function _slidingByFunction( - rate: number | undefined, - iteratee: AccumulatorByIteratee, -): (event: T, buffer: T[], stream: Transform) => void { - return (event: T, buffer: T[], stream: Transform) => { - let index = 0; - while (index < buffer.length && iteratee(event, buffer[index])) { - index++; - } - buffer.splice(0, index); - buffer.push(event); - stream.push(buffer); - }; -} - -function _rollingByFunction( - rate: number | undefined, - iteratee: AccumulatorByIteratee, -): (event: T, buffer: T[], stream: Transform) => void { - return (event: T, buffer: T[], stream: Transform) => { - if (iteratee) { - if (buffer.length > 0 && iteratee(event, buffer[0])) { - stream.push(buffer.slice(0)); - buffer.length = 0; - } - } - buffer.push(event); - }; -} - -function _rolling( - windowLength: number, - rate: number | undefined, - key?: string, -): (event: T, buffer: T[], stream: Transform) => void { - return (event: T, buffer: T[], stream: Transform) => { - if (key) { - if (event[key] === undefined) { - stream.emit( - "error", - new Error( - `Key is missing in event: (${key}, ${JSON.stringify( - event, - )})`, - ), - ); - } else if ( - buffer.length > 0 && - buffer[0][key] + windowLength <= event[key] - ) { - stream.push(buffer.slice(0)); - buffer.length = 0; - } - } else if (buffer.length === windowLength) { - stream.push(buffer.slice(0)); - buffer.length = 0; - } - buffer.push(event); - }; -} - -export function accumulator( - batchSize: number, - batchRate: number | undefined, - flushStrategy: FlushStrategy, - keyBy?: string, -): Transform { - if (flushStrategy === FlushStrategy.sliding) { - return sliding(batchSize, batchRate, keyBy); - } else if (flushStrategy === FlushStrategy.rolling) { - return rolling(batchSize, batchRate, keyBy); - } else { - return batch(batchSize, batchRate); - } -} - -export function accumulatorBy( - batchRate: number | undefined, - flushStrategy: S, - iteratee: AccumulatorByIteratee, -): Transform { - if (flushStrategy === FlushStrategy.sliding) { - return slidingBy(batchRate, iteratee); - } else { - return rollingBy(batchRate, iteratee); - } -} - -export function sliding( - windowLength: number, - rate: number | undefined, - key?: string, -): Transform { - return _accumulator(_sliding(windowLength, rate, key), false); -} - -export function slidingBy( - rate: number | undefined, - iteratee: AccumulatorByIteratee, -): Transform { - return _accumulator(_slidingByFunction(rate, iteratee), false); -} - -export function rolling( - windowLength: number, - rate: number | undefined, - key?: string, -): Transform { - return _accumulator(_rolling(windowLength, rate, key)); -} - -export function rollingBy( - rate: number | undefined, - iteratee: AccumulatorByIteratee, -): Transform { - return _accumulator(_rollingByFunction(rate, iteratee)); -} diff --git a/src/functions/index.ts b/src/functions/index.ts index 2c511c0..2100353 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -1,6 +1,6 @@ -import { Readable, Writable } from "stream"; +import { Readable, Writable, Transform } from "stream"; import { ChildProcess } from "child_process"; -import * as baseFunctions from "./functions"; +import * as baseFunctions from "./baseFunctions"; import { ThroughOptions, @@ -29,7 +29,7 @@ export function fromArray(array: any[]): NodeJS.ReadableStream { export function map( mapper: (chunk: T, encoding?: string) => R, options?: TransformOptions, -): NodeJS.ReadWriteStream { +): Transform { return baseFunctions.map(mapper, options); } @@ -207,10 +207,7 @@ export function last(readable: Readable): Promise { * @param batchSize Size of the batches, defaults to 1000. * @param maxBatchAge? Max lifetime of a batch, defaults to 500 */ -export function batch( - batchSize: number, - maxBatchAge?: number, -): NodeJS.ReadWriteStream { +export function batch(batchSize: number, maxBatchAge?: number): Transform { return baseFunctions.batch(batchSize, maxBatchAge); } diff --git a/src/functions/join/index.ts b/src/functions/join/index.ts new file mode 100644 index 0000000..0bd22d3 --- /dev/null +++ b/src/functions/join/index.ts @@ -0,0 +1,31 @@ +import { Transform } from "stream"; +import { StringDecoder } from "string_decoder"; +import { WithEncoding } from "../definitions"; +/** + * Return a ReadWrite stream that joins streamed chunks using the given separator + * @param separator Separator to join with + * @param options + * @param options.encoding Encoding written chunks are assumed to use + */ +export function join( + separator: string, + options: WithEncoding = { encoding: "utf8" }, +): NodeJS.ReadWriteStream { + let isFirstChunk = true; + const decoder = new StringDecoder(options.encoding); + return new Transform({ + readableObjectMode: true, + async transform(chunk: Buffer, encoding, callback) { + const asString = decoder.write(chunk); + // Take care not to break up multi-byte characters spanning multiple chunks + if (asString !== "" || chunk.length === 0) { + if (!isFirstChunk) { + this.push(separator); + } + this.push(asString); + isFirstChunk = false; + } + callback(); + }, + }); +} diff --git a/src/functions/join/join.spec.ts b/src/functions/join/join.spec.ts new file mode 100644 index 0000000..fc9d5b7 --- /dev/null +++ b/src/functions/join/join.spec.ts @@ -0,0 +1,56 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { join } from "."; + +test.cb("join() joins chunks using the specified separator", t => { + t.plan(9); + const source = new Readable({ objectMode: true }); + const expectedParts = ["ab|", "|", "c|d", "|", "|", "|", "e", "|", "|f|"]; + let i = 0; + source + .pipe(join("|")) + .on("data", part => { + expect(part).to.equal(expectedParts[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("ab|"); + source.push("c|d"); + source.push("|"); + source.push("e"); + source.push("|f|"); + source.push(null); +}); + +test.cb( + "join() joins chunks using the specified separator without breaking up multi-byte characters " + + "spanning multiple chunks", + t => { + t.plan(5); + const source = new Readable({ objectMode: true }); + const expectedParts = ["ø", "|", "ö", "|", "一"]; + let i = 0; + source + .pipe(join("|")) + .on("data", part => { + expect(part).to.equal(expectedParts[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks + source.push(Buffer.from("ø").slice(1, 2)); + source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks + source.push(Buffer.from("ö").slice(1, 2)); + source.push(Buffer.from("一").slice(0, 1)); // 3-byte character spanning three chunks + source.push(Buffer.from("一").slice(1, 2)); + source.push(Buffer.from("一").slice(2, 3)); + source.push(null); + }, +); diff --git a/src/functions/last/index.ts b/src/functions/last/index.ts new file mode 100644 index 0000000..baf7440 --- /dev/null +++ b/src/functions/last/index.ts @@ -0,0 +1,14 @@ +import { Readable } from "stream"; +/** + * Return a Promise resolving to the last streamed chunk of the given readable stream, after it has + * ended + * @param readable Readable stream to wait on + */ +export function last(readable: Readable): Promise { + let lastChunk: T | null = null; + return new Promise((resolve, _) => { + readable + .on("data", chunk => (lastChunk = chunk)) + .on("end", () => resolve(lastChunk)); + }); +} diff --git a/src/functions/map/index.ts b/src/functions/map/index.ts new file mode 100644 index 0000000..7ddfbed --- /dev/null +++ b/src/functions/map/index.ts @@ -0,0 +1,29 @@ +import { Transform } from "stream"; +import { TransformOptions } from "../definitions"; +/** + * Return a ReadWrite stream that maps streamed chunks + * @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such) + * @param options + * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects + * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects + */ +export function map( + mapper: (chunk: T, encoding: string) => R, + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +): Transform { + return new Transform({ + ...options, + async transform(chunk: T, encoding, callback) { + try { + const mapped = await mapper(chunk, encoding); + this.push(mapped); + callback(); + } catch (err) { + callback(err); + } + }, + }); +} diff --git a/src/functions/map/map.spec.ts b/src/functions/map/map.spec.ts new file mode 100644 index 0000000..2812503 --- /dev/null +++ b/src/functions/map/map.spec.ts @@ -0,0 +1,107 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { map } from "."; + +test.cb("map() maps elements synchronously", t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["A", "B", "C"]; + let i = 0; + source + .pipe(map((element: string) => element.toUpperCase())) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("map() maps elements asynchronously", t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["A", "B", "C"]; + let i = 0; + source + .pipe( + map(async (element: string) => { + await Promise.resolve(); + return element.toUpperCase(); + }), + ) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("map() emits errors during synchronous mapping", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + map((element: string) => { + if (element !== "a") { + throw new Error("Failed mapping"); + } + return element.toUpperCase(); + }), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed mapping"); + t.pass(); + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test("map() emits errors during asynchronous mapping", t => { + t.plan(1); + return new Promise((resolve, reject) => { + const source = new Readable({ objectMode: true }); + source + .pipe( + map(async (element: string) => { + await Promise.resolve(); + if (element !== "a") { + throw new Error("Failed mapping"); + } + return element.toUpperCase(); + }), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed mapping"); + t.pass(); + resolve(); + }) + .on("end", () => { + t.fail(); + }); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); + }); +}); diff --git a/src/functions/merge/index.ts b/src/functions/merge/index.ts new file mode 100644 index 0000000..7166006 --- /dev/null +++ b/src/functions/merge/index.ts @@ -0,0 +1,36 @@ +import { Readable } from "stream"; +/** + * Return a Readable stream of readable streams merged together in chunk arrival order + * @param streams Readable streams to merge + */ +export function merge(...streams: Readable[]): Readable { + let isStarted = false; + let streamEndedCount = 0; + return new Readable({ + objectMode: true, + read() { + if (streamEndedCount >= streams.length) { + this.push(null); + } else if (!isStarted) { + isStarted = true; + streams.forEach(stream => + stream + .on("data", chunk => { + if (!this.push(chunk)) { + streams.forEach(s => s.pause()); + } + }) + .on("error", err => this.emit("error", err)) + .on("end", () => { + streamEndedCount++; + if (streamEndedCount === streams.length) { + this.push(null); + } + }), + ); + } else { + streams.forEach(s => s.resume()); + } + }, + }); +} diff --git a/src/functions/parallelMap/index.ts b/src/functions/parallelMap/index.ts new file mode 100644 index 0000000..ec82f35 --- /dev/null +++ b/src/functions/parallelMap/index.ts @@ -0,0 +1,44 @@ +import { Transform } from "stream"; +import { sleep } from "../../helpers"; +import { TransformOptions } from "../definitions"; +/** + * Limits number of parallel processes in flight. + * @param parallel Max number of parallel processes. + * @param func Function to execute on each data chunk + * @param pause Amount of time to pause processing when max number of parallel processes are executing. + */ +export function parallelMap( + mapper: (data: T) => R, + parallel: number = 10, + sleepTime: number = 5, + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +) { + let inflight = 0; + return new Transform({ + ...options, + async transform(data, encoding, callback) { + while (parallel <= inflight) { + await sleep(sleepTime); + } + inflight += 1; + callback(); + try { + const res = await mapper(data); + this.push(res); + } catch (e) { + this.emit(e); + } finally { + inflight -= 1; + } + }, + async flush(callback) { + while (inflight > 0) { + await sleep(sleepTime); + } + callback(); + }, + }); +} diff --git a/src/functions/parse/index.ts b/src/functions/parse/index.ts new file mode 100644 index 0000000..1e32cb2 --- /dev/null +++ b/src/functions/parse/index.ts @@ -0,0 +1,26 @@ +import { Transform } from "stream"; +import { StringDecoder } from "string_decoder"; +import { SerializationFormats } from "../definitions"; +/** + * Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk + * must be a fully defined JSON string. + * @param format Format of serialized data, only utf8 supported. + */ +export function parse( + format: SerializationFormats = SerializationFormats.utf8, +): NodeJS.ReadWriteStream { + const decoder = new StringDecoder(format); + return new Transform({ + readableObjectMode: true, + writableObjectMode: true, + async transform(chunk: Buffer, encoding, callback) { + try { + const asString = decoder.write(chunk); + // Using await causes parsing errors to be emitted + callback(undefined, await JSON.parse(asString)); + } catch (err) { + callback(err); + } + }, + }); +} diff --git a/src/functions/rate/index.ts b/src/functions/rate/index.ts new file mode 100644 index 0000000..5e88950 --- /dev/null +++ b/src/functions/rate/index.ts @@ -0,0 +1,31 @@ +import { Transform } from "stream"; +import { performance } from "perf_hooks"; +import { sleep } from "../../helpers"; +import { TransformOptions } from "../definitions"; +/** + * Limits date of data transferred into stream. + * @param targetRate Desired rate in ms + * @param period Period to sleep for when rate is above or equal to targetRate + */ +export function rate( + targetRate: number = 50, + period: number = 2, + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +) { + const deltaMS = ((1 / targetRate) * 1000) / period; // Skip half a period + let total = 0; + const start = performance.now(); + return new Transform({ + async transform(data, encoding, callback) { + const currentRate = (total / (performance.now() - start)) * 1000; + if (targetRate && currentRate > targetRate) { + await sleep(deltaMS); + } + total += 1; + callback(undefined, data); + }, + }); +} diff --git a/src/functions/reduce/index.ts b/src/functions/reduce/index.ts new file mode 100644 index 0000000..f7654fb --- /dev/null +++ b/src/functions/reduce/index.ts @@ -0,0 +1,57 @@ +import { Transform } from "stream"; +import { TransformOptions } from "../definitions"; +/** + * Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that + * value + * @param iteratee Reducer function to apply on each streamed chunk + * @param initialValue Initial value + * @param options + * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects + * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects + */ +export function reduce( + iteratee: + | ((previousValue: R, chunk: T, encoding: string) => R) + | ((previousValue: R, chunk: T, encoding: string) => Promise), + initialValue: R, + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +) { + let value = initialValue; + return new Transform({ + readableObjectMode: options.readableObjectMode, + writableObjectMode: options.writableObjectMode, + async transform(chunk: T, encoding, callback) { + let isPromise = false; + try { + const result = iteratee(value, chunk, encoding); + isPromise = result instanceof Promise; + value = await result; + callback(); + } catch (err) { + if (isPromise) { + // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly + this.emit("error", err); + callback(); + } else { + callback(err); + } + } + }, + flush(callback) { + // Best effort attempt at yielding the final value (will throw if e.g. yielding an object and + // downstream doesn't expect objects) + try { + callback(undefined, value); + } catch (err) { + try { + this.emit("error", err); + } catch { + // Best effort was made + } + } + }, + }); +} diff --git a/src/functions/reduce/reduce.spec.ts b/src/functions/reduce/reduce.spec.ts new file mode 100644 index 0000000..c01a51e --- /dev/null +++ b/src/functions/reduce/reduce.spec.ts @@ -0,0 +1,98 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { reduce } from "."; + +test.cb("reduce() reduces elements synchronously", t => { + t.plan(1); + const source = new Readable({ objectMode: true }); + const expectedValue = 6; + source + .pipe(reduce((acc: number, element: string) => acc + element.length, 0)) + .on("data", (element: string) => { + expect(element).to.equal(expectedValue); + t.pass(); + }) + .on("error", t.end) + .on("end", t.end); + + source.push("ab"); + source.push("cd"); + source.push("ef"); + source.push(null); +}); + +test.cb("reduce() reduces elements asynchronously", t => { + t.plan(1); + const source = new Readable({ objectMode: true }); + const expectedValue = 6; + source + .pipe( + reduce(async (acc: number, element: string) => { + await Promise.resolve(); + return acc + element.length; + }, 0), + ) + .on("data", (element: string) => { + expect(element).to.equal(expectedValue); + t.pass(); + }) + .on("error", t.end) + .on("end", t.end); + + source.push("ab"); + source.push("cd"); + source.push("ef"); + source.push(null); +}); + +test.cb("reduce() emits errors during synchronous reduce", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + reduce((acc: number, element: string) => { + if (element !== "ab") { + throw new Error("Failed reduce"); + } + return acc + element.length; + }, 0), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed reduce"); + t.pass(); + }) + .on("end", t.end); + + source.push("ab"); + source.push("cd"); + source.push("ef"); + source.push(null); +}); + +test.cb("reduce() emits errors during asynchronous reduce", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + reduce(async (acc: number, element: string) => { + await Promise.resolve(); + if (element !== "ab") { + throw new Error("Failed mapping"); + } + return acc + element.length; + }, 0), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed mapping"); + t.pass(); + }) + .on("end", t.end); + + source.push("ab"); + source.push("cd"); + source.push("ef"); + source.push(null); +}); diff --git a/src/functions/replace/index.ts b/src/functions/replace/index.ts new file mode 100644 index 0000000..462103c --- /dev/null +++ b/src/functions/replace/index.ts @@ -0,0 +1,33 @@ +import { Transform } from "stream"; +import { StringDecoder } from "string_decoder"; +import { WithEncoding } from "../definitions"; +/** + * Return a ReadWrite stream that replaces occurrences of the given string or regular expression in + * the streamed chunks with the specified replacement string + * @param searchValue Search string to use + * @param replaceValue Replacement string to use + * @param options + * @param options.encoding Encoding written chunks are assumed to use + */ +export function replace( + searchValue: string | RegExp, + replaceValue: string, + options: WithEncoding = { encoding: "utf8" }, +): NodeJS.ReadWriteStream { + const decoder = new StringDecoder(options.encoding); + return new Transform({ + readableObjectMode: true, + transform(chunk: Buffer, encoding, callback) { + const asString = decoder.write(chunk); + // Take care not to break up multi-byte characters spanning multiple chunks + if (asString !== "" || chunk.length === 0) { + callback( + undefined, + asString.replace(searchValue, replaceValue), + ); + } else { + callback(); + } + }, + }); +} diff --git a/src/functions/replace/replace.spec.ts b/src/functions/replace/replace.spec.ts new file mode 100644 index 0000000..a36642c --- /dev/null +++ b/src/functions/replace/replace.spec.ts @@ -0,0 +1,80 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { replace } from "."; + +test.cb( + "replace() replaces occurrences of the given string in the streamed elements with the specified " + + "replacement string", + t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["abc", "xyf", "ghi"]; + let i = 0; + source + .pipe(replace("de", "xy")) + .on("data", part => { + expect(part).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("abc"); + source.push("def"); + source.push("ghi"); + source.push(null); + }, +); + +test.cb( + "replace() replaces occurrences of the given regular expression in the streamed elements with " + + "the specified replacement string", + t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["abc", "xyz", "ghi"]; + let i = 0; + source + .pipe(replace(/^def$/, "xyz")) + .on("data", part => { + expect(part).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("abc"); + source.push("def"); + source.push("ghi"); + source.push(null); + }, +); + +test.cb( + "replace() replaces occurrences of the given multi-byte character even if it spans multiple chunks", + t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["ø", "O", "a"]; + let i = 0; + source + .pipe(replace("ö", "O")) + .on("data", part => { + expect(part).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks + source.push(Buffer.from("ø").slice(1, 2)); + source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks + source.push(Buffer.from("ö").slice(1, 2)); + source.push("a"); + source.push(null); + }, +); diff --git a/src/functions/split/index.ts b/src/functions/split/index.ts new file mode 100644 index 0000000..fb0f319 --- /dev/null +++ b/src/functions/split/index.ts @@ -0,0 +1,34 @@ +import { Transform } from "stream"; +import { StringDecoder } from "string_decoder"; +import { WithEncoding } from "../definitions"; +/** + * Return a ReadWrite stream that splits streamed chunks using the given separator + * @param separator Separator to split by, defaulting to "\n" + * @param options + * @param options.encoding Encoding written chunks are assumed to use + */ +export function split( + separator: string | RegExp = "\n", + options: WithEncoding = { encoding: "utf8" }, +): NodeJS.ReadWriteStream { + let buffered = ""; + const decoder = new StringDecoder(options.encoding); + + return new Transform({ + readableObjectMode: true, + transform(chunk: Buffer, encoding, callback) { + const asString = decoder.write(chunk); + const splitted = asString.split(separator); + if (splitted.length > 1) { + splitted[0] = buffered.concat(splitted[0]); + buffered = ""; + } + buffered += splitted[splitted.length - 1]; + splitted.slice(0, -1).forEach((part: string) => this.push(part)); + callback(); + }, + flush(callback) { + callback(undefined, buffered + decoder.end()); + }, + }); +} diff --git a/src/functions/split/split.spec.ts b/src/functions/split/split.spec.ts new file mode 100644 index 0000000..9e909f3 --- /dev/null +++ b/src/functions/split/split.spec.ts @@ -0,0 +1,98 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { split } from "."; + +test.cb("split() splits chunks using the default separator (\\n)", t => { + t.plan(5); + const source = new Readable({ objectMode: true }); + const expectedParts = ["ab", "c", "d", "ef", ""]; + let i = 0; + source + .pipe(split()) + .on("data", part => { + expect(part).to.equal(expectedParts[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("ab\n"); + source.push("c"); + source.push("\n"); + source.push("d"); + source.push("\nef\n"); + source.push(null); +}); + +test.cb("split() splits chunks using the specified separator", t => { + t.plan(6); + const source = new Readable({ objectMode: true }); + const expectedParts = ["ab", "c", "d", "e", "f", ""]; + let i = 0; + source + .pipe(split("|")) + .on("data", (part: string) => { + expect(part).to.equal(expectedParts[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("ab|"); + source.push("c|d"); + source.push("|"); + source.push("e"); + source.push("|f|"); + source.push(null); +}); + +test.cb( + "split() splits utf8 encoded buffers using the specified separator", + t => { + t.plan(3); + const expectedElements = ["a", "b", "c"]; + let i = 0; + const through = split(","); + const buf = Buffer.from("a,b,c"); + through + .on("data", element => { + expect(element).to.equal(expectedElements[i]); + i++; + t.pass(); + }) + .on("error", t.end) + .on("end", t.end); + + for (let j = 0; j < buf.length; ++j) { + through.write(buf.slice(j, j + 1)); + } + through.end(); + }, +); + +test.cb( + "split() splits utf8 encoded buffers with multi-byte characters using the specified separator", + t => { + t.plan(3); + const expectedElements = ["一", "一", "一"]; + let i = 0; + const through = split(","); + const buf = Buffer.from("一,一,一"); // Those spaces are multi-byte utf8 characters (code: 4E00) + through + .on("data", element => { + expect(element).to.equal(expectedElements[i]); + i++; + t.pass(); + }) + .on("error", t.end) + .on("end", t.end); + + for (let j = 0; j < buf.length; ++j) { + through.write(buf.slice(j, j + 1)); + } + through.end(); + }, +); diff --git a/src/functions/stringify/index.ts b/src/functions/stringify/index.ts new file mode 100644 index 0000000..3ac5a6f --- /dev/null +++ b/src/functions/stringify/index.ts @@ -0,0 +1,22 @@ +import { Transform } from "stream"; +import { JsonValue, JsonParseOptions } from "../definitions"; + +/** + * Return a ReadWrite stream that stringifies the streamed chunks to JSON + */ +export function stringify( + options: JsonParseOptions = { pretty: false }, +): NodeJS.ReadWriteStream { + return new Transform({ + readableObjectMode: true, + writableObjectMode: true, + transform(chunk: JsonValue, encoding, callback) { + callback( + undefined, + options.pretty + ? JSON.stringify(chunk, null, 2) + : JSON.stringify(chunk), + ); + }, + }); +} diff --git a/src/functions/unbatch/index.ts b/src/functions/unbatch/index.ts new file mode 100644 index 0000000..b0dd51c --- /dev/null +++ b/src/functions/unbatch/index.ts @@ -0,0 +1,21 @@ +import { Transform } from "stream"; +import { TransformOptions } from "../definitions"; +/** + * Unbatches and sends individual chunks of data + */ +export function unbatch( + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +) { + return new Transform({ + ...options, + transform(data, encoding, callback) { + for (const d of data) { + this.push(d); + } + callback(); + }, + }); +} diff --git a/tsconfig.json b/tsconfig.json index 8df64fa..4a3d25c 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -3,14 +3,14 @@ "noImplicitAny": true, "strictNullChecks": true, "noImplicitReturns": true, - "noUnusedLocals": true, + "noUnusedLocals": false, "noImplicitThis": true, "forceConsistentCasingInFileNames": true, "suppressImplicitAnyIndexErrors": true, "outDir": "./dist", "module": "commonjs", "target": "es5", - "lib": ["es2016"], + "lib": ["es2016", "es2019"], "sourceMap": true, "declaration": true }, diff --git a/yarn.lock b/yarn.lock index e0b7aca..ee57991 100644 --- a/yarn.lock +++ b/yarn.lock @@ -37,41 +37,41 @@ imurmurhash "^0.1.4" slide "^1.1.5" -"@babel/code-frame@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.0.0.tgz#06e2ab19bdb535385559aabb5ba59729482800f8" - integrity sha512-OfC2uemaknXr87bdLUkWog7nYuliM9Ij5HUcajsVcMCpQrcLmtxRbVFTIqmcSkSeYRBFBRxs2FiUqFJDLdiebA== +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d" + integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw== dependencies: "@babel/highlight" "^7.0.0" "@babel/core@^7.4.0": - version "7.5.0" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.5.0.tgz#6ed6a2881ad48a732c5433096d96d1b0ee5eb734" - integrity sha512-6Isr4X98pwXqHvtigw71CKgmhL1etZjPs5A67jL/w0TkLM9eqmFR40YrnJvEc1WnMZFsskjsmid8bHZyxKEAnw== + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.5.5.tgz#17b2686ef0d6bc58f963dddd68ab669755582c30" + integrity sha512-i4qoSr2KTtce0DmkuuQBV4AuQgGPUcPXMr9L5MyYAtk06z068lQ10a4O009fe5OB/DfNV+h+qqT7ddNV8UnRjg== dependencies: - "@babel/code-frame" "^7.0.0" - "@babel/generator" "^7.5.0" - "@babel/helpers" "^7.5.0" - "@babel/parser" "^7.5.0" + "@babel/code-frame" "^7.5.5" + "@babel/generator" "^7.5.5" + "@babel/helpers" "^7.5.5" + "@babel/parser" "^7.5.5" "@babel/template" "^7.4.4" - "@babel/traverse" "^7.5.0" - "@babel/types" "^7.5.0" + "@babel/traverse" "^7.5.5" + "@babel/types" "^7.5.5" convert-source-map "^1.1.0" debug "^4.1.0" json5 "^2.1.0" - lodash "^4.17.11" + lodash "^4.17.13" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" -"@babel/generator@^7.0.0", "@babel/generator@^7.4.0", "@babel/generator@^7.5.0": - version "7.5.0" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.5.0.tgz#f20e4b7a91750ee8b63656073d843d2a736dca4a" - integrity sha512-1TTVrt7J9rcG5PMjvO7VEG3FrEoEJNHxumRq66GemPmzboLWtIjjcJgk8rokuAS7IiRSpgVSu5Vb9lc99iJkOA== +"@babel/generator@^7.0.0", "@babel/generator@^7.4.0", "@babel/generator@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.5.5.tgz#873a7f936a3c89491b43536d12245b626664e3cf" + integrity sha512-ETI/4vyTSxTzGnU2c49XHv2zhExkv9JHLTwDAFz85kmcwuShvYG2H08FwgIguQf4JC75CBnXAUM5PqeF4fj0nQ== dependencies: - "@babel/types" "^7.5.0" + "@babel/types" "^7.5.5" jsesc "^2.5.1" - lodash "^4.17.11" + lodash "^4.17.13" source-map "^0.5.0" trim-right "^1.0.1" @@ -122,16 +122,16 @@ "@babel/types" "^7.0.0" "@babel/helper-module-transforms@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.4.4.tgz#96115ea42a2f139e619e98ed46df6019b94414b8" - integrity sha512-3Z1yp8TVQf+B4ynN7WoHPKS8EkdTbgAEy0nU0rs/1Kw4pDgmvYH3rz3aI11KgxKCba2cn7N+tqzV1mY2HMN96w== + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.5.5.tgz#f84ff8a09038dcbca1fd4355661a500937165b4a" + integrity sha512-jBeCvETKuJqeiaCdyaheF40aXnnU1+wkSiUs/IQg3tB85up1LyL8x77ClY8qJpuRJUcXQo+ZtdNESmZl4j56Pw== dependencies: "@babel/helper-module-imports" "^7.0.0" "@babel/helper-simple-access" "^7.1.0" "@babel/helper-split-export-declaration" "^7.4.4" "@babel/template" "^7.4.4" - "@babel/types" "^7.4.4" - lodash "^4.17.11" + "@babel/types" "^7.5.5" + lodash "^4.17.13" "@babel/helper-plugin-utils@^7.0.0": version "7.0.0" @@ -139,11 +139,11 @@ integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== "@babel/helper-regex@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.4.4.tgz#a47e02bc91fb259d2e6727c2a30013e3ac13c4a2" - integrity sha512-Y5nuB/kESmR3tKjU8Nkn1wMGEx1tjJX076HBMeL3XLQCu6vA/YRzuTW0bbb+qRnXvQGn+d6Rx953yffl8vEy7Q== + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351" + integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw== dependencies: - lodash "^4.17.11" + lodash "^4.17.13" "@babel/helper-remap-async-to-generator@^7.1.0": version "7.1.0" @@ -181,14 +181,14 @@ "@babel/traverse" "^7.1.0" "@babel/types" "^7.2.0" -"@babel/helpers@^7.5.0": - version "7.5.1" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.5.1.tgz#65407c741a56ddd59dd86346cd112da3de912db3" - integrity sha512-rVOTDv8sH8kNI72Unenusxw6u+1vEepZgLxeV+jHkhsQlYhzVhzL1EpfoWT7Ub3zpWSv2WV03V853dqsnyoQzA== +"@babel/helpers@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.5.5.tgz#63908d2a73942229d1e6685bc2a0e730dde3b75e" + integrity sha512-nRq2BUhxZFnfEn/ciJuhklHvFOqjJUD5wpx+1bxUF2axL9C+v4DE/dmp5sT2dKnpOs4orZWzpAZqlCy8QqE/7g== dependencies: "@babel/template" "^7.4.4" - "@babel/traverse" "^7.5.0" - "@babel/types" "^7.5.0" + "@babel/traverse" "^7.5.5" + "@babel/types" "^7.5.5" "@babel/highlight@^7.0.0": version "7.5.0" @@ -199,10 +199,10 @@ esutils "^2.0.2" js-tokens "^4.0.0" -"@babel/parser@^7.0.0", "@babel/parser@^7.4.4", "@babel/parser@^7.5.0": - version "7.5.0" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.5.0.tgz#3e0713dff89ad6ae37faec3b29dcfc5c979770b7" - integrity sha512-I5nW8AhGpOXGCCNYGc+p7ExQIBxRFnS2fd/d862bNOKvmoEPjYPcfIjsfdy0ujagYOIYPczKgD9l3FsgTkAzKA== +"@babel/parser@^7.0.0", "@babel/parser@^7.4.4", "@babel/parser@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.5.5.tgz#02f077ac8817d3df4a832ef59de67565e71cca4b" + integrity sha512-E5BN68cqR7dhKan1SfqgPGhQ178bkVKpXTPEXnFJBrEt8/DKRZlybmy+IgYLTeN7tp1R5Ccmbm2rBk17sHYU3g== "@babel/plugin-proposal-async-generator-functions@^7.0.0": version "7.2.0" @@ -214,9 +214,9 @@ "@babel/plugin-syntax-async-generators" "^7.2.0" "@babel/plugin-proposal-object-rest-spread@^7.0.0": - version "7.5.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.5.1.tgz#5788ab097c63135e4236548b4f112bfce09dd394" - integrity sha512-PVGXx5LYHcT7L4MdoE+rM5uq68IKlvU9lljVQ4OXY6aUEnGvezcGbM4VNY57Ug+3R2Zg/nYHlEdiWoIBoRA0mw== + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.5.5.tgz#61939744f71ba76a3ae46b5eea18a54c16d22e58" + integrity sha512-F2DxJJSQ7f64FyTVl5cw/9MWn6naXGdk3Q3UhDbFEEHv+EilCPoeRD3Zh/Utx1CJz4uyKlQ4uH+bJPbEhMV7Zw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread" "^7.2.0" @@ -295,28 +295,28 @@ "@babel/parser" "^7.4.4" "@babel/types" "^7.4.4" -"@babel/traverse@^7.1.0", "@babel/traverse@^7.5.0": - version "7.5.0" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.5.0.tgz#4216d6586854ef5c3c4592dab56ec7eb78485485" - integrity sha512-SnA9aLbyOCcnnbQEGwdfBggnc142h/rbqqsXcaATj2hZcegCl903pUD/lfpsNBlBSuWow/YDfRyJuWi2EPR5cg== +"@babel/traverse@^7.1.0", "@babel/traverse@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.5.5.tgz#f664f8f368ed32988cd648da9f72d5ca70f165bb" + integrity sha512-MqB0782whsfffYfSjH4TM+LMjrJnhCNEDMDIjeTpl+ASaUvxcjoiVCo/sM1GhS1pHOXYfWVCYneLjMckuUxDaQ== dependencies: - "@babel/code-frame" "^7.0.0" - "@babel/generator" "^7.5.0" + "@babel/code-frame" "^7.5.5" + "@babel/generator" "^7.5.5" "@babel/helper-function-name" "^7.1.0" "@babel/helper-split-export-declaration" "^7.4.4" - "@babel/parser" "^7.5.0" - "@babel/types" "^7.5.0" + "@babel/parser" "^7.5.5" + "@babel/types" "^7.5.5" debug "^4.1.0" globals "^11.1.0" - lodash "^4.17.11" + lodash "^4.17.13" -"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.0": - version "7.5.0" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.5.0.tgz#e47d43840c2e7f9105bc4d3a2c371b4d0c7832ab" - integrity sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ== +"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.5.5.tgz#97b9f728e182785909aa4ab56264f090a028d18a" + integrity sha512-s63F9nJioLqOlW3UkyMd+BYhXt44YuaFm/VV0VwuteqjYwRrObkU7ra9pY4wAJR3oXi8hJrMcrcJdO/HH33vtw== dependencies: esutils "^2.0.2" - lodash "^4.17.11" + lodash "^4.17.13" to-fast-properties "^2.0.0" "@concordance/react@^2.0.0": @@ -327,9 +327,9 @@ arrify "^1.0.1" "@types/chai@^4.1.7": - version "4.1.7" - resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.1.7.tgz#1b8e33b61a8c09cbe1f85133071baa0dbf9fa71a" - integrity sha512-2Y8uPt0/jwjhQ6EiluT0XCri1Dbplr0ZxfFXUz+ye13gaqE8u5gL5ppao1JrUYr9cIip5S6MvQzBS7Kke7U9VA== + version "4.2.0" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.0.tgz#2478260021408dec32c123a7cad3414beb811a07" + integrity sha512-zw8UvoBEImn392tLjxoavuonblX/4Yb9ha4KBU10FirCfwgzhKO0dvyJSF9ByxV1xK1r2AgnAi/tvQaLgxQqxA== "@types/events@*": version "3.0.0" @@ -350,22 +350,10 @@ resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== -"@types/node@*": - version "12.0.12" - resolved "https://registry.yarnpkg.com/@types/node/-/node-12.0.12.tgz#cc791b402360db1eaf7176479072f91ee6c6c7ca" - integrity sha512-Uy0PN4R5vgBUXFoJrKryf5aTk3kJ8Rv3PdlHjl6UaX+Cqp1QE0yPQ68MPXGrZOfG7gZVNDIJZYyot0B9ubXUrQ== - -"@types/node@^10.12.10": - version "10.14.12" - resolved "https://registry.yarnpkg.com/@types/node/-/node-10.14.12.tgz#0eec3155a46e6c4db1f27c3e588a205f767d622f" - integrity sha512-QcAKpaO6nhHLlxWBvpc4WeLrTvPqlHOvaj0s5GriKkA1zq+bsFBPpfYCvQhLqLgYlIko8A9YrPdaMHCo5mBcpg== - -"@types/typescript@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@types/typescript/-/typescript-2.0.0.tgz#c433539c98bae28682b307eaa7a0fd2115b83c28" - integrity sha1-xDNTnJi64oaCswfqp6D9IRW4PCg= - dependencies: - typescript "*" +"@types/node@*", "@types/node@^12.7.2": + version "12.7.2" + resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.2.tgz#c4e63af5e8823ce9cc3f0b34f7b998c2171f0c44" + integrity sha512-dyYO+f6ihZEtNPDcWNR1fkoTDf3zAK3lAABDze3mz6POyIercH0lEUawUFXlG8xaQZmm1yEBON/4TsYv/laDYg== abbrev@1: version "1.1.1" @@ -427,6 +415,11 @@ are-we-there-yet@~1.1.2: delegates "^1.0.0" readable-stream "^2.0.6" +arg@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.1.tgz#485f8e7c390ce4c5f78257dbea80d4be11feda4c" + integrity sha512-SlmP3fEA88MBv0PypnXZ8ZfJhwmDeIE3SP71j37AiXQBXYosPV0x6uISAaHYSlSVhmHOVkomen0tbGk6Anlebw== + argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" @@ -678,7 +671,7 @@ braces@^2.3.1, braces@^2.3.2: split-string "^3.0.2" to-regex "^3.0.1" -buffer-from@^1.0.0, buffer-from@^1.1.0: +buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== @@ -818,9 +811,9 @@ class-utils@^0.3.5: static-extend "^0.1.1" clean-stack@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.1.0.tgz#9e7fec7f3f8340a2ab4f127c80273085e8fbbdd0" - integrity sha512-uQWrpRm+iZZUCAp7ZZJQbd4Za9I3AjR/3YTjmcnAtkauaIm/T5CT6U8zVI6e60T6OANqBFAzuR9/HB3NzuZCRA== + version "2.2.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== clean-yaml-object@^0.1.0: version "0.1.0" @@ -1120,11 +1113,16 @@ detect-libc@^1.0.2: resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= -diff@^3.1.0, diff@^3.2.0: +diff@^3.2.0: version "3.5.0" resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA== +diff@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.1.tgz#0c667cb467ebbb5cea7f14f135cc2dba7780a8ff" + integrity sha512-s2+XdvhPCOF01LRQBC8hf4vhbVmI2CGS5aZnxLJlT5FtdhPCDFq80q++zK2KlrVorVDdL5BOGZ/VfLrVtYNF+Q== + dir-glob@^2.0.0: version "2.2.2" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.2.2.tgz#fa09f0694153c8918b18ba0deafae94769fc50c4" @@ -1215,14 +1213,14 @@ espurify@^1.6.0: core-js "^2.0.0" estraverse@^4.0.0, estraverse@^4.1.1: - version "4.2.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" - integrity sha1-De4/7TH81GlhjOc0IJn8GvoL2xM= + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== esutils@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" - integrity sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs= + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== execa@^0.7.0: version "0.7.0" @@ -1459,9 +1457,9 @@ got@^6.7.1: url-parse-lax "^1.0.0" graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2: - version "4.2.0" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.0.tgz#8d8fdc73977cb04104721cb53666c1ca64cd328b" - integrity sha512-jpSvDPV4Cq/bgtpndIWbI5hmYxhQGHPC4d4cqBPb4DLniCfhJokdXhwhaDuLBGLQdvvRum/UiX6ECVIPvDXqdg== + version "4.2.2" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.2.tgz#6f0952605d0140c1cfdb138ed005775b92d67b02" + integrity sha512-IItsdsea19BoLC7ELy13q1iJFNmd7ofZH5+X/pJr90/nRoPEX0DJo1dHDbgtYWOhJhcCgMDTOw84RZ72q6lB+Q== has-flag@^3.0.0: version "3.0.0" @@ -1517,9 +1515,9 @@ hasha@^3.0.0: is-stream "^1.0.1" hosted-git-info@^2.1.4: - version "2.7.1" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.7.1.tgz#97f236977bd6e125408930ff6de3eec6281ec047" - integrity sha512-7T/BxH19zbcCTa8XkMlbK5lTo1WtgkFi3GvdWEyNuc4Vex7/9Dqbnpsf4JMydcfj9HCg4zUWFTL3Za6lapg5/w== + version "2.8.4" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.4.tgz#44119abaf4bc64692a16ace34700fed9c03e2546" + integrity sha512-pzXIvANXEFrc5oFFXRMkbLPQ2rXRoDERwDLyrcUxGhaZhgP54BBSl9Oheh7Vv0T090cszWBxPjkQQ5Sq1PbBRQ== iconv-lite@^0.4.4: version "0.4.24" @@ -2009,14 +2007,14 @@ lodash.islength@^4.0.1: integrity sha1-Tpho1FJXXXUK/9NYyXlUPcIO1Xc= lodash.merge@^4.6.1: - version "4.6.1" - resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.1.tgz#adc25d9cb99b9391c59624f379fbba60d7111d54" - integrity sha512-AOYza4+Hf5z1/0Hztxpm2/xiPZgi/cjMqdnKTUWTBSKchJlxXXuUSxCCl8rJlf4g6yww/j6mA8nC8Hw/EZWxKQ== + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== -lodash@^4.17.11: - version "4.17.11" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d" - integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg== +lodash@^4.17.13: + version "4.17.15" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" + integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== log-symbols@^2.2.0: version "2.2.0" @@ -2702,7 +2700,7 @@ redent@^2.0.0: indent-string "^3.0.0" strip-indent "^2.0.0" -regenerate-unicode-properties@^8.0.2: +regenerate-unicode-properties@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== @@ -2723,12 +2721,12 @@ regex-not@^1.0.0, regex-not@^1.0.2: safe-regex "^1.1.0" regexpu-core@^4.5.4: - version "4.5.4" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.5.4.tgz#080d9d02289aa87fe1667a4f5136bc98a6aebaae" - integrity sha512-BtizvGtFQKGPUcTy56o3nk1bGRp4SZOTYrDtGNlqCQufptV5IkkLN6Emw+yunAJjzf+C9FQFtvq7IoA3+oMYHQ== + version "4.5.5" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.5.5.tgz#aaffe61c2af58269b3e516b61a73790376326411" + integrity sha512-FpI67+ky9J+cDizQUJlIlNZFKual/lUkFr1AG6zOCpwZ9cLrg8UUVakyUQJD7fCDIe9Z2nwTQJNPyonatNmDFQ== dependencies: regenerate "^1.4.0" - regenerate-unicode-properties "^8.0.2" + regenerate-unicode-properties "^8.1.0" regjsgen "^0.5.0" regjsparser "^0.6.0" unicode-match-property-ecmascript "^1.0.4" @@ -2806,9 +2804,9 @@ resolve-url@^0.2.1: integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= resolve@^1.10.0, resolve@^1.3.2: - version "1.11.1" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.11.1.tgz#ea10d8110376982fef578df8fc30b9ac30a07a3e" - integrity sha512-vIpgF6wfuJOZI7KKKSP+HmiKggadPQAdsp5HiC1mvqnfp0gF1vdwgBWZIdrVft9pgqoMFQN+R7BSWZiBxx+BBw== + version "1.12.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.0.tgz#3fc644a35c84a48554609ff26ec52b66fa577df6" + integrity sha512-B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w== dependencies: path-parse "^1.0.6" @@ -2826,9 +2824,9 @@ ret@~0.1.10: integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== rimraf@^2.6.1, rimraf@^2.6.3: - version "2.6.3" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" - integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== + version "2.7.1" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" + integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" @@ -2867,9 +2865,9 @@ semver-diff@^2.0.0: semver "^5.0.3" "semver@2 || 3 || 4 || 5", semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@^5.4.1, semver@^5.5.1, semver@^5.6.0: - version "5.7.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.0.tgz#790a7cf6fea5459bac96110b29b60412dc8ff96b" - integrity sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA== + version "5.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== serialize-error@^2.1.0: version "2.1.0" @@ -2971,18 +2969,10 @@ source-map-resolve@^0.5.0: source-map-url "^0.4.0" urix "^0.1.0" -source-map-support@^0.5.11: - version "0.5.12" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.12.tgz#b4f3b10d51857a5af0138d3ce8003b201613d599" - integrity sha512-4h2Pbvyy15EE02G+JOZpUCmqWJuqrs+sEkzewTm++BPi7Hvn/HwcqLAcNxYAyI0x13CpPPn+kMjl+hplXMHITQ== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map-support@^0.5.6: - version "0.5.9" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.9.tgz#41bc953b2534267ea2d605bccfa7bfa3111ced5f" - integrity sha512-gR6Rw4MvUlYy83vP0vxoVNzM6t8MUXqNuRsuBmBHQDu1Fh6X015FrLdgoDKcNdkwGubozq0P4N0Q37UyFVr1EA== +source-map-support@^0.5.11, source-map-support@^0.5.6: + version "0.5.13" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" + integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" @@ -3024,9 +3014,9 @@ spdx-expression-parse@^3.0.0: spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: - version "3.0.4" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.4.tgz#75ecd1a88de8c184ef015eafb51b5b48bfd11bb1" - integrity sha512-7j8LYJLeY/Yb6ACbQ7F76qy5jHkp0U6jgBfJsk97bwWlVUnUWsAgpyaCvo17h0/RQGnQ036tVDomiwoI4pDkQA== + version "3.0.5" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" + integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" @@ -3235,26 +3225,18 @@ trim-right@^1.0.1: resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= -ts-node@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-7.0.1.tgz#9562dc2d1e6d248d24bc55f773e3f614337d9baf" - integrity sha512-BVwVbPJRspzNh2yfslyT1PSbl5uIk03EZlb493RKHN4qej/D06n1cEhjlOJG69oFsE7OT8XjpTUcYf6pKTLMhw== +ts-node@^8.3.0: + version "8.3.0" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-8.3.0.tgz#e4059618411371924a1fb5f3b125915f324efb57" + integrity sha512-dyNS/RqyVTDcmNM4NIBAeDMpsAdaQ+ojdf0GOLqE6nwJOgzEkdRNzJywhDfwnuvB10oa6NLVG1rUJQCpRN7qoQ== dependencies: - arrify "^1.0.0" - buffer-from "^1.1.0" - diff "^3.1.0" + arg "^4.1.0" + diff "^4.0.1" make-error "^1.1.1" - minimist "^1.2.0" - mkdirp "^0.5.1" source-map-support "^0.5.6" - yn "^2.0.0" + yn "^3.0.0" -tslib@^1.7.1: - version "1.9.3" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.3.tgz#d7e4dd79245d85428c4d7e4822a79917954ca286" - integrity sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ== - -tslib@^1.8.0, tslib@^1.8.1: +tslib@^1.7.1, tslib@^1.8.0, tslib@^1.8.1: version "1.10.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== @@ -3309,11 +3291,6 @@ type-fest@^0.3.0: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1" integrity sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ== -typescript@*: - version "3.5.2" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.2.tgz#a09e1dc69bc9551cadf17dba10ee42cf55e5d56c" - integrity sha512-7KxJovlYhTX5RaRbUdkAXN1KUZ8PwWlTzQdHV6xNqvuFOs7+WBo10TQUqT19Q/Jz2hk5v9TQDIhyLhhJY4p5AA== - typescript@^3.5.3: version "3.5.3" resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" @@ -3490,9 +3467,9 @@ xdg-basedir@^3.0.0: integrity sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ= xtend@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" - integrity sha1-pcbVMr5lbiPbgg77lDofBJmNY68= + version "4.0.2" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== yallist@^2.1.2: version "2.1.2" @@ -3511,7 +3488,7 @@ yargs-parser@^10.0.0: dependencies: camelcase "^4.1.0" -yn@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/yn/-/yn-2.0.0.tgz#e5adabc8acf408f6385fc76495684c88e6af689a" - integrity sha1-5a2ryKz0CPY4X8dklWhMiOavaJo= +yn@^3.0.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== From 6a9f6ff919dac7bb075412efb530c61f022fb475 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 15 Aug 2019 11:56:43 -0400 Subject: [PATCH 19/69] Export replace --- src/functions/baseFunctions.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/functions/baseFunctions.ts b/src/functions/baseFunctions.ts index 7117746..2b2b067 100644 --- a/src/functions/baseFunctions.ts +++ b/src/functions/baseFunctions.ts @@ -15,6 +15,7 @@ export { parallelMap } from "./parallelMap"; export { parse } from "./parse"; export { rate } from "./rate"; export { reduce } from "./reduce"; +export { replace } from "./replace"; export { split } from "./split"; export { stringify } from "./stringify"; export { unbatch } from "./unbatch"; From 27b4b2427b2ea08f40b7c3a4b209659ed5d8e478 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 15 Aug 2019 14:27:51 -0400 Subject: [PATCH 20/69] Tests --- package.json | 11 +- src/functions/accumulator/accumulator.spec.ts | 412 ++++++++++++++---- src/functions/accumulator/index.ts | 12 + 3 files changed, 351 insertions(+), 84 deletions(-) diff --git a/package.json b/package.json index a5c8e2e..53eb1bf 100644 --- a/package.json +++ b/package.json @@ -22,11 +22,12 @@ "type": "git" }, "scripts": { - "test": "ava", - "test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.js", - "lint": "tslint -p tsconfig.json", - "validate:tslint": "tslint-config-prettier-check ./tslint.json", - "prepublishOnly": "yarn lint && yarn test && yarn tsc" + "test": "NODE_PATH=src node node_modules/.bin/ava 'src/**/**/*.spec.ts' -e", + "test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.ts", + "test:all": "NODE_PATH=src node node_modules/.bin/ava", + "lint": "tslint -p tsconfig.json", + "validate:tslint": "tslint-config-prettier-check ./tslint.json", + "prepublishOnly": "yarn lint && yarn test && yarn tsc" }, "dependencies": {}, "devDependencies": { diff --git a/src/functions/accumulator/accumulator.spec.ts b/src/functions/accumulator/accumulator.spec.ts index c22b2fe..a64c921 100644 --- a/src/functions/accumulator/accumulator.spec.ts +++ b/src/functions/accumulator/accumulator.spec.ts @@ -70,48 +70,107 @@ test.cb("accumulator() rolling with key", t => { source.push(null); }); -test.cb("accumulatorBy() rolling", t => { - t.plan(2); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const firstFlush = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 2, key: "d" }, - ]; - const secondFlush = [{ ts: 3, key: "e" }]; - const flushes = [firstFlush, secondFlush]; +test.cb( + "accumulator() rolling should emit error and ignore chunk when its missing key", + t => { + t.plan(2); + let index = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const accumulatorStream = accumulator( + 3, + undefined, + FlushStrategy.rolling, + "nonExistingKey", + ); + const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; - source - .pipe( - accumulatorBy( - undefined, - FlushStrategy.rolling, - (event: TestObject, bufferChunk: TestObject) => { - return bufferChunk.ts + 3 <= event.ts; - }, - ), - ) - .on("data", (flush: TestObject[]) => { - t.deepEqual(flush, flushes[chunkIndex]); - chunkIndex++; - }) - .on("error", (e: any) => { - t.end(e); - }) - .on("end", () => { - t.end(); + source + .pipe(accumulatorStream) + .on("data", (flush: TestObject[]) => { + // No valid data output + expect(flush).to.deep.equal([]); + }) + .on("error", (err: any) => { + source.pipe(accumulatorStream); + accumulatorStream.resume(); + expect(err.message).to.equal( + `Key is missing in event: (nonExistingKey, ${JSON.stringify( + input[index], + )})`, + ); + index++; + t.pass(); + }) + .on("end", () => { + t.end(); + }); + input.forEach(item => { + source.push(item); }); - [...firstFlush, ...secondFlush].forEach(item => { - source.push(item); - }); - source.push(null); -}); + source.push(null); + }, +); + +test.cb( + "accumulator() rolling should emit error, ignore chunk when key is missing and continue processing chunks correctly", + t => { + t.plan(3); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const accumulatorStream = accumulator( + 3, + undefined, + FlushStrategy.rolling, + "ts", + ); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { key: "d" }, + { ts: 3, key: "e" }, + ]; + const firstFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + ]; + const secondFlush = [{ ts: 3, key: "e" }]; + const flushes = [firstFlush, secondFlush]; + + source + .pipe(accumulatorStream) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (err: any) => { + source.pipe(accumulatorStream); + accumulatorStream.resume(); + expect(err.message).to.equal( + `Key is missing in event: (ts, ${JSON.stringify( + input[3], + )})`, + ); + t.pass(); + }) + .on("end", () => { + t.end(); + }); + input.forEach(item => { + source.push(item); + }); + source.push(null); + }, +); test.cb("accumulator() sliding", t => { t.plan(4); @@ -216,6 +275,199 @@ test.cb("accumulator() sliding with key", t => { source.push(null); }); +test.cb( + "accumulator() sliding should emit error and ignore chunk when key is missing", + t => { + t.plan(2); + let index = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const accumulatorStream = accumulator( + 3, + undefined, + FlushStrategy.sliding, + "nonExistingKey", + ); + const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + + source + .pipe(accumulatorStream) + .on("data", (flush: TestObject[]) => { + expect(flush).to.deep.equal([]); + }) + .on("error", (err: any) => { + source.pipe(accumulatorStream); + accumulatorStream.resume(); + expect(err.message).to.equal( + `Key is missing in event: (nonExistingKey, ${JSON.stringify( + input[index], + )})`, + ); + index++; + t.pass(); + }) + .on("end", () => { + t.end(); + }); + input.forEach(item => { + source.push(item); + }); + source.push(null); + }, +); + +test.cb( + "accumulator() sliding should emit error, ignore chunk when key is missing and continue processing chunks correctly", + t => { + t.plan(6); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const accumulatorStream = accumulator( + 3, + undefined, + FlushStrategy.sliding, + "ts", + ); + const input = [ + { ts: 0, key: "a" }, + { key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + { ts: 5, key: "f" }, + { ts: 6, key: "g" }, + ]; + const firstFlush = [{ ts: 0, key: "a" }]; + const secondFlush = [{ ts: 0, key: "a" }, { ts: 2, key: "c" }]; + const thirdFlush = [{ ts: 2, key: "c" }, { ts: 3, key: "d" }]; + const fourthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }]; + const fifthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }]; + + const flushes = [ + firstFlush, + secondFlush, + thirdFlush, + fourthFlush, + fifthFlush, + ]; + source + .pipe(accumulatorStream) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (err: any) => { + source.pipe(accumulatorStream); + accumulatorStream.resume(); + expect(err.message).to.equal( + `Key is missing in event: (ts, ${JSON.stringify( + input[1], + )})`, + ); + t.pass(); + }) + .on("end", () => { + t.end(); + }); + input.forEach(item => { + source.push(item); + }); + source.push(null); + }, +); + +test.cb("accumulatorBy() rolling", t => { + t.plan(2); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const firstFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 2, key: "d" }, + ]; + const secondFlush = [{ ts: 3, key: "e" }]; + const flushes = [firstFlush, secondFlush]; + + source + .pipe( + accumulatorBy( + undefined, + FlushStrategy.rolling, + (event: TestObject, bufferChunk: TestObject) => { + return bufferChunk.ts + 3 <= event.ts; + }, + ), + ) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + [...firstFlush, ...secondFlush].forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb( + "accumulatorBy() rolling should emit error when key iteratee throws", + t => { + t.plan(2); + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + ]; + const accumulaterStream = accumulatorBy( + undefined, + FlushStrategy.rolling, + (event: TestObject, bufferChunk: TestObject) => { + if (event.key !== "a") { + throw new Error("Failed mapping"); + } + return bufferChunk.ts + 3 <= event.ts; + }, + ); + source + .pipe(accumulaterStream) + .on("error", (err: any) => { + source.pipe(accumulaterStream); + accumulaterStream.resume(); + expect(err.message).to.equal("Failed mapping"); + t.pass(); + }) + .on("end", () => { + t.end(); + }); + + input.forEach(item => { + source.push(item); + }); + source.push(null); + }, +); + test.cb("accumulatorBy() sliding", t => { t.plan(6); let chunkIndex = 0; @@ -281,43 +533,45 @@ test.cb("accumulatorBy() sliding", t => { source.push(null); }); -test.cb.only("accumulatorBy() sliding should throw", t => { - t.plan(2); - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const input = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 3, key: "d" }, - ]; - const accumulaterStream = accumulatorBy( - undefined, - FlushStrategy.sliding, - (event: TestObject, bufferChunk: TestObject) => { - if (event.key !== "a" && event.key !== "b") { - throw new Error("Failed mapping"); - } - return bufferChunk.ts + 3 <= event.ts ? true : false; - }, - ); - source - .pipe(accumulaterStream) - .on("error", (err: any) => { - source.pipe(accumulaterStream); - accumulaterStream.resume(); - expect(err.message).to.equal("Failed mapping"); - t.pass(); - }) - .on("end", () => { - t.end(); - }); +test.cb( + "accumulatorBy() sliding should emit error when key iteratee throws", + t => { + t.plan(2); + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + ]; + const accumulaterStream = accumulatorBy( + undefined, + FlushStrategy.sliding, + (event: TestObject, bufferChunk: TestObject) => { + if (event.key !== "a") { + throw new Error("Failed mapping"); + } + return bufferChunk.ts + 3 <= event.ts ? true : false; + }, + ); + source + .pipe(accumulaterStream) + .on("error", (err: any) => { + source.pipe(accumulaterStream); + accumulaterStream.resume(); + expect(err.message).to.equal("Failed mapping"); + t.pass(); + }) + .on("end", () => { + t.end(); + }); - input.forEach(item => { - source.push(item); - }); - source.push(null); -}); + input.forEach(item => { + source.push(item); + }); + source.push(null); + }, +); diff --git a/src/functions/accumulator/index.ts b/src/functions/accumulator/index.ts index a801dad..8beaf46 100644 --- a/src/functions/accumulator/index.ts +++ b/src/functions/accumulator/index.ts @@ -35,6 +35,17 @@ function _sliding( return (event: T, buffer: T[], stream: Transform) => { if (key) { let index = 0; + if (event[key] === undefined) { + stream.emit( + "error", + new Error( + `Key is missing in event: (${key}, ${JSON.stringify( + event, + )})`, + ), + ); + return; + } while ( index < buffer.length && buffer[index][key] + windowLength <= event[key] @@ -96,6 +107,7 @@ function _rolling( )})`, ), ); + return; } else if ( buffer.length > 0 && buffer[0][key] + windowLength <= event[key] From 5a9fcc94a6498e7660d1b838ce90bc15f75c8df4 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 15 Aug 2019 15:42:54 -0400 Subject: [PATCH 21/69] Refactor --- src/functions/accumulator/index.ts | 2 + src/functions/map/map.spec.ts | 64 +++++++++++++++--------------- 2 files changed, 35 insertions(+), 31 deletions(-) diff --git a/src/functions/accumulator/index.ts b/src/functions/accumulator/index.ts index 8beaf46..c7b9f51 100644 --- a/src/functions/accumulator/index.ts +++ b/src/functions/accumulator/index.ts @@ -44,6 +44,7 @@ function _sliding( )})`, ), ); + stream.resume(); return; } while ( @@ -107,6 +108,7 @@ function _rolling( )})`, ), ); + stream.resume(); return; } else if ( buffer.length > 0 && diff --git a/src/functions/map/map.spec.ts b/src/functions/map/map.spec.ts index 2812503..5d6a114 100644 --- a/src/functions/map/map.spec.ts +++ b/src/functions/map/map.spec.ts @@ -6,10 +6,11 @@ import { map } from "."; test.cb("map() maps elements synchronously", t => { t.plan(3); const source = new Readable({ objectMode: true }); + const mapStream = map((element: string) => element.toUpperCase()); const expectedElements = ["A", "B", "C"]; let i = 0; source - .pipe(map((element: string) => element.toUpperCase())) + .pipe(mapStream) .on("data", (element: string) => { expect(element).to.equal(expectedElements[i]); t.pass(); @@ -27,15 +28,14 @@ test.cb("map() maps elements synchronously", t => { test.cb("map() maps elements asynchronously", t => { t.plan(3); const source = new Readable({ objectMode: true }); + const mapStream = map(async (element: string) => { + await Promise.resolve(); + return element.toUpperCase(); + }); const expectedElements = ["A", "B", "C"]; let i = 0; source - .pipe( - map(async (element: string) => { - await Promise.resolve(); - return element.toUpperCase(); - }), - ) + .pipe(mapStream) .on("data", (element: string) => { expect(element).to.equal(expectedElements[i]); t.pass(); @@ -51,19 +51,23 @@ test.cb("map() maps elements asynchronously", t => { }); test.cb("map() emits errors during synchronous mapping", t => { - t.plan(2); + t.plan(3); const source = new Readable({ objectMode: true }); + const mapStream = map((element: string) => { + if (element !== "b") { + throw new Error("Failed mapping"); + } + return element.toUpperCase(); + }); source - .pipe( - map((element: string) => { - if (element !== "a") { - throw new Error("Failed mapping"); - } - return element.toUpperCase(); - }), - ) - .resume() + .pipe(mapStream) + .on("data", data => { + expect(data).to.equal("B"); + t.pass(); + }) .on("error", err => { + source.pipe(mapStream); + mapStream.resume(); expect(err.message).to.equal("Failed mapping"); t.pass(); }) @@ -77,31 +81,29 @@ test.cb("map() emits errors during synchronous mapping", t => { test("map() emits errors during asynchronous mapping", t => { t.plan(1); - return new Promise((resolve, reject) => { + return new Promise((resolve, _) => { const source = new Readable({ objectMode: true }); + const mapStream = map(async (element: string) => { + await Promise.resolve(); + if (element === "b") { + throw new Error("Failed mapping"); + } + return element.toUpperCase(); + }); source - .pipe( - map(async (element: string) => { - await Promise.resolve(); - if (element !== "a") { - throw new Error("Failed mapping"); - } - return element.toUpperCase(); - }), - ) - .resume() + .pipe(mapStream) .on("error", err => { expect(err.message).to.equal("Failed mapping"); t.pass(); resolve(); }) - .on("end", () => { - t.fail(); - }); + .on("end", () => t.fail); source.push("a"); source.push("b"); source.push("c"); source.push(null); + source.push(null); + source.push(null); }); }); From d6d974ee0d0b4312fb4de690911ad9a5acd34563 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 15 Aug 2019 15:54:53 -0400 Subject: [PATCH 22/69] baseDefinitions --- src/functions/accumulator/index.ts | 8 ++++++-- src/functions/{definitions.ts => baseDefinitions.ts} | 7 ------- src/functions/batch/index.ts | 2 +- src/functions/collect/index.ts | 2 +- src/functions/filter/index.ts | 2 +- src/functions/flatMap/index.ts | 2 +- src/functions/join/index.ts | 2 +- src/functions/map/index.ts | 2 +- src/functions/parallelMap/index.ts | 2 +- src/functions/parse/index.ts | 2 +- src/functions/rate/index.ts | 2 +- src/functions/reduce/index.ts | 2 +- src/functions/replace/index.ts | 2 +- src/functions/split/index.ts | 2 +- src/functions/stringify/index.ts | 2 +- src/functions/unbatch/index.ts | 2 +- 16 files changed, 20 insertions(+), 23 deletions(-) rename src/functions/{definitions.ts => baseDefinitions.ts} (73%) diff --git a/src/functions/accumulator/index.ts b/src/functions/accumulator/index.ts index c7b9f51..81531c7 100644 --- a/src/functions/accumulator/index.ts +++ b/src/functions/accumulator/index.ts @@ -1,15 +1,19 @@ import { Transform } from "stream"; import { AccumulatorByIteratee, FlushStrategy } from "./definitions"; +import { TransformOptions } from "../baseDefinitions"; import { batch } from "../../index"; function _accumulator( accumulateBy: (data: T, buffer: T[], stream: Transform) => void, shouldFlush: boolean = true, + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, ) { const buffer: T[] = []; return new Transform({ - readableObjectMode: true, - writableObjectMode: true, + ...options, transform(data: any, encoding, callback) { try { accumulateBy(data, buffer, this); diff --git a/src/functions/definitions.ts b/src/functions/baseDefinitions.ts similarity index 73% rename from src/functions/definitions.ts rename to src/functions/baseDefinitions.ts index 6cc97a8..791ada9 100644 --- a/src/functions/definitions.ts +++ b/src/functions/baseDefinitions.ts @@ -21,10 +21,3 @@ export type JsonValue = JsonPrimitive | JsonPrimitive[]; export interface JsonParseOptions { pretty: boolean; } - -export enum FlushStrategy { - rolling = "rolling", - sliding = "sliding", -} - -export type AccumulatorByIteratee = (event: T, bufferChunk: T) => boolean; diff --git a/src/functions/batch/index.ts b/src/functions/batch/index.ts index 6ff3d87..4dad33a 100644 --- a/src/functions/batch/index.ts +++ b/src/functions/batch/index.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { TransformOptions } from "../definitions"; +import { TransformOptions } from "../baseDefinitions"; /** * Stores chunks of data internally in array and batches when batchSize is reached. * diff --git a/src/functions/collect/index.ts b/src/functions/collect/index.ts index 11ad423..fbed881 100644 --- a/src/functions/collect/index.ts +++ b/src/functions/collect/index.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { ThroughOptions } from "../definitions"; +import { ThroughOptions } from "../baseDefinitions"; /** * Return a ReadWrite stream that collects streamed chunks into an array or buffer * @param options diff --git a/src/functions/filter/index.ts b/src/functions/filter/index.ts index 49e7a05..fad186d 100644 --- a/src/functions/filter/index.ts +++ b/src/functions/filter/index.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { ThroughOptions } from "../definitions"; +import { ThroughOptions } from "../baseDefinitions"; /** * Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold * @param predicate Predicate with which to filter scream chunks diff --git a/src/functions/flatMap/index.ts b/src/functions/flatMap/index.ts index 9e90c04..32244a2 100644 --- a/src/functions/flatMap/index.ts +++ b/src/functions/flatMap/index.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { TransformOptions } from "../definitions"; +import { TransformOptions } from "../baseDefinitions"; /** * Return a ReadWrite stream that flat maps streamed chunks * @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such) diff --git a/src/functions/join/index.ts b/src/functions/join/index.ts index 0bd22d3..8c7352b 100644 --- a/src/functions/join/index.ts +++ b/src/functions/join/index.ts @@ -1,6 +1,6 @@ import { Transform } from "stream"; import { StringDecoder } from "string_decoder"; -import { WithEncoding } from "../definitions"; +import { WithEncoding } from "../baseDefinitions"; /** * Return a ReadWrite stream that joins streamed chunks using the given separator * @param separator Separator to join with diff --git a/src/functions/map/index.ts b/src/functions/map/index.ts index 7ddfbed..4941b98 100644 --- a/src/functions/map/index.ts +++ b/src/functions/map/index.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { TransformOptions } from "../definitions"; +import { TransformOptions } from "../baseDefinitions"; /** * Return a ReadWrite stream that maps streamed chunks * @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such) diff --git a/src/functions/parallelMap/index.ts b/src/functions/parallelMap/index.ts index ec82f35..353ab29 100644 --- a/src/functions/parallelMap/index.ts +++ b/src/functions/parallelMap/index.ts @@ -1,6 +1,6 @@ import { Transform } from "stream"; import { sleep } from "../../helpers"; -import { TransformOptions } from "../definitions"; +import { TransformOptions } from "../baseDefinitions"; /** * Limits number of parallel processes in flight. * @param parallel Max number of parallel processes. diff --git a/src/functions/parse/index.ts b/src/functions/parse/index.ts index 1e32cb2..d6ac299 100644 --- a/src/functions/parse/index.ts +++ b/src/functions/parse/index.ts @@ -1,6 +1,6 @@ import { Transform } from "stream"; import { StringDecoder } from "string_decoder"; -import { SerializationFormats } from "../definitions"; +import { SerializationFormats } from "../baseDefinitions"; /** * Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk * must be a fully defined JSON string. diff --git a/src/functions/rate/index.ts b/src/functions/rate/index.ts index 5e88950..febcc1e 100644 --- a/src/functions/rate/index.ts +++ b/src/functions/rate/index.ts @@ -1,7 +1,7 @@ import { Transform } from "stream"; import { performance } from "perf_hooks"; import { sleep } from "../../helpers"; -import { TransformOptions } from "../definitions"; +import { TransformOptions } from "../baseDefinitions"; /** * Limits date of data transferred into stream. * @param targetRate Desired rate in ms diff --git a/src/functions/reduce/index.ts b/src/functions/reduce/index.ts index f7654fb..743d156 100644 --- a/src/functions/reduce/index.ts +++ b/src/functions/reduce/index.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { TransformOptions } from "../definitions"; +import { TransformOptions } from "../baseDefinitions"; /** * Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that * value diff --git a/src/functions/replace/index.ts b/src/functions/replace/index.ts index 462103c..c31f369 100644 --- a/src/functions/replace/index.ts +++ b/src/functions/replace/index.ts @@ -1,6 +1,6 @@ import { Transform } from "stream"; import { StringDecoder } from "string_decoder"; -import { WithEncoding } from "../definitions"; +import { WithEncoding } from "../baseDefinitions"; /** * Return a ReadWrite stream that replaces occurrences of the given string or regular expression in * the streamed chunks with the specified replacement string diff --git a/src/functions/split/index.ts b/src/functions/split/index.ts index fb0f319..a031c8c 100644 --- a/src/functions/split/index.ts +++ b/src/functions/split/index.ts @@ -1,6 +1,6 @@ import { Transform } from "stream"; import { StringDecoder } from "string_decoder"; -import { WithEncoding } from "../definitions"; +import { WithEncoding } from "../baseDefinitions"; /** * Return a ReadWrite stream that splits streamed chunks using the given separator * @param separator Separator to split by, defaulting to "\n" diff --git a/src/functions/stringify/index.ts b/src/functions/stringify/index.ts index 3ac5a6f..5b476af 100644 --- a/src/functions/stringify/index.ts +++ b/src/functions/stringify/index.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { JsonValue, JsonParseOptions } from "../definitions"; +import { JsonValue, JsonParseOptions } from "../baseDefinitions"; /** * Return a ReadWrite stream that stringifies the streamed chunks to JSON diff --git a/src/functions/unbatch/index.ts b/src/functions/unbatch/index.ts index b0dd51c..946c754 100644 --- a/src/functions/unbatch/index.ts +++ b/src/functions/unbatch/index.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { TransformOptions } from "../definitions"; +import { TransformOptions } from "../baseDefinitions"; /** * Unbatches and sends individual chunks of data */ From 505fefeeb50c58c2b7bf9433caba4ff5cb2e6f37 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 15 Aug 2019 17:06:54 -0400 Subject: [PATCH 23/69] Save --- src/functions/batch/batch.spec.ts | 58 ++++++ src/functions/child/child.spec.ts | 28 +++ src/functions/child/index.ts | 2 + src/functions/collect/collect.spec.ts | 132 +++++++++++++ src/functions/collect/index.ts | 2 +- src/functions/concat/concat.spec.ts | 180 ++++++++++++++++++ src/functions/concat/index.ts | 2 +- src/functions/duplex/duplex.spec.ts | 28 +++ src/functions/duplex/index.ts | 7 +- src/functions/flatMap/index.ts | 2 +- src/functions/fromArray/index.ts | 2 +- src/functions/index.ts | 52 ++--- src/functions/join/index.ts | 2 +- src/functions/last/index.ts | 3 +- src/functions/last/last.spec.ts | 15 ++ src/functions/merge/merge.spec.ts | 60 ++++++ src/functions/parallelMap/parallelMap.spec.ts | 77 ++++++++ src/functions/parse/index.ts | 2 +- src/functions/parse/parse.spec.ts | 40 ++++ src/functions/rate/index.ts | 3 +- src/functions/rate/rate.spec.ts | 67 +++++++ src/functions/replace/index.ts | 2 +- src/functions/split/index.ts | 2 +- src/functions/stringify/index.ts | 2 +- src/functions/stringify/stringify.spec.ts | 61 ++++++ src/functions/unbatch/unbatch.spec.ts | 26 +++ 26 files changed, 812 insertions(+), 45 deletions(-) create mode 100644 src/functions/batch/batch.spec.ts create mode 100644 src/functions/child/child.spec.ts create mode 100644 src/functions/collect/collect.spec.ts create mode 100644 src/functions/concat/concat.spec.ts create mode 100644 src/functions/duplex/duplex.spec.ts create mode 100644 src/functions/last/last.spec.ts create mode 100644 src/functions/merge/merge.spec.ts create mode 100644 src/functions/parallelMap/parallelMap.spec.ts create mode 100644 src/functions/parse/parse.spec.ts create mode 100644 src/functions/rate/rate.spec.ts create mode 100644 src/functions/stringify/stringify.spec.ts create mode 100644 src/functions/unbatch/unbatch.spec.ts diff --git a/src/functions/batch/batch.spec.ts b/src/functions/batch/batch.spec.ts new file mode 100644 index 0000000..056af0d --- /dev/null +++ b/src/functions/batch/batch.spec.ts @@ -0,0 +1,58 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { batch } from "."; + +test.cb("batch() batches chunks together", t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = [["a", "b", "c"], ["d", "e", "f"], ["g"]]; + let i = 0; + source + .pipe(batch(3)) + .on("data", (element: string[]) => { + expect(element).to.deep.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push("d"); + source.push("e"); + source.push("f"); + source.push("g"); + source.push(null); +}); + +test.cb("batch() yields a batch after the timeout", t => { + t.plan(3); + const source = new Readable({ + objectMode: true, + read(size: number) {}, + }); + const expectedElements = [["a", "b"], ["c"], ["d"]]; + let i = 0; + source + .pipe(batch(3)) + .on("data", (element: string[]) => { + expect(element).to.deep.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.fail) + .on("end", t.end); + + source.push("a"); + source.push("b"); + setTimeout(() => { + source.push("c"); + }, 600); + setTimeout(() => { + source.push("d"); + source.push(null); + }, 600 * 2); +}); diff --git a/src/functions/child/child.spec.ts b/src/functions/child/child.spec.ts new file mode 100644 index 0000000..fd1ae79 --- /dev/null +++ b/src/functions/child/child.spec.ts @@ -0,0 +1,28 @@ +import * as cp from "child_process"; +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { child } from "."; + +test.cb( + "child() allows easily writing to child process stdin and reading from its stdout", + t => { + t.plan(1); + const source = new Readable(); + const catProcess = cp.exec("cat"); + let out = ""; + source + .pipe(child(catProcess)) + .on("data", chunk => (out += chunk)) + .on("error", t.end) + .on("end", () => { + expect(out).to.equal("abcdef"); + t.pass(); + t.end(); + }); + source.push("ab"); + source.push("cd"); + source.push("ef"); + source.push(null); + }, +); diff --git a/src/functions/child/index.ts b/src/functions/child/index.ts index efe4f90..e564eec 100644 --- a/src/functions/child/index.ts +++ b/src/functions/child/index.ts @@ -1,3 +1,5 @@ +import { ChildProcess } from "child_process"; +import { duplex } from "../baseFunctions"; /** * Return a Duplex stream from a child process' stdin and stdout * @param childProcess Child process from which to create duplex stream diff --git a/src/functions/collect/collect.spec.ts b/src/functions/collect/collect.spec.ts new file mode 100644 index 0000000..b585fe9 --- /dev/null +++ b/src/functions/collect/collect.spec.ts @@ -0,0 +1,132 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { collect } from "."; + +test.cb( + "collect() collects streamed elements into an array (object, flowing mode)", + t => { + t.plan(1); + const source = new Readable({ objectMode: true }); + + source + .pipe(collect({ objectMode: true })) + .on("data", collected => { + expect(collected).to.deep.equal(["a", "b", "c"]); + t.pass(); + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); + }, +); + +test.cb( + "collect() collects streamed elements into an array (object, paused mode)", + t => { + t.plan(1); + const source = new Readable({ objectMode: true }); + const collector = source.pipe(collect({ objectMode: true })); + + collector + .on("readable", () => { + let collected = collector.read(); + while (collected !== null) { + expect(collected).to.deep.equal(["a", "b", "c"]); + t.pass(); + collected = collector.read(); + } + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); + }, +); + +test.cb( + "collect() collects streamed bytes into a buffer (non-object, flowing mode)", + t => { + t.plan(1); + const source = new Readable({ objectMode: false }); + + source + .pipe(collect()) + .on("data", collected => { + expect(collected).to.deep.equal(Buffer.from("abc")); + t.pass(); + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); + }, +); + +test.cb( + "collect() collects streamed bytes into a buffer (non-object, paused mode)", + t => { + t.plan(1); + const source = new Readable({ objectMode: false }); + const collector = source.pipe(collect({ objectMode: false })); + collector + .on("readable", () => { + let collected = collector.read(); + while (collected !== null) { + expect(collected).to.deep.equal(Buffer.from("abc")); + t.pass(); + collected = collector.read(); + } + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); + }, +); + +test.cb( + "collect() emits an empty array if the source was empty (object mode)", + t => { + t.plan(1); + const source = new Readable({ objectMode: true }); + const collector = source.pipe(collect({ objectMode: true })); + collector + .on("data", collected => { + expect(collected).to.deep.equal([]); + t.pass(); + }) + .on("error", t.end) + .on("end", t.end); + + source.push(null); + }, +); + +test.cb( + "collect() emits nothing if the source was empty (non-object mode)", + t => { + t.plan(0); + const source = new Readable({ objectMode: false }); + const collector = source.pipe(collect({ objectMode: false })); + collector + .on("data", () => t.fail()) + .on("error", t.end) + .on("end", t.end); + + source.push(null); + }, +); diff --git a/src/functions/collect/index.ts b/src/functions/collect/index.ts index fbed881..57dd8e0 100644 --- a/src/functions/collect/index.ts +++ b/src/functions/collect/index.ts @@ -7,7 +7,7 @@ import { ThroughOptions } from "../baseDefinitions"; */ export function collect( options: ThroughOptions = { objectMode: false }, -): NodeJS.ReadWriteStream { +): Transform { const collected: any[] = []; return new Transform({ readableObjectMode: options.objectMode, diff --git a/src/functions/concat/concat.spec.ts b/src/functions/concat/concat.spec.ts new file mode 100644 index 0000000..0750174 --- /dev/null +++ b/src/functions/concat/concat.spec.ts @@ -0,0 +1,180 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { concat, collect } from "../baseFunctions"; + +test.cb( + "concat() concatenates multiple readable streams (object, flowing mode)", + t => { + t.plan(6); + const source1 = new Readable({ objectMode: true }); + const source2 = new Readable({ objectMode: true }); + const expectedElements = ["a", "b", "c", "d", "e", "f"]; + let i = 0; + concat(source1, source2) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source1.push("a"); + source2.push("d"); + source1.push("b"); + source2.push("e"); + source1.push("c"); + source2.push("f"); + source2.push(null); + source1.push(null); + }, +); + +test.cb( + "concat() concatenates multiple readable streams (object, paused mode)", + t => { + t.plan(6); + const source1 = new Readable({ objectMode: true }); + const source2 = new Readable({ objectMode: true }); + const expectedElements = ["a", "b", "c", "d", "e", "f"]; + let i = 0; + const concatenation = concat(source1, source2) + .on("readable", () => { + let element = concatenation.read(); + while (element !== null) { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + element = concatenation.read(); + } + }) + .on("error", t.end) + .on("end", t.end); + + source1.push("a"); + source2.push("d"); + source1.push("b"); + source2.push("e"); + source1.push("c"); + source2.push("f"); + source2.push(null); + source1.push(null); + }, +); + +test.cb( + "concat() concatenates multiple readable streams (non-object, flowing mode)", + t => { + t.plan(6); + const source1 = new Readable({ objectMode: false }); + const source2 = new Readable({ objectMode: false }); + const expectedElements = ["a", "b", "c", "d", "e", "f"]; + let i = 0; + concat(source1, source2) + .on("data", (element: string) => { + expect(element).to.deep.equal(Buffer.from(expectedElements[i])); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source1.push("a"); + source2.push("d"); + source1.push("b"); + source2.push("e"); + source1.push("c"); + source2.push("f"); + source2.push(null); + source1.push(null); + }, +); + +test.cb( + "concat() concatenates multiple readable streams (non-object, paused mode)", + t => { + t.plan(6); + const source1 = new Readable({ objectMode: false, read: () => ({}) }); + const source2 = new Readable({ objectMode: false, read: () => ({}) }); + const expectedElements = ["a", "b", "c", "d", "e", "f"]; + let i = 0; + const concatenation = concat(source1, source2) + .on("readable", () => { + let element = concatenation.read(); + while (element !== null) { + expect(element).to.deep.equal( + Buffer.from(expectedElements[i]), + ); + t.pass(); + i++; + element = concatenation.read(); + } + }) + .on("error", t.end) + .on("end", t.end); + + source1.push("a"); + setTimeout(() => source2.push("d"), 10); + setTimeout(() => source1.push("b"), 20); + setTimeout(() => source2.push("e"), 30); + setTimeout(() => source1.push("c"), 40); + setTimeout(() => source2.push("f"), 50); + setTimeout(() => source2.push(null), 60); + setTimeout(() => source1.push(null), 70); + }, +); + +test.cb("concat() concatenates a single readable stream (object mode)", t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "b", "c", "d", "e", "f"]; + let i = 0; + concat(source) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb( + "concat() concatenates a single readable stream (non-object mode)", + t => { + t.plan(3); + const source = new Readable({ objectMode: false }); + const expectedElements = ["a", "b", "c", "d", "e", "f"]; + let i = 0; + concat(source) + .on("data", (element: string) => { + expect(element).to.deep.equal(Buffer.from(expectedElements[i])); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); + }, +); + +test.cb("concat() concatenates empty list of readable streams", t => { + t.plan(0); + concat() + .pipe(collect()) + .on("data", _ => { + t.fail(); + }) + .on("error", t.end) + .on("end", t.end); +}); diff --git a/src/functions/concat/index.ts b/src/functions/concat/index.ts index 8064b30..af79db9 100644 --- a/src/functions/concat/index.ts +++ b/src/functions/concat/index.ts @@ -3,7 +3,7 @@ import { Readable } from "stream"; * Return a Readable stream of readable streams concatenated together * @param streams Readable streams to concatenate */ -export function concat(...streams: Readable[]): Readable { +export function concat(...streams: NodeJS.ReadableStream[]): Readable { let isStarted = false; let currentStreamIndex = 0; const startCurrentStream = () => { diff --git a/src/functions/duplex/duplex.spec.ts b/src/functions/duplex/duplex.spec.ts new file mode 100644 index 0000000..c1ef28b --- /dev/null +++ b/src/functions/duplex/duplex.spec.ts @@ -0,0 +1,28 @@ +import * as cp from "child_process"; +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { duplex } from "../baseFunctions"; + +test.cb( + "duplex() combines a writable and readable stream into a ReadWrite stream", + t => { + t.plan(1); + const source = new Readable(); + const catProcess = cp.exec("cat"); + let out = ""; + source + .pipe(duplex(catProcess.stdin!, catProcess.stdout!)) + .on("data", chunk => (out += chunk)) + .on("error", t.end) + .on("end", () => { + expect(out).to.equal("abcdef"); + t.pass(); + t.end(); + }); + source.push("ab"); + source.push("cd"); + source.push("ef"); + source.push(null); + }, +); diff --git a/src/functions/duplex/index.ts b/src/functions/duplex/index.ts index 2470da1..b1e967a 100644 --- a/src/functions/duplex/index.ts +++ b/src/functions/duplex/index.ts @@ -1,11 +1,14 @@ -import { Duplex, Writable, Readable } from "stream"; +import { Duplex } from "stream"; /** * Return a Duplex stream from a writable stream that is assumed to somehow, when written to, * cause the given readable stream to yield chunks * @param writable Writable stream assumed to cause the readable stream to yield chunks when written to * @param readable Readable stream assumed to yield chunks when the writable stream is written to */ -export function duplex(writable: Writable, readable: Readable) { +export function duplex( + writable: NodeJS.WritableStream, + readable: NodeJS.ReadableStream, +) { const wrapper = new Duplex({ readableObjectMode: true, writableObjectMode: true, diff --git a/src/functions/flatMap/index.ts b/src/functions/flatMap/index.ts index 32244a2..3497ca7 100644 --- a/src/functions/flatMap/index.ts +++ b/src/functions/flatMap/index.ts @@ -15,7 +15,7 @@ export function flatMap( readableObjectMode: true, writableObjectMode: true, }, -): NodeJS.ReadWriteStream { +): Transform { return new Transform({ ...options, async transform(chunk: T, encoding, callback) { diff --git a/src/functions/fromArray/index.ts b/src/functions/fromArray/index.ts index f92654e..54e01a9 100644 --- a/src/functions/fromArray/index.ts +++ b/src/functions/fromArray/index.ts @@ -3,7 +3,7 @@ import { Readable } from "stream"; * Convert an array into a Readable stream of its elements * @param array Array of elements to stream */ -export function fromArray(array: any[]): NodeJS.ReadableStream { +export function fromArray(array: any[]): Readable { let cursor = 0; return new Readable({ objectMode: true, diff --git a/src/functions/index.ts b/src/functions/index.ts index 2100353..247400f 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -1,4 +1,4 @@ -import { Readable, Writable, Transform } from "stream"; +import { Readable, Writable, Transform, Duplex } from "stream"; import { ChildProcess } from "child_process"; import * as baseFunctions from "./baseFunctions"; @@ -7,15 +7,18 @@ import { TransformOptions, WithEncoding, JsonParseOptions, +} from "./baseDefinitions"; + +import { FlushStrategy, AccumulatorByIteratee, -} from "./definitions"; +} from "./accumulator/definitions"; /** * Convert an array into a Readable stream of its elements * @param array Array of elements to stream */ -export function fromArray(array: any[]): NodeJS.ReadableStream { +export function fromArray(array: any[]): Readable { return baseFunctions.fromArray(array); } @@ -45,7 +48,7 @@ export function flatMap( | ((chunk: T, encoding: string) => R[]) | ((chunk: T, encoding: string) => Promise), options?: TransformOptions, -): NodeJS.ReadWriteStream { +): Transform { return baseFunctions.flatMap(mapper, options); } @@ -60,7 +63,7 @@ export function filter( | ((chunk: T, encoding: string) => boolean) | ((chunk: T, encoding: string) => Promise), options?: ThroughOptions, -): NodeJS.ReadWriteStream { +): Transform { return baseFunctions.filter(mapper, options); } @@ -79,7 +82,7 @@ export function reduce( | ((previousValue: R, chunk: T, encoding: string) => Promise), initialValue: R, options?: TransformOptions, -): NodeJS.ReadWriteStream { +): Transform { return baseFunctions.reduce(iteratee, initialValue, options); } @@ -92,7 +95,7 @@ export function reduce( export function split( separator?: string | RegExp, options?: WithEncoding, -): NodeJS.ReadWriteStream { +): Transform { return baseFunctions.split(separator, options); } @@ -102,10 +105,7 @@ export function split( * @param options? Defaults to encoding: utf8 * @param options.encoding? Encoding written chunks are assumed to use */ -export function join( - separator: string, - options?: WithEncoding, -): NodeJS.ReadWriteStream { +export function join(separator: string, options?: WithEncoding): Transform { return baseFunctions.join(separator, options); } @@ -121,7 +121,7 @@ export function replace( searchValue: string | RegExp, replaceValue: string, options?: WithEncoding, -): NodeJS.ReadWriteStream { +): Transform { return baseFunctions.replace(searchValue, replaceValue, options); } @@ -129,7 +129,7 @@ export function replace( * Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk * must be a fully defined JSON string in utf8. */ -export function parse(): NodeJS.ReadWriteStream { +export function parse(): Transform { return baseFunctions.parse(); } @@ -139,7 +139,7 @@ export function parse(): NodeJS.ReadWriteStream { * @param options.pretty If true, whitespace is inserted into the stringified chunks. * */ -export function stringify(options?: JsonParseOptions): NodeJS.ReadWriteStream { +export function stringify(options?: JsonParseOptions): Transform { return baseFunctions.stringify(options); } @@ -148,7 +148,7 @@ export function stringify(options?: JsonParseOptions): NodeJS.ReadWriteStream { * @param options? * @param options.objectMode? Whether this stream should behave as a stream of objects */ -export function collect(options?: ThroughOptions): NodeJS.ReadWriteStream { +export function collect(options?: ThroughOptions): Transform { return baseFunctions.collect(options); } @@ -156,9 +156,7 @@ export function collect(options?: ThroughOptions): NodeJS.ReadWriteStream { * Return a Readable stream of readable streams concatenated together * @param streams Readable streams to concatenate */ -export function concat( - ...streams: NodeJS.ReadableStream[] -): NodeJS.ReadableStream { +export function concat(...streams: Readable[]): Readable { return baseFunctions.concat(...streams); } @@ -166,9 +164,7 @@ export function concat( * Return a Readable stream of readable streams concatenated together * @param streams Readable streams to merge */ -export function merge( - ...streams: NodeJS.ReadableStream[] -): NodeJS.ReadableStream { +export function merge(...streams: Readable[]): Readable { return baseFunctions.merge(...streams); } @@ -178,10 +174,7 @@ export function merge( * @param writable Writable stream assumed to cause the readable stream to yield chunks when written to * @param readable Readable stream assumed to yield chunks when the writable stream is written to */ -export function duplex( - writable: Writable, - readable: Readable, -): NodeJS.ReadWriteStream { +export function duplex(writable: Writable, readable: Readable): Duplex { return baseFunctions.duplex(writable, readable); } @@ -189,7 +182,7 @@ export function duplex( * Return a Duplex stream from a child process' stdin and stdout * @param childProcess Child process from which to create duplex stream */ -export function child(childProcess: ChildProcess): NodeJS.ReadWriteStream { +export function child(childProcess: ChildProcess): Duplex { return baseFunctions.child(childProcess); } @@ -214,7 +207,7 @@ export function batch(batchSize: number, maxBatchAge?: number): Transform { /** * Unbatches and sends individual chunks of data */ -export function unbatch(): NodeJS.ReadWriteStream { +export function unbatch(): Transform { return baseFunctions.unbatch(); } @@ -224,10 +217,7 @@ export function unbatch(): NodeJS.ReadWriteStream { * @param targetRate? Desired rate in ms * @param period? Period to sleep for when rate is above or equal to targetRate */ -export function rate( - targetRate?: number, - period?: number, -): NodeJS.ReadWriteStream { +export function rate(targetRate?: number, period?: number): Transform { return baseFunctions.rate(targetRate, period); } diff --git a/src/functions/join/index.ts b/src/functions/join/index.ts index 8c7352b..d3772c0 100644 --- a/src/functions/join/index.ts +++ b/src/functions/join/index.ts @@ -10,7 +10,7 @@ import { WithEncoding } from "../baseDefinitions"; export function join( separator: string, options: WithEncoding = { encoding: "utf8" }, -): NodeJS.ReadWriteStream { +): Transform { let isFirstChunk = true; const decoder = new StringDecoder(options.encoding); return new Transform({ diff --git a/src/functions/last/index.ts b/src/functions/last/index.ts index baf7440..98422a7 100644 --- a/src/functions/last/index.ts +++ b/src/functions/last/index.ts @@ -1,10 +1,9 @@ -import { Readable } from "stream"; /** * Return a Promise resolving to the last streamed chunk of the given readable stream, after it has * ended * @param readable Readable stream to wait on */ -export function last(readable: Readable): Promise { +export function last(readable: NodeJS.ReadableStream): Promise { let lastChunk: T | null = null; return new Promise((resolve, _) => { readable diff --git a/src/functions/last/last.spec.ts b/src/functions/last/last.spec.ts new file mode 100644 index 0000000..5bb0338 --- /dev/null +++ b/src/functions/last/last.spec.ts @@ -0,0 +1,15 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { last } from "../baseFunctions"; + +test("last() resolves to the last chunk streamed by the given readable stream", async t => { + const source = new Readable({ objectMode: true }); + const lastPromise = last(source); + source.push("ab"); + source.push("cd"); + source.push("ef"); + source.push(null); + const lastChunk = await lastPromise; + expect(lastChunk).to.equal("ef"); +}); diff --git a/src/functions/merge/merge.spec.ts b/src/functions/merge/merge.spec.ts new file mode 100644 index 0000000..84a8dca --- /dev/null +++ b/src/functions/merge/merge.spec.ts @@ -0,0 +1,60 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { merge } from "../baseFunctions"; + +test.cb( + "merge() merges multiple readable streams in chunk arrival order", + t => { + t.plan(6); + const source1 = new Readable({ objectMode: true, read: () => ({}) }); + const source2 = new Readable({ objectMode: true, read: () => ({}) }); + const expectedElements = ["a", "d", "b", "e", "c", "f"]; + let i = 0; + merge(source1, source2) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source1.push("a"); + setTimeout(() => source2.push("d"), 10); + setTimeout(() => source1.push("b"), 20); + setTimeout(() => source2.push("e"), 30); + setTimeout(() => source1.push("c"), 40); + setTimeout(() => source2.push("f"), 50); + setTimeout(() => source2.push(null), 60); + setTimeout(() => source1.push(null), 70); + }, +); + +test.cb("merge() merges a readable stream", t => { + t.plan(3); + const source = new Readable({ objectMode: true, read: () => ({}) }); + const expectedElements = ["a", "b", "c"]; + let i = 0; + merge(source) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("merge() merges an empty list of readable streams", t => { + t.plan(0); + merge() + .on("data", () => t.pass()) + .on("error", t.end) + .on("end", t.end); +}); diff --git a/src/functions/parallelMap/parallelMap.spec.ts b/src/functions/parallelMap/parallelMap.spec.ts new file mode 100644 index 0000000..9ae4d37 --- /dev/null +++ b/src/functions/parallelMap/parallelMap.spec.ts @@ -0,0 +1,77 @@ +import { Readable } from "stream"; +import { performance } from "perf_hooks"; +import test from "ava"; +import { expect } from "chai"; +import { parallelMap } from "../baseFunctions"; +import { sleep } from "../../helpers"; + +test.cb("parallelMap() parallel mapping", t => { + t.plan(6); + const offset = 50; + const source = new Readable({ objectMode: true }); + const expectedElements = [ + "a_processed", + "b_processed", + "c_processed", + "d_processed", + "e_processed", + "f_processed", + ]; + interface IPerfData { + start: number; + output?: string; + finish?: number; + } + const orderedResults: IPerfData[] = []; + source + .pipe( + parallelMap(async (data: any) => { + const perfData: IPerfData = { start: performance.now() }; + const c = data + "_processed"; + perfData.output = c; + await sleep(offset); + perfData.finish = performance.now(); + orderedResults.push(perfData); + return c; + }, 2), + ) + .on("data", (element: string) => { + t.true(expectedElements.includes(element)); + }) + .on("error", t.end) + .on("end", async () => { + expect(orderedResults[0].finish).to.be.lessThan( + orderedResults[2].start, + ); + expect(orderedResults[1].finish).to.be.lessThan( + orderedResults[3].start, + ); + expect(orderedResults[2].finish).to.be.lessThan( + orderedResults[4].start, + ); + expect(orderedResults[3].finish).to.be.lessThan( + orderedResults[5].start, + ); + expect(orderedResults[0].start).to.be.lessThan( + orderedResults[2].start + offset, + ); + expect(orderedResults[1].start).to.be.lessThan( + orderedResults[3].start + offset, + ); + expect(orderedResults[2].start).to.be.lessThan( + orderedResults[4].start + offset, + ); + expect(orderedResults[3].start).to.be.lessThan( + orderedResults[5].start + offset, + ); + t.end(); + }); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push("d"); + source.push("e"); + source.push("f"); + source.push(null); +}); diff --git a/src/functions/parse/index.ts b/src/functions/parse/index.ts index d6ac299..3bc9d69 100644 --- a/src/functions/parse/index.ts +++ b/src/functions/parse/index.ts @@ -8,7 +8,7 @@ import { SerializationFormats } from "../baseDefinitions"; */ export function parse( format: SerializationFormats = SerializationFormats.utf8, -): NodeJS.ReadWriteStream { +): Transform { const decoder = new StringDecoder(format); return new Transform({ readableObjectMode: true, diff --git a/src/functions/parse/parse.spec.ts b/src/functions/parse/parse.spec.ts new file mode 100644 index 0000000..0f17b53 --- /dev/null +++ b/src/functions/parse/parse.spec.ts @@ -0,0 +1,40 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { parse } from "../baseFunctions"; + +test.cb("parse() parses the streamed elements as JSON", t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["abc", {}, []]; + let i = 0; + source + .pipe(parse()) + .on("data", part => { + expect(part).to.deep.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push('"abc"'); + source.push("{}"); + source.push("[]"); + source.push(null); +}); + +test.cb("parse() emits errors on invalid JSON", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe(parse()) + .resume() + .on("error", () => t.pass()) + .on("end", t.end); + + source.push("{}"); + source.push({}); + source.push([]); + source.push(null); +}); diff --git a/src/functions/rate/index.ts b/src/functions/rate/index.ts index febcc1e..b199efd 100644 --- a/src/functions/rate/index.ts +++ b/src/functions/rate/index.ts @@ -14,11 +14,12 @@ export function rate( readableObjectMode: true, writableObjectMode: true, }, -) { +): Transform { const deltaMS = ((1 / targetRate) * 1000) / period; // Skip half a period let total = 0; const start = performance.now(); return new Transform({ + ...options, async transform(data, encoding, callback) { const currentRate = (total / (performance.now() - start)) * 1000; if (targetRate && currentRate > targetRate) { diff --git a/src/functions/rate/rate.spec.ts b/src/functions/rate/rate.spec.ts new file mode 100644 index 0000000..a88d179 --- /dev/null +++ b/src/functions/rate/rate.spec.ts @@ -0,0 +1,67 @@ +import { Readable } from "stream"; +import { performance } from "perf_hooks"; +import test from "ava"; +import { expect } from "chai"; +import { rate } from "../baseFunctions"; + +test.cb("rate() sends data at desired rate", t => { + t.plan(9); + const fastRate = 150; + const medRate = 50; + const slowRate = 1; + const sourceFast = new Readable({ objectMode: true }); + const sourceMed = new Readable({ objectMode: true }); + const sourceSlow = new Readable({ objectMode: true }); + const expectedElements = ["a", "b", "c"]; + const start = performance.now(); + let i = 0; + let j = 0; + let k = 0; + + sourceFast + .pipe(rate(fastRate)) + .on("data", (element: string[]) => { + const currentRate = (i / (performance.now() - start)) * 1000; + expect(element).to.deep.equal(expectedElements[i]); + expect(currentRate).lessThan(fastRate); + t.pass(); + i++; + }) + .on("error", t.end); + + sourceMed + .pipe(rate(medRate)) + .on("data", (element: string[]) => { + const currentRate = (j / (performance.now() - start)) * 1000; + expect(element).to.deep.equal(expectedElements[j]); + expect(currentRate).lessThan(medRate); + t.pass(); + j++; + }) + .on("error", t.end); + + sourceSlow + .pipe(rate(slowRate, 1)) + .on("data", (element: string[]) => { + const currentRate = (k / (performance.now() - start)) * 1000; + expect(element).to.deep.equal(expectedElements[k]); + expect(currentRate).lessThan(slowRate); + t.pass(); + k++; + }) + .on("error", t.end) + .on("end", t.end); + + sourceFast.push("a"); + sourceFast.push("b"); + sourceFast.push("c"); + sourceFast.push(null); + sourceMed.push("a"); + sourceMed.push("b"); + sourceMed.push("c"); + sourceMed.push(null); + sourceSlow.push("a"); + sourceSlow.push("b"); + sourceSlow.push("c"); + sourceSlow.push(null); +}); diff --git a/src/functions/replace/index.ts b/src/functions/replace/index.ts index c31f369..4726a35 100644 --- a/src/functions/replace/index.ts +++ b/src/functions/replace/index.ts @@ -13,7 +13,7 @@ export function replace( searchValue: string | RegExp, replaceValue: string, options: WithEncoding = { encoding: "utf8" }, -): NodeJS.ReadWriteStream { +): Transform { const decoder = new StringDecoder(options.encoding); return new Transform({ readableObjectMode: true, diff --git a/src/functions/split/index.ts b/src/functions/split/index.ts index a031c8c..4ae3e4e 100644 --- a/src/functions/split/index.ts +++ b/src/functions/split/index.ts @@ -10,7 +10,7 @@ import { WithEncoding } from "../baseDefinitions"; export function split( separator: string | RegExp = "\n", options: WithEncoding = { encoding: "utf8" }, -): NodeJS.ReadWriteStream { +): Transform { let buffered = ""; const decoder = new StringDecoder(options.encoding); diff --git a/src/functions/stringify/index.ts b/src/functions/stringify/index.ts index 5b476af..aaa5918 100644 --- a/src/functions/stringify/index.ts +++ b/src/functions/stringify/index.ts @@ -6,7 +6,7 @@ import { JsonValue, JsonParseOptions } from "../baseDefinitions"; */ export function stringify( options: JsonParseOptions = { pretty: false }, -): NodeJS.ReadWriteStream { +): Transform { return new Transform({ readableObjectMode: true, writableObjectMode: true, diff --git a/src/functions/stringify/stringify.spec.ts b/src/functions/stringify/stringify.spec.ts new file mode 100644 index 0000000..1569ec1 --- /dev/null +++ b/src/functions/stringify/stringify.spec.ts @@ -0,0 +1,61 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { stringify } from "../baseFunctions"; + +test.cb("stringify() stringifies the streamed elements as JSON", t => { + t.plan(4); + const source = new Readable({ objectMode: true }); + const expectedElements = [ + '"abc"', + "0", + '{"a":"a","b":"b","c":"c"}', + '["a","b","c"]', + ]; + let i = 0; + source + .pipe(stringify()) + .on("data", part => { + expect(part).to.deep.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("abc"); + source.push(0); + source.push({ a: "a", b: "b", c: "c" }); + source.push(["a", "b", "c"]); + source.push(null); +}); + +test.cb( + "stringify() stringifies the streamed elements as pretty-printed JSON", + t => { + t.plan(4); + const source = new Readable({ objectMode: true }); + const expectedElements = [ + '"abc"', + "0", + '{\n "a": "a",\n "b": "b",\n "c": "c"\n}', + '[\n "a",\n "b",\n "c"\n]', + ]; + let i = 0; + source + .pipe(stringify({ pretty: true })) + .on("data", part => { + expect(part).to.deep.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("abc"); + source.push(0); + source.push({ a: "a", b: "b", c: "c" }); + source.push(["a", "b", "c"]); + source.push(null); + }, +); diff --git a/src/functions/unbatch/unbatch.spec.ts b/src/functions/unbatch/unbatch.spec.ts new file mode 100644 index 0000000..6a99a05 --- /dev/null +++ b/src/functions/unbatch/unbatch.spec.ts @@ -0,0 +1,26 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { unbatch, batch } from "../baseFunctions"; + +test.cb("unbatch() unbatches", t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "b", "c"]; + let i = 0; + source + .pipe(batch(3)) + .pipe(unbatch()) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); From faac6134af1f6ca86a79acc9a18151431ec9dff4 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Fri, 16 Aug 2019 09:02:54 -0400 Subject: [PATCH 24/69] Refactoring --- package.json | 4 +- .../{accumulator/index.ts => accumulator.ts} | 9 ++-- src/functions/accumulator/definitions.ts | 6 --- src/functions/baseDefinitions.ts | 6 +++ src/functions/{batch/index.ts => batch.ts} | 2 +- src/functions/{child/index.ts => child.ts} | 2 +- .../{collect/index.ts => collect.ts} | 2 +- src/functions/{concat/index.ts => concat.ts} | 0 src/functions/{duplex/index.ts => duplex.ts} | 0 src/functions/{filter/index.ts => filter.ts} | 2 +- .../{flatMap/index.ts => flatMap.ts} | 2 +- .../{fromArray/index.ts => fromArray.ts} | 0 src/functions/index.ts | 5 +-- src/functions/{join/index.ts => join.ts} | 2 +- src/functions/{last/index.ts => last.ts} | 0 src/functions/{map/index.ts => map.ts} | 2 +- src/functions/{merge/index.ts => merge.ts} | 0 .../{parallelMap/index.ts => parallelMap.ts} | 4 +- src/functions/{parse/index.ts => parse.ts} | 2 +- src/functions/{rate/index.ts => rate.ts} | 4 +- src/functions/{reduce/index.ts => reduce.ts} | 2 +- .../{replace/index.ts => replace.ts} | 2 +- src/functions/{split/index.ts => split.ts} | 2 +- .../{stringify/index.ts => stringify.ts} | 2 +- .../{unbatch/index.ts => unbatch.ts} | 2 +- src/index.ts | 1 + .../accumulator => tests}/accumulator.spec.ts | 4 +- {src/functions/batch => tests}/batch.spec.ts | 2 +- {src/functions/child => tests}/child.spec.ts | 2 +- .../collect => tests}/collect.spec.ts | 2 +- .../functions/concat => tests}/concat.spec.ts | 2 +- .../functions/duplex => tests}/duplex.spec.ts | 2 +- .../functions/filter => tests}/filter.spec.ts | 2 +- .../flatMap => tests}/flatMap.spec.ts | 2 +- .../fromArray => tests}/fromArray.spec.ts | 2 +- {src/functions/join => tests}/join.spec.ts | 2 +- {src/functions/last => tests}/last.spec.ts | 2 +- {src/functions/map => tests}/map.spec.ts | 2 +- {src/functions/merge => tests}/merge.spec.ts | 2 +- .../parallelMap => tests}/parallelMap.spec.ts | 4 +- {src/functions/parse => tests}/parse.spec.ts | 2 +- {src/functions/rate => tests}/rate.spec.ts | 6 +-- .../functions/reduce => tests}/reduce.spec.ts | 2 +- .../replace => tests}/replace.spec.ts | 2 +- {src/functions/split => tests}/split.spec.ts | 2 +- .../stringify => tests}/stringify.spec.ts | 2 +- .../unbatch => tests}/unbatch.spec.ts | 2 +- tsconfig.json | 41 ++++++++++++------- 48 files changed, 84 insertions(+), 72 deletions(-) rename src/functions/{accumulator/index.ts => accumulator.ts} (96%) delete mode 100644 src/functions/accumulator/definitions.ts rename src/functions/{batch/index.ts => batch.ts} (95%) rename src/functions/{child/index.ts => child.ts} (92%) rename src/functions/{collect/index.ts => collect.ts} (93%) rename src/functions/{concat/index.ts => concat.ts} (100%) rename src/functions/{duplex/index.ts => duplex.ts} (100%) rename src/functions/{filter/index.ts => filter.ts} (96%) rename src/functions/{flatMap/index.ts => flatMap.ts} (96%) rename src/functions/{fromArray/index.ts => fromArray.ts} (100%) rename src/functions/{join/index.ts => join.ts} (95%) rename src/functions/{last/index.ts => last.ts} (100%) rename src/functions/{map/index.ts => map.ts} (94%) rename src/functions/{merge/index.ts => merge.ts} (100%) rename src/functions/{parallelMap/index.ts => parallelMap.ts} (92%) rename src/functions/{parse/index.ts => parse.ts} (93%) rename src/functions/{rate/index.ts => rate.ts} (91%) rename src/functions/{reduce/index.ts => reduce.ts} (97%) rename src/functions/{replace/index.ts => replace.ts} (95%) rename src/functions/{split/index.ts => split.ts} (95%) rename src/functions/{stringify/index.ts => stringify.ts} (89%) rename src/functions/{unbatch/index.ts => unbatch.ts} (89%) rename {src/functions/accumulator => tests}/accumulator.spec.ts (99%) rename {src/functions/batch => tests}/batch.spec.ts (97%) rename {src/functions/child => tests}/child.spec.ts (95%) rename {src/functions/collect => tests}/collect.spec.ts (99%) rename {src/functions/concat => tests}/concat.spec.ts (99%) rename {src/functions/duplex => tests}/duplex.spec.ts (94%) rename {src/functions/filter => tests}/filter.spec.ts (98%) rename {src/functions/flatMap => tests}/flatMap.spec.ts (98%) rename {src/functions/fromArray => tests}/fromArray.spec.ts (97%) rename {src/functions/join => tests}/join.spec.ts (98%) rename {src/functions/last => tests}/last.spec.ts (91%) rename {src/functions/map => tests}/map.spec.ts (98%) rename {src/functions/merge => tests}/merge.spec.ts (97%) rename {src/functions/parallelMap => tests}/parallelMap.spec.ts (96%) rename {src/functions/parse => tests}/parse.spec.ts (95%) rename {src/functions/rate => tests}/rate.spec.ts (95%) rename {src/functions/reduce => tests}/reduce.spec.ts (98%) rename {src/functions/replace => tests}/replace.spec.ts (98%) rename {src/functions/split => tests}/split.spec.ts (98%) rename {src/functions/stringify => tests}/stringify.spec.ts (97%) rename {src/functions/unbatch => tests}/unbatch.spec.ts (92%) diff --git a/package.json b/package.json index 53eb1bf..d5a2ee0 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,7 @@ "type": "git" }, "scripts": { - "test": "NODE_PATH=src node node_modules/.bin/ava 'src/**/**/*.spec.ts' -e", + "test": "NODE_PATH=src node node_modules/.bin/ava 'tests/*.spec.ts' -e", "test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.ts", "test:all": "NODE_PATH=src node node_modules/.bin/ava", "lint": "tslint -p tsconfig.json", @@ -45,7 +45,7 @@ }, "ava": { "files": [ - "src/**/*.spec.ts" + "tests/*.spec.ts" ], "sources": [ "src/**/*.ts" diff --git a/src/functions/accumulator/index.ts b/src/functions/accumulator.ts similarity index 96% rename from src/functions/accumulator/index.ts rename to src/functions/accumulator.ts index 81531c7..b8faeab 100644 --- a/src/functions/accumulator/index.ts +++ b/src/functions/accumulator.ts @@ -1,7 +1,10 @@ import { Transform } from "stream"; -import { AccumulatorByIteratee, FlushStrategy } from "./definitions"; -import { TransformOptions } from "../baseDefinitions"; -import { batch } from "../../index"; +import { + AccumulatorByIteratee, + FlushStrategy, + TransformOptions, +} from "./baseDefinitions"; +import { batch } from "."; function _accumulator( accumulateBy: (data: T, buffer: T[], stream: Transform) => void, diff --git a/src/functions/accumulator/definitions.ts b/src/functions/accumulator/definitions.ts deleted file mode 100644 index bd6ec50..0000000 --- a/src/functions/accumulator/definitions.ts +++ /dev/null @@ -1,6 +0,0 @@ -export enum FlushStrategy { - rolling = "rolling", - sliding = "sliding", -} - -export type AccumulatorByIteratee = (event: T, bufferChunk: T) => boolean; diff --git a/src/functions/baseDefinitions.ts b/src/functions/baseDefinitions.ts index 791ada9..c3f5461 100644 --- a/src/functions/baseDefinitions.ts +++ b/src/functions/baseDefinitions.ts @@ -21,3 +21,9 @@ export type JsonValue = JsonPrimitive | JsonPrimitive[]; export interface JsonParseOptions { pretty: boolean; } +export enum FlushStrategy { + rolling = "rolling", + sliding = "sliding", +} + +export type AccumulatorByIteratee = (event: T, bufferChunk: T) => boolean; diff --git a/src/functions/batch/index.ts b/src/functions/batch.ts similarity index 95% rename from src/functions/batch/index.ts rename to src/functions/batch.ts index 4dad33a..76551eb 100644 --- a/src/functions/batch/index.ts +++ b/src/functions/batch.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { TransformOptions } from "../baseDefinitions"; +import { TransformOptions } from "./baseDefinitions"; /** * Stores chunks of data internally in array and batches when batchSize is reached. * diff --git a/src/functions/child/index.ts b/src/functions/child.ts similarity index 92% rename from src/functions/child/index.ts rename to src/functions/child.ts index e564eec..e2e0c22 100644 --- a/src/functions/child/index.ts +++ b/src/functions/child.ts @@ -1,5 +1,5 @@ import { ChildProcess } from "child_process"; -import { duplex } from "../baseFunctions"; +import { duplex } from "./baseFunctions"; /** * Return a Duplex stream from a child process' stdin and stdout * @param childProcess Child process from which to create duplex stream diff --git a/src/functions/collect/index.ts b/src/functions/collect.ts similarity index 93% rename from src/functions/collect/index.ts rename to src/functions/collect.ts index 57dd8e0..33b7330 100644 --- a/src/functions/collect/index.ts +++ b/src/functions/collect.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { ThroughOptions } from "../baseDefinitions"; +import { ThroughOptions } from "./baseDefinitions"; /** * Return a ReadWrite stream that collects streamed chunks into an array or buffer * @param options diff --git a/src/functions/concat/index.ts b/src/functions/concat.ts similarity index 100% rename from src/functions/concat/index.ts rename to src/functions/concat.ts diff --git a/src/functions/duplex/index.ts b/src/functions/duplex.ts similarity index 100% rename from src/functions/duplex/index.ts rename to src/functions/duplex.ts diff --git a/src/functions/filter/index.ts b/src/functions/filter.ts similarity index 96% rename from src/functions/filter/index.ts rename to src/functions/filter.ts index fad186d..5714dd1 100644 --- a/src/functions/filter/index.ts +++ b/src/functions/filter.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { ThroughOptions } from "../baseDefinitions"; +import { ThroughOptions } from "./baseDefinitions"; /** * Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold * @param predicate Predicate with which to filter scream chunks diff --git a/src/functions/flatMap/index.ts b/src/functions/flatMap.ts similarity index 96% rename from src/functions/flatMap/index.ts rename to src/functions/flatMap.ts index 3497ca7..ba8915f 100644 --- a/src/functions/flatMap/index.ts +++ b/src/functions/flatMap.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { TransformOptions } from "../baseDefinitions"; +import { TransformOptions } from "./baseDefinitions"; /** * Return a ReadWrite stream that flat maps streamed chunks * @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such) diff --git a/src/functions/fromArray/index.ts b/src/functions/fromArray.ts similarity index 100% rename from src/functions/fromArray/index.ts rename to src/functions/fromArray.ts diff --git a/src/functions/index.ts b/src/functions/index.ts index 247400f..426d03d 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -7,12 +7,9 @@ import { TransformOptions, WithEncoding, JsonParseOptions, -} from "./baseDefinitions"; - -import { FlushStrategy, AccumulatorByIteratee, -} from "./accumulator/definitions"; +} from "./baseDefinitions"; /** * Convert an array into a Readable stream of its elements diff --git a/src/functions/join/index.ts b/src/functions/join.ts similarity index 95% rename from src/functions/join/index.ts rename to src/functions/join.ts index d3772c0..c1a28b6 100644 --- a/src/functions/join/index.ts +++ b/src/functions/join.ts @@ -1,6 +1,6 @@ import { Transform } from "stream"; import { StringDecoder } from "string_decoder"; -import { WithEncoding } from "../baseDefinitions"; +import { WithEncoding } from "./baseDefinitions"; /** * Return a ReadWrite stream that joins streamed chunks using the given separator * @param separator Separator to join with diff --git a/src/functions/last/index.ts b/src/functions/last.ts similarity index 100% rename from src/functions/last/index.ts rename to src/functions/last.ts diff --git a/src/functions/map/index.ts b/src/functions/map.ts similarity index 94% rename from src/functions/map/index.ts rename to src/functions/map.ts index 4941b98..0bf708d 100644 --- a/src/functions/map/index.ts +++ b/src/functions/map.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { TransformOptions } from "../baseDefinitions"; +import { TransformOptions } from "./baseDefinitions"; /** * Return a ReadWrite stream that maps streamed chunks * @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such) diff --git a/src/functions/merge/index.ts b/src/functions/merge.ts similarity index 100% rename from src/functions/merge/index.ts rename to src/functions/merge.ts diff --git a/src/functions/parallelMap/index.ts b/src/functions/parallelMap.ts similarity index 92% rename from src/functions/parallelMap/index.ts rename to src/functions/parallelMap.ts index 353ab29..7610f49 100644 --- a/src/functions/parallelMap/index.ts +++ b/src/functions/parallelMap.ts @@ -1,6 +1,6 @@ import { Transform } from "stream"; -import { sleep } from "../../helpers"; -import { TransformOptions } from "../baseDefinitions"; +import { sleep } from "../helpers"; +import { TransformOptions } from "./baseDefinitions"; /** * Limits number of parallel processes in flight. * @param parallel Max number of parallel processes. diff --git a/src/functions/parse/index.ts b/src/functions/parse.ts similarity index 93% rename from src/functions/parse/index.ts rename to src/functions/parse.ts index 3bc9d69..da2ccee 100644 --- a/src/functions/parse/index.ts +++ b/src/functions/parse.ts @@ -1,6 +1,6 @@ import { Transform } from "stream"; import { StringDecoder } from "string_decoder"; -import { SerializationFormats } from "../baseDefinitions"; +import { SerializationFormats } from "./baseDefinitions"; /** * Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk * must be a fully defined JSON string. diff --git a/src/functions/rate/index.ts b/src/functions/rate.ts similarity index 91% rename from src/functions/rate/index.ts rename to src/functions/rate.ts index b199efd..bed7edd 100644 --- a/src/functions/rate/index.ts +++ b/src/functions/rate.ts @@ -1,7 +1,7 @@ import { Transform } from "stream"; import { performance } from "perf_hooks"; -import { sleep } from "../../helpers"; -import { TransformOptions } from "../baseDefinitions"; +import { sleep } from "../helpers"; +import { TransformOptions } from "./baseDefinitions"; /** * Limits date of data transferred into stream. * @param targetRate Desired rate in ms diff --git a/src/functions/reduce/index.ts b/src/functions/reduce.ts similarity index 97% rename from src/functions/reduce/index.ts rename to src/functions/reduce.ts index 743d156..6dfcdf9 100644 --- a/src/functions/reduce/index.ts +++ b/src/functions/reduce.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { TransformOptions } from "../baseDefinitions"; +import { TransformOptions } from "./baseDefinitions"; /** * Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that * value diff --git a/src/functions/replace/index.ts b/src/functions/replace.ts similarity index 95% rename from src/functions/replace/index.ts rename to src/functions/replace.ts index 4726a35..e8bc0e7 100644 --- a/src/functions/replace/index.ts +++ b/src/functions/replace.ts @@ -1,6 +1,6 @@ import { Transform } from "stream"; import { StringDecoder } from "string_decoder"; -import { WithEncoding } from "../baseDefinitions"; +import { WithEncoding } from "./baseDefinitions"; /** * Return a ReadWrite stream that replaces occurrences of the given string or regular expression in * the streamed chunks with the specified replacement string diff --git a/src/functions/split/index.ts b/src/functions/split.ts similarity index 95% rename from src/functions/split/index.ts rename to src/functions/split.ts index 4ae3e4e..fe31d65 100644 --- a/src/functions/split/index.ts +++ b/src/functions/split.ts @@ -1,6 +1,6 @@ import { Transform } from "stream"; import { StringDecoder } from "string_decoder"; -import { WithEncoding } from "../baseDefinitions"; +import { WithEncoding } from "./baseDefinitions"; /** * Return a ReadWrite stream that splits streamed chunks using the given separator * @param separator Separator to split by, defaulting to "\n" diff --git a/src/functions/stringify/index.ts b/src/functions/stringify.ts similarity index 89% rename from src/functions/stringify/index.ts rename to src/functions/stringify.ts index aaa5918..21996ad 100644 --- a/src/functions/stringify/index.ts +++ b/src/functions/stringify.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { JsonValue, JsonParseOptions } from "../baseDefinitions"; +import { JsonValue, JsonParseOptions } from "./baseDefinitions"; /** * Return a ReadWrite stream that stringifies the streamed chunks to JSON diff --git a/src/functions/unbatch/index.ts b/src/functions/unbatch.ts similarity index 89% rename from src/functions/unbatch/index.ts rename to src/functions/unbatch.ts index 946c754..d8fc25f 100644 --- a/src/functions/unbatch/index.ts +++ b/src/functions/unbatch.ts @@ -1,5 +1,5 @@ import { Transform } from "stream"; -import { TransformOptions } from "../baseDefinitions"; +import { TransformOptions } from "./baseDefinitions"; /** * Unbatches and sends individual chunks of data */ diff --git a/src/index.ts b/src/index.ts index 3d57c81..c0eabe4 100644 --- a/src/index.ts +++ b/src/index.ts @@ -20,4 +20,5 @@ export { rate, parallelMap, accumulator, + accumulatorBy, } from "./functions"; diff --git a/src/functions/accumulator/accumulator.spec.ts b/tests/accumulator.spec.ts similarity index 99% rename from src/functions/accumulator/accumulator.spec.ts rename to tests/accumulator.spec.ts index a64c921..c5a5ae3 100644 --- a/src/functions/accumulator/accumulator.spec.ts +++ b/tests/accumulator.spec.ts @@ -1,8 +1,8 @@ import test from "ava"; import { expect } from "chai"; import { Readable } from "stream"; -import { accumulator, accumulatorBy } from "."; -import { FlushStrategy } from "./definitions"; +import { accumulator, accumulatorBy } from "../src"; +import { FlushStrategy } from "../src/functions/baseDefinitions"; test.cb("accumulator() rolling", t => { t.plan(3); diff --git a/src/functions/batch/batch.spec.ts b/tests/batch.spec.ts similarity index 97% rename from src/functions/batch/batch.spec.ts rename to tests/batch.spec.ts index 056af0d..bccd1b6 100644 --- a/src/functions/batch/batch.spec.ts +++ b/tests/batch.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { batch } from "."; +import { batch } from "../src"; test.cb("batch() batches chunks together", t => { t.plan(3); diff --git a/src/functions/child/child.spec.ts b/tests/child.spec.ts similarity index 95% rename from src/functions/child/child.spec.ts rename to tests/child.spec.ts index fd1ae79..7730790 100644 --- a/src/functions/child/child.spec.ts +++ b/tests/child.spec.ts @@ -2,7 +2,7 @@ import * as cp from "child_process"; import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { child } from "."; +import { child } from "../src"; test.cb( "child() allows easily writing to child process stdin and reading from its stdout", diff --git a/src/functions/collect/collect.spec.ts b/tests/collect.spec.ts similarity index 99% rename from src/functions/collect/collect.spec.ts rename to tests/collect.spec.ts index b585fe9..1e4cd03 100644 --- a/src/functions/collect/collect.spec.ts +++ b/tests/collect.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { collect } from "."; +import { collect } from "../src"; test.cb( "collect() collects streamed elements into an array (object, flowing mode)", diff --git a/src/functions/concat/concat.spec.ts b/tests/concat.spec.ts similarity index 99% rename from src/functions/concat/concat.spec.ts rename to tests/concat.spec.ts index 0750174..596fbad 100644 --- a/src/functions/concat/concat.spec.ts +++ b/tests/concat.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { concat, collect } from "../baseFunctions"; +import { concat, collect } from "../src"; test.cb( "concat() concatenates multiple readable streams (object, flowing mode)", diff --git a/src/functions/duplex/duplex.spec.ts b/tests/duplex.spec.ts similarity index 94% rename from src/functions/duplex/duplex.spec.ts rename to tests/duplex.spec.ts index c1ef28b..e5fafd7 100644 --- a/src/functions/duplex/duplex.spec.ts +++ b/tests/duplex.spec.ts @@ -2,7 +2,7 @@ import * as cp from "child_process"; import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { duplex } from "../baseFunctions"; +import { duplex } from "../src"; test.cb( "duplex() combines a writable and readable stream into a ReadWrite stream", diff --git a/src/functions/filter/filter.spec.ts b/tests/filter.spec.ts similarity index 98% rename from src/functions/filter/filter.spec.ts rename to tests/filter.spec.ts index a537372..7fa2053 100644 --- a/src/functions/filter/filter.spec.ts +++ b/tests/filter.spec.ts @@ -1,7 +1,7 @@ import test from "ava"; import { expect } from "chai"; import { Readable } from "stream"; -import { filter } from "."; +import { filter } from "../src"; test.cb("filter() filters elements synchronously", t => { t.plan(2); diff --git a/src/functions/flatMap/flatMap.spec.ts b/tests/flatMap.spec.ts similarity index 98% rename from src/functions/flatMap/flatMap.spec.ts rename to tests/flatMap.spec.ts index 4e6c28d..a8b22bb 100644 --- a/src/functions/flatMap/flatMap.spec.ts +++ b/tests/flatMap.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { flatMap } from "."; +import { flatMap } from "../src"; test.cb("flatMap() maps elements synchronously", t => { t.plan(6); diff --git a/src/functions/fromArray/fromArray.spec.ts b/tests/fromArray.spec.ts similarity index 97% rename from src/functions/fromArray/fromArray.spec.ts rename to tests/fromArray.spec.ts index b0b9a95..3e4c93e 100644 --- a/src/functions/fromArray/fromArray.spec.ts +++ b/tests/fromArray.spec.ts @@ -1,6 +1,6 @@ import test from "ava"; import { expect } from "chai"; -import { fromArray } from "."; +import { fromArray } from "../src"; test.cb("fromArray() streams array elements in flowing mode", t => { t.plan(3); diff --git a/src/functions/join/join.spec.ts b/tests/join.spec.ts similarity index 98% rename from src/functions/join/join.spec.ts rename to tests/join.spec.ts index fc9d5b7..6b0be52 100644 --- a/src/functions/join/join.spec.ts +++ b/tests/join.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { join } from "."; +import { join } from "../src"; test.cb("join() joins chunks using the specified separator", t => { t.plan(9); diff --git a/src/functions/last/last.spec.ts b/tests/last.spec.ts similarity index 91% rename from src/functions/last/last.spec.ts rename to tests/last.spec.ts index 5bb0338..033c9d8 100644 --- a/src/functions/last/last.spec.ts +++ b/tests/last.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { last } from "../baseFunctions"; +import { last } from "../src"; test("last() resolves to the last chunk streamed by the given readable stream", async t => { const source = new Readable({ objectMode: true }); diff --git a/src/functions/map/map.spec.ts b/tests/map.spec.ts similarity index 98% rename from src/functions/map/map.spec.ts rename to tests/map.spec.ts index 5d6a114..75210ff 100644 --- a/src/functions/map/map.spec.ts +++ b/tests/map.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { map } from "."; +import { map } from "../src"; test.cb("map() maps elements synchronously", t => { t.plan(3); diff --git a/src/functions/merge/merge.spec.ts b/tests/merge.spec.ts similarity index 97% rename from src/functions/merge/merge.spec.ts rename to tests/merge.spec.ts index 84a8dca..dbbfd79 100644 --- a/src/functions/merge/merge.spec.ts +++ b/tests/merge.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { merge } from "../baseFunctions"; +import { merge } from "../src"; test.cb( "merge() merges multiple readable streams in chunk arrival order", diff --git a/src/functions/parallelMap/parallelMap.spec.ts b/tests/parallelMap.spec.ts similarity index 96% rename from src/functions/parallelMap/parallelMap.spec.ts rename to tests/parallelMap.spec.ts index 9ae4d37..dff719a 100644 --- a/src/functions/parallelMap/parallelMap.spec.ts +++ b/tests/parallelMap.spec.ts @@ -2,8 +2,8 @@ import { Readable } from "stream"; import { performance } from "perf_hooks"; import test from "ava"; import { expect } from "chai"; -import { parallelMap } from "../baseFunctions"; -import { sleep } from "../../helpers"; +import { parallelMap } from "../src"; +import { sleep } from "../src/helpers"; test.cb("parallelMap() parallel mapping", t => { t.plan(6); diff --git a/src/functions/parse/parse.spec.ts b/tests/parse.spec.ts similarity index 95% rename from src/functions/parse/parse.spec.ts rename to tests/parse.spec.ts index 0f17b53..d9aebbb 100644 --- a/src/functions/parse/parse.spec.ts +++ b/tests/parse.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { parse } from "../baseFunctions"; +import { parse } from "../src"; test.cb("parse() parses the streamed elements as JSON", t => { t.plan(3); diff --git a/src/functions/rate/rate.spec.ts b/tests/rate.spec.ts similarity index 95% rename from src/functions/rate/rate.spec.ts rename to tests/rate.spec.ts index a88d179..024724c 100644 --- a/src/functions/rate/rate.spec.ts +++ b/tests/rate.spec.ts @@ -2,7 +2,7 @@ import { Readable } from "stream"; import { performance } from "perf_hooks"; import test from "ava"; import { expect } from "chai"; -import { rate } from "../baseFunctions"; +import { rate } from "../src"; test.cb("rate() sends data at desired rate", t => { t.plan(9); @@ -19,7 +19,7 @@ test.cb("rate() sends data at desired rate", t => { let k = 0; sourceFast - .pipe(rate(fastRate)) + .pipe(rate(fastRate, 1)) .on("data", (element: string[]) => { const currentRate = (i / (performance.now() - start)) * 1000; expect(element).to.deep.equal(expectedElements[i]); @@ -30,7 +30,7 @@ test.cb("rate() sends data at desired rate", t => { .on("error", t.end); sourceMed - .pipe(rate(medRate)) + .pipe(rate(medRate, 1)) .on("data", (element: string[]) => { const currentRate = (j / (performance.now() - start)) * 1000; expect(element).to.deep.equal(expectedElements[j]); diff --git a/src/functions/reduce/reduce.spec.ts b/tests/reduce.spec.ts similarity index 98% rename from src/functions/reduce/reduce.spec.ts rename to tests/reduce.spec.ts index c01a51e..b005896 100644 --- a/src/functions/reduce/reduce.spec.ts +++ b/tests/reduce.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { reduce } from "."; +import { reduce } from "../src"; test.cb("reduce() reduces elements synchronously", t => { t.plan(1); diff --git a/src/functions/replace/replace.spec.ts b/tests/replace.spec.ts similarity index 98% rename from src/functions/replace/replace.spec.ts rename to tests/replace.spec.ts index a36642c..5829f8e 100644 --- a/src/functions/replace/replace.spec.ts +++ b/tests/replace.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { replace } from "."; +import { replace } from "../src"; test.cb( "replace() replaces occurrences of the given string in the streamed elements with the specified " + diff --git a/src/functions/split/split.spec.ts b/tests/split.spec.ts similarity index 98% rename from src/functions/split/split.spec.ts rename to tests/split.spec.ts index 9e909f3..1819e2b 100644 --- a/src/functions/split/split.spec.ts +++ b/tests/split.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { split } from "."; +import { split } from "../src"; test.cb("split() splits chunks using the default separator (\\n)", t => { t.plan(5); diff --git a/src/functions/stringify/stringify.spec.ts b/tests/stringify.spec.ts similarity index 97% rename from src/functions/stringify/stringify.spec.ts rename to tests/stringify.spec.ts index 1569ec1..7452e99 100644 --- a/src/functions/stringify/stringify.spec.ts +++ b/tests/stringify.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { stringify } from "../baseFunctions"; +import { stringify } from "../src"; test.cb("stringify() stringifies the streamed elements as JSON", t => { t.plan(4); diff --git a/src/functions/unbatch/unbatch.spec.ts b/tests/unbatch.spec.ts similarity index 92% rename from src/functions/unbatch/unbatch.spec.ts rename to tests/unbatch.spec.ts index 6a99a05..d48b1b9 100644 --- a/src/functions/unbatch/unbatch.spec.ts +++ b/tests/unbatch.spec.ts @@ -1,7 +1,7 @@ import { Readable } from "stream"; import test from "ava"; import { expect } from "chai"; -import { unbatch, batch } from "../baseFunctions"; +import { unbatch, batch } from "../src"; test.cb("unbatch() unbatches", t => { t.plan(3); diff --git a/tsconfig.json b/tsconfig.json index 4a3d25c..56ed6a8 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,18 +1,29 @@ { - "compilerOptions": { - "noImplicitAny": true, - "strictNullChecks": true, - "noImplicitReturns": true, - "noUnusedLocals": false, - "noImplicitThis": true, - "forceConsistentCasingInFileNames": true, - "suppressImplicitAnyIndexErrors": true, - "outDir": "./dist", - "module": "commonjs", - "target": "es5", - "lib": ["es2016", "es2019"], - "sourceMap": true, - "declaration": true + "compilerOptions": { + "noImplicitAny": true, + "strictNullChecks": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "noImplicitThis": true, + "forceConsistentCasingInFileNames": true, + "suppressImplicitAnyIndexErrors": true, + "outDir": "./dist", + "module": "commonjs", + "baseUrl": ".", + "paths": { + "src/*": [ + "src/*" + ] }, - "include": ["src/**/*.ts"] + "target": "es5", + "lib": [ + "es2016", + "es2019" + ], + "sourceMap": true, + "declaration": true + }, + "include": [ + "src/**/*.ts" + ] } From 047ff66ee1343e20373a75fc96c928851c326cca Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Fri, 16 Aug 2019 09:04:59 -0400 Subject: [PATCH 25/69] Remove unused lib --- tsconfig.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tsconfig.json b/tsconfig.json index 56ed6a8..b0b6347 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -17,8 +17,7 @@ }, "target": "es5", "lib": [ - "es2016", - "es2019" + "es2016" ], "sourceMap": true, "declaration": true From 4e80e48fa41aaa8bcc302b6846fa41e3c3c2db61 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Fri, 16 Aug 2019 09:27:17 -0400 Subject: [PATCH 26/69] Remove paths --- tsconfig.json | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/tsconfig.json b/tsconfig.json index b0b6347..c9faf21 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -8,12 +8,7 @@ "forceConsistentCasingInFileNames": true, "suppressImplicitAnyIndexErrors": true, "outDir": "./dist", - "module": "commonjs", - "baseUrl": ".", - "paths": { - "src/*": [ - "src/*" - ] + "module": "commonjs" }, "target": "es5", "lib": [ From 50f6886b4b91fbb3ed9e9ec21058043f3a30b2ce Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Fri, 16 Aug 2019 10:01:55 -0400 Subject: [PATCH 27/69] Cleanup --- src/functions/batch.ts | 2 +- src/functions/index.ts | 44 +++++++++++++++++++++++++++++++---- tests/accumulator.spec.ts | 48 ++++++++++----------------------------- tests/batch.spec.ts | 6 ++--- tsconfig.json | 16 +++++-------- 5 files changed, 60 insertions(+), 56 deletions(-) diff --git a/src/functions/batch.ts b/src/functions/batch.ts index 76551eb..4b56b4c 100644 --- a/src/functions/batch.ts +++ b/src/functions/batch.ts @@ -4,7 +4,7 @@ import { TransformOptions } from "./baseDefinitions"; * Stores chunks of data internally in array and batches when batchSize is reached. * * @param batchSize Size of the batches - * @param maxBatchAge Max lifetime of a batch + * @param maxBatchAge Max lifetime of a batch in seconds */ export function batch( batchSize: number = 1000, diff --git a/src/functions/index.ts b/src/functions/index.ts index 426d03d..8bdbf75 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -202,17 +202,17 @@ export function batch(batchSize: number, maxBatchAge?: number): Transform { } /** - * Unbatches and sends individual chunks of data + * Unbatches and sends individual chunks of data. */ export function unbatch(): Transform { return baseFunctions.unbatch(); } /** - * Limits date of data transferred into stream. + * Limits rate of data transferred into stream. * @param options? - * @param targetRate? Desired rate in ms - * @param period? Period to sleep for when rate is above or equal to targetRate + * @param targetRate? Desired rate in ms. + * @param period? Period to sleep for when rate is above or equal to targetRate. */ export function rate(targetRate?: number, period?: number): Transform { return baseFunctions.rate(targetRate, period); @@ -221,7 +221,7 @@ export function rate(targetRate?: number, period?: number): Transform { /** * Limits number of parallel processes in flight. * @param parallel Max number of parallel processes. - * @param func Function to execute on each data chunk + * @param func Function to execute on each data chunk. * @param pause Amount of time to pause processing when max number of parallel processes are executing. */ export function parallelMap( @@ -232,6 +232,26 @@ export function parallelMap( return baseFunctions.parallelMap(mapper, parallel, sleepTime); } +/** + * Accummulates and sends batches of data. Each chunk that flows into the stream is checked against items + * in the buffer. How the buffer is mutated is based on 1 of 2 possible buffering strategies: + * 1. Sliding + * - If the buffer is larger than the batchSize, the front of the buffer is popped to maintain + * the batchSize. When no key is provided, the batchSize is effectively the buffer length. When + * a key is provided, the batchSize is based on the value at that key. For example, given a key + * of `timestamp` and a batchSize of 3000, each item in the buffer will be guaranteed to be + * within 3000 timestamp units from the first element. This means that with a key, multiple elements + * may be spliced off the front of the buffer. The buffer is then pushed into the stream. + * 2. Rolling + * - If the buffer is larger than the batchSize, the buffer is cleared and pushed into the stream. + * When no key is provided, the batchSize is the buffer length. When a key is provided, the batchSize + * is based on the value at that key. For example, given a key of `timestamp` and a batchSize of 3000, + * each item in the buffer will be guaranteed to be within 3000 timestamp units from the first element. + * @param batchSize Size of the batch (in units of buffer length or value at key). + * @param batchRate Desired rate of data transfer to next stream. + * @param flushStrategy Buffering strategy to use. + * @param keyBy Key to determine if element fits into buffer or items need to be cleared from buffer. + */ export function accumulator( batchSize: number, batchRate: number | undefined, @@ -246,6 +266,20 @@ export function accumulator( ); } +/** + * Accummulates and sends batches of data. Each chunk that flows into the stream is checked against items + * in the buffer. How the buffer is mutated is based on 1 of 2 possible buffering strategies: + * 1. Sliding + * - If the iteratee returns false, the front of the buffer is popped until iteratee returns true. The + * item is pushed into the buffer and buffer is pushed into stream. + * 2. Rolling + * - If the iteratee returns false, the buffer is cleared and pushed into stream. The item is + * then pushed into the buffer. + * @param batchRate Desired rate of data transfer to next stream. + * @param flushStrategy Buffering strategy to use. + * @param iteratee Function applied to buffer when a chunk of data enters stream to determine if element fits into + * or items need to be cleared from buffer. + */ export function accumulatorBy( batchRate: number | undefined, flushStrategy: S, diff --git a/tests/accumulator.spec.ts b/tests/accumulator.spec.ts index c5a5ae3..523455a 100644 --- a/tests/accumulator.spec.ts +++ b/tests/accumulator.spec.ts @@ -26,9 +26,7 @@ test.cb("accumulator() rolling", t => { .on("error", (e: any) => { t.end(e); }) - .on("end", () => { - t.end(); - }); + .on("end", t.end); [...firstFlush, ...secondFlush, ...thirdFlush].forEach(item => { source.push(item); }); @@ -61,9 +59,7 @@ test.cb("accumulator() rolling with key", t => { .on("error", (e: any) => { t.end(e); }) - .on("end", () => { - t.end(); - }); + .on("end", t.end); [...firstFlush, ...secondFlush].forEach(item => { source.push(item); }); @@ -105,9 +101,7 @@ test.cb( index++; t.pass(); }) - .on("end", () => { - t.end(); - }); + .on("end", t.end); input.forEach(item => { source.push(item); }); @@ -162,9 +156,7 @@ test.cb( ); t.pass(); }) - .on("end", () => { - t.end(); - }); + .on("end", t.end); input.forEach(item => { source.push(item); }); @@ -209,9 +201,7 @@ test.cb("accumulator() sliding", t => { .on("error", (e: any) => { t.end(e); }) - .on("end", () => { - t.end(); - }); + .on("end", t.end); input.forEach(item => { source.push(item); }); @@ -266,9 +256,7 @@ test.cb("accumulator() sliding with key", t => { .on("error", (e: any) => { t.end(e); }) - .on("end", () => { - t.end(); - }); + .on("end", t.end); input.forEach(item => { source.push(item); }); @@ -309,9 +297,7 @@ test.cb( index++; t.pass(); }) - .on("end", () => { - t.end(); - }); + .on("end", t.end); input.forEach(item => { source.push(item); }); @@ -372,9 +358,7 @@ test.cb( ); t.pass(); }) - .on("end", () => { - t.end(); - }); + .on("end", t.end); input.forEach(item => { source.push(item); }); @@ -416,9 +400,7 @@ test.cb("accumulatorBy() rolling", t => { .on("error", (e: any) => { t.end(e); }) - .on("end", () => { - t.end(); - }); + .on("end", t.end); [...firstFlush, ...secondFlush].forEach(item => { source.push(item); }); @@ -457,9 +439,7 @@ test.cb( expect(err.message).to.equal("Failed mapping"); t.pass(); }) - .on("end", () => { - t.end(); - }); + .on("end", t.end); input.forEach(item => { source.push(item); @@ -524,9 +504,7 @@ test.cb("accumulatorBy() sliding", t => { .on("error", (e: any) => { t.end(e); }) - .on("end", () => { - t.end(); - }); + .on("end", t.end); input.forEach(item => { source.push(item); }); @@ -565,9 +543,7 @@ test.cb( expect(err.message).to.equal("Failed mapping"); t.pass(); }) - .on("end", () => { - t.end(); - }); + .on("end", t.end); input.forEach(item => { source.push(item); diff --git a/tests/batch.spec.ts b/tests/batch.spec.ts index bccd1b6..0c2cd3a 100644 --- a/tests/batch.spec.ts +++ b/tests/batch.spec.ts @@ -11,8 +11,7 @@ test.cb("batch() batches chunks together", t => { source .pipe(batch(3)) .on("data", (element: string[]) => { - expect(element).to.deep.equal(expectedElements[i]); - t.pass(); + t.deepEqual(element, expectedElements[i]); i++; }) .on("error", t.end) @@ -39,8 +38,7 @@ test.cb("batch() yields a batch after the timeout", t => { source .pipe(batch(3)) .on("data", (element: string[]) => { - expect(element).to.deep.equal(expectedElements[i]); - t.pass(); + t.deepEqual(element, expectedElements[i]); i++; }) .on("error", t.fail) diff --git a/tsconfig.json b/tsconfig.json index c9faf21..fabdff4 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -9,15 +9,11 @@ "suppressImplicitAnyIndexErrors": true, "outDir": "./dist", "module": "commonjs" - }, - "target": "es5", - "lib": [ - "es2016" - ], - "sourceMap": true, - "declaration": true }, - "include": [ - "src/**/*.ts" - ] + "target": "es5", + "lib": [ + "es2016" + ], + "sourceMap": true, + "declaration": true } From 7394b6ef840e0f7e4adaa719ebcda09e539a15e2 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Fri, 16 Aug 2019 10:06:23 -0400 Subject: [PATCH 28/69] Skip full period in rate --- src/functions/rate.ts | 4 ++-- tests/rate.spec.ts | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/functions/rate.ts b/src/functions/rate.ts index bed7edd..e322744 100644 --- a/src/functions/rate.ts +++ b/src/functions/rate.ts @@ -9,13 +9,13 @@ import { TransformOptions } from "./baseDefinitions"; */ export function rate( targetRate: number = 50, - period: number = 2, + period: number = 1, options: TransformOptions = { readableObjectMode: true, writableObjectMode: true, }, ): Transform { - const deltaMS = ((1 / targetRate) * 1000) / period; // Skip half a period + const deltaMS = ((1 / targetRate) * 1000) / period; // Skip a full period let total = 0; const start = performance.now(); return new Transform({ diff --git a/tests/rate.spec.ts b/tests/rate.spec.ts index 024724c..acd3647 100644 --- a/tests/rate.spec.ts +++ b/tests/rate.spec.ts @@ -19,7 +19,7 @@ test.cb("rate() sends data at desired rate", t => { let k = 0; sourceFast - .pipe(rate(fastRate, 1)) + .pipe(rate(fastRate)) .on("data", (element: string[]) => { const currentRate = (i / (performance.now() - start)) * 1000; expect(element).to.deep.equal(expectedElements[i]); @@ -30,7 +30,7 @@ test.cb("rate() sends data at desired rate", t => { .on("error", t.end); sourceMed - .pipe(rate(medRate, 1)) + .pipe(rate(medRate)) .on("data", (element: string[]) => { const currentRate = (j / (performance.now() - start)) * 1000; expect(element).to.deep.equal(expectedElements[j]); @@ -41,7 +41,7 @@ test.cb("rate() sends data at desired rate", t => { .on("error", t.end); sourceSlow - .pipe(rate(slowRate, 1)) + .pipe(rate(slowRate)) .on("data", (element: string[]) => { const currentRate = (k / (performance.now() - start)) * 1000; expect(element).to.deep.equal(expectedElements[k]); From 6581e1d7451e0a5b86b22087446d56c272b5fb8f Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Wed, 21 Aug 2019 15:40:19 -0400 Subject: [PATCH 29/69] Save --- src/functions/baseFunctions.ts | 1 + src/functions/compose.ts | 45 ++++++++++++++++++++++++++++++++++ src/functions/index.ts | 12 ++++++++- src/index.ts | 1 + tests/compose.spec.ts | 20 +++++++++++++++ tests/composed.spec.ts | 0 tsconfig.json | 4 ++- 7 files changed, 81 insertions(+), 2 deletions(-) create mode 100644 src/functions/compose.ts create mode 100644 tests/compose.spec.ts create mode 100644 tests/composed.spec.ts diff --git a/src/functions/baseFunctions.ts b/src/functions/baseFunctions.ts index 2b2b067..005aa3a 100644 --- a/src/functions/baseFunctions.ts +++ b/src/functions/baseFunctions.ts @@ -19,3 +19,4 @@ export { replace } from "./replace"; export { split } from "./split"; export { stringify } from "./stringify"; export { unbatch } from "./unbatch"; +export { compose } from "./compose"; diff --git a/src/functions/compose.ts b/src/functions/compose.ts new file mode 100644 index 0000000..ed6df07 --- /dev/null +++ b/src/functions/compose.ts @@ -0,0 +1,45 @@ +import { Transform, Writable, Pipe, WritableOptions } from "stream"; + +class Compose extends Writable implements Pipe { + private head: Writable | Transform; + private tail: Writable | Transform; + constructor( + streams: Array, + options?: WritableOptions, + ) { + super(options); + if (streams.length < 2) { + throw new Error("Cannot compose 1 or less streams"); + } + this.head = streams[0]; + for (let i = 1; i < streams.length; i++) { + streams[i - 1].pipe(streams[i]); + } + this.tail = streams[streams.length - 1]; + } + + public pipe( + destination: T, + options: { end?: boolean } | undefined, + ) { + return this.tail.pipe( + destination, + options, + ); + } + + public _write(chunk: any, enc: string, cb: any) { + this.head.write(chunk.toString ? chunk.toString() : chunk, cb); + } +} + +/** + * Return a Readable stream of readable streams concatenated together + * @param streams Readable streams to concatenate + */ +export function compose( + streams: Array, + options?: WritableOptions, +): Compose { + return new Compose(streams, options); +} diff --git a/src/functions/index.ts b/src/functions/index.ts index 8bdbf75..ceba200 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -1,4 +1,4 @@ -import { Readable, Writable, Transform, Duplex } from "stream"; +import { Readable, Writable, WritableOptions, Transform, Duplex } from "stream"; import { ChildProcess } from "child_process"; import * as baseFunctions from "./baseFunctions"; @@ -287,3 +287,13 @@ export function accumulatorBy( ) { return baseFunctions.accumulatorBy(batchRate, flushStrategy, iteratee); } + +export function compose( + streams: Array, + options?: WritableOptions, +) { + return baseFunctions.compose( + streams, + options, + ); +} diff --git a/src/index.ts b/src/index.ts index c0eabe4..98b0a45 100644 --- a/src/index.ts +++ b/src/index.ts @@ -21,4 +21,5 @@ export { parallelMap, accumulator, accumulatorBy, + compose, } from "./functions"; diff --git a/tests/compose.spec.ts b/tests/compose.spec.ts new file mode 100644 index 0000000..001dbe0 --- /dev/null +++ b/tests/compose.spec.ts @@ -0,0 +1,20 @@ +const test = require("ava"); +const { compose, map } = require("../src"); + +test.cb("compose()", t => { + const first = map((chunk: number) => chunk * 2); + const second = map((chunk: number) => chunk + 1); + + const composed = compose( + [first, second], + { objectMode: true }, + ); + + composed.write(1); + composed.write(2); + composed.write(3); + + composed.on("data", data => { + console.log("DATA", data); + }); +}); diff --git a/tests/composed.spec.ts b/tests/composed.spec.ts new file mode 100644 index 0000000..e69de29 diff --git a/tsconfig.json b/tsconfig.json index fabdff4..3a85e23 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -15,5 +15,7 @@ "es2016" ], "sourceMap": true, - "declaration": true + "declaration": true, + "include": ["src/**/*"], + "exclude": ["tests", "node_modules"] } From 1e7fad240338276b815bae09a25a38fd860a2c89 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Wed, 21 Aug 2019 15:40:34 -0400 Subject: [PATCH 30/69] Remove composed.spec --- tests/composed.spec.ts | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 tests/composed.spec.ts diff --git a/tests/composed.spec.ts b/tests/composed.spec.ts deleted file mode 100644 index e69de29..0000000 From d097fa6aa50743fd0114a467264668fd84612b69 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 22 Aug 2019 12:07:30 -0400 Subject: [PATCH 31/69] Save --- src/functions/baseFunctions.ts | 2 +- src/functions/compose.ts | 83 +++++++++++++++++++--------------- src/functions/index.ts | 6 +++ src/index.ts | 1 + tests/compose.spec.ts | 44 ++++++++++++++++-- 5 files changed, 93 insertions(+), 43 deletions(-) diff --git a/src/functions/baseFunctions.ts b/src/functions/baseFunctions.ts index 005aa3a..6bf75a6 100644 --- a/src/functions/baseFunctions.ts +++ b/src/functions/baseFunctions.ts @@ -19,4 +19,4 @@ export { replace } from "./replace"; export { split } from "./split"; export { stringify } from "./stringify"; export { unbatch } from "./unbatch"; -export { compose } from "./compose"; +export { compose, composeDuplex } from "./compose"; diff --git a/src/functions/compose.ts b/src/functions/compose.ts index ed6df07..8d21283 100644 --- a/src/functions/compose.ts +++ b/src/functions/compose.ts @@ -1,45 +1,54 @@ -import { Transform, Writable, Pipe, WritableOptions } from "stream"; - -class Compose extends Writable implements Pipe { - private head: Writable | Transform; - private tail: Writable | Transform; - constructor( - streams: Array, - options?: WritableOptions, - ) { - super(options); - if (streams.length < 2) { - throw new Error("Cannot compose 1 or less streams"); - } - this.head = streams[0]; - for (let i = 1; i < streams.length; i++) { - streams[i - 1].pipe(streams[i]); - } - this.tail = streams[streams.length - 1]; - } - - public pipe( - destination: T, - options: { end?: boolean } | undefined, - ) { - return this.tail.pipe( - destination, - options, - ); - } - - public _write(chunk: any, enc: string, cb: any) { - this.head.write(chunk.toString ? chunk.toString() : chunk, cb); - } -} +import { + pipeline, + Transform, + Writable, + Pipe, + WritableOptions, + Readable, + Duplex, +} from "stream"; /** * Return a Readable stream of readable streams concatenated together * @param streams Readable streams to concatenate */ + +// First Readable --> Readable +// First Transform | Duplex, Last Writable --> Writable +// export function compose( - streams: Array, + streams: Array, options?: WritableOptions, -): Compose { - return new Compose(streams, options); +): Duplex { + // Maybe just return a new stream here + if (streams.length < 2) { + throw new Error("Not enough"); + } + + const duplex = new Duplex({ + objectMode: true, + write(chunk, enc, cb) { + const first = streams[0] as Writable; + if (!first.write(chunk)) { + first.on("drain", cb); + } else { + cb(); + } + }, + read(size) { + let chunk; + while ( + null !== + (chunk = (streams[streams.length - 1] as Readable).read()) + ) { + this.push(chunk); + } + }, + }); + + pipeline(streams, (err: any) => { + duplex.emit("error", err); + }); + + return duplex; } diff --git a/src/functions/index.ts b/src/functions/index.ts index ceba200..9d08eba 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -297,3 +297,9 @@ export function compose( options, ); } +export function composeDuplex( + streams: Array, + options?: WritableOptions, +) { + return baseFunctions.composeDuplex(streams, options); +} diff --git a/src/index.ts b/src/index.ts index 98b0a45..d6b784c 100644 --- a/src/index.ts +++ b/src/index.ts @@ -22,4 +22,5 @@ export { accumulator, accumulatorBy, compose, + composeDuplex, } from "./functions"; diff --git a/tests/compose.spec.ts b/tests/compose.spec.ts index 001dbe0..9fa8d82 100644 --- a/tests/compose.spec.ts +++ b/tests/compose.spec.ts @@ -1,20 +1,54 @@ const test = require("ava"); -const { compose, map } = require("../src"); +const { compose, composeDuplex, map } = require("../src"); test.cb("compose()", t => { - const first = map((chunk: number) => chunk * 2); - const second = map((chunk: number) => chunk + 1); + const first = map((chunk: number) => chunk + "x"); + const second = map((chunk: number) => chunk + "y"); const composed = compose( [first, second], { objectMode: true }, ); + const third = map((chunk: number) => chunk + "z"); + composed + .pipe(third) + .on("data", data => console.log("Piped composed: ", data)); + + composed.on("data", data => { + console.log("data on composed", data); + t.end(); + }); + composed.on("error", data => { + console.log("ERROR", data); + }); + composed.on("end", data => { + console.log("end", data); + }); composed.write(1); composed.write(2); - composed.write(3); +}); +test.cb.only("composeDuplex()", t => { + const first = map((chunk: number) => chunk + "x"); + const second = map((chunk: number) => chunk + "y"); + + const composed = composeDuplex([first, second], { objectMode: true }); + const third = map((chunk: number) => chunk + "z"); + // composed + // .pipe(third) + // .on("data", data => console.log("Piped composed: ", data)); composed.on("data", data => { - console.log("DATA", data); + console.log("data on composed", data); + t.end(); }); + composed.on("error", data => { + console.log("ERROR", data); + }); + composed.on("end", data => { + console.log("end", data); + }); + + composed.write(1); + composed.write(2); }); From 1d0e15890cf346e36abf999362629f89aa43f7f8 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 22 Aug 2019 14:52:39 -0400 Subject: [PATCH 32/69] Tests --- src/functions/baseFunctions.ts | 2 +- src/functions/compose.ts | 19 +++----- src/functions/index.ts | 6 --- src/index.ts | 1 - tests/compose.spec.ts | 88 ++++++++++++++++++++-------------- 5 files changed, 59 insertions(+), 57 deletions(-) diff --git a/src/functions/baseFunctions.ts b/src/functions/baseFunctions.ts index 6bf75a6..005aa3a 100644 --- a/src/functions/baseFunctions.ts +++ b/src/functions/baseFunctions.ts @@ -19,4 +19,4 @@ export { replace } from "./replace"; export { split } from "./split"; export { stringify } from "./stringify"; export { unbatch } from "./unbatch"; -export { compose, composeDuplex } from "./compose"; +export { compose } from "./compose"; diff --git a/src/functions/compose.ts b/src/functions/compose.ts index 8d21283..96008ea 100644 --- a/src/functions/compose.ts +++ b/src/functions/compose.ts @@ -2,7 +2,6 @@ import { pipeline, Transform, Writable, - Pipe, WritableOptions, Readable, Duplex, @@ -12,7 +11,6 @@ import { * Return a Readable stream of readable streams concatenated together * @param streams Readable streams to concatenate */ - // First Readable --> Readable // First Transform | Duplex, Last Writable --> Writable // @@ -22,26 +20,23 @@ export function compose( ): Duplex { // Maybe just return a new stream here if (streams.length < 2) { - throw new Error("Not enough"); + throw new Error("At least two streams are required to compose"); } + const first = streams[0] as Writable; + const last = streams[streams.length - 1] as Readable; const duplex = new Duplex({ - objectMode: true, + ...options, write(chunk, enc, cb) { - const first = streams[0] as Writable; if (!first.write(chunk)) { - first.on("drain", cb); + first.once("drain", cb); } else { cb(); } }, read(size) { - let chunk; - while ( - null !== - (chunk = (streams[streams.length - 1] as Readable).read()) - ) { - this.push(chunk); + if (last.readable) { + this.push(last.read(size)); } }, }); diff --git a/src/functions/index.ts b/src/functions/index.ts index 9d08eba..ceba200 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -297,9 +297,3 @@ export function compose( options, ); } -export function composeDuplex( - streams: Array, - options?: WritableOptions, -) { - return baseFunctions.composeDuplex(streams, options); -} diff --git a/src/index.ts b/src/index.ts index d6b784c..98b0a45 100644 --- a/src/index.ts +++ b/src/index.ts @@ -22,5 +22,4 @@ export { accumulator, accumulatorBy, compose, - composeDuplex, } from "./functions"; diff --git a/tests/compose.spec.ts b/tests/compose.spec.ts index 9fa8d82..ed2304b 100644 --- a/tests/compose.spec.ts +++ b/tests/compose.spec.ts @@ -1,54 +1,68 @@ const test = require("ava"); +const { expect } = require("chai"); const { compose, composeDuplex, map } = require("../src"); -test.cb("compose()", t => { - const first = map((chunk: number) => chunk + "x"); - const second = map((chunk: number) => chunk + "y"); +test.cb("compose() chains two streams together in the correct order", t => { + t.plan(3); + let i = 0; + const first = map((chunk: number) => chunk + 1); + const second = map((chunk: number) => chunk * 2); const composed = compose( [first, second], { objectMode: true }, ); - const third = map((chunk: number) => chunk + "z"); - composed - .pipe(third) - .on("data", data => console.log("Piped composed: ", data)); composed.on("data", data => { - console.log("data on composed", data); + expect(data).to.equal(result[i]); + t.pass(); + i++; + if (i === 3) { + t.end(); + } + }); + composed.on("error", err => { + t.end(err); + }); + composed.on("end", () => { t.end(); }); - composed.on("error", data => { - console.log("ERROR", data); - }); - composed.on("end", data => { - console.log("end", data); - }); - composed.write(1); - composed.write(2); + const input = [1, 2, 3]; + const result = [4, 6, 8]; + + input.forEach(item => composed.write(item)); }); -test.cb.only("composeDuplex()", t => { - const first = map((chunk: number) => chunk + "x"); - const second = map((chunk: number) => chunk + "y"); - const composed = composeDuplex([first, second], { objectMode: true }); - const third = map((chunk: number) => chunk + "z"); - // composed - // .pipe(third) - // .on("data", data => console.log("Piped composed: ", data)); +test.cb( + "compose() followed by pipe chains streams together in the correct order", + t => { + t.plan(3); + let i = 0; + const first = map((chunk: number) => chunk + 1); + const second = map((chunk: number) => chunk * 2); - composed.on("data", data => { - console.log("data on composed", data); - t.end(); - }); - composed.on("error", data => { - console.log("ERROR", data); - }); - composed.on("end", data => { - console.log("end", data); - }); + const composed = compose( + [first, second], + { objectMode: true }, + ); + const third = map((chunk: number) => chunk + 1); + composed.pipe(third).on("data", data => { + expect(data).to.equal(result[i]); + t.pass(); + i++; + if (i === 3) { + t.end(); + } + }); - composed.write(1); - composed.write(2); -}); + composed.on("error", err => { + t.end(err); + }); + + const input = [1, 2, 3]; + const result = [5, 7, 9]; + + input.forEach(item => composed.write(item)); + }, +); From f35f025dbca0c01695976af5acdb6ac835b190e3 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 22 Aug 2019 15:35:36 -0400 Subject: [PATCH 33/69] Use class --- src/functions/compose.ts | 81 ++++++++++++++++++++++++---------------- 1 file changed, 49 insertions(+), 32 deletions(-) diff --git a/src/functions/compose.ts b/src/functions/compose.ts index 96008ea..f53aa6c 100644 --- a/src/functions/compose.ts +++ b/src/functions/compose.ts @@ -1,11 +1,4 @@ -import { - pipeline, - Transform, - Writable, - WritableOptions, - Readable, - Duplex, -} from "stream"; +import { pipeline, Duplex, DuplexOptions } from "stream"; /** * Return a Readable stream of readable streams concatenated together @@ -15,35 +8,59 @@ import { // First Transform | Duplex, Last Writable --> Writable // export function compose( - streams: Array, - options?: WritableOptions, + streams: Array< + NodeJS.ReadableStream | NodeJS.ReadWriteStream | NodeJS.WritableStream + >, + options?: DuplexOptions, ): Duplex { // Maybe just return a new stream here if (streams.length < 2) { throw new Error("At least two streams are required to compose"); } - const first = streams[0] as Writable; - const last = streams[streams.length - 1] as Readable; - const duplex = new Duplex({ - ...options, - write(chunk, enc, cb) { - if (!first.write(chunk)) { - first.once("drain", cb); - } else { - cb(); - } - }, - read(size) { - if (last.readable) { - this.push(last.read(size)); - } - }, - }); + const composed = new Compose(streams, options); - pipeline(streams, (err: any) => { - duplex.emit("error", err); - }); - - return duplex; + return composed; +} + +class Compose extends Duplex { + private first: + | NodeJS.ReadableStream + | NodeJS.ReadWriteStream + | NodeJS.WritableStream; + private last: + | NodeJS.ReadableStream + | NodeJS.ReadWriteStream + | NodeJS.WritableStream; + constructor( + streams: Array< + | NodeJS.ReadableStream + | NodeJS.ReadWriteStream + | NodeJS.WritableStream + >, + options?: DuplexOptions, + ) { + super(options); + this.first = streams[0]; + this.last = streams[streams.length - 1]; + pipeline(streams, (err: any) => { + this.emit("error", err); + }); + } + + public pipe(dest: T) { + return (this.last as NodeJS.ReadableStream).pipe(dest); + } + + public write(chunk: any) { + return (this.first as NodeJS.WritableStream).write(chunk); + } + + public on(event: string, cb: any) { + if (event === "error") { + super.on(event, cb); + } + this.last.on(event, cb); + return this; + } } From c7903376e93c5c63f58aa71b0da8e889e8ec8602 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 22 Aug 2019 16:47:43 -0400 Subject: [PATCH 34/69] DuplexOptions --- src/functions/index.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/functions/index.ts b/src/functions/index.ts index ceba200..21ad7f9 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -1,4 +1,4 @@ -import { Readable, Writable, WritableOptions, Transform, Duplex } from "stream"; +import { Readable, Writable, DuplexOptions, Transform, Duplex } from "stream"; import { ChildProcess } from "child_process"; import * as baseFunctions from "./baseFunctions"; @@ -290,7 +290,7 @@ export function accumulatorBy( export function compose( streams: Array, - options?: WritableOptions, + options?: DuplexOptions, ) { return baseFunctions.compose( streams, From 9b09a3f9490896851936f3895c91a8b99b156de8 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Wed, 28 Aug 2019 17:01:51 -0400 Subject: [PATCH 35/69] Add demux --- src/functions/baseFunctions.ts | 1 + src/functions/demux.ts | 49 +++++++++++++++++++++++++++++ src/functions/index.ts | 10 ++++++ src/index.ts | 1 + tests/demux.spec.ts | 57 ++++++++++++++++++++++++++++++++++ tslint.json | 3 +- 6 files changed, 120 insertions(+), 1 deletion(-) create mode 100644 src/functions/demux.ts create mode 100644 tests/demux.spec.ts diff --git a/src/functions/baseFunctions.ts b/src/functions/baseFunctions.ts index 005aa3a..6ff480a 100644 --- a/src/functions/baseFunctions.ts +++ b/src/functions/baseFunctions.ts @@ -20,3 +20,4 @@ export { split } from "./split"; export { stringify } from "./stringify"; export { unbatch } from "./unbatch"; export { compose } from "./compose"; +export { demux } from "./demux"; diff --git a/src/functions/demux.ts b/src/functions/demux.ts new file mode 100644 index 0000000..a0fead2 --- /dev/null +++ b/src/functions/demux.ts @@ -0,0 +1,49 @@ +import { WritableOptions, Writable } from "stream"; + +/** + * Return a Duplex stream that is pushed data from multiple sources + * @param streams Source streams to multiplex + * @param options Duplex stream options + */ +export function demux( + construct: () => NodeJS.WritableStream | NodeJS.ReadWriteStream, + demuxBy: { key?: string; keyBy?: (chunk: any) => string }, + options?: WritableOptions, +): Writable { + return new Demux(construct, demuxBy, options); +} + +class Demux extends Writable { + private keyMap: object; + private demuxer: (chunk: any) => string; + private construct: ( + destKey?: string, + ) => NodeJS.WritableStream | NodeJS.ReadWriteStream; + constructor( + construct: ( + destKey?: string, + ) => NodeJS.WritableStream | NodeJS.ReadWriteStream, + demuxBy: { key?: string; keyBy?: (chunk: any) => string }, + options?: WritableOptions, + ) { + super(options); + if (demuxBy.keyBy === undefined && demuxBy.key === undefined) { + throw new Error("Need one"); + } + this.demuxer = demuxBy.keyBy || ((chunk: any) => chunk[demuxBy.key!]); + this.construct = construct; + this.keyMap = {}; + } + + public write(chunk: any, encoding?: any, cb?: any): boolean { + const destKey = this.demuxer(chunk); + if (this.keyMap[destKey] === undefined) { + this.keyMap[destKey] = this.construct(destKey); + } + const writeRes = this.keyMap[destKey].write(chunk); + if (cb !== undefined) { + cb(); + } + return writeRes; + } +} diff --git a/src/functions/index.ts b/src/functions/index.ts index 21ad7f9..ce50218 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -297,3 +297,13 @@ export function compose( options, ); } + +export function demux( + construct: ( + destKey?: string, + ) => NodeJS.WritableStream | NodeJS.ReadWriteStream, + demuxer: { key?: string; keyBy?: (chunk: any) => string }, + options?: DuplexOptions, +) { + return baseFunctions.demux(construct, demuxer, options); +} diff --git a/src/index.ts b/src/index.ts index 98b0a45..924b246 100644 --- a/src/index.ts +++ b/src/index.ts @@ -22,4 +22,5 @@ export { accumulator, accumulatorBy, compose, + demux, } from "./functions"; diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts new file mode 100644 index 0000000..3c124b9 --- /dev/null +++ b/tests/demux.spec.ts @@ -0,0 +1,57 @@ +import test from "ava"; +import { expect } from "chai"; +import { demux, map } from "../src"; +import { Readable, Transform } from "stream"; + +interface Test { + key: string; + val: number; +} +test.cb("should spread per key", t => { + t.plan(5); + const input = [ + { key: "a", val: 1 }, + { key: "a", val: 2 }, + { key: "b", val: 3 }, + { key: "c", val: 4 }, + ]; + const results = [ + { key: "a", val: 2 }, + { key: "a", val: 3 }, + { key: "b", val: 4 }, + { key: "c", val: 5 }, + ]; + const destKeys = []; + const dests = []; + let i = 0; + + const construct = (destKey: string) => { + destKeys.push(destKey); + const dest = map((chunk: Test) => ({ + ...chunk, + val: chunk.val + 1, + })) + .on("data", (d: Test) => { + expect(results).to.deep.include(d); + t.pass(); + }) + .on("end", () => { + i++; + if (i === dests.length) { + t.end(); + } + }); + dests.push(dest); + return dest; + }; + + const demuxed = demux(construct, { key: "key" }, { objectMode: true }); + demuxed.on("finish", () => { + expect(destKeys).to.deep.equal(["a", "b", "c"]); + t.pass(); + dests.forEach(dest => dest.end()); + }); + + input.forEach(event => demuxed.write(event)); + demuxed.end(); +}); diff --git a/tslint.json b/tslint.json index becd92c..03b2e44 100644 --- a/tslint.json +++ b/tslint.json @@ -9,6 +9,7 @@ "no-implicit-dependencies": [true, "dev"], "prettier": [true, ".prettierrc"], "ordered-imports": false, - "interface-name": false + "interface-name": false, + "object-literal-sort-keys": false } } From 685215bee6d5652ff1e571467c4ef6fd256dee3e Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Wed, 28 Aug 2019 17:04:31 -0400 Subject: [PATCH 36/69] Add test for keyBy --- tests/demux.spec.ts | 54 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 53 insertions(+), 1 deletion(-) diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index 3c124b9..baf29aa 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -1,7 +1,6 @@ import test from "ava"; import { expect } from "chai"; import { demux, map } from "../src"; -import { Readable, Transform } from "stream"; interface Test { key: string; @@ -55,3 +54,56 @@ test.cb("should spread per key", t => { input.forEach(event => demuxed.write(event)); demuxed.end(); }); + +test.cb("should spread per key using keyBy", t => { + t.plan(5); + const input = [ + { key: "a", val: 1 }, + { key: "a", val: 2 }, + { key: "b", val: 3 }, + { key: "c", val: 4 }, + ]; + const results = [ + { key: "a", val: 2 }, + { key: "a", val: 3 }, + { key: "b", val: 4 }, + { key: "c", val: 5 }, + ]; + const destKeys = []; + const dests = []; + let i = 0; + + const construct = (destKey: string) => { + destKeys.push(destKey); + const dest = map((chunk: Test) => ({ + ...chunk, + val: chunk.val + 1, + })) + .on("data", (d: Test) => { + expect(results).to.deep.include(d); + t.pass(); + }) + .on("end", () => { + i++; + if (i === dests.length) { + t.end(); + } + }); + dests.push(dest); + return dest; + }; + + const demuxed = demux( + construct, + { keyBy: (chunk: any) => chunk.key }, + { objectMode: true }, + ); + demuxed.on("finish", () => { + expect(destKeys).to.deep.equal(["a", "b", "c"]); + t.pass(); + dests.forEach(dest => dest.end()); + }); + + input.forEach(event => demuxed.write(event)); + demuxed.end(); +}); From 9765e6cb49fee9636ba0cdf1c1343206836a8cc7 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 29 Aug 2019 08:50:11 -0400 Subject: [PATCH 37/69] Update tests to write to sink --- tests/demux.spec.ts | 107 +++++++++++++++++++++++--------------------- 1 file changed, 57 insertions(+), 50 deletions(-) diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index baf29aa..dcd8ad0 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -1,6 +1,7 @@ import test from "ava"; import { expect } from "chai"; import { demux, map } from "../src"; +import { Writable } from "stream"; interface Test { key: string; @@ -10,45 +11,48 @@ test.cb("should spread per key", t => { t.plan(5); const input = [ { key: "a", val: 1 }, - { key: "a", val: 2 }, - { key: "b", val: 3 }, + { key: "b", val: 2 }, + { key: "a", val: 3 }, { key: "c", val: 4 }, ]; const results = [ { key: "a", val: 2 }, - { key: "a", val: 3 }, - { key: "b", val: 4 }, + { key: "b", val: 3 }, + { key: "a", val: 4 }, { key: "c", val: 5 }, ]; - const destKeys = []; - const dests = []; + const destinationStreamKeys = []; let i = 0; - + const sink = new Writable({ + objectMode: true, + write(chunk, enc, cb) { + i++; + expect(results).to.deep.include(chunk); + expect(input).to.not.deep.include(chunk); + t.pass(); + cb(); + if (i === 4) { + t.end(); + } + }, + }); const construct = (destKey: string) => { - destKeys.push(destKey); - const dest = map((chunk: Test) => ({ - ...chunk, - val: chunk.val + 1, - })) - .on("data", (d: Test) => { - expect(results).to.deep.include(d); - t.pass(); - }) - .on("end", () => { - i++; - if (i === dests.length) { - t.end(); - } - }); - dests.push(dest); + destinationStreamKeys.push(destKey); + const dest = map((chunk: Test) => { + return { + ...chunk, + val: chunk.val + 1, + }; + }); + + dest.pipe(sink); return dest; }; const demuxed = demux(construct, { key: "key" }, { objectMode: true }); demuxed.on("finish", () => { - expect(destKeys).to.deep.equal(["a", "b", "c"]); + expect(destinationStreamKeys).to.deep.equal(["a", "b", "c"]); t.pass(); - dests.forEach(dest => dest.end()); }); input.forEach(event => demuxed.write(event)); @@ -59,37 +63,41 @@ test.cb("should spread per key using keyBy", t => { t.plan(5); const input = [ { key: "a", val: 1 }, - { key: "a", val: 2 }, - { key: "b", val: 3 }, + { key: "b", val: 2 }, + { key: "a", val: 3 }, { key: "c", val: 4 }, ]; const results = [ { key: "a", val: 2 }, - { key: "a", val: 3 }, - { key: "b", val: 4 }, + { key: "b", val: 3 }, + { key: "a", val: 4 }, { key: "c", val: 5 }, ]; - const destKeys = []; - const dests = []; + const destinationStreamKeys = []; let i = 0; - + const sink = new Writable({ + objectMode: true, + write(chunk, enc, cb) { + i++; + expect(results).to.deep.include(chunk); + expect(input).to.not.deep.include(chunk); + t.pass(); + cb(); + if (i === 4) { + t.end(); + } + }, + }); const construct = (destKey: string) => { - destKeys.push(destKey); - const dest = map((chunk: Test) => ({ - ...chunk, - val: chunk.val + 1, - })) - .on("data", (d: Test) => { - expect(results).to.deep.include(d); - t.pass(); - }) - .on("end", () => { - i++; - if (i === dests.length) { - t.end(); - } - }); - dests.push(dest); + destinationStreamKeys.push(destKey); + const dest = map((chunk: Test) => { + return { + ...chunk, + val: chunk.val + 1, + }; + }); + + dest.pipe(sink); return dest; }; @@ -99,9 +107,8 @@ test.cb("should spread per key using keyBy", t => { { objectMode: true }, ); demuxed.on("finish", () => { - expect(destKeys).to.deep.equal(["a", "b", "c"]); + expect(destinationStreamKeys).to.deep.equal(["a", "b", "c"]); t.pass(); - dests.forEach(dest => dest.end()); }); input.forEach(event => demuxed.write(event)); From 2524d51aa7b778d292450430775a54d184fbb384 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 29 Aug 2019 14:39:08 -0400 Subject: [PATCH 38/69] Allow CB to be called by construction streams --- src/functions/compose.ts | 6 ++++-- src/functions/demux.ts | 20 +++++++++++--------- tests/demux.spec.ts | 11 ++--------- 3 files changed, 17 insertions(+), 20 deletions(-) diff --git a/src/functions/compose.ts b/src/functions/compose.ts index f53aa6c..40525a0 100644 --- a/src/functions/compose.ts +++ b/src/functions/compose.ts @@ -52,8 +52,10 @@ class Compose extends Duplex { return (this.last as NodeJS.ReadableStream).pipe(dest); } - public write(chunk: any) { - return (this.first as NodeJS.WritableStream).write(chunk); + public _write(chunk: any, encoding: string, cb: any) { + const res = (this.first as NodeJS.WritableStream).write(chunk); + cb(); + return res; } public on(event: string, cb: any) { diff --git a/src/functions/demux.ts b/src/functions/demux.ts index a0fead2..ef39284 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -14,7 +14,9 @@ export function demux( } class Demux extends Writable { - private keyMap: object; + private keyMap: { + [key: string]: NodeJS.WritableStream | NodeJS.ReadWriteStream; + }; private demuxer: (chunk: any) => string; private construct: ( destKey?: string, @@ -28,22 +30,22 @@ class Demux extends Writable { ) { super(options); if (demuxBy.keyBy === undefined && demuxBy.key === undefined) { - throw new Error("Need one"); + throw new Error( + "keyBy or key must be provided in second parameter", + ); } this.demuxer = demuxBy.keyBy || ((chunk: any) => chunk[demuxBy.key!]); this.construct = construct; this.keyMap = {}; } - public write(chunk: any, encoding?: any, cb?: any): boolean { + public _write(chunk: any, encoding: string, cb: any) { const destKey = this.demuxer(chunk); if (this.keyMap[destKey] === undefined) { - this.keyMap[destKey] = this.construct(destKey); + this.keyMap[destKey] = this.construct(destKey).on("error", e => { + this.emit("error", e); + }); } - const writeRes = this.keyMap[destKey].write(chunk); - if (cb !== undefined) { - cb(); - } - return writeRes; + return this.keyMap[destKey].write(chunk, encoding, cb); } } diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index dcd8ad0..4ed3e3f 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -26,14 +26,10 @@ test.cb("should spread per key", t => { const sink = new Writable({ objectMode: true, write(chunk, enc, cb) { - i++; expect(results).to.deep.include(chunk); expect(input).to.not.deep.include(chunk); t.pass(); cb(); - if (i === 4) { - t.end(); - } }, }); const construct = (destKey: string) => { @@ -53,6 +49,7 @@ test.cb("should spread per key", t => { demuxed.on("finish", () => { expect(destinationStreamKeys).to.deep.equal(["a", "b", "c"]); t.pass(); + t.end(); }); input.forEach(event => demuxed.write(event)); @@ -74,18 +71,13 @@ test.cb("should spread per key using keyBy", t => { { key: "c", val: 5 }, ]; const destinationStreamKeys = []; - let i = 0; const sink = new Writable({ objectMode: true, write(chunk, enc, cb) { - i++; expect(results).to.deep.include(chunk); expect(input).to.not.deep.include(chunk); t.pass(); cb(); - if (i === 4) { - t.end(); - } }, }); const construct = (destKey: string) => { @@ -109,6 +101,7 @@ test.cb("should spread per key using keyBy", t => { demuxed.on("finish", () => { expect(destinationStreamKeys).to.deep.equal(["a", "b", "c"]); t.pass(); + t.end(); }); input.forEach(event => demuxed.write(event)); From fe0e53147c39c6ebd3e5dbeb81842118d58792f3 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Fri, 30 Aug 2019 09:33:29 -0400 Subject: [PATCH 39/69] Handle backpressure --- package.json | 2 +- src/functions/demux.ts | 54 +++++++++++++++++++++++++++++++++++------ src/functions/map.ts | 2 ++ tests/demux.spec.ts | 55 ++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 105 insertions(+), 8 deletions(-) diff --git a/package.json b/package.json index d5a2ee0..58a655f 100644 --- a/package.json +++ b/package.json @@ -23,7 +23,7 @@ }, "scripts": { "test": "NODE_PATH=src node node_modules/.bin/ava 'tests/*.spec.ts' -e", - "test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.ts", + "test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.js", "test:all": "NODE_PATH=src node node_modules/.bin/ava", "lint": "tslint -p tsconfig.json", "validate:tslint": "tslint-config-prettier-check ./tslint.json", diff --git a/src/functions/demux.ts b/src/functions/demux.ts index ef39284..ef7a26a 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -14,10 +14,15 @@ export function demux( } class Demux extends Writable { - private keyMap: { - [key: string]: NodeJS.WritableStream | NodeJS.ReadWriteStream; + private streamsByKey: { + [key: string]: { + stream: NodeJS.WritableStream | NodeJS.ReadWriteStream; + writable: boolean; + }; }; private demuxer: (chunk: any) => string; + private isWritable: boolean; + private nonWritableStreams: Array; private construct: ( destKey?: string, ) => NodeJS.WritableStream | NodeJS.ReadWriteStream; @@ -36,16 +41,51 @@ class Demux extends Writable { } this.demuxer = demuxBy.keyBy || ((chunk: any) => chunk[demuxBy.key!]); this.construct = construct; - this.keyMap = {}; + this.streamsByKey = {}; + this.isWritable = true; } public _write(chunk: any, encoding: string, cb: any) { const destKey = this.demuxer(chunk); - if (this.keyMap[destKey] === undefined) { - this.keyMap[destKey] = this.construct(destKey).on("error", e => { - this.emit("error", e); + if (this.streamsByKey[destKey] === undefined) { + this.streamsByKey[destKey] = { + stream: this.construct(destKey), + writable: true, + }; + } + // Throttle when one stream is not writable anymore + // Set writable to false + // keep state of all the streams, if one is not writable demux shouldnt be writable + // Small optimization is to keep writing until you get a following event to the unwritable destination + + let res = false; + if (this.isWritable && this.streamsByKey[destKey].writable) { + res = this.streamsByKey[destKey].stream.write(chunk, encoding, cb); + } else if (this.isWritable) { + this.isWritable = false; + // Buffer chunk? + return this.isWritable; + } + + /* If write above returns false and the stream written to was writable previously, we need to make demux + * non-writable and update state to know the stream is nonWritable. + * If write returns true and the stream was previously not writable, we need to update which streams + * are non writable and determine if it is safe for demux to become writable (all streams are writable) + */ + if (!res) { + this.streamsByKey[destKey].writable = false; + this.nonWritableStreams.push(destKey); + this.isWritable = false; + this.streamsByKey[destKey].stream.once("drain", () => { + this.streamsByKey[destKey].writable = true; + this.nonWritableStreams = this.nonWritableStreams.filter( + key => key !== destKey, + ); + + this.isWritable = this.nonWritableStreams.length === 0; }); } - return this.keyMap[destKey].write(chunk, encoding, cb); + + return this.writable; } } diff --git a/src/functions/map.ts b/src/functions/map.ts index 0bf708d..61e84d5 100644 --- a/src/functions/map.ts +++ b/src/functions/map.ts @@ -14,6 +14,7 @@ export function map( writableObjectMode: true, }, ): Transform { + // remove try catch return new Transform({ ...options, async transform(chunk: T, encoding, callback) { @@ -22,6 +23,7 @@ export function map( this.push(mapped); callback(); } catch (err) { + console.log("caught error", err.message); callback(err); } }, diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index 4ed3e3f..cdc91d5 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -107,3 +107,58 @@ test.cb("should spread per key using keyBy", t => { input.forEach(event => demuxed.write(event)); demuxed.end(); }); + +test.cb("should emit errors", t => { + t.plan(2); + const input = [ + { key: "a", val: 1 }, + { key: "b", val: 2 }, + { key: "a", val: 3 }, + { key: "a", val: 4 }, + ]; + const results = [ + { key: "a", val: 2 }, + { key: "b", val: 3 }, + { key: "a", val: 4 }, + { key: "a", val: 5 }, + ]; + const destinationStreamKeys = []; + const sink = new Writable({ + objectMode: true, + write(chunk, enc, cb) { + expect(results).to.deep.include(chunk); + expect(input).to.not.deep.include(chunk); + t.pass(); + cb(); + }, + }).on("unpipe", e => console.log("sink err")); + + const construct = (destKey: string) => { + destinationStreamKeys.push(destKey); + const dest = map((chunk: Test) => { + if (chunk.key === "b") { + throw new Error("Caught object with key 'b'"); + } + return { + ...chunk, + val: chunk.val + 1, + }; + }).on("error", e => console.log("got err")); + + dest.pipe(sink); + return dest; + }; + + const demuxed = demux( + construct, + { keyBy: (chunk: any) => chunk.key }, + { objectMode: true }, + ); + demuxed.on("error", e => { + expect(e.message).to.equal("Caught object with key 'b'"); + t.pass(); + t.end(); + }); + input.forEach(event => demuxed.write(event)); + demuxed.end(); +}); From 2ee04a2d79b12044e684fe0a9df803f823d6fba6 Mon Sep 17 00:00:00 2001 From: Lewis Diamond Date: Fri, 30 Aug 2019 15:24:38 -0400 Subject: [PATCH 40/69] unclean --- src/functions/compose.ts | 83 +++++++++++++++++++++++++++++----------- src/functions/index.ts | 10 +---- 2 files changed, 62 insertions(+), 31 deletions(-) diff --git a/src/functions/compose.ts b/src/functions/compose.ts index f53aa6c..fea69eb 100644 --- a/src/functions/compose.ts +++ b/src/functions/compose.ts @@ -1,4 +1,11 @@ -import { pipeline, Duplex, DuplexOptions } from "stream"; +import { + pipeline, + Duplex, + Transform, + Readable, + Writable, + DuplexOptions, +} from "stream"; /** * Return a Readable stream of readable streams concatenated together @@ -12,7 +19,7 @@ export function compose( NodeJS.ReadableStream | NodeJS.ReadWriteStream | NodeJS.WritableStream >, options?: DuplexOptions, -): Duplex { +): Compose { // Maybe just return a new stream here if (streams.length < 2) { throw new Error("At least two streams are required to compose"); @@ -23,26 +30,41 @@ export function compose( return composed; } -class Compose extends Duplex { - private first: - | NodeJS.ReadableStream - | NodeJS.ReadWriteStream - | NodeJS.WritableStream; - private last: - | NodeJS.ReadableStream - | NodeJS.ReadWriteStream - | NodeJS.WritableStream; - constructor( - streams: Array< - | NodeJS.ReadableStream - | NodeJS.ReadWriteStream - | NodeJS.WritableStream - >, - options?: DuplexOptions, - ) { +enum EventSubscription { + Last = 0, + First, + All, + Self, +} +const eventsTarget = { + close: EventSubscription.Last, + data: EventSubscription.Last, + drain: EventSubscription.First, + end: EventSubscription.Last, + error: EventSubscription.Self, + finish: EventSubscription.Last, + pause: EventSubscription.Last, + pipe: EventSubscription.First, + readable: EventSubscription.Last, + resume: EventSubscription.Last, + unpipe: EventSubscription.First, +}; + +type AllStreams = + | NodeJS.ReadableStream + | NodeJS.ReadWriteStream + | NodeJS.WritableStream; + +export class Compose extends Duplex { + private first: AllStreams; + private last: AllStreams; + private streams: AllStreams[]; + + constructor(streams: AllStreams[], options?: DuplexOptions) { super(options); this.first = streams[0]; this.last = streams[streams.length - 1]; + this.streams = streams; pipeline(streams, (err: any) => { this.emit("error", err); }); @@ -56,11 +78,28 @@ class Compose extends Duplex { return (this.first as NodeJS.WritableStream).write(chunk); } + public bubble(...events: string[]) { + this.streams.forEach(s => { + events.forEach(e => { + s.on(e, (...args) => super.emit(e, ...args)); + }); + }); + } + public on(event: string, cb: any) { - if (event === "error") { - super.on(event, cb); + switch (eventsTarget[event]) { + case EventSubscription.First: + this.first.on(event, cb); + break; + case EventSubscription.Last: + this.last.on(event, cb); + break; + case EventSubscription.All: + this.streams.forEach(s => s.on(event, cb)); + break; + default: + super.on(event, cb); } - this.last.on(event, cb); return this; } } diff --git a/src/functions/index.ts b/src/functions/index.ts index 21ad7f9..2e4b8ad 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -288,12 +288,4 @@ export function accumulatorBy( return baseFunctions.accumulatorBy(batchRate, flushStrategy, iteratee); } -export function compose( - streams: Array, - options?: DuplexOptions, -) { - return baseFunctions.compose( - streams, - options, - ); -} +export const compose = baseFunctions.compose; From cd10649d44898fd22644031313d260e9f2773152 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Sat, 7 Sep 2019 11:04:33 -0400 Subject: [PATCH 41/69] WIP Add some backpressure tests for compose --- src/functions/compose.ts | 16 +--- src/functions/demux.ts | 34 +++----- src/functions/filter.ts | 14 ++-- src/functions/index.ts | 2 +- src/functions/map.ts | 3 +- tests/compose.spec.ts | 168 ++++++++++++++++++++++++++++++++++++++- tests/filter.spec.ts | 50 +++++++----- 7 files changed, 221 insertions(+), 66 deletions(-) diff --git a/src/functions/compose.ts b/src/functions/compose.ts index ee17101..da46ee8 100644 --- a/src/functions/compose.ts +++ b/src/functions/compose.ts @@ -1,11 +1,4 @@ -import { - pipeline, - Duplex, - Transform, - Readable, - Writable, - DuplexOptions, -} from "stream"; +import { pipeline, Duplex, DuplexOptions } from "stream"; /** * Return a Readable stream of readable streams concatenated together @@ -39,7 +32,7 @@ enum EventSubscription { const eventsTarget = { close: EventSubscription.Last, data: EventSubscription.Last, - drain: EventSubscription.First, + drain: EventSubscription.Self, end: EventSubscription.Last, error: EventSubscription.Self, finish: EventSubscription.Last, @@ -56,6 +49,7 @@ type AllStreams = | NodeJS.WritableStream; export class Compose extends Duplex { + public writable: boolean; private first: AllStreams; private last: AllStreams; private streams: AllStreams[]; @@ -75,9 +69,7 @@ export class Compose extends Duplex { } public _write(chunk: any, encoding: string, cb: any) { - const res = (this.first as NodeJS.WritableStream).write(chunk); - cb(); - return res; + (this.first as NodeJS.WritableStream).write(chunk, encoding, cb); } public bubble(...events: string[]) { diff --git a/src/functions/demux.ts b/src/functions/demux.ts index ef7a26a..138e6c7 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -14,6 +14,7 @@ export function demux( } class Demux extends Writable { + public isWritable: boolean; private streamsByKey: { [key: string]: { stream: NodeJS.WritableStream | NodeJS.ReadWriteStream; @@ -21,7 +22,6 @@ class Demux extends Writable { }; }; private demuxer: (chunk: any) => string; - private isWritable: boolean; private nonWritableStreams: Array; private construct: ( destKey?: string, @@ -43,9 +43,10 @@ class Demux extends Writable { this.construct = construct; this.streamsByKey = {}; this.isWritable = true; + this.nonWritableStreams = []; } - public _write(chunk: any, encoding: string, cb: any) { + public _write(chunk: any, encoding?: any, cb?: any) { const destKey = this.demuxer(chunk); if (this.streamsByKey[destKey] === undefined) { this.streamsByKey[destKey] = { @@ -57,35 +58,22 @@ class Demux extends Writable { // Set writable to false // keep state of all the streams, if one is not writable demux shouldnt be writable // Small optimization is to keep writing until you get a following event to the unwritable destination - let res = false; - if (this.isWritable && this.streamsByKey[destKey].writable) { + if (this.streamsByKey[destKey].writable && this.isWritable) { res = this.streamsByKey[destKey].stream.write(chunk, encoding, cb); - } else if (this.isWritable) { - this.isWritable = false; - // Buffer chunk? - return this.isWritable; } - - /* If write above returns false and the stream written to was writable previously, we need to make demux - * non-writable and update state to know the stream is nonWritable. - * If write returns true and the stream was previously not writable, we need to update which streams - * are non writable and determine if it is safe for demux to become writable (all streams are writable) - */ - if (!res) { + if (!res && this.isWritable) { + this.isWritable = false; this.streamsByKey[destKey].writable = false; this.nonWritableStreams.push(destKey); - this.isWritable = false; this.streamsByKey[destKey].stream.once("drain", () => { - this.streamsByKey[destKey].writable = true; - this.nonWritableStreams = this.nonWritableStreams.filter( - key => key !== destKey, - ); - + this.nonWritableStreams.filter(key => key !== destKey); this.isWritable = this.nonWritableStreams.length === 0; + this.streamsByKey[destKey].stream.write(chunk, encoding, cb); + if (this.isWritable) { + this.emit("drain"); + } }); } - - return this.writable; } } diff --git a/src/functions/filter.ts b/src/functions/filter.ts index 5714dd1..336db0c 100644 --- a/src/functions/filter.ts +++ b/src/functions/filter.ts @@ -1,5 +1,4 @@ -import { Transform } from "stream"; -import { ThroughOptions } from "./baseDefinitions"; +import { Transform, TransformOptions } from "stream"; /** * Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold * @param predicate Predicate with which to filter scream chunks @@ -10,20 +9,17 @@ export function filter( predicate: | ((chunk: T, encoding: string) => boolean) | ((chunk: T, encoding: string) => Promise), - options: ThroughOptions = { - objectMode: true, - }, + options?: TransformOptions, ) { return new Transform({ - readableObjectMode: options.objectMode, - writableObjectMode: options.objectMode, - async transform(chunk: T, encoding, callback) { + ...options, + async transform(chunk: T, encoding?: any, callback?: any) { let isPromise = false; try { const result = predicate(chunk, encoding); isPromise = result instanceof Promise; if (!!(await result)) { - callback(undefined, chunk); + callback(null, chunk); } else { callback(); } diff --git a/src/functions/index.ts b/src/functions/index.ts index a5b53db..59ff9c3 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -59,7 +59,7 @@ export function filter( mapper: | ((chunk: T, encoding: string) => boolean) | ((chunk: T, encoding: string) => Promise), - options?: ThroughOptions, + options?: TransformOptions, ): Transform { return baseFunctions.filter(mapper, options); } diff --git a/src/functions/map.ts b/src/functions/map.ts index 61e84d5..05fe627 100644 --- a/src/functions/map.ts +++ b/src/functions/map.ts @@ -20,8 +20,7 @@ export function map( async transform(chunk: T, encoding, callback) { try { const mapped = await mapper(chunk, encoding); - this.push(mapped); - callback(); + callback(null, mapped); } catch (err) { console.log("caught error", err.message); callback(err); diff --git a/tests/compose.spec.ts b/tests/compose.spec.ts index ed2304b..362f484 100644 --- a/tests/compose.spec.ts +++ b/tests/compose.spec.ts @@ -1,6 +1,8 @@ const test = require("ava"); const { expect } = require("chai"); -const { compose, composeDuplex, map } = require("../src"); +const { compose, composeDuplex, map, rate } = require("../src"); +const { sleep } = require("../src/helpers"); +import { performance } from "perf_hooks"; test.cb("compose() chains two streams together in the correct order", t => { t.plan(3); @@ -66,3 +68,167 @@ test.cb( input.forEach(item => composed.write(item)); }, ); + +test.cb( + "compose() should emit drain event after 1 second when first stream is bottleneck", + t => { + t.plan(1); + const first = map( + async (chunk: number) => { + await sleep(200); + return chunk; + }, + { + objectMode: true, + }, + ); + + const second = map( + async (chunk: number) => { + return chunk; + }, + { objectMode: true }, + ); + + const composed = compose( + [first, second], + { objectMode: true, highWaterMark: 2 }, + ); + composed.on("error", err => { + t.end(err); + }); + + composed.on("drain", err => { + expect(performance.now() - start).to.be.greaterThan(1000); + t.pass(); + }); + + composed.on("data", chunk => { + if (chunk.data === 5) { + t.end(); + } + }); + + const input = [ + { data: 1 }, + { data: 2 }, + { data: 3 }, + { data: 4 }, + { data: 5 }, + ]; + + input.forEach(item => { + composed.write(item); + }); + const start = performance.now(); + }, +); + +test.cb( + "compose() should emit drain event immediately when second stream is bottleneck", + t => { + t.plan(1); + const first = map( + async (chunk: number) => { + return chunk; + }, + { + objectMode: true, + }, + ); + + const second = map( + async (chunk: number) => { + await sleep(500); + return chunk; + }, + { objectMode: true }, + ); + + const composed = compose( + [first, second], + { objectMode: true, highWaterMark: 2 }, + ); + composed.on("error", err => { + t.end(err); + }); + + composed.on("drain", err => { + expect(performance.now() - start).to.be.lessThan(100); + t.pass(); + }); + + composed.on("data", chunk => { + if (chunk.data === 5) { + t.end(); + } + }); + + const input = [ + { data: 1 }, + { data: 2 }, + { data: 3 }, + { data: 4 }, + { data: 5 }, + ]; + + input.forEach(item => { + composed.write(item); + }); + const start = performance.now(); + }, +); + +test.cb( + "first should contain up to highWaterMark items in readable state when second is bottleneck", + t => { + t.plan(10); + const first = map( + async (chunk: number) => { + expect(first._readableState.length).to.be.at.most(2); + t.pass(); + return chunk; + }, + { + objectMode: true, + highWaterMark: 2, + }, + ); + + const second = map( + async (chunk: number) => { + expect(second._writableState.length).to.be.equal(1); + t.pass(); + await sleep(100); + return chunk; + }, + { objectMode: true, highWaterMark: 2 }, + ); + + const composed = compose( + [first, second], + { objectMode: true }, + ); + composed.on("error", err => { + t.end(err); + }); + + composed.on("data", chunk => { + if (chunk.data === 5) { + t.end(); + } + }); + + const input = [ + { data: 1 }, + { data: 2 }, + { data: 3 }, + { data: 4 }, + { data: 5 }, + ]; + + input.forEach(item => { + composed.write(item); + }); + }, +); diff --git a/tests/filter.spec.ts b/tests/filter.spec.ts index 7fa2053..0732d06 100644 --- a/tests/filter.spec.ts +++ b/tests/filter.spec.ts @@ -9,7 +9,12 @@ test.cb("filter() filters elements synchronously", t => { const expectedElements = ["a", "c"]; let i = 0; source - .pipe(filter((element: string) => element !== "b")) + .pipe( + filter((element: string) => element !== "b", { + readableObjectMode: true, + writableObjectMode: true, + }), + ) .on("data", (element: string) => { expect(element).to.equal(expectedElements[i]); t.pass(); @@ -31,10 +36,13 @@ test.cb("filter() filters elements asynchronously", t => { let i = 0; source .pipe( - filter(async (element: string) => { - await Promise.resolve(); - return element !== "b"; - }), + filter( + async (element: string) => { + await Promise.resolve(); + return element !== "b"; + }, + { readableObjectMode: true, writableObjectMode: true }, + ), ) .on("data", (element: string) => { expect(element).to.equal(expectedElements[i]); @@ -55,12 +63,15 @@ test.cb("filter() emits errors during synchronous filtering", t => { const source = new Readable({ objectMode: true }); source .pipe( - filter((element: string) => { - if (element !== "a") { - throw new Error("Failed filtering"); - } - return true; - }), + filter( + (element: string) => { + if (element !== "a") { + throw new Error("Failed filtering"); + } + return true; + }, + { readableObjectMode: true, writableObjectMode: true }, + ), ) .resume() .on("error", err => { @@ -80,13 +91,16 @@ test.cb("filter() emits errors during asynchronous filtering", t => { const source = new Readable({ objectMode: true }); source .pipe( - filter(async (element: string) => { - await Promise.resolve(); - if (element !== "a") { - throw new Error("Failed filtering"); - } - return true; - }), + filter( + async (element: string) => { + await Promise.resolve(); + if (element !== "a") { + throw new Error("Failed filtering"); + } + return true; + }, + { readableObjectMode: true, writableObjectMode: true }, + ), ) .resume() .on("error", err => { From ae7c9d6b09a2966ab97c6768ba92aad0c514134b Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Sat, 7 Sep 2019 14:27:55 -0400 Subject: [PATCH 42/69] Add test for highwatermark --- tests/compose.spec.ts | 87 ++++++++++++++++++++++++++++--------------- 1 file changed, 58 insertions(+), 29 deletions(-) diff --git a/tests/compose.spec.ts b/tests/compose.spec.ts index 362f484..1e8b57a 100644 --- a/tests/compose.spec.ts +++ b/tests/compose.spec.ts @@ -72,10 +72,15 @@ test.cb( test.cb( "compose() should emit drain event after 1 second when first stream is bottleneck", t => { - t.plan(1); + t.plan(6); + interface Chunk { + index: number; + mapped: string[]; + } const first = map( - async (chunk: number) => { + async (chunk: Chunk) => { await sleep(200); + chunk.mapped.push("first"); return chunk; }, { @@ -84,7 +89,8 @@ test.cb( ); const second = map( - async (chunk: number) => { + async (chunk: Chunk) => { + chunk.mapped.push("second"); return chunk; }, { objectMode: true }, @@ -103,8 +109,11 @@ test.cb( t.pass(); }); - composed.on("data", chunk => { - if (chunk.data === 5) { + composed.on("data", (chunk: Chunk) => { + expect(chunk.mapped.length).to.equal(2); + expect(chunk.mapped).to.deep.equal(["first", "second"]); + t.pass(); + if (chunk.index === 5) { t.end(); } }); @@ -127,9 +136,14 @@ test.cb( test.cb( "compose() should emit drain event immediately when second stream is bottleneck", t => { - t.plan(1); + t.plan(6); + interface Chunk { + index: number; + mapped: string[]; + } const first = map( - async (chunk: number) => { + async (chunk: Chunk) => { + chunk.mapped.push("first"); return chunk; }, { @@ -138,8 +152,9 @@ test.cb( ); const second = map( - async (chunk: number) => { + async (chunk: Chunk) => { await sleep(500); + chunk.mapped.push("second"); return chunk; }, { objectMode: true }, @@ -158,18 +173,21 @@ test.cb( t.pass(); }); - composed.on("data", chunk => { - if (chunk.data === 5) { + composed.on("data", (chunk: Chunk) => { + expect(chunk.mapped.length).to.equal(2); + expect(chunk.mapped).to.deep.equal(["first", "second"]); + t.pass(); + if (chunk.index === 5) { t.end(); } }); const input = [ - { data: 1 }, - { data: 2 }, - { data: 3 }, - { data: 4 }, - { data: 5 }, + { index: 1, mapped: [] }, + { index: 2, mapped: [] }, + { index: 3, mapped: [] }, + { index: 4, mapped: [] }, + { index: 5, mapped: [] }, ]; input.forEach(item => { @@ -180,13 +198,17 @@ test.cb( ); test.cb( - "first should contain up to highWaterMark items in readable state when second is bottleneck", + "compose() should emit drain event and first should contain up to highWaterMark items in readable state when second is bottleneck", t => { - t.plan(10); + t.plan(6); + interface Chunk { + index: number; + mapped: string[]; + } const first = map( - async (chunk: number) => { + async (chunk: Chunk) => { expect(first._readableState.length).to.be.at.most(2); - t.pass(); + chunk.mapped.push("first"); return chunk; }, { @@ -196,10 +218,10 @@ test.cb( ); const second = map( - async (chunk: number) => { + async (chunk: Chunk) => { expect(second._writableState.length).to.be.equal(1); - t.pass(); await sleep(100); + chunk.mapped.push("second"); return chunk; }, { objectMode: true, highWaterMark: 2 }, @@ -207,24 +229,31 @@ test.cb( const composed = compose( [first, second], - { objectMode: true }, + { objectMode: true, highWaterMark: 3 }, ); composed.on("error", err => { t.end(err); }); - composed.on("data", chunk => { - if (chunk.data === 5) { + composed.on("data", (chunk: Chunk) => { + expect(chunk.mapped.length).to.equal(2); + expect(chunk.mapped).to.deep.equal(["first", "second"]); + t.pass(); + if (chunk.index === 5) { t.end(); } }); + composed.on("drain", () => { + t.pass(); + }); + const input = [ - { data: 1 }, - { data: 2 }, - { data: 3 }, - { data: 4 }, - { data: 5 }, + { index: 1, mapped: [] }, + { index: 2, mapped: [] }, + { index: 3, mapped: [] }, + { index: 4, mapped: [] }, + { index: 5, mapped: [] }, ]; input.forEach(item => { From 2cbeae38e7e1ceced2c20697f47c82708c1966fb Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Sat, 7 Sep 2019 17:14:08 -0400 Subject: [PATCH 43/69] Test readable length in first --- tests/compose.spec.ts | 32 +++++++++++++++++++++++--------- 1 file changed, 23 insertions(+), 9 deletions(-) diff --git a/tests/compose.spec.ts b/tests/compose.spec.ts index 1e8b57a..17cb325 100644 --- a/tests/compose.spec.ts +++ b/tests/compose.spec.ts @@ -73,6 +73,7 @@ test.cb( "compose() should emit drain event after 1 second when first stream is bottleneck", t => { t.plan(6); + let passedBottleneck = 0; interface Chunk { index: number; mapped: string[]; @@ -80,6 +81,7 @@ test.cb( const first = map( async (chunk: Chunk) => { await sleep(200); + passedBottleneck++; chunk.mapped.push("first"); return chunk; }, @@ -98,18 +100,23 @@ test.cb( const composed = compose( [first, second], - { objectMode: true, highWaterMark: 2 }, + { objectMode: true, highWaterMark: 5 }, ); composed.on("error", err => { t.end(err); }); composed.on("drain", err => { + expect(composed._writableState.length).to.be.equal(0); expect(performance.now() - start).to.be.greaterThan(1000); t.pass(); }); composed.on("data", (chunk: Chunk) => { + // Since first is bottleneck, composed accumulates until cb is executed in first. Therefore buffer should contain 4, 3, 2, 1 then 0 elements + expect(composed._writableState.length).to.be.equal( + input.length - passedBottleneck, + ); expect(chunk.mapped.length).to.equal(2); expect(chunk.mapped).to.deep.equal(["first", "second"]); t.pass(); @@ -119,11 +126,11 @@ test.cb( }); const input = [ - { data: 1 }, - { data: 2 }, - { data: 3 }, - { data: 4 }, - { data: 5 }, + { index: 1, mapped: [] }, + { index: 2, mapped: [] }, + { index: 3, mapped: [] }, + { index: 4, mapped: [] }, + { index: 5, mapped: [] }, ]; input.forEach(item => { @@ -153,27 +160,32 @@ test.cb( const second = map( async (chunk: Chunk) => { + pendingReads--; await sleep(500); + expect(first._readableState.length).to.equal(pendingReads); chunk.mapped.push("second"); return chunk; }, - { objectMode: true }, + { objectMode: true, highWaterMark: 1 }, ); const composed = compose( [first, second], - { objectMode: true, highWaterMark: 2 }, + { objectMode: true, highWaterMark: 5 }, ); composed.on("error", err => { t.end(err); }); composed.on("drain", err => { + expect(composed._writableState.length).to.be.equal(0); expect(performance.now() - start).to.be.lessThan(100); t.pass(); }); composed.on("data", (chunk: Chunk) => { + // Since second is bottleneck, composed will write into first immediately. Buffer should be empty. + expect(composed._writableState.length).to.be.equal(0); expect(chunk.mapped.length).to.equal(2); expect(chunk.mapped).to.deep.equal(["first", "second"]); t.pass(); @@ -189,6 +201,7 @@ test.cb( { index: 4, mapped: [] }, { index: 5, mapped: [] }, ]; + let pendingReads = input.length; input.forEach(item => { composed.write(item); @@ -229,7 +242,7 @@ test.cb( const composed = compose( [first, second], - { objectMode: true, highWaterMark: 3 }, + { objectMode: true, highWaterMark: 5 }, ); composed.on("error", err => { t.end(err); @@ -245,6 +258,7 @@ test.cb( }); composed.on("drain", () => { + expect(composed._writableState.length).to.be.equal(0); t.pass(); }); From 599ba16d4889bdfa160e1a9a0405c3d14daac1b2 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Mon, 9 Sep 2019 08:58:04 -0400 Subject: [PATCH 44/69] Add more tests for compose --- tests/compose.spec.ts | 345 ++++++++++++++++++++++++++++++++++-------- 1 file changed, 285 insertions(+), 60 deletions(-) diff --git a/tests/compose.spec.ts b/tests/compose.spec.ts index 17cb325..f05a248 100644 --- a/tests/compose.spec.ts +++ b/tests/compose.spec.ts @@ -6,9 +6,20 @@ import { performance } from "perf_hooks"; test.cb("compose() chains two streams together in the correct order", t => { t.plan(3); + interface Chunk { + visited: number[]; + key: string; + } + let i = 0; - const first = map((chunk: number) => chunk + 1); - const second = map((chunk: number) => chunk * 2); + const first = map((chunk: Chunk) => { + chunk.visited.push(1); + return chunk; + }); + const second = map((chunk: Chunk) => { + chunk.visited.push(2); + return chunk; + }); const composed = compose( [first, second], @@ -16,7 +27,7 @@ test.cb("compose() chains two streams together in the correct order", t => { ); composed.on("data", data => { - expect(data).to.equal(result[i]); + expect(data).to.deep.equal(result[i]); t.pass(); i++; if (i === 3) { @@ -30,59 +41,82 @@ test.cb("compose() chains two streams together in the correct order", t => { t.end(); }); - const input = [1, 2, 3]; - const result = [4, 6, 8]; + const input = [ + { key: "a", visited: [] }, + { key: "b", visited: [] }, + { key: "c", visited: [] }, + ]; + const result = [ + { key: "a", visited: [1, 2] }, + { key: "b", visited: [1, 2] }, + { key: "c", visited: [1, 2] }, + ]; input.forEach(item => composed.write(item)); }); -test.cb( - "compose() followed by pipe chains streams together in the correct order", - t => { - t.plan(3); - let i = 0; - const first = map((chunk: number) => chunk + 1); - const second = map((chunk: number) => chunk * 2); +test.cb("piping compose() maintains correct order", t => { + t.plan(3); + interface Chunk { + visited: number[]; + key: string; + } + let i = 0; + const first = map((chunk: Chunk) => { + chunk.visited.push(1); + return chunk; + }); + const second = map((chunk: Chunk) => { + chunk.visited.push(2); + return chunk; + }); - const composed = compose( - [first, second], - { objectMode: true }, - ); - const third = map((chunk: number) => chunk + 1); - composed.pipe(third).on("data", data => { - expect(data).to.equal(result[i]); - t.pass(); - i++; - if (i === 3) { - t.end(); - } - }); + const composed = compose( + [first, second], + { objectMode: true }, + ); + const third = map((chunk: Chunk) => { + chunk.visited.push(3); + return chunk; + }); - composed.on("error", err => { - t.end(err); - }); + composed.pipe(third).on("data", data => { + expect(data).to.deep.equal(result[i]); + t.pass(); + i++; + if (i === 3) { + t.end(); + } + }); - const input = [1, 2, 3]; - const result = [5, 7, 9]; + composed.on("error", err => { + t.end(err); + }); - input.forEach(item => composed.write(item)); - }, -); + const input = [ + { key: "a", visited: [] }, + { key: "b", visited: [] }, + { key: "c", visited: [] }, + ]; + const result = [ + { key: "a", visited: [1, 2, 3] }, + { key: "b", visited: [1, 2, 3] }, + { key: "c", visited: [1, 2, 3] }, + ]; -test.cb( - "compose() should emit drain event after 1 second when first stream is bottleneck", - t => { - t.plan(6); - let passedBottleneck = 0; + input.forEach(item => composed.write(item)); +}); + +test("compose() writable length should be less than highWaterMark when handing writes", async t => { + t.plan(7); + return new Promise(async (resolve, reject) => { interface Chunk { - index: number; - mapped: string[]; + key: string; + mapped: number[]; } const first = map( async (chunk: Chunk) => { - await sleep(200); - passedBottleneck++; - chunk.mapped.push("first"); + chunk.mapped.push(1); return chunk; }, { @@ -92,7 +126,141 @@ test.cb( const second = map( async (chunk: Chunk) => { - chunk.mapped.push("second"); + chunk.mapped.push(2); + return chunk; + }, + { objectMode: true }, + ); + + const composed = compose( + [first, second], + { objectMode: true, highWaterMark: 2 }, + ); + composed.on("error", err => { + reject(); + }); + + composed.on("drain", () => { + t.pass(); + expect(composed._writableState.length).to.be.equal(0); + }); + + composed.on("data", (chunk: Chunk) => { + if (chunk.key === "e") { + resolve(); + } + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "b", mapped: [] }, + { key: "c", mapped: [] }, + { key: "d", mapped: [] }, + { key: "e", mapped: [] }, + ]; + + for (const item of input) { + const res = composed.write(item); + expect(composed._writableState.length).to.be.at.most(2); + t.pass(); + if (!res) { + await sleep(10); + } + } + }); +}); + +test("compose() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", async t => { + t.plan(7); + const _rate = 25; + return new Promise(async (resolve, reject) => { + interface Chunk { + key: string; + mapped: number[]; + } + const first = map( + async (chunk: Chunk) => { + await sleep(_rate); + chunk.mapped.push(1); + return chunk; + }, + { + objectMode: true, + }, + ); + + const second = map( + async (chunk: Chunk) => { + chunk.mapped.push(2); + return chunk; + }, + { objectMode: true }, + ); + + const composed = compose( + [first, second], + { objectMode: true, highWaterMark: 2 }, + ); + composed.on("error", err => { + reject(); + }); + + composed.on("drain", () => { + t.pass(); + expect(composed._writableState.length).to.be.equal(0); + expect(performance.now() - start).to.be.greaterThan(_rate); + }); + + composed.on("data", (chunk: Chunk) => { + if (chunk.key === "e") { + resolve(); + } + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "b", mapped: [] }, + { key: "c", mapped: [] }, + { key: "d", mapped: [] }, + { key: "e", mapped: [] }, + ]; + + let start = performance.now(); + for (const item of input) { + const res = composed.write(item); + expect(composed._writableState.length).to.be.at.most(2); + t.pass(); + if (!res) { + start = performance.now(); + await sleep(100); + } + } + }); +}); + +test.cb( + "compose() should emit drain event after 500 ms when writing 5 items that take 100ms to process with a highWaterMark of 5 ", + t => { + t.plan(6); + const _rate = 100; + interface Chunk { + key: string; + mapped: number[]; + } + const first = map( + async (chunk: Chunk) => { + await sleep(_rate); + chunk.mapped.push(1); + return chunk; + }, + { + objectMode: true, + }, + ); + + const second = map( + async (chunk: Chunk) => { + chunk.mapped.push(2); return chunk; }, { objectMode: true }, @@ -106,33 +274,28 @@ test.cb( t.end(err); }); - composed.on("drain", err => { + composed.on("drain", () => { expect(composed._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.greaterThan(1000); + expect(performance.now() - start).to.be.greaterThan( + _rate * input.length, + ); t.pass(); }); composed.on("data", (chunk: Chunk) => { - // Since first is bottleneck, composed accumulates until cb is executed in first. Therefore buffer should contain 4, 3, 2, 1 then 0 elements - expect(composed._writableState.length).to.be.equal( - input.length - passedBottleneck, - ); - expect(chunk.mapped.length).to.equal(2); - expect(chunk.mapped).to.deep.equal(["first", "second"]); t.pass(); - if (chunk.index === 5) { + if (chunk.key === "e") { t.end(); } }); const input = [ - { index: 1, mapped: [] }, - { index: 2, mapped: [] }, - { index: 3, mapped: [] }, - { index: 4, mapped: [] }, - { index: 5, mapped: [] }, + { key: "a", mapped: [] }, + { key: "b", mapped: [] }, + { key: "c", mapped: [] }, + { key: "d", mapped: [] }, + { key: "e", mapped: [] }, ]; - input.forEach(item => { composed.write(item); }); @@ -177,7 +340,7 @@ test.cb( t.end(err); }); - composed.on("drain", err => { + composed.on("drain", () => { expect(composed._writableState.length).to.be.equal(0); expect(performance.now() - start).to.be.lessThan(100); t.pass(); @@ -275,3 +438,65 @@ test.cb( }); }, ); + +test.cb( + "compose() should not emit drain event writing 5 items to compose with a highWaterMark of 6", + t => { + t.plan(5); + const _rate = 100; + interface Chunk { + key: string; + mapped: number[]; + } + const first = map( + async (chunk: Chunk) => { + await sleep(_rate); + chunk.mapped.push(1); + return chunk; + }, + { + objectMode: true, + }, + ); + + const second = map( + async (chunk: Chunk) => { + chunk.mapped.push(2); + return chunk; + }, + { objectMode: true }, + ); + + const composed = compose( + [first, second], + { objectMode: true, highWaterMark: 6 }, + ); + + composed.on("error", err => { + t.end(err); + }); + + composed.on("drain", () => { + t.end(new Error("Drain should not be emitted")); + }); + + composed.on("data", (chunk: Chunk) => { + t.pass(); + if (chunk.key === "e") { + t.end(); + } + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "b", mapped: [] }, + { key: "c", mapped: [] }, + { key: "d", mapped: [] }, + { key: "e", mapped: [] }, + ]; + + input.forEach(item => { + composed.write(item); + }); + }, +); From 0067ba6a7cdf3b10743c655fefa2ac955a6140ea Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Mon, 9 Sep 2019 11:53:21 -0400 Subject: [PATCH 45/69] Add tests for demux --- package.json | 14 +- src/functions/compose.ts | 1 + src/functions/demux.ts | 42 ++++ tests/compose.spec.ts | 29 ++- tests/demux.spec.ts | 502 ++++++++++++++++++++++++++++++++------- yarn.lock | 93 +++++++- 6 files changed, 576 insertions(+), 105 deletions(-) diff --git a/package.json b/package.json index 58a655f..76fd2bf 100644 --- a/package.json +++ b/package.json @@ -22,21 +22,23 @@ "type": "git" }, "scripts": { - "test": "NODE_PATH=src node node_modules/.bin/ava 'tests/*.spec.ts' -e", - "test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.js", - "test:all": "NODE_PATH=src node node_modules/.bin/ava", - "lint": "tslint -p tsconfig.json", - "validate:tslint": "tslint-config-prettier-check ./tslint.json", - "prepublishOnly": "yarn lint && yarn test && yarn tsc" + "test": "NODE_PATH=src node node_modules/.bin/ava 'tests/*.spec.ts' -e", + "test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.js", + "test:all": "NODE_PATH=src node node_modules/.bin/ava", + "lint": "tslint -p tsconfig.json", + "validate:tslint": "tslint-config-prettier-check ./tslint.json", + "prepublishOnly": "yarn lint && yarn test && yarn tsc" }, "dependencies": {}, "devDependencies": { "@types/chai": "^4.1.7", "@types/node": "^12.7.2", + "@types/sinon": "^7.0.13", "ava": "^1.0.0-rc.2", "chai": "^4.2.0", "mhysa": "./", "prettier": "^1.14.3", + "sinon": "^7.4.2", "ts-node": "^8.3.0", "tslint": "^5.11.0", "tslint-config-prettier": "^1.16.0", diff --git a/src/functions/compose.ts b/src/functions/compose.ts index da46ee8..00ff00e 100644 --- a/src/functions/compose.ts +++ b/src/functions/compose.ts @@ -29,6 +29,7 @@ enum EventSubscription { All, Self, } + const eventsTarget = { close: EventSubscription.Last, data: EventSubscription.Last, diff --git a/src/functions/demux.ts b/src/functions/demux.ts index 138e6c7..9c9e624 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -1,5 +1,27 @@ import { WritableOptions, Writable } from "stream"; +enum EventSubscription { + Last = 0, + First, + All, + Self, + Unhandled, +} + +const eventsTarget = { + close: EventSubscription.Self, + data: EventSubscription.All, + drain: EventSubscription.Self, + end: EventSubscription.Self, + error: EventSubscription.Self, + finish: EventSubscription.Self, + pause: EventSubscription.Self, + pipe: EventSubscription.Unhandled, + readable: EventSubscription.Self, + resume: EventSubscription.Self, + unpipe: EventSubscription.Unhandled, +}; + /** * Return a Duplex stream that is pushed data from multiple sources * @param streams Source streams to multiplex @@ -76,4 +98,24 @@ class Demux extends Writable { }); } } + + public on(event: string, cb: any) { + switch (eventsTarget[event]) { + case EventSubscription.Self: + super.on(event, cb); + break; + case EventSubscription.All: + Object.keys(this.streamsByKey).forEach(key => + this.streamsByKey[key].stream.on(event, cb), + ); + break; + case EventSubscription.Unhandled: + throw new Error( + "Stream must be multiplexed before handling this event", + ); + default: + super.on(event, cb); + } + return this; + } } diff --git a/tests/compose.spec.ts b/tests/compose.spec.ts index f05a248..6b97798 100644 --- a/tests/compose.spec.ts +++ b/tests/compose.spec.ts @@ -1,6 +1,6 @@ const test = require("ava"); const { expect } = require("chai"); -const { compose, composeDuplex, map, rate } = require("../src"); +const { compose, map } = require("../src"); const { sleep } = require("../src/helpers"); import { performance } from "perf_hooks"; @@ -308,12 +308,12 @@ test.cb( t => { t.plan(6); interface Chunk { - index: number; - mapped: string[]; + key: string; + mapped: number[]; } const first = map( async (chunk: Chunk) => { - chunk.mapped.push("first"); + chunk.mapped.push(1); return chunk; }, { @@ -324,9 +324,10 @@ test.cb( const second = map( async (chunk: Chunk) => { pendingReads--; - await sleep(500); + await sleep(200); + expect(second._writableState.length).to.be.equal(1); expect(first._readableState.length).to.equal(pendingReads); - chunk.mapped.push("second"); + chunk.mapped.push(2); return chunk; }, { objectMode: true, highWaterMark: 1 }, @@ -342,27 +343,25 @@ test.cb( composed.on("drain", () => { expect(composed._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.lessThan(100); + expect(performance.now() - start).to.be.lessThan(50); t.pass(); }); composed.on("data", (chunk: Chunk) => { // Since second is bottleneck, composed will write into first immediately. Buffer should be empty. expect(composed._writableState.length).to.be.equal(0); - expect(chunk.mapped.length).to.equal(2); - expect(chunk.mapped).to.deep.equal(["first", "second"]); t.pass(); - if (chunk.index === 5) { + if (chunk.key === "e") { t.end(); } }); const input = [ - { index: 1, mapped: [] }, - { index: 2, mapped: [] }, - { index: 3, mapped: [] }, - { index: 4, mapped: [] }, - { index: 5, mapped: [] }, + { key: "a", mapped: [] }, + { key: "b", mapped: [] }, + { key: "c", mapped: [] }, + { key: "d", mapped: [] }, + { key: "e", mapped: [] }, ]; let pendingReads = input.length; diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index cdc91d5..9ff3498 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -1,53 +1,38 @@ import test from "ava"; import { expect } from "chai"; -import { demux, map } from "../src"; +const { demux, map } = require("../src"); import { Writable } from "stream"; +const sinon = require("sinon"); +const { sleep } = require("../src/helpers"); +import { performance } from "perf_hooks"; interface Test { key: string; - val: number; + visited: number[]; } -test.cb("should spread per key", t => { - t.plan(5); +test.cb("demux() constructor should be called once per key", t => { + t.plan(1); const input = [ - { key: "a", val: 1 }, - { key: "b", val: 2 }, - { key: "a", val: 3 }, - { key: "c", val: 4 }, + { key: "a", visited: [] }, + { key: "b", visited: [] }, + { key: "a", visited: [] }, + { key: "c", visited: [] }, ]; - const results = [ - { key: "a", val: 2 }, - { key: "b", val: 3 }, - { key: "a", val: 4 }, - { key: "c", val: 5 }, - ]; - const destinationStreamKeys = []; - let i = 0; - const sink = new Writable({ - objectMode: true, - write(chunk, enc, cb) { - expect(results).to.deep.include(chunk); - expect(input).to.not.deep.include(chunk); - t.pass(); - cb(); - }, - }); - const construct = (destKey: string) => { - destinationStreamKeys.push(destKey); + const construct = sinon.spy((destKey: string) => { const dest = map((chunk: Test) => { - return { - ...chunk, - val: chunk.val + 1, - }; + chunk.visited.push(1); + return chunk; }); - dest.pipe(sink); return dest; - }; + }); const demuxed = demux(construct, { key: "key" }, { objectMode: true }); + demuxed.on("finish", () => { - expect(destinationStreamKeys).to.deep.equal(["a", "b", "c"]); + expect(construct.withArgs("a").callCount).to.equal(1); + expect(construct.withArgs("b").callCount).to.equal(1); + expect(construct.withArgs("c").callCount).to.equal(1); t.pass(); t.end(); }); @@ -56,50 +41,34 @@ test.cb("should spread per key", t => { demuxed.end(); }); -test.cb("should spread per key using keyBy", t => { - t.plan(5); +test.cb("demux() constructor should be called once per key using keyBy", t => { + t.plan(1); const input = [ - { key: "a", val: 1 }, - { key: "b", val: 2 }, - { key: "a", val: 3 }, - { key: "c", val: 4 }, + { key: "a", visited: [] }, + { key: "b", visited: [] }, + { key: "a", visited: [] }, + { key: "c", visited: [] }, ]; - const results = [ - { key: "a", val: 2 }, - { key: "b", val: 3 }, - { key: "a", val: 4 }, - { key: "c", val: 5 }, - ]; - const destinationStreamKeys = []; - const sink = new Writable({ - objectMode: true, - write(chunk, enc, cb) { - expect(results).to.deep.include(chunk); - expect(input).to.not.deep.include(chunk); - t.pass(); - cb(); - }, - }); - const construct = (destKey: string) => { - destinationStreamKeys.push(destKey); + + const construct = sinon.spy((destKey: string) => { const dest = map((chunk: Test) => { - return { - ...chunk, - val: chunk.val + 1, - }; + chunk.visited.push(1); + return chunk; }); - dest.pipe(sink); return dest; - }; + }); const demuxed = demux( construct, - { keyBy: (chunk: any) => chunk.key }, + { keyBy: item => item.key }, { objectMode: true }, ); + demuxed.on("finish", () => { - expect(destinationStreamKeys).to.deep.equal(["a", "b", "c"]); + expect(construct.withArgs("a").callCount).to.equal(1); + expect(construct.withArgs("b").callCount).to.equal(1); + expect(construct.withArgs("c").callCount).to.equal(1); t.pass(); t.end(); }); @@ -110,17 +79,18 @@ test.cb("should spread per key using keyBy", t => { test.cb("should emit errors", t => { t.plan(2); + let index = 0; const input = [ - { key: "a", val: 1 }, - { key: "b", val: 2 }, - { key: "a", val: 3 }, - { key: "a", val: 4 }, + { key: "a", visited: [] }, + { key: "b", visited: [] }, + { key: "a", visited: [] }, + { key: "a", visited: [] }, ]; const results = [ - { key: "a", val: 2 }, - { key: "b", val: 3 }, - { key: "a", val: 4 }, - { key: "a", val: 5 }, + { key: "a", visited: [0] }, + { key: "b", visited: [1] }, + { key: "a", visited: [2] }, + { key: "a", visited: [3] }, ]; const destinationStreamKeys = []; const sink = new Writable({ @@ -131,7 +101,7 @@ test.cb("should emit errors", t => { t.pass(); cb(); }, - }).on("unpipe", e => console.log("sink err")); + }); const construct = (destKey: string) => { destinationStreamKeys.push(destKey); @@ -139,11 +109,12 @@ test.cb("should emit errors", t => { if (chunk.key === "b") { throw new Error("Caught object with key 'b'"); } - return { - ...chunk, - val: chunk.val + 1, - }; - }).on("error", e => console.log("got err")); + + const _chunk = { ...chunk, visited: [] }; + _chunk.visited.push(index); + index++; + return _chunk; + }).on("error", () => {}); dest.pipe(sink); return dest; @@ -162,3 +133,374 @@ test.cb("should emit errors", t => { input.forEach(event => demuxed.write(event)); demuxed.end(); }); + +test("compose() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", async t => { + t.plan(7); + const highWaterMark = 5; + const _rate = 25; + return new Promise(async (resolve, reject) => { + interface Chunk { + key: string; + mapped: number[]; + } + const sink = new Writable({ + objectMode: true, + write(chunk, encoding, cb) { + cb(); + t.pass(); + pendingReads--; + if (pendingReads === 0) { + resolve(); + } + }, + }); + const construct = (destKey: string) => { + const first = map(async (chunk: Chunk) => { + await sleep(_rate); + chunk.mapped.push(1); + return chunk; + }); + + const second = map(async (chunk: Chunk) => { + chunk.mapped.push(2); + return chunk; + }); + + first.pipe(second).pipe(sink); + return first; + }; + const _demux = demux( + construct, + { key: "key" }, + { + objectMode: true, + highWaterMark, + }, + ); + _demux.on("error", err => { + reject(); + }); + + _demux.on("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + expect(performance.now() - start).to.be.greaterThan(_rate); + t.pass(); + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + ]; + let pendingReads = input.length; + + let start = performance.now(); + for (const item of input) { + const res = _demux.write(item); + expect(_demux._writableState.length).to.be.at.most(highWaterMark); + if (!res) { + start = performance.now(); + await sleep(100); + } + } + }); +}); + +test("demux() should emit one drain event when writing 6 items with highWaterMark of 5", t => { + t.plan(7); + const highWaterMark = 5; + return new Promise(async (resolve, reject) => { + interface Chunk { + key: string; + mapped: number[]; + } + const sink = new Writable({ + objectMode: true, + write(chunk, encoding, cb) { + cb(); + t.pass(); + if (chunk.key === "f") { + resolve(); + } + }, + }); + const construct = (destKey: string) => { + const first = map(async (chunk: Chunk) => { + chunk.mapped.push(1); + return chunk; + }); + + const second = map(async (chunk: Chunk) => { + chunk.mapped.push(2); + return chunk; + }); + + first.pipe(second).pipe(sink); + return first; + }; + const _demux = demux( + construct, + { key: "key" }, + { + objectMode: true, + highWaterMark, + }, + ); + _demux.on("error", err => { + reject(); + }); + + _demux.on("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + t.pass(); + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "b", mapped: [] }, + { key: "c", mapped: [] }, + { key: "d", mapped: [] }, + { key: "e", mapped: [] }, + { key: "f", mapped: [] }, + ]; + + for (const item of input) { + const res = _demux.write(item); + expect(_demux._writableState.length).to.be.at.most(highWaterMark); + if (!res) { + await sleep(10); + } + } + }); +}); + +test.cb( + "demux() should emit drain event after 500 ms when writing 5 items that take 100ms to process with a highWaterMark of 5 ", + t => { + t.plan(6); + const _rate = 100; + const highWaterMark = 5; + interface Chunk { + key: string; + mapped: number[]; + } + const sink = new Writable({ + objectMode: true, + write(chunk, encoding, cb) { + t.pass(); + cb(); + if (pendingReads === 0) { + t.end(); + } + }, + }); + const construct = (destKey: string) => { + const first = map( + async (chunk: Chunk) => { + chunk.mapped.push(1); + await sleep(_rate); + return chunk; + }, + { objectMode: true }, + ); + + const second = map( + (chunk: Chunk) => { + pendingReads--; + chunk.mapped.push(2); + return chunk; + }, + { objectMode: true, highWaterMark: 1 }, + ); + + first.pipe(second).pipe(sink); + return first; + }; + const _demux = demux( + construct, + { key: "key" }, + { + objectMode: true, + highWaterMark, + }, + ); + _demux.on("error", err => { + t.end(err); + }); + + _demux.on("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + expect(performance.now() - start).to.be.greaterThan( + _rate * input.length, + ); + t.pass(); + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + ]; + + let pendingReads = input.length; + input.forEach(item => { + _demux.write(item); + }); + const start = performance.now(); + }, +); +test.cb( + "demux() should emit drain event immediately when second stream is bottleneck", + t => { + t.plan(6); + const highWaterMark = 5; + interface Chunk { + key: string; + mapped: number[]; + } + const sink = new Writable({ + objectMode: true, + write(chunk, encoding, cb) { + t.pass(); + cb(); + if (pendingReads === 0) { + t.end(); + } + }, + }); + const construct = (destKey: string) => { + const first = map( + (chunk: Chunk) => { + chunk.mapped.push(1); + return chunk; + }, + { objectMode: true }, + ); + + const second = map( + async (chunk: Chunk) => { + pendingReads--; + await sleep(200); + chunk.mapped.push(2); + expect(second._writableState.length).to.be.equal(1); + expect(first._readableState.length).to.equal(pendingReads); + return chunk; + }, + { objectMode: true, highWaterMark: 1 }, + ); + + first.pipe(second).pipe(sink); + return first; + }; + const _demux = demux( + construct, + { key: "key" }, + { + objectMode: true, + highWaterMark, + }, + ); + _demux.on("error", err => { + t.end(err); + }); + + _demux.on("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + expect(performance.now() - start).to.be.lessThan(50); + t.pass(); + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + ]; + + let pendingReads = input.length; + input.forEach(item => { + _demux.write(item); + }); + const start = performance.now(); + }, +); + +test("demux() should emit drain event and first should contain up to highWaterMark items in readable state when second is bottleneck", t => { + t.plan(6); + const highWaterMark = 5; + return new Promise(async (resolve, reject) => { + interface Chunk { + key: string; + mapped: number[]; + } + const sink = new Writable({ + objectMode: true, + write(chunk, encoding, cb) { + t.pass(); + cb(); + if (pendingReads === 0) { + resolve(); + } + }, + }); + const construct = (destKey: string) => { + const first = map( + (chunk: Chunk) => { + expect(first._readableState.length).to.be.at.most(2); + chunk.mapped.push(1); + return chunk; + }, + { objectMode: 2, highWaterMark: 2 }, + ); + + const second = map( + async (chunk: Chunk) => { + chunk.mapped.push(2); + expect(second._writableState.length).to.be.equal(1); + await sleep(100); + pendingReads--; + return chunk; + }, + { objectMode: 2, highWaterMark: 2 }, + ); + + first.pipe(second).pipe(sink); + return first; + }; + const _demux = demux( + construct, + { key: "key" }, + { + objectMode: true, + highWaterMark, + }, + ); + _demux.on("error", err => { + reject(); + }); + + _demux.on("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + t.pass(); + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + ]; + let pendingReads = input.length; + + input.forEach(item => { + _demux.write(item); + }); + }); +}); diff --git a/yarn.lock b/yarn.lock index ee57991..ba435ef 100644 --- a/yarn.lock +++ b/yarn.lock @@ -326,6 +326,35 @@ dependencies: arrify "^1.0.1" +"@sinonjs/commons@^1", "@sinonjs/commons@^1.3.0", "@sinonjs/commons@^1.4.0": + version "1.6.0" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.6.0.tgz#ec7670432ae9c8eb710400d112c201a362d83393" + integrity sha512-w4/WHG7C4WWFyE5geCieFJF6MZkbW4VAriol5KlmQXpAQdxvV0p26sqNZOW6Qyw6Y0l9K4g+cHvvczR2sEEpqg== + dependencies: + type-detect "4.0.8" + +"@sinonjs/formatio@^3.2.1": + version "3.2.1" + resolved "https://registry.yarnpkg.com/@sinonjs/formatio/-/formatio-3.2.1.tgz#52310f2f9bcbc67bdac18c94ad4901b95fde267e" + integrity sha512-tsHvOB24rvyvV2+zKMmPkZ7dXX6LSLKZ7aOtXY6Edklp0uRcgGpOsQTTGTcWViFyx4uhWc6GV8QdnALbIbIdeQ== + dependencies: + "@sinonjs/commons" "^1" + "@sinonjs/samsam" "^3.1.0" + +"@sinonjs/samsam@^3.1.0", "@sinonjs/samsam@^3.3.3": + version "3.3.3" + resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-3.3.3.tgz#46682efd9967b259b81136b9f120fd54585feb4a" + integrity sha512-bKCMKZvWIjYD0BLGnNrxVuw4dkWCYsLqFOUWw8VgKF/+5Y+mE7LfHWPIYoDXowH+3a9LsWDMo0uAP8YDosPvHQ== + dependencies: + "@sinonjs/commons" "^1.3.0" + array-from "^2.1.1" + lodash "^4.17.15" + +"@sinonjs/text-encoding@^0.7.1": + version "0.7.1" + resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz#8da5c6530915653f3a1f38fd5f101d8c3f8079c5" + integrity sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ== + "@types/chai@^4.1.7": version "4.2.0" resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.0.tgz#2478260021408dec32c123a7cad3414beb811a07" @@ -355,6 +384,11 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.2.tgz#c4e63af5e8823ce9cc3f0b34f7b998c2171f0c44" integrity sha512-dyYO+f6ihZEtNPDcWNR1fkoTDf3zAK3lAABDze3mz6POyIercH0lEUawUFXlG8xaQZmm1yEBON/4TsYv/laDYg== +"@types/sinon@^7.0.13": + version "7.0.13" + resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-7.0.13.tgz#ca039c23a9e27ebea53e0901ef928ea2a1a6d313" + integrity sha512-d7c/C/+H/knZ3L8/cxhicHUiTDxdgap0b/aNJfsmLwFu/iOP17mdgbQsbHA3SJmrzsjD0l3UEE5SN4xxuz5ung== + abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" @@ -452,6 +486,11 @@ array-find-index@^1.0.1: resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= +array-from@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/array-from/-/array-from-2.1.1.tgz#cfe9d8c26628b9dc5aecc62a9f5d8f1f352c1195" + integrity sha1-z+nYwmYoudxa7MYqn12PHzUsEZU= + array-union@^1.0.1, array-union@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" @@ -1113,7 +1152,7 @@ detect-libc@^1.0.2: resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= -diff@^3.2.0: +diff@^3.2.0, diff@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA== @@ -1826,6 +1865,11 @@ is-windows@^1.0.2: resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= + isarray@1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" @@ -1893,6 +1937,11 @@ json5@^2.1.0: dependencies: minimist "^1.2.0" +just-extend@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.0.2.tgz#f3f47f7dfca0f989c55410a7ebc8854b07108afc" + integrity sha512-FrLwOgm+iXrPV+5zDU6Jqu4gCRXbWEQg2O3SKONsWE4w7AXFRkryS53bpWdaL9cNol+AmR3AEYz6kn+o0fCPnw== + kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" @@ -2011,7 +2060,7 @@ lodash.merge@^4.6.1: resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== -lodash@^4.17.13: +lodash@^4.17.13, lodash@^4.17.15: version "4.17.15" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== @@ -2023,6 +2072,11 @@ log-symbols@^2.2.0: dependencies: chalk "^2.0.1" +lolex@^4.1.0, lolex@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/lolex/-/lolex-4.2.0.tgz#ddbd7f6213ca1ea5826901ab1222b65d714b3cd7" + integrity sha512-gKO5uExCXvSm6zbF562EvM+rd1kQDnB9AZBbiQVzf1ZmdDpxUSvpnAaVOP83N/31mRK8Ml8/VE8DMvsAZQ+7wg== + loud-rejection@^1.0.0, loud-rejection@^1.2.0: version "1.6.0" resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" @@ -2253,6 +2307,17 @@ needle@^2.2.1: iconv-lite "^0.4.4" sax "^1.2.4" +nise@^1.5.2: + version "1.5.2" + resolved "https://registry.yarnpkg.com/nise/-/nise-1.5.2.tgz#b6d29af10e48b321b307e10e065199338eeb2652" + integrity sha512-/6RhOUlicRCbE9s+94qCUsyE+pKlVJ5AhIv+jEE7ESKwnbXqulKZ1FYU+XAtHHWE9TinYvAxDUJAb912PwPoWA== + dependencies: + "@sinonjs/formatio" "^3.2.1" + "@sinonjs/text-encoding" "^0.7.1" + just-extend "^4.0.2" + lolex "^4.1.0" + path-to-regexp "^1.7.0" + node-pre-gyp@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" @@ -2545,6 +2610,13 @@ path-parse@^1.0.6: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== +path-to-regexp@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.7.0.tgz#59fde0f435badacba103a84e9d3bc64e96b9937d" + integrity sha1-Wf3g9DW62suhA6hOnTvGTpa5k30= + dependencies: + isarray "0.0.1" + path-type@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" @@ -2906,6 +2978,19 @@ signal-exit@^3.0.0, signal-exit@^3.0.2: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= +sinon@^7.4.2: + version "7.4.2" + resolved "https://registry.yarnpkg.com/sinon/-/sinon-7.4.2.tgz#ecd54158fef2fcfbdb231a3fa55140e8cb02ad6c" + integrity sha512-pY5RY99DKelU3pjNxcWo6XqeB1S118GBcVIIdDi6V+h6hevn1izcg2xv1hTHW/sViRXU7sUOxt4wTUJ3gsW2CQ== + dependencies: + "@sinonjs/commons" "^1.4.0" + "@sinonjs/formatio" "^3.2.1" + "@sinonjs/samsam" "^3.3.3" + diff "^3.5.0" + lolex "^4.2.0" + nise "^1.5.2" + supports-color "^5.5.0" + slash@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" @@ -3126,7 +3211,7 @@ supertap@^1.0.0: serialize-error "^2.1.0" strip-ansi "^4.0.0" -supports-color@^5.3.0: +supports-color@^5.3.0, supports-color@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== @@ -3281,7 +3366,7 @@ tsutils@^2.29.0: dependencies: tslib "^1.8.1" -type-detect@^4.0.0, type-detect@^4.0.5: +type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.5: version "4.0.8" resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== From ea2ffdb38cf2cfdb8431dde97129067be5f21ba8 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Mon, 9 Sep 2019 13:47:38 -0400 Subject: [PATCH 46/69] Add test for unwritable streams in demux --- 1:p | 589 ++++++++++++++++++++++++++++++++++++++++++ tests/compose.spec.ts | 8 +- tests/demux.spec.ts | 102 +++++++- 3 files changed, 684 insertions(+), 15 deletions(-) create mode 100644 1:p diff --git a/1:p b/1:p new file mode 100644 index 0000000..5f5fc0a --- /dev/null +++ b/1:p @@ -0,0 +1,589 @@ +import test from "ava"; +import { expect } from "chai"; +const { demux, map } = require("../src"); +import { Writable } from "stream"; +const sinon = require("sinon"); +const { sleep } = require("../src/helpers"); +import { performance } from "perf_hooks"; + +interface Test { + key: string; + visited: number[]; +} +test.cb("demux() constructor should be called once per key", t => { + t.plan(1); + const input = [ + { key: "a", visited: [] }, + { key: "b", visited: [] }, + { key: "a", visited: [] }, + { key: "c", visited: [] }, + { key: "a", visited: [] }, + { key: "b", visited: [] }, + ]; + const construct = sinon.spy((destKey: string) => { + const dest = map((chunk: Test) => { + chunk.visited.push(1); + return chunk; + }); + + return dest; + }); + + const demuxed = demux(construct, { key: "key" }, { objectMode: true }); + + demuxed.on("finish", () => { + expect(construct.withArgs("a").callCount).to.equal(1); + expect(construct.withArgs("b").callCount).to.equal(1); + expect(construct.withArgs("c").callCount).to.equal(1); + t.pass(); + t.end(); + }); + + input.forEach(event => demuxed.write(event)); + demuxed.end(); +}); + +test.cb("demux() constructor should be called once per key using keyBy", t => { + t.plan(1); + const input = [ + { key: "a", visited: [] }, + { key: "b", visited: [] }, + { key: "a", visited: [] }, + { key: "c", visited: [] }, + { key: "a", visited: [] }, + { key: "b", visited: [] }, + ]; + + const construct = sinon.spy((destKey: string) => { + const dest = map((chunk: Test) => { + chunk.visited.push(1); + return chunk; + }); + + return dest; + }); + + const demuxed = demux( + construct, + { keyBy: item => item.key }, + { objectMode: true }, + ); + + demuxed.on("finish", () => { + expect(construct.withArgs("a").callCount).to.equal(1); + expect(construct.withArgs("b").callCount).to.equal(1); + expect(construct.withArgs("c").callCount).to.equal(1); + t.pass(); + t.end(); + }); + + input.forEach(event => demuxed.write(event)); + demuxed.end(); +}); + +test.cb("should emit errors", t => { + t.plan(2); + let index = 0; + const input = [ + { key: "a", visited: [] }, + { key: "b", visited: [] }, + { key: "a", visited: [] }, + { key: "a", visited: [] }, + ]; + const results = [ + { key: "a", visited: [0] }, + { key: "b", visited: [1] }, + { key: "a", visited: [2] }, + { key: "a", visited: [3] }, + ]; + const destinationStreamKeys = []; + const sink = new Writable({ + objectMode: true, + write(chunk, enc, cb) { + expect(results).to.deep.include(chunk); + expect(input).to.not.deep.include(chunk); + t.pass(); + cb(); + }, + }); + + const construct = (destKey: string) => { + destinationStreamKeys.push(destKey); + const dest = map((chunk: Test) => { + if (chunk.key === "b") { + throw new Error("Caught object with key 'b'"); + } + + const _chunk = { ...chunk, visited: [] }; + _chunk.visited.push(index); + index++; + return _chunk; + }).on("error", () => {}); + + dest.pipe(sink); + return dest; + }; + + const demuxed = demux( + construct, + { keyBy: (chunk: any) => chunk.key }, + { objectMode: true }, + ); + demuxed.on("error", e => { + expect(e.message).to.equal("Caught object with key 'b'"); + t.pass(); + t.end(); + }); + input.forEach(event => demuxed.write(event)); + demuxed.end(); +}); + +test("demux() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", t => { + t.plan(7); + interface Chunk { + key: string; + mapped: number[]; + } + const highWaterMark = 5; + const _rate = 25; + return new Promise(async (resolve, reject) => { + const sink = new Writable({ + objectMode: true, + write(chunk, encoding, cb) { + cb(); + t.pass(); + pendingReads--; + if (pendingReads === 0) { + resolve(); + } + }, + }); + const construct = (destKey: string) => { + const first = map(async (chunk: Chunk) => { + await sleep(_rate); + chunk.mapped.push(1); + return chunk; + }); + + const second = map(async (chunk: Chunk) => { + chunk.mapped.push(2); + return chunk; + }); + + first.pipe(second).pipe(sink); + return first; + }; + const _demux = demux( + construct, + { key: "key" }, + { + objectMode: true, + highWaterMark, + }, + ); + _demux.on("error", err => { + reject(); + }); + + _demux.on("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + expect(performance.now() - start).to.be.greaterThan(_rate); + t.pass(); + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + ]; + let pendingReads = input.length; + + let start = performance.now(); + for (const item of input) { + const res = _demux.write(item); + expect(_demux._writableState.length).to.be.at.most(highWaterMark); + if (!res) { + start = performance.now(); + await sleep(100); + } + } + }); +}); + +test("demux() should emit one drain event when writing 6 items with highWaterMark of 5", t => { + t.plan(7); + const highWaterMark = 5; + return new Promise(async (resolve, reject) => { + interface Chunk { + key: string; + mapped: number[]; + } + const sink = new Writable({ + objectMode: true, + write(chunk, encoding, cb) { + cb(); + t.pass(); + pendingReads--; + if (pendingReads === 0) { + resolve(); + } + }, + }); + const construct = (destKey: string) => { + const first = map(async (chunk: Chunk) => { + chunk.mapped.push(1); + return chunk; + }); + + const second = map(async (chunk: Chunk) => { + chunk.mapped.push(2); + return chunk; + }); + + first.pipe(second).pipe(sink); + return first; + }; + const _demux = demux( + construct, + { key: "key" }, + { + objectMode: true, + highWaterMark, + }, + ); + _demux.on("error", err => { + reject(); + }); + + _demux.on("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + t.pass(); + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + ]; + let pendingReads = input.length; + + for (const item of input) { + const res = _demux.write(item); + expect(_demux._writableState.length).to.be.at.most(highWaterMark); + if (!res) { + await sleep(10); + } + } + }); +}); + +test.cb( + "demux() should emit drain event after 500 ms when writing 5 items that take 100ms to process with a highWaterMark of 5 ", + t => { + t.plan(6); + const _rate = 100; + const highWaterMark = 5; + interface Chunk { + key: string; + mapped: number[]; + } + const sink = new Writable({ + objectMode: true, + write(chunk, encoding, cb) { + t.pass(); + cb(); + if (pendingReads === 0) { + t.end(); + } + }, + }); + const construct = (destKey: string) => { + const first = map( + async (chunk: Chunk) => { + chunk.mapped.push(1); + await sleep(_rate); + return chunk; + }, + { objectMode: true }, + ); + + const second = map( + (chunk: Chunk) => { + pendingReads--; + chunk.mapped.push(2); + return chunk; + }, + { objectMode: true, highWaterMark: 1 }, + ); + + first.pipe(second).pipe(sink); + return first; + }; + const _demux = demux( + construct, + { key: "key" }, + { + objectMode: true, + highWaterMark, + }, + ); + _demux.on("error", err => { + t.end(err); + }); + + _demux.on("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + expect(performance.now() - start).to.be.greaterThan( + _rate * input.length, + ); + t.pass(); + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + ]; + + let pendingReads = input.length; + input.forEach(item => { + _demux.write(item); + }); + const start = performance.now(); + }, +); + +test.cb( + "demux() should emit drain event immediately when second stream is bottleneck", + t => { + t.plan(6); + const highWaterMark = 5; + interface Chunk { + key: string; + mapped: number[]; + } + const sink = new Writable({ + objectMode: true, + write(chunk, encoding, cb) { + t.pass(); + cb(); + if (pendingReads === 0) { + t.end(); + } + }, + }); + const construct = (destKey: string) => { + const first = map( + (chunk: Chunk) => { + chunk.mapped.push(1); + return chunk; + }, + { objectMode: true }, + ); + + const second = map( + async (chunk: Chunk) => { + pendingReads--; + await sleep(200); + chunk.mapped.push(2); + expect(second._writableState.length).to.be.equal(1); + expect(first._readableState.length).to.equal(pendingReads); + return chunk; + }, + { objectMode: true, highWaterMark: 1 }, + ); + + first.pipe(second).pipe(sink); + return first; + }; + const _demux = demux( + construct, + { key: "key" }, + { + objectMode: true, + highWaterMark, + }, + ); + _demux.on("error", err => { + t.end(err); + }); + + _demux.on("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + expect(performance.now() - start).to.be.lessThan(50); + t.pass(); + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + ]; + + let pendingReads = input.length; + input.forEach(item => { + _demux.write(item); + }); + const start = performance.now(); + }, +); + +test.only("demux() should only emit drain event when all streams are writable", t => { + t.plan(3); + const highWaterMark = 2; + interface Chunk { + key: string; + mapped: number[]; + } + return new Promise(async (resolve, reject) => { + const sink = new Writable({ + objectMode: true, + write(chunk, encoding, cb) { + t.pass(); + cb(); + console.log(chunk); + pendingReads--; + if (pendingReads === 0) { + resolve(); + } + }, + }); + const construct = (destKey: string) => { + const first = map( + (chunk: Chunk) => { + chunk.mapped.push(1); + return chunk; + }, + { objectMode: true }, + ); + + const second = map( + async (chunk: Chunk) => { + await sleep(25); + chunk.mapped.push(2); + return chunk; + }, + { objectMode: true, highWaterMark: 1 }, + ); + + first.pipe(second).pipe(sink); + return first; + }; + const _demux = demux( + construct, + { key: "key" }, + { + objectMode: true, + highWaterMark, + }, + ); + _demux.on("error", err => { + reject(); + }); + + _demux.on("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + expect(performance.now() - start).to.be.lessThan(50); + t.pass(); + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "b", mapped: [] }, // should only be recieved after a becomes writable + ]; + + let pendingReads = input.length; + let start = performance.now(); + for (const item of input) { + const res = _demux.write(item); + if (!res) { + await sleep(100); + start = performance.now(); + } + } + }); +}); +test("demux() should emit drain event and first should contain up to highWaterMark items in readable state when second is bottleneck", t => { + t.plan(6); + const highWaterMark = 5; + return new Promise(async (resolve, reject) => { + interface Chunk { + key: string; + mapped: number[]; + } + const sink = new Writable({ + objectMode: true, + write(chunk, encoding, cb) { + t.pass(); + cb(); + if (pendingReads === 0) { + resolve(); + } + }, + }); + const construct = (destKey: string) => { + const first = map( + (chunk: Chunk) => { + expect(first._readableState.length).to.be.at.most(2); + chunk.mapped.push(1); + return chunk; + }, + { objectMode: 2, highWaterMark: 2 }, + ); + + const second = map( + async (chunk: Chunk) => { + chunk.mapped.push(2); + expect(second._writableState.length).to.be.equal(1); + await sleep(100); + pendingReads--; + return chunk; + }, + { objectMode: 2, highWaterMark: 2 }, + ); + + first.pipe(second).pipe(sink); + return first; + }; + const _demux = demux( + construct, + { key: "key" }, + { + objectMode: true, + highWaterMark, + }, + ); + _demux.on("error", err => { + reject(); + }); + + _demux.on("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + t.pass(); + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + ]; + let pendingReads = input.length; + + input.forEach(item => { + _demux.write(item); + }); + }); +}); diff --git a/tests/compose.spec.ts b/tests/compose.spec.ts index 6b97798..2c5feb6 100644 --- a/tests/compose.spec.ts +++ b/tests/compose.spec.ts @@ -219,10 +219,10 @@ test("compose() should emit drain event ~rate * highWaterMark ms for every write const input = [ { key: "a", mapped: [] }, - { key: "b", mapped: [] }, - { key: "c", mapped: [] }, - { key: "d", mapped: [] }, - { key: "e", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, ]; let start = performance.now(); diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index 9ff3498..5504c20 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -17,6 +17,8 @@ test.cb("demux() constructor should be called once per key", t => { { key: "b", visited: [] }, { key: "a", visited: [] }, { key: "c", visited: [] }, + { key: "a", visited: [] }, + { key: "b", visited: [] }, ]; const construct = sinon.spy((destKey: string) => { const dest = map((chunk: Test) => { @@ -48,6 +50,8 @@ test.cb("demux() constructor should be called once per key using keyBy", t => { { key: "b", visited: [] }, { key: "a", visited: [] }, { key: "c", visited: [] }, + { key: "a", visited: [] }, + { key: "b", visited: [] }, ]; const construct = sinon.spy((destKey: string) => { @@ -134,15 +138,15 @@ test.cb("should emit errors", t => { demuxed.end(); }); -test("compose() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", async t => { +test("demux() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", t => { t.plan(7); + interface Chunk { + key: string; + mapped: number[]; + } const highWaterMark = 5; const _rate = 25; return new Promise(async (resolve, reject) => { - interface Chunk { - key: string; - mapped: number[]; - } const sink = new Writable({ objectMode: true, write(chunk, encoding, cb) { @@ -222,7 +226,8 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar write(chunk, encoding, cb) { cb(); t.pass(); - if (chunk.key === "f") { + pendingReads--; + if (pendingReads === 0) { resolve(); } }, @@ -260,12 +265,13 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar const input = [ { key: "a", mapped: [] }, - { key: "b", mapped: [] }, - { key: "c", mapped: [] }, - { key: "d", mapped: [] }, - { key: "e", mapped: [] }, - { key: "f", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, ]; + let pendingReads = input.length; for (const item of input) { const res = _demux.write(item); @@ -354,6 +360,7 @@ test.cb( const start = performance.now(); }, ); + test.cb( "demux() should emit drain event immediately when second stream is bottleneck", t => { @@ -431,6 +438,79 @@ test.cb( }, ); +test("demux() should only emit drain event when all streams are writable", t => { + t.plan(1); + const highWaterMark = 2; + interface Chunk { + key: string; + mapped: number[]; + } + return new Promise(async (resolve, reject) => { + const sink = new Writable({ + objectMode: true, + write(chunk, encoding, cb) { + cb(); + pendingReads--; + if (chunk.key === "b") { + expect(performance.now() - start).to.be.greaterThan(150); + t.pass(); + } + if (pendingReads === 0) { + resolve(); + } + }, + }); + const construct = (destKey: string) => { + const first = map( + (chunk: Chunk) => { + chunk.mapped.push(1); + return chunk; + }, + { objectMode: true }, + ); + + const second = map( + async (chunk: Chunk) => { + await sleep(50); + chunk.mapped.push(2); + return chunk; + }, + { objectMode: true, highWaterMark: 1 }, + ); + + first.pipe(second).pipe(sink); + return first; + }; + const _demux = demux( + construct, + { key: "key" }, + { + objectMode: true, + highWaterMark, + }, + ); + _demux.on("error", err => { + reject(); + }); + + const input = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "c", mapped: [] }, + { key: "c", mapped: [] }, + { key: "b", mapped: [] }, // should only be recieved after a becomes writable + ]; + + let pendingReads = input.length; + let start = performance.now(); + for (const item of input) { + const res = _demux.write(item); + if (!res) { + await sleep(50); + } + } + }); +}); test("demux() should emit drain event and first should contain up to highWaterMark items in readable state when second is bottleneck", t => { t.plan(6); const highWaterMark = 5; From eed36a4fe91285171627996263664854ba4b76bc Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Mon, 9 Sep 2019 14:43:18 -0400 Subject: [PATCH 47/69] Lots of stuff --- 1:p | 589 ----------------------------------------- src/functions/demux.ts | 19 ++ tests/demux.spec.ts | 4 +- 3 files changed, 22 insertions(+), 590 deletions(-) delete mode 100644 1:p diff --git a/1:p b/1:p deleted file mode 100644 index 5f5fc0a..0000000 --- a/1:p +++ /dev/null @@ -1,589 +0,0 @@ -import test from "ava"; -import { expect } from "chai"; -const { demux, map } = require("../src"); -import { Writable } from "stream"; -const sinon = require("sinon"); -const { sleep } = require("../src/helpers"); -import { performance } from "perf_hooks"; - -interface Test { - key: string; - visited: number[]; -} -test.cb("demux() constructor should be called once per key", t => { - t.plan(1); - const input = [ - { key: "a", visited: [] }, - { key: "b", visited: [] }, - { key: "a", visited: [] }, - { key: "c", visited: [] }, - { key: "a", visited: [] }, - { key: "b", visited: [] }, - ]; - const construct = sinon.spy((destKey: string) => { - const dest = map((chunk: Test) => { - chunk.visited.push(1); - return chunk; - }); - - return dest; - }); - - const demuxed = demux(construct, { key: "key" }, { objectMode: true }); - - demuxed.on("finish", () => { - expect(construct.withArgs("a").callCount).to.equal(1); - expect(construct.withArgs("b").callCount).to.equal(1); - expect(construct.withArgs("c").callCount).to.equal(1); - t.pass(); - t.end(); - }); - - input.forEach(event => demuxed.write(event)); - demuxed.end(); -}); - -test.cb("demux() constructor should be called once per key using keyBy", t => { - t.plan(1); - const input = [ - { key: "a", visited: [] }, - { key: "b", visited: [] }, - { key: "a", visited: [] }, - { key: "c", visited: [] }, - { key: "a", visited: [] }, - { key: "b", visited: [] }, - ]; - - const construct = sinon.spy((destKey: string) => { - const dest = map((chunk: Test) => { - chunk.visited.push(1); - return chunk; - }); - - return dest; - }); - - const demuxed = demux( - construct, - { keyBy: item => item.key }, - { objectMode: true }, - ); - - demuxed.on("finish", () => { - expect(construct.withArgs("a").callCount).to.equal(1); - expect(construct.withArgs("b").callCount).to.equal(1); - expect(construct.withArgs("c").callCount).to.equal(1); - t.pass(); - t.end(); - }); - - input.forEach(event => demuxed.write(event)); - demuxed.end(); -}); - -test.cb("should emit errors", t => { - t.plan(2); - let index = 0; - const input = [ - { key: "a", visited: [] }, - { key: "b", visited: [] }, - { key: "a", visited: [] }, - { key: "a", visited: [] }, - ]; - const results = [ - { key: "a", visited: [0] }, - { key: "b", visited: [1] }, - { key: "a", visited: [2] }, - { key: "a", visited: [3] }, - ]; - const destinationStreamKeys = []; - const sink = new Writable({ - objectMode: true, - write(chunk, enc, cb) { - expect(results).to.deep.include(chunk); - expect(input).to.not.deep.include(chunk); - t.pass(); - cb(); - }, - }); - - const construct = (destKey: string) => { - destinationStreamKeys.push(destKey); - const dest = map((chunk: Test) => { - if (chunk.key === "b") { - throw new Error("Caught object with key 'b'"); - } - - const _chunk = { ...chunk, visited: [] }; - _chunk.visited.push(index); - index++; - return _chunk; - }).on("error", () => {}); - - dest.pipe(sink); - return dest; - }; - - const demuxed = demux( - construct, - { keyBy: (chunk: any) => chunk.key }, - { objectMode: true }, - ); - demuxed.on("error", e => { - expect(e.message).to.equal("Caught object with key 'b'"); - t.pass(); - t.end(); - }); - input.forEach(event => demuxed.write(event)); - demuxed.end(); -}); - -test("demux() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", t => { - t.plan(7); - interface Chunk { - key: string; - mapped: number[]; - } - const highWaterMark = 5; - const _rate = 25; - return new Promise(async (resolve, reject) => { - const sink = new Writable({ - objectMode: true, - write(chunk, encoding, cb) { - cb(); - t.pass(); - pendingReads--; - if (pendingReads === 0) { - resolve(); - } - }, - }); - const construct = (destKey: string) => { - const first = map(async (chunk: Chunk) => { - await sleep(_rate); - chunk.mapped.push(1); - return chunk; - }); - - const second = map(async (chunk: Chunk) => { - chunk.mapped.push(2); - return chunk; - }); - - first.pipe(second).pipe(sink); - return first; - }; - const _demux = demux( - construct, - { key: "key" }, - { - objectMode: true, - highWaterMark, - }, - ); - _demux.on("error", err => { - reject(); - }); - - _demux.on("drain", () => { - expect(_demux._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.greaterThan(_rate); - t.pass(); - }); - - const input = [ - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - ]; - let pendingReads = input.length; - - let start = performance.now(); - for (const item of input) { - const res = _demux.write(item); - expect(_demux._writableState.length).to.be.at.most(highWaterMark); - if (!res) { - start = performance.now(); - await sleep(100); - } - } - }); -}); - -test("demux() should emit one drain event when writing 6 items with highWaterMark of 5", t => { - t.plan(7); - const highWaterMark = 5; - return new Promise(async (resolve, reject) => { - interface Chunk { - key: string; - mapped: number[]; - } - const sink = new Writable({ - objectMode: true, - write(chunk, encoding, cb) { - cb(); - t.pass(); - pendingReads--; - if (pendingReads === 0) { - resolve(); - } - }, - }); - const construct = (destKey: string) => { - const first = map(async (chunk: Chunk) => { - chunk.mapped.push(1); - return chunk; - }); - - const second = map(async (chunk: Chunk) => { - chunk.mapped.push(2); - return chunk; - }); - - first.pipe(second).pipe(sink); - return first; - }; - const _demux = demux( - construct, - { key: "key" }, - { - objectMode: true, - highWaterMark, - }, - ); - _demux.on("error", err => { - reject(); - }); - - _demux.on("drain", () => { - expect(_demux._writableState.length).to.be.equal(0); - t.pass(); - }); - - const input = [ - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - ]; - let pendingReads = input.length; - - for (const item of input) { - const res = _demux.write(item); - expect(_demux._writableState.length).to.be.at.most(highWaterMark); - if (!res) { - await sleep(10); - } - } - }); -}); - -test.cb( - "demux() should emit drain event after 500 ms when writing 5 items that take 100ms to process with a highWaterMark of 5 ", - t => { - t.plan(6); - const _rate = 100; - const highWaterMark = 5; - interface Chunk { - key: string; - mapped: number[]; - } - const sink = new Writable({ - objectMode: true, - write(chunk, encoding, cb) { - t.pass(); - cb(); - if (pendingReads === 0) { - t.end(); - } - }, - }); - const construct = (destKey: string) => { - const first = map( - async (chunk: Chunk) => { - chunk.mapped.push(1); - await sleep(_rate); - return chunk; - }, - { objectMode: true }, - ); - - const second = map( - (chunk: Chunk) => { - pendingReads--; - chunk.mapped.push(2); - return chunk; - }, - { objectMode: true, highWaterMark: 1 }, - ); - - first.pipe(second).pipe(sink); - return first; - }; - const _demux = demux( - construct, - { key: "key" }, - { - objectMode: true, - highWaterMark, - }, - ); - _demux.on("error", err => { - t.end(err); - }); - - _demux.on("drain", () => { - expect(_demux._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.greaterThan( - _rate * input.length, - ); - t.pass(); - }); - - const input = [ - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - ]; - - let pendingReads = input.length; - input.forEach(item => { - _demux.write(item); - }); - const start = performance.now(); - }, -); - -test.cb( - "demux() should emit drain event immediately when second stream is bottleneck", - t => { - t.plan(6); - const highWaterMark = 5; - interface Chunk { - key: string; - mapped: number[]; - } - const sink = new Writable({ - objectMode: true, - write(chunk, encoding, cb) { - t.pass(); - cb(); - if (pendingReads === 0) { - t.end(); - } - }, - }); - const construct = (destKey: string) => { - const first = map( - (chunk: Chunk) => { - chunk.mapped.push(1); - return chunk; - }, - { objectMode: true }, - ); - - const second = map( - async (chunk: Chunk) => { - pendingReads--; - await sleep(200); - chunk.mapped.push(2); - expect(second._writableState.length).to.be.equal(1); - expect(first._readableState.length).to.equal(pendingReads); - return chunk; - }, - { objectMode: true, highWaterMark: 1 }, - ); - - first.pipe(second).pipe(sink); - return first; - }; - const _demux = demux( - construct, - { key: "key" }, - { - objectMode: true, - highWaterMark, - }, - ); - _demux.on("error", err => { - t.end(err); - }); - - _demux.on("drain", () => { - expect(_demux._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.lessThan(50); - t.pass(); - }); - - const input = [ - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - ]; - - let pendingReads = input.length; - input.forEach(item => { - _demux.write(item); - }); - const start = performance.now(); - }, -); - -test.only("demux() should only emit drain event when all streams are writable", t => { - t.plan(3); - const highWaterMark = 2; - interface Chunk { - key: string; - mapped: number[]; - } - return new Promise(async (resolve, reject) => { - const sink = new Writable({ - objectMode: true, - write(chunk, encoding, cb) { - t.pass(); - cb(); - console.log(chunk); - pendingReads--; - if (pendingReads === 0) { - resolve(); - } - }, - }); - const construct = (destKey: string) => { - const first = map( - (chunk: Chunk) => { - chunk.mapped.push(1); - return chunk; - }, - { objectMode: true }, - ); - - const second = map( - async (chunk: Chunk) => { - await sleep(25); - chunk.mapped.push(2); - return chunk; - }, - { objectMode: true, highWaterMark: 1 }, - ); - - first.pipe(second).pipe(sink); - return first; - }; - const _demux = demux( - construct, - { key: "key" }, - { - objectMode: true, - highWaterMark, - }, - ); - _demux.on("error", err => { - reject(); - }); - - _demux.on("drain", () => { - expect(_demux._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.lessThan(50); - t.pass(); - }); - - const input = [ - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "b", mapped: [] }, // should only be recieved after a becomes writable - ]; - - let pendingReads = input.length; - let start = performance.now(); - for (const item of input) { - const res = _demux.write(item); - if (!res) { - await sleep(100); - start = performance.now(); - } - } - }); -}); -test("demux() should emit drain event and first should contain up to highWaterMark items in readable state when second is bottleneck", t => { - t.plan(6); - const highWaterMark = 5; - return new Promise(async (resolve, reject) => { - interface Chunk { - key: string; - mapped: number[]; - } - const sink = new Writable({ - objectMode: true, - write(chunk, encoding, cb) { - t.pass(); - cb(); - if (pendingReads === 0) { - resolve(); - } - }, - }); - const construct = (destKey: string) => { - const first = map( - (chunk: Chunk) => { - expect(first._readableState.length).to.be.at.most(2); - chunk.mapped.push(1); - return chunk; - }, - { objectMode: 2, highWaterMark: 2 }, - ); - - const second = map( - async (chunk: Chunk) => { - chunk.mapped.push(2); - expect(second._writableState.length).to.be.equal(1); - await sleep(100); - pendingReads--; - return chunk; - }, - { objectMode: 2, highWaterMark: 2 }, - ); - - first.pipe(second).pipe(sink); - return first; - }; - const _demux = demux( - construct, - { key: "key" }, - { - objectMode: true, - highWaterMark, - }, - ); - _demux.on("error", err => { - reject(); - }); - - _demux.on("drain", () => { - expect(_demux._writableState.length).to.be.equal(0); - t.pass(); - }); - - const input = [ - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - ]; - let pendingReads = input.length; - - input.forEach(item => { - _demux.write(item); - }); - }); -}); diff --git a/src/functions/demux.ts b/src/functions/demux.ts index 9c9e624..647cd61 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -118,4 +118,23 @@ class Demux extends Writable { } return this; } + public once(event: string, cb: any) { + switch (eventsTarget[event]) { + case EventSubscription.Self: + super.once(event, cb); + break; + case EventSubscription.All: + Object.keys(this.streamsByKey).forEach(key => + this.streamsByKey[key].stream.once(event, cb), + ); + break; + case EventSubscription.Unhandled: + throw new Error( + "Stream must be multiplexed before handling this event", + ); + default: + super.once(event, cb); + } + return this; + } } diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index 5504c20..15d89c3 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -10,6 +10,7 @@ interface Test { key: string; visited: number[]; } + test.cb("demux() constructor should be called once per key", t => { t.plan(1); const input = [ @@ -502,7 +503,7 @@ test("demux() should only emit drain event when all streams are writable", t => ]; let pendingReads = input.length; - let start = performance.now(); + const start = performance.now(); for (const item of input) { const res = _demux.write(item); if (!res) { @@ -511,6 +512,7 @@ test("demux() should only emit drain event when all streams are writable", t => } }); }); + test("demux() should emit drain event and first should contain up to highWaterMark items in readable state when second is bottleneck", t => { t.plan(6); const highWaterMark = 5; From d33d8dcad39bc83cb58e48131dcb42c7d0c4d69f Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Mon, 9 Sep 2019 15:15:40 -0400 Subject: [PATCH 48/69] Add generic type --- src/functions/accumulator.ts | 2 +- src/functions/demux.ts | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/functions/accumulator.ts b/src/functions/accumulator.ts index b8faeab..c406afe 100644 --- a/src/functions/accumulator.ts +++ b/src/functions/accumulator.ts @@ -17,7 +17,7 @@ function _accumulator( const buffer: T[] = []; return new Transform({ ...options, - transform(data: any, encoding, callback) { + transform(data: T, encoding, callback) { try { accumulateBy(data, buffer, this); callback(); diff --git a/src/functions/demux.ts b/src/functions/demux.ts index 647cd61..a72eb69 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -118,6 +118,7 @@ class Demux extends Writable { } return this; } + public once(event: string, cb: any) { switch (eventsTarget[event]) { case EventSubscription.Self: From 7aeea4815a2c602801533aea71e5c10ea28dffcf Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Mon, 9 Sep 2019 15:54:29 -0400 Subject: [PATCH 49/69] Add descriptions for demux and compose --- src/functions/collect.ts | 3 +- src/functions/compose.ts | 25 +++++-- src/functions/demux.ts | 9 +-- src/functions/index.ts | 153 +++++++++------------------------------ tests/compose.spec.ts | 4 +- 5 files changed, 60 insertions(+), 134 deletions(-) diff --git a/src/functions/collect.ts b/src/functions/collect.ts index 33b7330..9507565 100644 --- a/src/functions/collect.ts +++ b/src/functions/collect.ts @@ -10,8 +10,7 @@ export function collect( ): Transform { const collected: any[] = []; return new Transform({ - readableObjectMode: options.objectMode, - writableObjectMode: options.objectMode, + ...options, transform(data, encoding, callback) { collected.push(data); callback(); diff --git a/src/functions/compose.ts b/src/functions/compose.ts index 00ff00e..3212fb9 100644 --- a/src/functions/compose.ts +++ b/src/functions/compose.ts @@ -4,23 +4,17 @@ import { pipeline, Duplex, DuplexOptions } from "stream"; * Return a Readable stream of readable streams concatenated together * @param streams Readable streams to concatenate */ -// First Readable --> Readable -// First Transform | Duplex, Last Writable --> Writable -// export function compose( streams: Array< NodeJS.ReadableStream | NodeJS.ReadWriteStream | NodeJS.WritableStream >, options?: DuplexOptions, ): Compose { - // Maybe just return a new stream here if (streams.length < 2) { throw new Error("At least two streams are required to compose"); } - const composed = new Compose(streams, options); - - return composed; + return new Compose(streams, options); } enum EventSubscription { @@ -97,4 +91,21 @@ export class Compose extends Duplex { } return this; } + + public once(event: string, cb: any) { + switch (eventsTarget[event]) { + case EventSubscription.First: + this.first.once(event, cb); + break; + case EventSubscription.Last: + this.last.once(event, cb); + break; + case EventSubscription.All: + this.streams.forEach(s => s.once(event, cb)); + break; + default: + super.once(event, cb); + } + return this; + } } diff --git a/src/functions/demux.ts b/src/functions/demux.ts index a72eb69..77e7d48 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -57,9 +57,7 @@ class Demux extends Writable { ) { super(options); if (demuxBy.keyBy === undefined && demuxBy.key === undefined) { - throw new Error( - "keyBy or key must be provided in second parameter", - ); + throw new Error("keyBy or key must be provided in second argument"); } this.demuxer = demuxBy.keyBy || ((chunk: any) => chunk[demuxBy.key!]); this.construct = construct; @@ -68,6 +66,7 @@ class Demux extends Writable { this.nonWritableStreams = []; } + // Throttles when one stream is not writable public _write(chunk: any, encoding?: any, cb?: any) { const destKey = this.demuxer(chunk); if (this.streamsByKey[destKey] === undefined) { @@ -76,10 +75,6 @@ class Demux extends Writable { writable: true, }; } - // Throttle when one stream is not writable anymore - // Set writable to false - // keep state of all the streams, if one is not writable demux shouldnt be writable - // Small optimization is to keep writing until you get a following event to the unwritable destination let res = false; if (this.streamsByKey[destKey].writable && this.isWritable) { res = this.streamsByKey[destKey].stream.write(chunk, encoding, cb); diff --git a/src/functions/index.ts b/src/functions/index.ts index 59ff9c3..0998e26 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -1,4 +1,4 @@ -import { Readable, Writable, DuplexOptions, Transform, Duplex } from "stream"; +import { Readable, Writable, Transform, Duplex } from "stream"; import { ChildProcess } from "child_process"; import * as baseFunctions from "./baseFunctions"; @@ -7,17 +7,13 @@ import { TransformOptions, WithEncoding, JsonParseOptions, - FlushStrategy, - AccumulatorByIteratee, } from "./baseDefinitions"; /** * Convert an array into a Readable stream of its elements * @param array Array of elements to stream */ -export function fromArray(array: any[]): Readable { - return baseFunctions.fromArray(array); -} +export const fromArray = baseFunctions.fromArray; /** * Return a ReadWrite stream that maps streamed chunks @@ -26,12 +22,7 @@ export function fromArray(array: any[]): Readable { * @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects * @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects */ -export function map( - mapper: (chunk: T, encoding?: string) => R, - options?: TransformOptions, -): Transform { - return baseFunctions.map(mapper, options); -} +export const map = baseFunctions.map; /** * Return a ReadWrite stream that flat maps streamed chunks @@ -40,14 +31,7 @@ export function map( * @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects * @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects */ -export function flatMap( - mapper: - | ((chunk: T, encoding: string) => R[]) - | ((chunk: T, encoding: string) => Promise), - options?: TransformOptions, -): Transform { - return baseFunctions.flatMap(mapper, options); -} +export const flatMap = baseFunctions.flatMap; /** * Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold @@ -55,14 +39,7 @@ export function flatMap( * @param options? * @param options.objectMode? Whether this stream should behave as a stream of objects. */ -export function filter( - mapper: - | ((chunk: T, encoding: string) => boolean) - | ((chunk: T, encoding: string) => Promise), - options?: TransformOptions, -): Transform { - return baseFunctions.filter(mapper, options); -} +export const filter = baseFunctions.filter; /** * Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that @@ -73,15 +50,7 @@ export function filter( * @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects * @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects */ -export function reduce( - iteratee: - | ((previousValue: R, chunk: T, encoding: string) => R) - | ((previousValue: R, chunk: T, encoding: string) => Promise), - initialValue: R, - options?: TransformOptions, -): Transform { - return baseFunctions.reduce(iteratee, initialValue, options); -} +export const reduce = baseFunctions.reduce; /** * Return a ReadWrite stream that splits streamed chunks using the given separator @@ -89,12 +58,7 @@ export function reduce( * @param options? Defaults to encoding: utf8 * @param options.encoding? Encoding written chunks are assumed to use */ -export function split( - separator?: string | RegExp, - options?: WithEncoding, -): Transform { - return baseFunctions.split(separator, options); -} +export const split = baseFunctions.split; /** * Return a ReadWrite stream that joins streamed chunks using the given separator @@ -102,9 +66,7 @@ export function split( * @param options? Defaults to encoding: utf8 * @param options.encoding? Encoding written chunks are assumed to use */ -export function join(separator: string, options?: WithEncoding): Transform { - return baseFunctions.join(separator, options); -} +export const join = baseFunctions.join; /** * Return a ReadWrite stream that replaces occurrences of the given string or regular expression in @@ -114,21 +76,13 @@ export function join(separator: string, options?: WithEncoding): Transform { * @param options? Defaults to encoding: utf8 * @param options.encoding Encoding written chunks are assumed to use */ -export function replace( - searchValue: string | RegExp, - replaceValue: string, - options?: WithEncoding, -): Transform { - return baseFunctions.replace(searchValue, replaceValue, options); -} +export const replace = baseFunctions.replace; /** * Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk * must be a fully defined JSON string in utf8. */ -export function parse(): Transform { - return baseFunctions.parse(); -} +export const parse = baseFunctions.parse; /** * Return a ReadWrite stream that stringifies the streamed chunks to JSON @@ -136,34 +90,26 @@ export function parse(): Transform { * @param options.pretty If true, whitespace is inserted into the stringified chunks. * */ -export function stringify(options?: JsonParseOptions): Transform { - return baseFunctions.stringify(options); -} +export const stringify = baseFunctions.stringify; /** * Return a ReadWrite stream that collects streamed chunks into an array or buffer * @param options? * @param options.objectMode? Whether this stream should behave as a stream of objects */ -export function collect(options?: ThroughOptions): Transform { - return baseFunctions.collect(options); -} +export const collect = baseFunctions.collect; /** * Return a Readable stream of readable streams concatenated together * @param streams Readable streams to concatenate */ -export function concat(...streams: Readable[]): Readable { - return baseFunctions.concat(...streams); -} +export const concat = baseFunctions.concat; /** * Return a Readable stream of readable streams concatenated together * @param streams Readable streams to merge */ -export function merge(...streams: Readable[]): Readable { - return baseFunctions.merge(...streams); -} +export const merge = baseFunctions.merge; /** * Return a Duplex stream from a writable stream that is assumed to somehow, when written to, @@ -171,42 +117,34 @@ export function merge(...streams: Readable[]): Readable { * @param writable Writable stream assumed to cause the readable stream to yield chunks when written to * @param readable Readable stream assumed to yield chunks when the writable stream is written to */ -export function duplex(writable: Writable, readable: Readable): Duplex { - return baseFunctions.duplex(writable, readable); -} +export const duplex = baseFunctions.duplex; /** * Return a Duplex stream from a child process' stdin and stdout * @param childProcess Child process from which to create duplex stream */ -export function child(childProcess: ChildProcess): Duplex { - return baseFunctions.child(childProcess); -} +export const child = baseFunctions.child; /** * Return a Promise resolving to the last streamed chunk of the given readable stream, after it has * ended * @param readable Readable stream to wait on */ -export function last(readable: Readable): Promise { - return baseFunctions.last(readable); -} +export const last = baseFunctions.last; /** * Stores chunks of data internally in array and batches when batchSize is reached. * @param batchSize Size of the batches, defaults to 1000. * @param maxBatchAge? Max lifetime of a batch, defaults to 500 */ -export function batch(batchSize: number, maxBatchAge?: number): Transform { +export function batch(batchSize?: number, maxBatchAge?: number): Transform { return baseFunctions.batch(batchSize, maxBatchAge); } /** * Unbatches and sends individual chunks of data. */ -export function unbatch(): Transform { - return baseFunctions.unbatch(); -} +export const unbatch = baseFunctions.unbatch; /** * Limits rate of data transferred into stream. @@ -224,13 +162,7 @@ export function rate(targetRate?: number, period?: number): Transform { * @param func Function to execute on each data chunk. * @param pause Amount of time to pause processing when max number of parallel processes are executing. */ -export function parallelMap( - mapper: (chunk: T) => R, - parallel?: number, - sleepTime?: number, -) { - return baseFunctions.parallelMap(mapper, parallel, sleepTime); -} +export const parallelMap = baseFunctions.parallelMap; /** * Accummulates and sends batches of data. Each chunk that flows into the stream is checked against items @@ -252,19 +184,7 @@ export function parallelMap( * @param flushStrategy Buffering strategy to use. * @param keyBy Key to determine if element fits into buffer or items need to be cleared from buffer. */ -export function accumulator( - batchSize: number, - batchRate: number | undefined, - flushStrategy: FlushStrategy, - keyBy?: string, -) { - return baseFunctions.accumulator( - batchSize, - batchRate, - flushStrategy, - keyBy, - ); -} +export const accumulator = baseFunctions.accumulator; /** * Accummulates and sends batches of data. Each chunk that flows into the stream is checked against items @@ -280,22 +200,21 @@ export function accumulator( * @param iteratee Function applied to buffer when a chunk of data enters stream to determine if element fits into * or items need to be cleared from buffer. */ -export function accumulatorBy( - batchRate: number | undefined, - flushStrategy: S, - iteratee: AccumulatorByIteratee, -) { - return baseFunctions.accumulatorBy(batchRate, flushStrategy, iteratee); -} +export const accumulatorBy = baseFunctions.accumulatorBy; +/** + * Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream. + * @param streams Array of streams to compose. Minimum of two. + * @param options Transform stream options + **/ export const compose = baseFunctions.compose; -export function demux( - construct: ( - destKey?: string, - ) => NodeJS.WritableStream | NodeJS.ReadWriteStream, - demuxer: { key?: string; keyBy?: (chunk: any) => string }, - options?: DuplexOptions, -) { - return baseFunctions.demux(construct, demuxer, options); -} +/** + * Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream. + * @param construct Constructor for new output source. Should return a Writable or ReadWrite stream. + * @param demuxBy + * @param demuxBy.key? Key to fetch value from source chunks to demultiplex source. + * @param demuxBy.keyBy? Function to fetch value from source chunks to demultiplex source. + * @param options Writable stream options + **/ +export const demux = baseFunctions.demux; diff --git a/tests/compose.spec.ts b/tests/compose.spec.ts index 2c5feb6..acc6e2e 100644 --- a/tests/compose.spec.ts +++ b/tests/compose.spec.ts @@ -212,7 +212,8 @@ test("compose() should emit drain event ~rate * highWaterMark ms for every write }); composed.on("data", (chunk: Chunk) => { - if (chunk.key === "e") { + pendingReads--; + if (pendingReads === 0) { resolve(); } }); @@ -226,6 +227,7 @@ test("compose() should emit drain event ~rate * highWaterMark ms for every write ]; let start = performance.now(); + let pendingReads = input.length; for (const item of input) { const res = composed.write(item); expect(composed._writableState.length).to.be.at.most(2); From 83ef6e9734064fce256e21488c1f147f326cf9a8 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Mon, 9 Sep 2019 15:58:35 -0400 Subject: [PATCH 50/69] remove duplicate descriptions --- src/functions/batch.ts | 7 +------ src/functions/child.ts | 5 +---- src/functions/collect.ts | 6 +----- src/functions/compose.ts | 4 ---- src/functions/concat.ts | 5 +---- src/functions/demux.ts | 5 ----- src/functions/duplex.ts | 7 +------ src/functions/filter.ts | 7 +------ src/functions/flatMap.ts | 8 +------- src/functions/fromArray.ts | 5 +---- src/functions/index.ts | 14 +++----------- src/functions/join.ts | 7 +------ src/functions/last.ts | 5 ----- src/functions/map.ts | 8 +------- src/functions/merge.ts | 5 +---- src/functions/parallelMap.ts | 7 +------ src/functions/parse.ts | 6 +----- src/functions/rate.ts | 6 +----- src/functions/reduce.ts | 10 +--------- src/functions/replace.ts | 9 +-------- src/functions/split.ts | 7 +------ src/functions/stringify.ts | 3 --- src/functions/unbatch.ts | 4 +--- 23 files changed, 21 insertions(+), 129 deletions(-) diff --git a/src/functions/batch.ts b/src/functions/batch.ts index 4b56b4c..0d0f314 100644 --- a/src/functions/batch.ts +++ b/src/functions/batch.ts @@ -1,11 +1,6 @@ import { Transform } from "stream"; import { TransformOptions } from "./baseDefinitions"; -/** - * Stores chunks of data internally in array and batches when batchSize is reached. - * - * @param batchSize Size of the batches - * @param maxBatchAge Max lifetime of a batch in seconds - */ + export function batch( batchSize: number = 1000, maxBatchAge: number = 500, diff --git a/src/functions/child.ts b/src/functions/child.ts index e2e0c22..73bdbef 100644 --- a/src/functions/child.ts +++ b/src/functions/child.ts @@ -1,9 +1,6 @@ import { ChildProcess } from "child_process"; import { duplex } from "./baseFunctions"; -/** - * Return a Duplex stream from a child process' stdin and stdout - * @param childProcess Child process from which to create duplex stream - */ + export function child(childProcess: ChildProcess) { if (childProcess.stdin === null) { throw new Error("childProcess.stdin is null"); diff --git a/src/functions/collect.ts b/src/functions/collect.ts index 9507565..38cd6ea 100644 --- a/src/functions/collect.ts +++ b/src/functions/collect.ts @@ -1,10 +1,6 @@ import { Transform } from "stream"; import { ThroughOptions } from "./baseDefinitions"; -/** - * Return a ReadWrite stream that collects streamed chunks into an array or buffer - * @param options - * @param options.objectMode Whether this stream should behave as a stream of objects - */ + export function collect( options: ThroughOptions = { objectMode: false }, ): Transform { diff --git a/src/functions/compose.ts b/src/functions/compose.ts index 3212fb9..7a4fdb4 100644 --- a/src/functions/compose.ts +++ b/src/functions/compose.ts @@ -1,9 +1,5 @@ import { pipeline, Duplex, DuplexOptions } from "stream"; -/** - * Return a Readable stream of readable streams concatenated together - * @param streams Readable streams to concatenate - */ export function compose( streams: Array< NodeJS.ReadableStream | NodeJS.ReadWriteStream | NodeJS.WritableStream diff --git a/src/functions/concat.ts b/src/functions/concat.ts index af79db9..d15f936 100644 --- a/src/functions/concat.ts +++ b/src/functions/concat.ts @@ -1,8 +1,5 @@ import { Readable } from "stream"; -/** - * Return a Readable stream of readable streams concatenated together - * @param streams Readable streams to concatenate - */ + export function concat(...streams: NodeJS.ReadableStream[]): Readable { let isStarted = false; let currentStreamIndex = 0; diff --git a/src/functions/demux.ts b/src/functions/demux.ts index 77e7d48..e26dafb 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -22,11 +22,6 @@ const eventsTarget = { unpipe: EventSubscription.Unhandled, }; -/** - * Return a Duplex stream that is pushed data from multiple sources - * @param streams Source streams to multiplex - * @param options Duplex stream options - */ export function demux( construct: () => NodeJS.WritableStream | NodeJS.ReadWriteStream, demuxBy: { key?: string; keyBy?: (chunk: any) => string }, diff --git a/src/functions/duplex.ts b/src/functions/duplex.ts index b1e967a..b72fd0d 100644 --- a/src/functions/duplex.ts +++ b/src/functions/duplex.ts @@ -1,10 +1,5 @@ import { Duplex } from "stream"; -/** - * Return a Duplex stream from a writable stream that is assumed to somehow, when written to, - * cause the given readable stream to yield chunks - * @param writable Writable stream assumed to cause the readable stream to yield chunks when written to - * @param readable Readable stream assumed to yield chunks when the writable stream is written to - */ + export function duplex( writable: NodeJS.WritableStream, readable: NodeJS.ReadableStream, diff --git a/src/functions/filter.ts b/src/functions/filter.ts index 336db0c..e7578b3 100644 --- a/src/functions/filter.ts +++ b/src/functions/filter.ts @@ -1,10 +1,5 @@ import { Transform, TransformOptions } from "stream"; -/** - * Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold - * @param predicate Predicate with which to filter scream chunks - * @param options - * @param options.objectMode Whether this stream should behave as a stream of objects - */ + export function filter( predicate: | ((chunk: T, encoding: string) => boolean) diff --git a/src/functions/flatMap.ts b/src/functions/flatMap.ts index ba8915f..99f38a6 100644 --- a/src/functions/flatMap.ts +++ b/src/functions/flatMap.ts @@ -1,12 +1,6 @@ import { Transform } from "stream"; import { TransformOptions } from "./baseDefinitions"; -/** - * Return a ReadWrite stream that flat maps streamed chunks - * @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such) - * @param options - * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects - * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects - */ + export function flatMap( mapper: | ((chunk: T, encoding: string) => R[]) diff --git a/src/functions/fromArray.ts b/src/functions/fromArray.ts index 54e01a9..a757354 100644 --- a/src/functions/fromArray.ts +++ b/src/functions/fromArray.ts @@ -1,8 +1,5 @@ import { Readable } from "stream"; -/** - * Convert an array into a Readable stream of its elements - * @param array Array of elements to stream - */ + export function fromArray(array: any[]): Readable { let cursor = 0; return new Readable({ diff --git a/src/functions/index.ts b/src/functions/index.ts index 0998e26..48d8062 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -1,14 +1,6 @@ -import { Readable, Writable, Transform, Duplex } from "stream"; -import { ChildProcess } from "child_process"; +import { Transform } from "stream"; import * as baseFunctions from "./baseFunctions"; -import { - ThroughOptions, - TransformOptions, - WithEncoding, - JsonParseOptions, -} from "./baseDefinitions"; - /** * Convert an array into a Readable stream of its elements * @param array Array of elements to stream @@ -206,7 +198,7 @@ export const accumulatorBy = baseFunctions.accumulatorBy; * Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream. * @param streams Array of streams to compose. Minimum of two. * @param options Transform stream options - **/ + */ export const compose = baseFunctions.compose; /** @@ -216,5 +208,5 @@ export const compose = baseFunctions.compose; * @param demuxBy.key? Key to fetch value from source chunks to demultiplex source. * @param demuxBy.keyBy? Function to fetch value from source chunks to demultiplex source. * @param options Writable stream options - **/ + */ export const demux = baseFunctions.demux; diff --git a/src/functions/join.ts b/src/functions/join.ts index c1a28b6..b49022b 100644 --- a/src/functions/join.ts +++ b/src/functions/join.ts @@ -1,12 +1,7 @@ import { Transform } from "stream"; import { StringDecoder } from "string_decoder"; import { WithEncoding } from "./baseDefinitions"; -/** - * Return a ReadWrite stream that joins streamed chunks using the given separator - * @param separator Separator to join with - * @param options - * @param options.encoding Encoding written chunks are assumed to use - */ + export function join( separator: string, options: WithEncoding = { encoding: "utf8" }, diff --git a/src/functions/last.ts b/src/functions/last.ts index 98422a7..412a34c 100644 --- a/src/functions/last.ts +++ b/src/functions/last.ts @@ -1,8 +1,3 @@ -/** - * Return a Promise resolving to the last streamed chunk of the given readable stream, after it has - * ended - * @param readable Readable stream to wait on - */ export function last(readable: NodeJS.ReadableStream): Promise { let lastChunk: T | null = null; return new Promise((resolve, _) => { diff --git a/src/functions/map.ts b/src/functions/map.ts index 05fe627..5848ca5 100644 --- a/src/functions/map.ts +++ b/src/functions/map.ts @@ -1,12 +1,6 @@ import { Transform } from "stream"; import { TransformOptions } from "./baseDefinitions"; -/** - * Return a ReadWrite stream that maps streamed chunks - * @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such) - * @param options - * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects - * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects - */ + export function map( mapper: (chunk: T, encoding: string) => R, options: TransformOptions = { diff --git a/src/functions/merge.ts b/src/functions/merge.ts index 7166006..ff4d5f6 100644 --- a/src/functions/merge.ts +++ b/src/functions/merge.ts @@ -1,8 +1,5 @@ import { Readable } from "stream"; -/** - * Return a Readable stream of readable streams merged together in chunk arrival order - * @param streams Readable streams to merge - */ + export function merge(...streams: Readable[]): Readable { let isStarted = false; let streamEndedCount = 0; diff --git a/src/functions/parallelMap.ts b/src/functions/parallelMap.ts index 7610f49..56c9f41 100644 --- a/src/functions/parallelMap.ts +++ b/src/functions/parallelMap.ts @@ -1,12 +1,7 @@ import { Transform } from "stream"; import { sleep } from "../helpers"; import { TransformOptions } from "./baseDefinitions"; -/** - * Limits number of parallel processes in flight. - * @param parallel Max number of parallel processes. - * @param func Function to execute on each data chunk - * @param pause Amount of time to pause processing when max number of parallel processes are executing. - */ + export function parallelMap( mapper: (data: T) => R, parallel: number = 10, diff --git a/src/functions/parse.ts b/src/functions/parse.ts index da2ccee..451e86c 100644 --- a/src/functions/parse.ts +++ b/src/functions/parse.ts @@ -1,11 +1,7 @@ import { Transform } from "stream"; import { StringDecoder } from "string_decoder"; import { SerializationFormats } from "./baseDefinitions"; -/** - * Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk - * must be a fully defined JSON string. - * @param format Format of serialized data, only utf8 supported. - */ + export function parse( format: SerializationFormats = SerializationFormats.utf8, ): Transform { diff --git a/src/functions/rate.ts b/src/functions/rate.ts index e322744..cb5cbfb 100644 --- a/src/functions/rate.ts +++ b/src/functions/rate.ts @@ -2,11 +2,7 @@ import { Transform } from "stream"; import { performance } from "perf_hooks"; import { sleep } from "../helpers"; import { TransformOptions } from "./baseDefinitions"; -/** - * Limits date of data transferred into stream. - * @param targetRate Desired rate in ms - * @param period Period to sleep for when rate is above or equal to targetRate - */ + export function rate( targetRate: number = 50, period: number = 1, diff --git a/src/functions/reduce.ts b/src/functions/reduce.ts index 6dfcdf9..6ee665f 100644 --- a/src/functions/reduce.ts +++ b/src/functions/reduce.ts @@ -1,14 +1,6 @@ import { Transform } from "stream"; import { TransformOptions } from "./baseDefinitions"; -/** - * Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that - * value - * @param iteratee Reducer function to apply on each streamed chunk - * @param initialValue Initial value - * @param options - * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects - * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects - */ + export function reduce( iteratee: | ((previousValue: R, chunk: T, encoding: string) => R) diff --git a/src/functions/replace.ts b/src/functions/replace.ts index e8bc0e7..dc5a05e 100644 --- a/src/functions/replace.ts +++ b/src/functions/replace.ts @@ -1,14 +1,7 @@ import { Transform } from "stream"; import { StringDecoder } from "string_decoder"; import { WithEncoding } from "./baseDefinitions"; -/** - * Return a ReadWrite stream that replaces occurrences of the given string or regular expression in - * the streamed chunks with the specified replacement string - * @param searchValue Search string to use - * @param replaceValue Replacement string to use - * @param options - * @param options.encoding Encoding written chunks are assumed to use - */ + export function replace( searchValue: string | RegExp, replaceValue: string, diff --git a/src/functions/split.ts b/src/functions/split.ts index fe31d65..8e517ed 100644 --- a/src/functions/split.ts +++ b/src/functions/split.ts @@ -1,12 +1,7 @@ import { Transform } from "stream"; import { StringDecoder } from "string_decoder"; import { WithEncoding } from "./baseDefinitions"; -/** - * Return a ReadWrite stream that splits streamed chunks using the given separator - * @param separator Separator to split by, defaulting to "\n" - * @param options - * @param options.encoding Encoding written chunks are assumed to use - */ + export function split( separator: string | RegExp = "\n", options: WithEncoding = { encoding: "utf8" }, diff --git a/src/functions/stringify.ts b/src/functions/stringify.ts index 21996ad..34eb302 100644 --- a/src/functions/stringify.ts +++ b/src/functions/stringify.ts @@ -1,9 +1,6 @@ import { Transform } from "stream"; import { JsonValue, JsonParseOptions } from "./baseDefinitions"; -/** - * Return a ReadWrite stream that stringifies the streamed chunks to JSON - */ export function stringify( options: JsonParseOptions = { pretty: false }, ): Transform { diff --git a/src/functions/unbatch.ts b/src/functions/unbatch.ts index d8fc25f..0f9b3f6 100644 --- a/src/functions/unbatch.ts +++ b/src/functions/unbatch.ts @@ -1,8 +1,6 @@ import { Transform } from "stream"; import { TransformOptions } from "./baseDefinitions"; -/** - * Unbatches and sends individual chunks of data - */ + export function unbatch( options: TransformOptions = { readableObjectMode: true, From ee3d9b9dedab7e414fb4d5f91bfa4bf91fb17774 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Tue, 10 Sep 2019 12:09:26 -0400 Subject: [PATCH 51/69] Add spies to ensure demux handles keys correctly --- src/functions/compose.ts | 1 - src/functions/map.ts | 1 - tests/compose.spec.ts | 12 +++-- tests/demux.spec.ts | 95 +++++++++++++++++++++++++++++++++++++++- 4 files changed, 102 insertions(+), 7 deletions(-) diff --git a/src/functions/compose.ts b/src/functions/compose.ts index 7a4fdb4..195598e 100644 --- a/src/functions/compose.ts +++ b/src/functions/compose.ts @@ -40,7 +40,6 @@ type AllStreams = | NodeJS.WritableStream; export class Compose extends Duplex { - public writable: boolean; private first: AllStreams; private last: AllStreams; private streams: AllStreams[]; diff --git a/src/functions/map.ts b/src/functions/map.ts index 5848ca5..c088c72 100644 --- a/src/functions/map.ts +++ b/src/functions/map.ts @@ -16,7 +16,6 @@ export function map( const mapped = await mapper(chunk, encoding); callback(null, mapped); } catch (err) { - console.log("caught error", err.message); callback(err); } }, diff --git a/tests/compose.spec.ts b/tests/compose.spec.ts index acc6e2e..de7603b 100644 --- a/tests/compose.spec.ts +++ b/tests/compose.spec.ts @@ -272,6 +272,7 @@ test.cb( [first, second], { objectMode: true, highWaterMark: 5 }, ); + composed.on("error", err => { t.end(err); }); @@ -309,12 +310,13 @@ test.cb( "compose() should emit drain event immediately when second stream is bottleneck", t => { t.plan(6); + const _rate = 200; interface Chunk { key: string; mapped: number[]; } const first = map( - async (chunk: Chunk) => { + (chunk: Chunk) => { chunk.mapped.push(1); return chunk; }, @@ -326,10 +328,11 @@ test.cb( const second = map( async (chunk: Chunk) => { pendingReads--; - await sleep(200); + await sleep(_rate); expect(second._writableState.length).to.be.equal(1); expect(first._readableState.length).to.equal(pendingReads); chunk.mapped.push(2); + console.log("returning chunk from second map", chunk); return chunk; }, { objectMode: true, highWaterMark: 1 }, @@ -340,15 +343,17 @@ test.cb( { objectMode: true, highWaterMark: 5 }, ); composed.on("error", err => { + console.log("ending tests and got error", err); t.end(err); }); composed.on("drain", () => { expect(composed._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.lessThan(50); + expect(performance.now() - start).to.be.lessThan(_rate); t.pass(); }); + // Check if this is causing double cb composed.on("data", (chunk: Chunk) => { // Since second is bottleneck, composed will write into first immediately. Buffer should be empty. expect(composed._writableState.length).to.be.equal(0); @@ -370,6 +375,7 @@ test.cb( input.forEach(item => { composed.write(item); }); + const start = performance.now(); }, ); diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index 15d89c3..06cec0e 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -44,6 +44,49 @@ test.cb("demux() constructor should be called once per key", t => { demuxed.end(); }); +test.cb("demux() should send input through correct pipeline", t => { + t.plan(6); + const input = [ + { key: "a", visited: [] }, + { key: "b", visited: [] }, + { key: "a", visited: [] }, + { key: "c", visited: [] }, + { key: "a", visited: [] }, + { key: "b", visited: [] }, + ]; + const pipelineSpies = {}; + const construct = (destKey: string) => { + const mapper = sinon.spy((chunk: Test) => { + return { ...chunk, visited: [1] }; + }); + const dest = map(mapper); + pipelineSpies[destKey] = mapper; + + return dest; + }; + + const demuxed = demux(construct, { key: "key" }, { objectMode: true }); + + demuxed.on("finish", () => { + pipelineSpies["a"].getCalls().forEach(call => { + expect(call.args[0].key).to.equal("a"); + t.pass(); + }); + pipelineSpies["b"].getCalls().forEach(call => { + expect(call.args[0].key).to.equal("b"); + t.pass(); + }); + pipelineSpies["c"].getCalls().forEach(call => { + expect(call.args[0].key).to.equal("c"); + t.pass(); + }); + t.end(); + }); + + input.forEach(event => demuxed.write(event)); + demuxed.end(); +}); + test.cb("demux() constructor should be called once per key using keyBy", t => { t.plan(1); const input = [ @@ -82,6 +125,53 @@ test.cb("demux() constructor should be called once per key using keyBy", t => { demuxed.end(); }); +test.cb("demux() should send input through correct pipeline using keyBy", t => { + t.plan(6); + const input = [ + { key: "a", visited: [] }, + { key: "b", visited: [] }, + { key: "a", visited: [] }, + { key: "c", visited: [] }, + { key: "a", visited: [] }, + { key: "b", visited: [] }, + ]; + const pipelineSpies = {}; + const construct = (destKey: string) => { + const mapper = sinon.spy((chunk: Test) => { + return { ...chunk, visited: [1] }; + }); + const dest = map(mapper); + pipelineSpies[destKey] = mapper; + + return dest; + }; + + const demuxed = demux( + construct, + { keyBy: item => item.key }, + { objectMode: true }, + ); + + demuxed.on("finish", () => { + pipelineSpies["a"].getCalls().forEach(call => { + expect(call.args[0].key).to.equal("a"); + t.pass(); + }); + pipelineSpies["b"].getCalls().forEach(call => { + expect(call.args[0].key).to.equal("b"); + t.pass(); + }); + pipelineSpies["c"].getCalls().forEach(call => { + expect(call.args[0].key).to.equal("c"); + t.pass(); + }); + t.end(); + }); + + input.forEach(event => demuxed.write(event)); + demuxed.end(); +}); + test.cb("should emit errors", t => { t.plan(2); let index = 0; @@ -367,6 +457,7 @@ test.cb( t => { t.plan(6); const highWaterMark = 5; + const _rate = 200; interface Chunk { key: string; mapped: number[]; @@ -393,7 +484,7 @@ test.cb( const second = map( async (chunk: Chunk) => { pendingReads--; - await sleep(200); + await sleep(_rate); chunk.mapped.push(2); expect(second._writableState.length).to.be.equal(1); expect(first._readableState.length).to.equal(pendingReads); @@ -419,7 +510,7 @@ test.cb( _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.lessThan(50); + expect(performance.now() - start).to.be.lessThan(_rate); t.pass(); }); From 9d280b16626f488c56806cd5f41876e40db2599e Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Tue, 10 Sep 2019 18:13:13 -0400 Subject: [PATCH 52/69] Wait for drain when write returns false in demux --- src/functions/demux.ts | 21 ++--- tests/demux.spec.ts | 184 +++++++++++++---------------------------- 2 files changed, 62 insertions(+), 143 deletions(-) diff --git a/src/functions/demux.ts b/src/functions/demux.ts index e26dafb..0ad2e57 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -62,7 +62,7 @@ class Demux extends Writable { } // Throttles when one stream is not writable - public _write(chunk: any, encoding?: any, cb?: any) { + public async _write(chunk: any, encoding: any, cb: any) { const destKey = this.demuxer(chunk); if (this.streamsByKey[destKey] === undefined) { this.streamsByKey[destKey] = { @@ -70,21 +70,12 @@ class Demux extends Writable { writable: true, }; } - let res = false; - if (this.streamsByKey[destKey].writable && this.isWritable) { - res = this.streamsByKey[destKey].stream.write(chunk, encoding, cb); - } - if (!res && this.isWritable) { - this.isWritable = false; - this.streamsByKey[destKey].writable = false; - this.nonWritableStreams.push(destKey); - this.streamsByKey[destKey].stream.once("drain", () => { - this.nonWritableStreams.filter(key => key !== destKey); - this.isWritable = this.nonWritableStreams.length === 0; - this.streamsByKey[destKey].stream.write(chunk, encoding, cb); - if (this.isWritable) { + if (!this.streamsByKey[destKey].stream.write(chunk, encoding, cb)) { + await new Promise((resolve, reject) => { + this.streamsByKey[destKey].stream.once("drain", () => { + resolve(); this.emit("drain"); - } + }); }); } } diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index 06cec0e..bde4ffe 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -172,6 +172,7 @@ test.cb("demux() should send input through correct pipeline using keyBy", t => { demuxed.end(); }); +// Probably needs to be removed test.cb("should emit errors", t => { t.plan(2); let index = 0; @@ -209,7 +210,7 @@ test.cb("should emit errors", t => { _chunk.visited.push(index); index++; return _chunk; - }).on("error", () => {}); + }).on("error", () => {}); // Otherwise ava complains dest.pipe(sink); return dest; @@ -226,18 +227,26 @@ test.cb("should emit errors", t => { t.end(); }); input.forEach(event => demuxed.write(event)); - demuxed.end(); }); -test("demux() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", t => { - t.plan(7); - interface Chunk { - key: string; - mapped: number[]; - } - const highWaterMark = 5; - const _rate = 25; +test("demux() when write returns false, drain event should be emitted after at least slowProcessorSpeed * highWaterMark", t => { return new Promise(async (resolve, reject) => { + t.plan(7); + interface Chunk { + key: string; + mapped: number[]; + } + const input: Chunk[] = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + ]; + let pendingReads = input.length; + const highWaterMark = 5; + const slowProcessorSpeed = 25; const sink = new Writable({ objectMode: true, write(chunk, encoding, cb) { @@ -251,7 +260,7 @@ test("demux() should emit drain event ~rate * highWaterMark ms for every write t }); const construct = (destKey: string) => { const first = map(async (chunk: Chunk) => { - await sleep(_rate); + await sleep(slowProcessorSpeed); chunk.mapped.push(1); return chunk; }); @@ -278,40 +287,45 @@ test("demux() should emit drain event ~rate * highWaterMark ms for every write t _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.greaterThan(_rate); + expect(performance.now() - start).to.be.greaterThan( + slowProcessorSpeed * highWaterMark, + ); t.pass(); }); - const input = [ - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - ]; - let pendingReads = input.length; - - let start = performance.now(); + let start = null; for (const item of input) { const res = _demux.write(item); expect(_demux._writableState.length).to.be.at.most(highWaterMark); if (!res) { start = performance.now(); - await sleep(100); + await new Promise((resolv, rej) => { + _demux.once("drain", () => { + resolv(); + }); + }); } } }); }); test("demux() should emit one drain event when writing 6 items with highWaterMark of 5", t => { - t.plan(7); - const highWaterMark = 5; return new Promise(async (resolve, reject) => { + t.plan(7); interface Chunk { key: string; mapped: number[]; } + const highWaterMark = 5; + const input = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + ]; + let pendingReads = input.length; const sink = new Writable({ objectMode: true, write(chunk, encoding, cb) { @@ -354,110 +368,26 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar t.pass(); }); - const input = [ - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - ]; - let pendingReads = input.length; - for (const item of input) { const res = _demux.write(item); expect(_demux._writableState.length).to.be.at.most(highWaterMark); if (!res) { - await sleep(10); + await new Promise((_resolve, _reject) => { + _demux.once("drain", () => { + _resolve(); + }); + }); } } }); }); -test.cb( - "demux() should emit drain event after 500 ms when writing 5 items that take 100ms to process with a highWaterMark of 5 ", - t => { - t.plan(6); - const _rate = 100; - const highWaterMark = 5; - interface Chunk { - key: string; - mapped: number[]; - } - const sink = new Writable({ - objectMode: true, - write(chunk, encoding, cb) { - t.pass(); - cb(); - if (pendingReads === 0) { - t.end(); - } - }, - }); - const construct = (destKey: string) => { - const first = map( - async (chunk: Chunk) => { - chunk.mapped.push(1); - await sleep(_rate); - return chunk; - }, - { objectMode: true }, - ); - - const second = map( - (chunk: Chunk) => { - pendingReads--; - chunk.mapped.push(2); - return chunk; - }, - { objectMode: true, highWaterMark: 1 }, - ); - - first.pipe(second).pipe(sink); - return first; - }; - const _demux = demux( - construct, - { key: "key" }, - { - objectMode: true, - highWaterMark, - }, - ); - _demux.on("error", err => { - t.end(err); - }); - - _demux.on("drain", () => { - expect(_demux._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.greaterThan( - _rate * input.length, - ); - t.pass(); - }); - - const input = [ - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - ]; - - let pendingReads = input.length; - input.forEach(item => { - _demux.write(item); - }); - const start = performance.now(); - }, -); - test.cb( "demux() should emit drain event immediately when second stream is bottleneck", t => { t.plan(6); const highWaterMark = 5; - const _rate = 200; + const slowProcessorSpeed = 200; interface Chunk { key: string; mapped: number[]; @@ -484,7 +414,7 @@ test.cb( const second = map( async (chunk: Chunk) => { pendingReads--; - await sleep(_rate); + await sleep(slowProcessorSpeed); chunk.mapped.push(2); expect(second._writableState.length).to.be.equal(1); expect(first._readableState.length).to.equal(pendingReads); @@ -510,7 +440,9 @@ test.cb( _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.lessThan(_rate); + expect(performance.now() - start).to.be.lessThan( + slowProcessorSpeed, + ); t.pass(); }); @@ -530,7 +462,7 @@ test.cb( }, ); -test("demux() should only emit drain event when all streams are writable", t => { +test.only("demux() should only emit drain event when all streams are writable", t => { t.plan(1); const highWaterMark = 2; interface Chunk { @@ -558,12 +490,12 @@ test("demux() should only emit drain event when all streams are writable", t => chunk.mapped.push(1); return chunk; }, - { objectMode: true }, + { objectMode: true, highWaterMark: 1 }, ); const second = map( async (chunk: Chunk) => { - await sleep(50); + await sleep(2000); chunk.mapped.push(2); return chunk; }, @@ -578,11 +510,10 @@ test("demux() should only emit drain event when all streams are writable", t => { key: "key" }, { objectMode: true, - highWaterMark, }, ); _demux.on("error", err => { - reject(); + reject(err); }); const input = [ @@ -590,16 +521,13 @@ test("demux() should only emit drain event when all streams are writable", t => { key: "a", mapped: [] }, { key: "c", mapped: [] }, { key: "c", mapped: [] }, - { key: "b", mapped: [] }, // should only be recieved after a becomes writable + { key: "b", mapped: [] }, // should only be recieved after a and c become writable ]; let pendingReads = input.length; const start = performance.now(); for (const item of input) { - const res = _demux.write(item); - if (!res) { - await sleep(50); - } + console.log("DEMUX", _demux.write(item)); } }); }); From dcfd6fe4c2851fcc31c5f45525732a3ba0b72d9a Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Wed, 11 Sep 2019 14:29:20 -0400 Subject: [PATCH 53/69] Update tests --- src/functions/demux.ts | 30 ++----- src/functions/map.ts | 7 +- tests/demux.spec.ts | 195 +++++++++++++++-------------------------- tests/map.spec.ts | 58 ------------ 4 files changed, 79 insertions(+), 211 deletions(-) diff --git a/src/functions/demux.ts b/src/functions/demux.ts index 0ad2e57..a9b1011 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -31,15 +31,10 @@ export function demux( } class Demux extends Writable { - public isWritable: boolean; private streamsByKey: { - [key: string]: { - stream: NodeJS.WritableStream | NodeJS.ReadWriteStream; - writable: boolean; - }; + [key: string]: NodeJS.WritableStream | NodeJS.ReadWriteStream; }; private demuxer: (chunk: any) => string; - private nonWritableStreams: Array; private construct: ( destKey?: string, ) => NodeJS.WritableStream | NodeJS.ReadWriteStream; @@ -57,26 +52,19 @@ class Demux extends Writable { this.demuxer = demuxBy.keyBy || ((chunk: any) => chunk[demuxBy.key!]); this.construct = construct; this.streamsByKey = {}; - this.isWritable = true; - this.nonWritableStreams = []; } - // Throttles when one stream is not writable public async _write(chunk: any, encoding: any, cb: any) { const destKey = this.demuxer(chunk); if (this.streamsByKey[destKey] === undefined) { - this.streamsByKey[destKey] = { - stream: this.construct(destKey), - writable: true, - }; + this.streamsByKey[destKey] = this.construct(destKey); } - if (!this.streamsByKey[destKey].stream.write(chunk, encoding, cb)) { - await new Promise((resolve, reject) => { - this.streamsByKey[destKey].stream.once("drain", () => { - resolve(); - this.emit("drain"); - }); + if (!this.streamsByKey[destKey].write(chunk, encoding)) { + this.streamsByKey[destKey].once("drain", () => { + cb(); }); + } else { + cb(); } } @@ -87,7 +75,7 @@ class Demux extends Writable { break; case EventSubscription.All: Object.keys(this.streamsByKey).forEach(key => - this.streamsByKey[key].stream.on(event, cb), + this.streamsByKey[key].on(event, cb), ); break; case EventSubscription.Unhandled: @@ -107,7 +95,7 @@ class Demux extends Writable { break; case EventSubscription.All: Object.keys(this.streamsByKey).forEach(key => - this.streamsByKey[key].stream.once(event, cb), + this.streamsByKey[key].once(event, cb), ); break; case EventSubscription.Unhandled: diff --git a/src/functions/map.ts b/src/functions/map.ts index c088c72..13834af 100644 --- a/src/functions/map.ts +++ b/src/functions/map.ts @@ -12,12 +12,7 @@ export function map( return new Transform({ ...options, async transform(chunk: T, encoding, callback) { - try { - const mapped = await mapper(chunk, encoding); - callback(null, mapped); - } catch (err) { - callback(err); - } + callback(null, await mapper(chunk, encoding)); }, }); } diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index bde4ffe..aedbe6c 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -172,63 +172,6 @@ test.cb("demux() should send input through correct pipeline using keyBy", t => { demuxed.end(); }); -// Probably needs to be removed -test.cb("should emit errors", t => { - t.plan(2); - let index = 0; - const input = [ - { key: "a", visited: [] }, - { key: "b", visited: [] }, - { key: "a", visited: [] }, - { key: "a", visited: [] }, - ]; - const results = [ - { key: "a", visited: [0] }, - { key: "b", visited: [1] }, - { key: "a", visited: [2] }, - { key: "a", visited: [3] }, - ]; - const destinationStreamKeys = []; - const sink = new Writable({ - objectMode: true, - write(chunk, enc, cb) { - expect(results).to.deep.include(chunk); - expect(input).to.not.deep.include(chunk); - t.pass(); - cb(); - }, - }); - - const construct = (destKey: string) => { - destinationStreamKeys.push(destKey); - const dest = map((chunk: Test) => { - if (chunk.key === "b") { - throw new Error("Caught object with key 'b'"); - } - - const _chunk = { ...chunk, visited: [] }; - _chunk.visited.push(index); - index++; - return _chunk; - }).on("error", () => {}); // Otherwise ava complains - - dest.pipe(sink); - return dest; - }; - - const demuxed = demux( - construct, - { keyBy: (chunk: any) => chunk.key }, - { objectMode: true }, - ); - demuxed.on("error", e => { - expect(e.message).to.equal("Caught object with key 'b'"); - t.pass(); - t.end(); - }); - input.forEach(event => demuxed.write(event)); -}); - test("demux() when write returns false, drain event should be emitted after at least slowProcessorSpeed * highWaterMark", t => { return new Promise(async (resolve, reject) => { t.plan(7); @@ -259,11 +202,14 @@ test("demux() when write returns false, drain event should be emitted after at l }, }); const construct = (destKey: string) => { - const first = map(async (chunk: Chunk) => { - await sleep(slowProcessorSpeed); - chunk.mapped.push(1); - return chunk; - }); + const first = map( + async (chunk: Chunk) => { + await sleep(slowProcessorSpeed); + chunk.mapped.push(1); + return chunk; + }, + { highWaterMark: 1, objectMode: true }, + ); const second = map(async (chunk: Chunk) => { chunk.mapped.push(2); @@ -285,14 +231,6 @@ test("demux() when write returns false, drain event should be emitted after at l reject(); }); - _demux.on("drain", () => { - expect(_demux._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.greaterThan( - slowProcessorSpeed * highWaterMark, - ); - t.pass(); - }); - let start = null; for (const item of input) { const res = _demux.write(item); @@ -301,6 +239,11 @@ test("demux() when write returns false, drain event should be emitted after at l start = performance.now(); await new Promise((resolv, rej) => { _demux.once("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + expect(performance.now() - start).to.be.greaterThan( + slowProcessorSpeed * highWaterMark, + ); + t.pass(); resolv(); }); }); @@ -318,63 +261,60 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar } const highWaterMark = 5; const input = [ - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, + { key: "a", val: 1, mapped: [] }, + { key: "a", val: 2, mapped: [] }, + { key: "a", val: 3, mapped: [] }, + { key: "a", val: 4, mapped: [] }, + { key: "a", val: 5, mapped: [] }, + { key: "a", val: 6, mapped: [] }, ]; let pendingReads = input.length; const sink = new Writable({ objectMode: true, write(chunk, encoding, cb) { cb(); - t.pass(); pendingReads--; + t.pass(); if (pendingReads === 0) { resolve(); } }, }); const construct = (destKey: string) => { - const first = map(async (chunk: Chunk) => { - chunk.mapped.push(1); - return chunk; - }); + const pipeline = map( + async (chunk: Chunk) => { + await sleep(50); + chunk.mapped.push(2); + return chunk; + }, + { highWaterMark: 1, objectMode: true }, + ); - const second = map(async (chunk: Chunk) => { - chunk.mapped.push(2); - return chunk; - }); - - first.pipe(second).pipe(sink); - return first; + pipeline.pipe(sink); + return pipeline; }; const _demux = demux( construct, { key: "key" }, { objectMode: true, - highWaterMark, + highWaterMark: 5, }, ); + _demux.on("error", err => { reject(); }); - _demux.on("drain", () => { - expect(_demux._writableState.length).to.be.equal(0); - t.pass(); - }); - for (const item of input) { const res = _demux.write(item); expect(_demux._writableState.length).to.be.at.most(highWaterMark); if (!res) { - await new Promise((_resolve, _reject) => { + await new Promise(_resolve => { _demux.once("drain", () => { _resolve(); + expect(_demux._writableState.length).to.be.equal(0); + t.pass(); }); }); } @@ -386,8 +326,8 @@ test.cb( "demux() should emit drain event immediately when second stream is bottleneck", t => { t.plan(6); - const highWaterMark = 5; - const slowProcessorSpeed = 200; + const slowProcessorSpeed = 100; + const highWaterMark = 3; interface Chunk { key: string; mapped: number[]; @@ -395,11 +335,13 @@ test.cb( const sink = new Writable({ objectMode: true, write(chunk, encoding, cb) { + expect(chunk.mapped).to.deep.equal([1, 2]); t.pass(); - cb(); + pendingReads--; if (pendingReads === 0) { t.end(); } + cb(); }, }); const construct = (destKey: string) => { @@ -408,16 +350,13 @@ test.cb( chunk.mapped.push(1); return chunk; }, - { objectMode: true }, + { objectMode: true, highWaterMark: 1 }, ); const second = map( async (chunk: Chunk) => { - pendingReads--; await sleep(slowProcessorSpeed); chunk.mapped.push(2); - expect(second._writableState.length).to.be.equal(1); - expect(first._readableState.length).to.equal(pendingReads); return chunk; }, { objectMode: true, highWaterMark: 1 }, @@ -440,8 +379,9 @@ test.cb( _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.lessThan( - slowProcessorSpeed, + // Should take longer than the amount of items needed to be processed until we are under highWaterMark + expect(performance.now() - start).to.be.greaterThan( + slowProcessorSpeed * (input.length - highWaterMark - 1), ); t.pass(); }); @@ -453,18 +393,18 @@ test.cb( { key: "a", mapped: [] }, { key: "a", mapped: [] }, ]; - let pendingReads = input.length; + + const start = performance.now(); input.forEach(item => { _demux.write(item); }); - const start = performance.now(); }, ); -test.only("demux() should only emit drain event when all streams are writable", t => { +test("demux() should be blocked by slowest pipeline", t => { t.plan(1); - const highWaterMark = 2; + const slowProcessorSpeed = 100; interface Chunk { key: string; mapped: number[]; @@ -476,33 +416,26 @@ test.only("demux() should only emit drain event when all streams are writable", cb(); pendingReads--; if (chunk.key === "b") { - expect(performance.now() - start).to.be.greaterThan(150); + expect(performance.now() - start).to.be.greaterThan( + slowProcessorSpeed * totalItems, + ); t.pass(); - } - if (pendingReads === 0) { + expect(pendingReads).to.equal(0); resolve(); } }, }); const construct = (destKey: string) => { const first = map( - (chunk: Chunk) => { + async (chunk: Chunk) => { + await sleep(slowProcessorSpeed); chunk.mapped.push(1); return chunk; }, { objectMode: true, highWaterMark: 1 }, ); - const second = map( - async (chunk: Chunk) => { - await sleep(2000); - chunk.mapped.push(2); - return chunk; - }, - { objectMode: true, highWaterMark: 1 }, - ); - - first.pipe(second).pipe(sink); + first.pipe(sink); return first; }; const _demux = demux( @@ -510,6 +443,7 @@ test.only("demux() should only emit drain event when all streams are writable", { key: "key" }, { objectMode: true, + highWaterMark: 1, }, ); _demux.on("error", err => { @@ -521,13 +455,21 @@ test.only("demux() should only emit drain event when all streams are writable", { key: "a", mapped: [] }, { key: "c", mapped: [] }, { key: "c", mapped: [] }, - { key: "b", mapped: [] }, // should only be recieved after a and c become writable + { key: "c", mapped: [] }, + { key: "b", mapped: [] }, ]; let pendingReads = input.length; + const totalItems = input.length; const start = performance.now(); for (const item of input) { - console.log("DEMUX", _demux.write(item)); + if (!_demux.write(item)) { + await new Promise(_resolve => { + _demux.once("drain", () => { + _resolve(); + }); + }); + } } }); }); @@ -543,6 +485,7 @@ test("demux() should emit drain event and first should contain up to highWaterMa const sink = new Writable({ objectMode: true, write(chunk, encoding, cb) { + expect(chunk.mapped).to.deep.equal([1, 2]); t.pass(); cb(); if (pendingReads === 0) { @@ -557,7 +500,7 @@ test("demux() should emit drain event and first should contain up to highWaterMa chunk.mapped.push(1); return chunk; }, - { objectMode: 2, highWaterMark: 2 }, + { objectMode: true, highWaterMark: 1 }, ); const second = map( @@ -568,7 +511,7 @@ test("demux() should emit drain event and first should contain up to highWaterMa pendingReads--; return chunk; }, - { objectMode: 2, highWaterMark: 2 }, + { objectMode: true, highWaterMark: 1 }, ); first.pipe(second).pipe(sink); diff --git a/tests/map.spec.ts b/tests/map.spec.ts index 75210ff..35c8e84 100644 --- a/tests/map.spec.ts +++ b/tests/map.spec.ts @@ -49,61 +49,3 @@ test.cb("map() maps elements asynchronously", t => { source.push("c"); source.push(null); }); - -test.cb("map() emits errors during synchronous mapping", t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const mapStream = map((element: string) => { - if (element !== "b") { - throw new Error("Failed mapping"); - } - return element.toUpperCase(); - }); - source - .pipe(mapStream) - .on("data", data => { - expect(data).to.equal("B"); - t.pass(); - }) - .on("error", err => { - source.pipe(mapStream); - mapStream.resume(); - expect(err.message).to.equal("Failed mapping"); - t.pass(); - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test("map() emits errors during asynchronous mapping", t => { - t.plan(1); - return new Promise((resolve, _) => { - const source = new Readable({ objectMode: true }); - const mapStream = map(async (element: string) => { - await Promise.resolve(); - if (element === "b") { - throw new Error("Failed mapping"); - } - return element.toUpperCase(); - }); - source - .pipe(mapStream) - .on("error", err => { - expect(err.message).to.equal("Failed mapping"); - t.pass(); - resolve(); - }) - .on("end", () => t.fail); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); - source.push(null); - source.push(null); - }); -}); From f06cb1c33eba5d6a6e008f2095ed20575bf62ba2 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Wed, 11 Sep 2019 14:31:06 -0400 Subject: [PATCH 54/69] Remove console log --- tests/compose.spec.ts | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/compose.spec.ts b/tests/compose.spec.ts index de7603b..ad2be6c 100644 --- a/tests/compose.spec.ts +++ b/tests/compose.spec.ts @@ -332,7 +332,6 @@ test.cb( expect(second._writableState.length).to.be.equal(1); expect(first._readableState.length).to.equal(pendingReads); chunk.mapped.push(2); - console.log("returning chunk from second map", chunk); return chunk; }, { objectMode: true, highWaterMark: 1 }, @@ -343,7 +342,6 @@ test.cb( { objectMode: true, highWaterMark: 5 }, ); composed.on("error", err => { - console.log("ending tests and got error", err); t.end(err); }); @@ -353,9 +351,7 @@ test.cb( t.pass(); }); - // Check if this is causing double cb composed.on("data", (chunk: Chunk) => { - // Since second is bottleneck, composed will write into first immediately. Buffer should be empty. expect(composed._writableState.length).to.be.equal(0); t.pass(); if (chunk.key === "e") { From ce19c5e987a8c17b9e15eb975de22dff124742eb Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Wed, 11 Sep 2019 15:09:51 -0400 Subject: [PATCH 55/69] Add test for drain events --- tests/demux.spec.ts | 82 +++++++++++++++++++++++++++++++++++++++------ 1 file changed, 72 insertions(+), 10 deletions(-) diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index aedbe6c..f01723a 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -172,6 +172,75 @@ test.cb("demux() should send input through correct pipeline using keyBy", t => { demuxed.end(); }); +test("demux() should return false after if it has >= highWaterMark items buffered and drain should be emitted", t => { + return new Promise(async (resolve, reject) => { + t.plan(7); + interface Chunk { + key: string; + mapped: number[]; + } + const input: Chunk[] = [ + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + ]; + let pendingReads = input.length; + const highWaterMark = 5; + const slowProcessorSpeed = 25; + const construct = (destKey: string) => { + const first = map( + async (chunk: Chunk) => { + await sleep(slowProcessorSpeed); + return { ...chunk, mapped: [1] }; + }, + { highWaterMark: 1, objectMode: true }, + ); + + // to clear first + first.on("data", chunk => { + expect(chunk.mapped).to.deep.equal([1]); + pendingReads--; + if (pendingReads === 0) { + resolve(); + } + t.pass(); + }); + + return first; + }; + + const _demux = demux( + construct, + { key: "key" }, + { + objectMode: true, + highWaterMark, + }, + ); + + _demux.on("error", err => { + reject(); + }); + + for (const item of input) { + const res = _demux.write(item); + expect(_demux._writableState.length).to.be.at.most(highWaterMark); + if (!res) { + await new Promise((resolv, rej) => { + _demux.once("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + t.pass(); + resolv(); + }); + }); + } + } + }); +}); + test("demux() when write returns false, drain event should be emitted after at least slowProcessorSpeed * highWaterMark", t => { return new Promise(async (resolve, reject) => { t.plan(7); @@ -211,12 +280,7 @@ test("demux() when write returns false, drain event should be emitted after at l { highWaterMark: 1, objectMode: true }, ); - const second = map(async (chunk: Chunk) => { - chunk.mapped.push(2); - return chunk; - }); - - first.pipe(second).pipe(sink); + first.pipe(sink); return first; }; const _demux = demux( @@ -231,20 +295,18 @@ test("demux() when write returns false, drain event should be emitted after at l reject(); }); - let start = null; + let start = performance.now(); for (const item of input) { const res = _demux.write(item); - expect(_demux._writableState.length).to.be.at.most(highWaterMark); if (!res) { - start = performance.now(); await new Promise((resolv, rej) => { _demux.once("drain", () => { - expect(_demux._writableState.length).to.be.equal(0); expect(performance.now() - start).to.be.greaterThan( slowProcessorSpeed * highWaterMark, ); t.pass(); resolv(); + start = performance.now(); }); }); } From 65c36a8f222e8946a234b9c3b0eda06bd4ce6f19 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Wed, 11 Sep 2019 16:33:02 -0400 Subject: [PATCH 56/69] Update tests --- tests/demux.spec.ts | 113 ++++++++++++++++++++------------------------ 1 file changed, 50 insertions(+), 63 deletions(-) diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index f01723a..4201375 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -172,7 +172,7 @@ test.cb("demux() should send input through correct pipeline using keyBy", t => { demuxed.end(); }); -test("demux() should return false after if it has >= highWaterMark items buffered and drain should be emitted", t => { +test("demux() write should return false after if it has >= highWaterMark items buffered and drain should be emitted", t => { return new Promise(async (resolve, reject) => { t.plan(7); interface Chunk { @@ -199,7 +199,6 @@ test("demux() should return false after if it has >= highWaterMark items buffere { highWaterMark: 1, objectMode: true }, ); - // to clear first first.on("data", chunk => { expect(chunk.mapped).to.deep.equal([1]); pendingReads--; @@ -241,7 +240,7 @@ test("demux() should return false after if it has >= highWaterMark items buffere }); }); -test("demux() when write returns false, drain event should be emitted after at least slowProcessorSpeed * highWaterMark", t => { +test("demux() should emit one drain event after slowProcessorSpeed * highWaterMark ms", t => { return new Promise(async (resolve, reject) => { t.plan(7); interface Chunk { @@ -259,17 +258,7 @@ test("demux() when write returns false, drain event should be emitted after at l let pendingReads = input.length; const highWaterMark = 5; const slowProcessorSpeed = 25; - const sink = new Writable({ - objectMode: true, - write(chunk, encoding, cb) { - cb(); - t.pass(); - pendingReads--; - if (pendingReads === 0) { - resolve(); - } - }, - }); + const construct = (destKey: string) => { const first = map( async (chunk: Chunk) => { @@ -280,7 +269,13 @@ test("demux() when write returns false, drain event should be emitted after at l { highWaterMark: 1, objectMode: true }, ); - first.pipe(sink); + first.on("data", () => { + t.pass(); + pendingReads--; + if (pendingReads === 0) { + resolve(); + } + }); return first; }; const _demux = demux( @@ -295,18 +290,18 @@ test("demux() when write returns false, drain event should be emitted after at l reject(); }); - let start = performance.now(); + const start = performance.now(); for (const item of input) { const res = _demux.write(item); if (!res) { await new Promise((resolv, rej) => { + // This event should be received after all items in demux are processed _demux.once("drain", () => { expect(performance.now() - start).to.be.greaterThan( slowProcessorSpeed * highWaterMark, ); t.pass(); resolv(); - start = performance.now(); }); }); } @@ -323,27 +318,16 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar } const highWaterMark = 5; const input = [ - { key: "a", val: 1, mapped: [] }, - { key: "a", val: 2, mapped: [] }, - { key: "a", val: 3, mapped: [] }, - { key: "a", val: 4, mapped: [] }, - { key: "a", val: 5, mapped: [] }, - { key: "a", val: 6, mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, + { key: "a", mapped: [] }, ]; let pendingReads = input.length; - const sink = new Writable({ - objectMode: true, - write(chunk, encoding, cb) { - cb(); - pendingReads--; - t.pass(); - if (pendingReads === 0) { - resolve(); - } - }, - }); const construct = (destKey: string) => { - const pipeline = map( + const first = map( async (chunk: Chunk) => { await sleep(50); chunk.mapped.push(2); @@ -352,8 +336,14 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar { highWaterMark: 1, objectMode: true }, ); - pipeline.pipe(sink); - return pipeline; + first.on("data", () => { + pendingReads--; + t.pass(); + if (pendingReads === 0) { + resolve(); + } + }); + return first; }; const _demux = demux( construct, @@ -385,11 +375,11 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar }); test.cb( - "demux() should emit drain event immediately when second stream is bottleneck", + "demux() should emit drain event when second stream is bottleneck", t => { t.plan(6); const slowProcessorSpeed = 100; - const highWaterMark = 3; + const highWaterMark = 5; interface Chunk { key: string; mapped: number[]; @@ -439,11 +429,12 @@ test.cb( t.end(err); }); + // This event should be received after at least 3 * slowProcessorSpeed (two are read immediately by first) + // @TODO Verify this is correct behaviour _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); - // Should take longer than the amount of items needed to be processed until we are under highWaterMark expect(performance.now() - start).to.be.greaterThan( - slowProcessorSpeed * (input.length - highWaterMark - 1), + slowProcessorSpeed * (input.length - 2), ); t.pass(); }); @@ -472,21 +463,6 @@ test("demux() should be blocked by slowest pipeline", t => { mapped: number[]; } return new Promise(async (resolve, reject) => { - const sink = new Writable({ - objectMode: true, - write(chunk, encoding, cb) { - cb(); - pendingReads--; - if (chunk.key === "b") { - expect(performance.now() - start).to.be.greaterThan( - slowProcessorSpeed * totalItems, - ); - t.pass(); - expect(pendingReads).to.equal(0); - resolve(); - } - }, - }); const construct = (destKey: string) => { const first = map( async (chunk: Chunk) => { @@ -497,7 +473,17 @@ test("demux() should be blocked by slowest pipeline", t => { { objectMode: true, highWaterMark: 1 }, ); - first.pipe(sink); + first.on("data", chunk => { + pendingReads--; + if (chunk.key === "b") { + expect(performance.now() - start).to.be.greaterThan( + slowProcessorSpeed * totalItems, + ); + t.pass(); + expect(pendingReads).to.equal(0); + resolve(); + } + }); return first; }; const _demux = demux( @@ -536,9 +522,9 @@ test("demux() should be blocked by slowest pipeline", t => { }); }); -test("demux() should emit drain event and first should contain up to highWaterMark items in readable state when second is bottleneck", t => { - t.plan(6); - const highWaterMark = 5; +test("demux() should emit drain event when second stream in pipeline is bottleneck", t => { + t.plan(5); + const highWaterMark = 3; return new Promise(async (resolve, reject) => { interface Chunk { key: string; @@ -555,6 +541,7 @@ test("demux() should emit drain event and first should contain up to highWaterMa } }, }); + const construct = (destKey: string) => { const first = map( (chunk: Chunk) => { @@ -562,14 +549,14 @@ test("demux() should emit drain event and first should contain up to highWaterMa chunk.mapped.push(1); return chunk; }, - { objectMode: true, highWaterMark: 1 }, + { objectMode: true, highWaterMark: 2 }, ); const second = map( async (chunk: Chunk) => { + await sleep(100); chunk.mapped.push(2); expect(second._writableState.length).to.be.equal(1); - await sleep(100); pendingReads--; return chunk; }, @@ -579,6 +566,7 @@ test("demux() should emit drain event and first should contain up to highWaterMa first.pipe(second).pipe(sink); return first; }; + const _demux = demux( construct, { key: "key" }, @@ -601,7 +589,6 @@ test("demux() should emit drain event and first should contain up to highWaterMa { key: "a", mapped: [] }, { key: "a", mapped: [] }, { key: "a", mapped: [] }, - { key: "a", mapped: [] }, ]; let pendingReads = input.length; From 586f618e95287c5fd2c5c0262f4410bdf28542ac Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 12 Sep 2019 09:08:49 -0400 Subject: [PATCH 57/69] Update demux --- src/functions/demux.ts | 12 ++- tests/demux.spec.ts | 189 +++++++++++++++++++++++++++-------------- 2 files changed, 128 insertions(+), 73 deletions(-) diff --git a/src/functions/demux.ts b/src/functions/demux.ts index a9b1011..4b6312a 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -24,7 +24,7 @@ const eventsTarget = { export function demux( construct: () => NodeJS.WritableStream | NodeJS.ReadWriteStream, - demuxBy: { key?: string; keyBy?: (chunk: any) => string }, + demuxBy: string | ((chunk: any) => string), options?: WritableOptions, ): Writable { return new Demux(construct, demuxBy, options); @@ -42,19 +42,17 @@ class Demux extends Writable { construct: ( destKey?: string, ) => NodeJS.WritableStream | NodeJS.ReadWriteStream, - demuxBy: { key?: string; keyBy?: (chunk: any) => string }, + demuxBy: string | ((chunk: any) => string), options?: WritableOptions, ) { super(options); - if (demuxBy.keyBy === undefined && demuxBy.key === undefined) { - throw new Error("keyBy or key must be provided in second argument"); - } - this.demuxer = demuxBy.keyBy || ((chunk: any) => chunk[demuxBy.key!]); + this.demuxer = + typeof demuxBy === "string" ? chunk => chunk[demuxBy] : demuxBy; this.construct = construct; this.streamsByKey = {}; } - public async _write(chunk: any, encoding: any, cb: any) { + public _write(chunk: any, encoding: any, cb: any) { const destKey = this.demuxer(chunk); if (this.streamsByKey[destKey] === undefined) { this.streamsByKey[destKey] = this.construct(destKey); diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index 4201375..725ec43 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -30,7 +30,7 @@ test.cb("demux() constructor should be called once per key", t => { return dest; }); - const demuxed = demux(construct, { key: "key" }, { objectMode: true }); + const demuxed = demux(construct, "key", { objectMode: true }); demuxed.on("finish", () => { expect(construct.withArgs("a").callCount).to.equal(1); @@ -65,7 +65,7 @@ test.cb("demux() should send input through correct pipeline", t => { return dest; }; - const demuxed = demux(construct, { key: "key" }, { objectMode: true }); + const demuxed = demux(construct, "key", { objectMode: true }); demuxed.on("finish", () => { pipelineSpies["a"].getCalls().forEach(call => { @@ -107,11 +107,7 @@ test.cb("demux() constructor should be called once per key using keyBy", t => { return dest; }); - const demuxed = demux( - construct, - { keyBy: item => item.key }, - { objectMode: true }, - ); + const demuxed = demux(construct, item => item.key, { objectMode: true }); demuxed.on("finish", () => { expect(construct.withArgs("a").callCount).to.equal(1); @@ -146,11 +142,7 @@ test.cb("demux() should send input through correct pipeline using keyBy", t => { return dest; }; - const demuxed = demux( - construct, - { keyBy: item => item.key }, - { objectMode: true }, - ); + const demuxed = demux(construct, item => item.key, { objectMode: true }); demuxed.on("finish", () => { pipelineSpies["a"].getCalls().forEach(call => { @@ -211,14 +203,10 @@ test("demux() write should return false after if it has >= highWaterMark items b return first; }; - const _demux = demux( - construct, - { key: "key" }, - { - objectMode: true, - highWaterMark, - }, - ); + const _demux = demux(construct, "key", { + objectMode: true, + highWaterMark, + }); _demux.on("error", err => { reject(); @@ -278,14 +266,10 @@ test("demux() should emit one drain event after slowProcessorSpeed * highWaterMa }); return first; }; - const _demux = demux( - construct, - { key: "key" }, - { - objectMode: true, - highWaterMark, - }, - ); + const _demux = demux(construct, "key", { + objectMode: true, + highWaterMark, + }); _demux.on("error", err => { reject(); }); @@ -345,14 +329,10 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar }); return first; }; - const _demux = demux( - construct, - { key: "key" }, - { - objectMode: true, - highWaterMark: 5, - }, - ); + const _demux = demux(construct, "key", { + objectMode: true, + highWaterMark: 5, + }); _demux.on("error", err => { reject(); @@ -375,9 +355,9 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar }); test.cb( - "demux() should emit drain event when second stream is bottleneck", + "demux() should emit drain event when third stream is bottleneck", t => { - t.plan(6); + t.plan(8); const slowProcessorSpeed = 100; const highWaterMark = 5; interface Chunk { @@ -417,20 +397,15 @@ test.cb( first.pipe(second).pipe(sink); return first; }; - const _demux = demux( - construct, - { key: "key" }, - { - objectMode: true, - highWaterMark, - }, - ); + const _demux = demux(construct, () => "a", { + objectMode: true, + highWaterMark, + }); _demux.on("error", err => { t.end(err); }); // This event should be received after at least 3 * slowProcessorSpeed (two are read immediately by first) - // @TODO Verify this is correct behaviour _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); expect(performance.now() - start).to.be.greaterThan( @@ -441,10 +416,100 @@ test.cb( const input = [ { key: "a", mapped: [] }, + { key: "b", mapped: [] }, + { key: "c", mapped: [] }, + { key: "d", mapped: [] }, + { key: "e", mapped: [] }, + { key: "f", mapped: [] }, + { key: "g", mapped: [] }, + ]; + let pendingReads = input.length; + + const start = performance.now(); + input.forEach(item => { + _demux.write(item); + }); + }, +); + +test.cb( + "demux() should emit drain event when second stream is bottleneck", + t => { + t.plan(8); + const slowProcessorSpeed = 100; + const highWaterMark = 5; + interface Chunk { + key: string; + mapped: number[]; + } + const sink = new Writable({ + objectMode: true, + write(chunk, encoding, cb) { + expect(chunk.mapped).to.deep.equal([1, 2, 3]); + t.pass(); + pendingReads--; + if (pendingReads === 0) { + t.end(); + } + cb(); + }, + }); + const construct = (destKey: string) => { + const first = map( + (chunk: Chunk) => { + chunk.mapped.push(1); + return chunk; + }, + { objectMode: true, highWaterMark: 1 }, + ); + const second = map( + (chunk: Chunk) => { + chunk.mapped.push(2); + return chunk; + }, + { objectMode: true, highWaterMark: 1 }, + ); + + const third = map( + async (chunk: Chunk) => { + await sleep(slowProcessorSpeed); + chunk.mapped.push(3); + return chunk; + }, + { objectMode: true, highWaterMark: 1 }, + ); + + first + .pipe(second) + .pipe(third) + .pipe(sink); + return first; + }; + const _demux = demux(construct, () => "a", { + objectMode: true, + highWaterMark, + }); + _demux.on("error", err => { + t.end(err); + }); + + // This event should be received after at least 3 * slowProcessorSpeed (two are read immediately by first) + _demux.on("drain", () => { + expect(_demux._writableState.length).to.be.equal(0); + expect(performance.now() - start).to.be.greaterThan( + slowProcessorSpeed * (input.length - 4), + ); + t.pass(); + }); + + const input = [ { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, + { key: "b", mapped: [] }, + { key: "c", mapped: [] }, + { key: "d", mapped: [] }, + { key: "e", mapped: [] }, + { key: "f", mapped: [] }, + { key: "g", mapped: [] }, ]; let pendingReads = input.length; @@ -486,14 +551,10 @@ test("demux() should be blocked by slowest pipeline", t => { }); return first; }; - const _demux = demux( - construct, - { key: "key" }, - { - objectMode: true, - highWaterMark: 1, - }, - ); + const _demux = demux(construct, "key", { + objectMode: true, + highWaterMark: 1, + }); _demux.on("error", err => { reject(err); }); @@ -567,14 +628,10 @@ test("demux() should emit drain event when second stream in pipeline is bottlene return first; }; - const _demux = demux( - construct, - { key: "key" }, - { - objectMode: true, - highWaterMark, - }, - ); + const _demux = demux(construct, "key", { + objectMode: true, + highWaterMark, + }); _demux.on("error", err => { reject(); }); From 517e281ce5953dda9aab7bf9c65d7a969e278b23 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 12 Sep 2019 09:41:04 -0400 Subject: [PATCH 58/69] Remove try catch from provided functions, user handles errors --- src/functions/accumulator.ts | 8 ++------ src/functions/filter.ts | 22 +++++----------------- src/functions/flatMap.ts | 17 ++--------------- src/functions/map.ts | 1 - src/functions/parallelMap.ts | 11 +++-------- src/functions/reduce.ts | 17 ++--------------- tests/accumulator.spec.ts | 4 ++-- tests/demux.spec.ts | 8 ++++---- tests/filter.spec.ts | 4 ++-- tests/flatMap.spec.ts | 4 ++-- tests/reduce.spec.ts | 4 ++-- 11 files changed, 26 insertions(+), 74 deletions(-) diff --git a/src/functions/accumulator.ts b/src/functions/accumulator.ts index c406afe..bb8a7fb 100644 --- a/src/functions/accumulator.ts +++ b/src/functions/accumulator.ts @@ -18,12 +18,8 @@ function _accumulator( return new Transform({ ...options, transform(data: T, encoding, callback) { - try { - accumulateBy(data, buffer, this); - callback(); - } catch (err) { - callback(err); - } + accumulateBy(data, buffer, this); + callback(); }, flush(callback) { if (shouldFlush) { diff --git a/src/functions/filter.ts b/src/functions/filter.ts index e7578b3..cd89864 100644 --- a/src/functions/filter.ts +++ b/src/functions/filter.ts @@ -9,23 +9,11 @@ export function filter( return new Transform({ ...options, async transform(chunk: T, encoding?: any, callback?: any) { - let isPromise = false; - try { - const result = predicate(chunk, encoding); - isPromise = result instanceof Promise; - if (!!(await result)) { - callback(null, chunk); - } else { - callback(); - } - } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } + const result = await predicate(chunk, encoding); + if (result === true) { + callback(null, chunk); + } else { + callback(); } }, }); diff --git a/src/functions/flatMap.ts b/src/functions/flatMap.ts index 99f38a6..2abb726 100644 --- a/src/functions/flatMap.ts +++ b/src/functions/flatMap.ts @@ -13,21 +13,8 @@ export function flatMap( return new Transform({ ...options, async transform(chunk: T, encoding, callback) { - let isPromise = false; - try { - const mapped = mapper(chunk, encoding); - isPromise = mapped instanceof Promise; - (await mapped).forEach(c => this.push(c)); - callback(); - } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } - } + (await mapper(chunk, encoding)).forEach(c => this.push(c)); + callback(); }, }); } diff --git a/src/functions/map.ts b/src/functions/map.ts index 13834af..589f0a9 100644 --- a/src/functions/map.ts +++ b/src/functions/map.ts @@ -8,7 +8,6 @@ export function map( writableObjectMode: true, }, ): Transform { - // remove try catch return new Transform({ ...options, async transform(chunk: T, encoding, callback) { diff --git a/src/functions/parallelMap.ts b/src/functions/parallelMap.ts index 56c9f41..6bc6b79 100644 --- a/src/functions/parallelMap.ts +++ b/src/functions/parallelMap.ts @@ -20,14 +20,9 @@ export function parallelMap( } inflight += 1; callback(); - try { - const res = await mapper(data); - this.push(res); - } catch (e) { - this.emit(e); - } finally { - inflight -= 1; - } + const res = await mapper(data); + this.push(res); + inflight -= 1; }, async flush(callback) { while (inflight > 0) { diff --git a/src/functions/reduce.ts b/src/functions/reduce.ts index 6ee665f..ff76025 100644 --- a/src/functions/reduce.ts +++ b/src/functions/reduce.ts @@ -16,21 +16,8 @@ export function reduce( readableObjectMode: options.readableObjectMode, writableObjectMode: options.writableObjectMode, async transform(chunk: T, encoding, callback) { - let isPromise = false; - try { - const result = iteratee(value, chunk, encoding); - isPromise = result instanceof Promise; - value = await result; - callback(); - } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } - } + value = await iteratee(value, chunk, encoding); + callback(); }, flush(callback) { // Best effort attempt at yielding the final value (will throw if e.g. yielding an object and diff --git a/tests/accumulator.spec.ts b/tests/accumulator.spec.ts index 523455a..71fdb1f 100644 --- a/tests/accumulator.spec.ts +++ b/tests/accumulator.spec.ts @@ -407,7 +407,7 @@ test.cb("accumulatorBy() rolling", t => { source.push(null); }); -test.cb( +test.cb.skip( "accumulatorBy() rolling should emit error when key iteratee throws", t => { t.plan(2); @@ -511,7 +511,7 @@ test.cb("accumulatorBy() sliding", t => { source.push(null); }); -test.cb( +test.cb.skip( "accumulatorBy() sliding should emit error when key iteratee throws", t => { t.plan(2); diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index 725ec43..2d43ec1 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -354,7 +354,7 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar }); }); -test.cb( +test.cb.only( "demux() should emit drain event when third stream is bottleneck", t => { t.plan(8); @@ -405,7 +405,7 @@ test.cb( t.end(err); }); - // This event should be received after at least 3 * slowProcessorSpeed (two are read immediately by first) + // This event should be received after at least 5 * slowProcessorSpeed (two are read immediately by first and second, 5 remaining in demux before drain event) _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); expect(performance.now() - start).to.be.greaterThan( @@ -445,7 +445,7 @@ test.cb( const sink = new Writable({ objectMode: true, write(chunk, encoding, cb) { - expect(chunk.mapped).to.deep.equal([1, 2, 3]); + expect(chunk.mapped).to.deep.equal([1, 2]); t.pass(); pendingReads--; if (pendingReads === 0) { @@ -493,7 +493,7 @@ test.cb( t.end(err); }); - // This event should be received after at least 3 * slowProcessorSpeed (two are read immediately by first) + // This event should be received after at least 3 * slowProcessorSpeed (two are read immediately by first and second, 3 remaining in demux before drain event) _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); expect(performance.now() - start).to.be.greaterThan( diff --git a/tests/filter.spec.ts b/tests/filter.spec.ts index 0732d06..badfda7 100644 --- a/tests/filter.spec.ts +++ b/tests/filter.spec.ts @@ -58,7 +58,7 @@ test.cb("filter() filters elements asynchronously", t => { source.push(null); }); -test.cb("filter() emits errors during synchronous filtering", t => { +test.cb.skip("filter() emits errors during synchronous filtering", t => { t.plan(2); const source = new Readable({ objectMode: true }); source @@ -86,7 +86,7 @@ test.cb("filter() emits errors during synchronous filtering", t => { source.push(null); }); -test.cb("filter() emits errors during asynchronous filtering", t => { +test.cb.skip("filter() emits errors during asynchronous filtering", t => { t.plan(2); const source = new Readable({ objectMode: true }); source diff --git a/tests/flatMap.spec.ts b/tests/flatMap.spec.ts index a8b22bb..84cebfb 100644 --- a/tests/flatMap.spec.ts +++ b/tests/flatMap.spec.ts @@ -48,7 +48,7 @@ test.cb("flatMap() maps elements asynchronously", t => { source.push(null); }); -test.cb("flatMap() emits errors during synchronous mapping", t => { +test.cb.skip("flatMap() emits errors during synchronous mapping", t => { t.plan(2); const source = new Readable({ objectMode: true }); source @@ -73,7 +73,7 @@ test.cb("flatMap() emits errors during synchronous mapping", t => { source.push(null); }); -test.cb("flatMap() emits errors during asynchronous mapping", t => { +test.cb.skip("flatMap() emits errors during asynchronous mapping", t => { t.plan(2); const source = new Readable({ objectMode: true }); source diff --git a/tests/reduce.spec.ts b/tests/reduce.spec.ts index b005896..8d504db 100644 --- a/tests/reduce.spec.ts +++ b/tests/reduce.spec.ts @@ -46,7 +46,7 @@ test.cb("reduce() reduces elements asynchronously", t => { source.push(null); }); -test.cb("reduce() emits errors during synchronous reduce", t => { +test.cb.skip("reduce() emits errors during synchronous reduce", t => { t.plan(2); const source = new Readable({ objectMode: true }); source @@ -71,7 +71,7 @@ test.cb("reduce() emits errors during synchronous reduce", t => { source.push(null); }); -test.cb("reduce() emits errors during asynchronous reduce", t => { +test.cb.skip("reduce() emits errors during asynchronous reduce", t => { t.plan(2); const source = new Readable({ objectMode: true }); source From 4c7e9ceb7ef59c7c66a1ff96231774e45e73049e Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 12 Sep 2019 14:40:47 -0400 Subject: [PATCH 59/69] Improve interface for accumulator --- src/functions/accumulator.ts | 71 +++++++++++++++++++++++++----------- src/functions/demux.ts | 14 +++++++ src/functions/index.ts | 6 ++- tests/accumulator.spec.ts | 32 ++++------------ tests/demux.spec.ts | 13 ++++++- 5 files changed, 86 insertions(+), 50 deletions(-) diff --git a/src/functions/accumulator.ts b/src/functions/accumulator.ts index bb8a7fb..0bef8e1 100644 --- a/src/functions/accumulator.ts +++ b/src/functions/accumulator.ts @@ -4,10 +4,11 @@ import { FlushStrategy, TransformOptions, } from "./baseDefinitions"; -import { batch } from "."; +import { batch, rate as _rate } from "."; function _accumulator( accumulateBy: (data: T, buffer: T[], stream: Transform) => void, + rate?: number, shouldFlush: boolean = true, options: TransformOptions = { readableObjectMode: true, @@ -15,7 +16,7 @@ function _accumulator( }, ) { const buffer: T[] = []; - return new Transform({ + const stream = new Transform({ ...options, transform(data: T, encoding, callback) { accumulateBy(data, buffer, this); @@ -28,11 +29,14 @@ function _accumulator( callback(); }, }); + if (rate) { + stream.pipe(_rate(rate)); + } + return stream; } function _sliding( windowLength: number, - rate: number | undefined, key?: string, ): (event: T, buffer: T[], stream: Transform) => void { return (event: T, buffer: T[], stream: Transform) => { @@ -66,7 +70,6 @@ function _sliding( } function _slidingByFunction( - rate: number | undefined, iteratee: AccumulatorByIteratee, ): (event: T, buffer: T[], stream: Transform) => void { return (event: T, buffer: T[], stream: Transform) => { @@ -81,7 +84,6 @@ function _slidingByFunction( } function _rollingByFunction( - rate: number | undefined, iteratee: AccumulatorByIteratee, ): (event: T, buffer: T[], stream: Transform) => void { return (event: T, buffer: T[], stream: Transform) => { @@ -97,7 +99,6 @@ function _rollingByFunction( function _rolling( windowLength: number, - rate: number | undefined, key?: string, ): (event: T, buffer: T[], stream: Transform) => void { return (event: T, buffer: T[], stream: Transform) => { @@ -129,58 +130,84 @@ function _rolling( } export function accumulator( - batchSize: number, - batchRate: number | undefined, flushStrategy: FlushStrategy, + batchSize: number, keyBy?: string, + options: TransformOptions & { rate?: number } = { + readableObjectMode: true, + writableObjectMode: true, + }, ): Transform { if (flushStrategy === FlushStrategy.sliding) { - return sliding(batchSize, batchRate, keyBy); + return sliding(batchSize, keyBy, options); } else if (flushStrategy === FlushStrategy.rolling) { - return rolling(batchSize, batchRate, keyBy); + return rolling(batchSize, keyBy, options); } else { - return batch(batchSize, batchRate); + return batch(batchSize, options.rate); } } export function accumulatorBy( - batchRate: number | undefined, flushStrategy: S, iteratee: AccumulatorByIteratee, + options: TransformOptions & { rate?: number } = { + readableObjectMode: true, + writableObjectMode: true, + }, ): Transform { if (flushStrategy === FlushStrategy.sliding) { - return slidingBy(batchRate, iteratee); + return slidingBy(iteratee, options); } else { - return rollingBy(batchRate, iteratee); + return rollingBy(iteratee, options); } } function sliding( windowLength: number, - rate: number | undefined, key?: string, + options?: TransformOptions & { rate?: number }, ): Transform { - return _accumulator(_sliding(windowLength, rate, key), false); + return _accumulator( + _sliding(windowLength, key), + options && options.rate, + false, + options, + ); } function slidingBy( - rate: number | undefined, iteratee: AccumulatorByIteratee, + options?: TransformOptions & { rate?: number }, ): Transform { - return _accumulator(_slidingByFunction(rate, iteratee), false); + return _accumulator( + _slidingByFunction(iteratee), + options && options.rate, + false, + options, + ); } function rolling( windowLength: number, - rate: number | undefined, key?: string, + options?: TransformOptions & { rate?: number }, ): Transform { - return _accumulator(_rolling(windowLength, rate, key)); + return _accumulator( + _rolling(windowLength, key), + options && options.rate, + true, + options, + ); } function rollingBy( - rate: number | undefined, iteratee: AccumulatorByIteratee, + options?: TransformOptions & { rate?: number }, ): Transform { - return _accumulator(_rollingByFunction(rate, iteratee)); + return _accumulator( + _rollingByFunction(iteratee), + options && options.rate, + true, + options, + ); } diff --git a/src/functions/demux.ts b/src/functions/demux.ts index 4b6312a..2a515ec 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -58,7 +58,21 @@ class Demux extends Writable { this.streamsByKey[destKey] = this.construct(destKey); } if (!this.streamsByKey[destKey].write(chunk, encoding)) { + console.log( + "waiting drain", + chunk, + this._writableState.length, + this.streamsByKey[destKey]._writableState.length, + this.streamsByKey[destKey]._readableState.length, + ); this.streamsByKey[destKey].once("drain", () => { + console.log( + "calling cb after drain", + chunk, + this._writableState.length, + this.streamsByKey[destKey]._writableState.length, + this.streamsByKey[destKey]._readableState.length, + ); cb(); }); } else { diff --git a/src/functions/index.ts b/src/functions/index.ts index 48d8062..778b1b9 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -171,10 +171,11 @@ export const parallelMap = baseFunctions.parallelMap; * When no key is provided, the batchSize is the buffer length. When a key is provided, the batchSize * is based on the value at that key. For example, given a key of `timestamp` and a batchSize of 3000, * each item in the buffer will be guaranteed to be within 3000 timestamp units from the first element. + * @param flushStrategy Buffering strategy to use. * @param batchSize Size of the batch (in units of buffer length or value at key). * @param batchRate Desired rate of data transfer to next stream. - * @param flushStrategy Buffering strategy to use. * @param keyBy Key to determine if element fits into buffer or items need to be cleared from buffer. + * @param options Transform stream options */ export const accumulator = baseFunctions.accumulator; @@ -187,10 +188,11 @@ export const accumulator = baseFunctions.accumulator; * 2. Rolling * - If the iteratee returns false, the buffer is cleared and pushed into stream. The item is * then pushed into the buffer. - * @param batchRate Desired rate of data transfer to next stream. * @param flushStrategy Buffering strategy to use. * @param iteratee Function applied to buffer when a chunk of data enters stream to determine if element fits into * or items need to be cleared from buffer. + * @param batchRate Desired rate of data transfer to next stream. + * @param options Transform stream options */ export const accumulatorBy = baseFunctions.accumulatorBy; diff --git a/tests/accumulator.spec.ts b/tests/accumulator.spec.ts index 71fdb1f..2ee455c 100644 --- a/tests/accumulator.spec.ts +++ b/tests/accumulator.spec.ts @@ -18,7 +18,7 @@ test.cb("accumulator() rolling", t => { const flushes = [firstFlush, secondFlush, thirdFlush]; source - .pipe(accumulator(2, undefined, FlushStrategy.rolling)) + .pipe(accumulator(FlushStrategy.rolling, 2)) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; @@ -51,7 +51,7 @@ test.cb("accumulator() rolling with key", t => { const flushes = [firstFlush, secondFlush]; source - .pipe(accumulator(3, undefined, FlushStrategy.rolling, "ts")) + .pipe(accumulator(FlushStrategy.rolling, 3, "ts")) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; @@ -77,9 +77,8 @@ test.cb( } const source = new Readable({ objectMode: true }); const accumulatorStream = accumulator( - 3, - undefined, FlushStrategy.rolling, + 3, "nonExistingKey", ); const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; @@ -119,12 +118,7 @@ test.cb( key: string; } const source = new Readable({ objectMode: true }); - const accumulatorStream = accumulator( - 3, - undefined, - FlushStrategy.rolling, - "ts", - ); + const accumulatorStream = accumulator(FlushStrategy.rolling, 3, "ts"); const input = [ { ts: 0, key: "a" }, { ts: 1, key: "b" }, @@ -193,7 +187,7 @@ test.cb("accumulator() sliding", t => { const flushes = [firstFlush, secondFlush, thirdFlush, fourthFlush]; source - .pipe(accumulator(3, undefined, FlushStrategy.sliding)) + .pipe(accumulator(FlushStrategy.sliding, 3)) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; @@ -248,7 +242,7 @@ test.cb("accumulator() sliding with key", t => { sixthFlush, ]; source - .pipe(accumulator(3, undefined, FlushStrategy.sliding, "ts")) + .pipe(accumulator(FlushStrategy.sliding, 3, "ts")) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; @@ -274,9 +268,8 @@ test.cb( } const source = new Readable({ objectMode: true }); const accumulatorStream = accumulator( - 3, - undefined, FlushStrategy.sliding, + 3, "nonExistingKey", ); const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; @@ -315,12 +308,7 @@ test.cb( key: string; } const source = new Readable({ objectMode: true }); - const accumulatorStream = accumulator( - 3, - undefined, - FlushStrategy.sliding, - "ts", - ); + const accumulatorStream = accumulator(FlushStrategy.sliding, 3, "ts"); const input = [ { ts: 0, key: "a" }, { key: "b" }, @@ -386,7 +374,6 @@ test.cb("accumulatorBy() rolling", t => { source .pipe( accumulatorBy( - undefined, FlushStrategy.rolling, (event: TestObject, bufferChunk: TestObject) => { return bufferChunk.ts + 3 <= event.ts; @@ -422,7 +409,6 @@ test.cb.skip( { ts: 2, key: "c" }, ]; const accumulaterStream = accumulatorBy( - undefined, FlushStrategy.rolling, (event: TestObject, bufferChunk: TestObject) => { if (event.key !== "a") { @@ -490,7 +476,6 @@ test.cb("accumulatorBy() sliding", t => { source .pipe( accumulatorBy( - undefined, FlushStrategy.sliding, (event: TestObject, bufferChunk: TestObject) => { return bufferChunk.ts + 3 <= event.ts ? true : false; @@ -526,7 +511,6 @@ test.cb.skip( { ts: 2, key: "c" }, ]; const accumulaterStream = accumulatorBy( - undefined, FlushStrategy.sliding, (event: TestObject, bufferChunk: TestObject) => { if (event.key !== "a") { diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index 2d43ec1..370e117 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -379,6 +379,7 @@ test.cb.only( const construct = (destKey: string) => { const first = map( (chunk: Chunk) => { + console.log("1: ", chunk); chunk.mapped.push(1); return chunk; }, @@ -387,7 +388,9 @@ test.cb.only( const second = map( async (chunk: Chunk) => { + console.log("2: ", chunk); await sleep(slowProcessorSpeed); + console.log("2 done ", chunk); chunk.mapped.push(2); return chunk; }, @@ -408,6 +411,7 @@ test.cb.only( // This event should be received after at least 5 * slowProcessorSpeed (two are read immediately by first and second, 5 remaining in demux before drain event) _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); + console.log(performance.now() - start); expect(performance.now() - start).to.be.greaterThan( slowProcessorSpeed * (input.length - 2), ); @@ -427,7 +431,7 @@ test.cb.only( const start = performance.now(); input.forEach(item => { - _demux.write(item); + console.log(_demux.write(item)); }); }, ); @@ -457,6 +461,7 @@ test.cb( const construct = (destKey: string) => { const first = map( (chunk: Chunk) => { + console.log("1: ", chunk); chunk.mapped.push(1); return chunk; }, @@ -464,6 +469,7 @@ test.cb( ); const second = map( (chunk: Chunk) => { + console.log("2: ", chunk); chunk.mapped.push(2); return chunk; }, @@ -472,7 +478,9 @@ test.cb( const third = map( async (chunk: Chunk) => { + console.log("3: ", chunk); await sleep(slowProcessorSpeed); + console.log(" 3 done ", chunk); chunk.mapped.push(3); return chunk; }, @@ -496,6 +504,7 @@ test.cb( // This event should be received after at least 3 * slowProcessorSpeed (two are read immediately by first and second, 3 remaining in demux before drain event) _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); + console.log(performance.now() - start); expect(performance.now() - start).to.be.greaterThan( slowProcessorSpeed * (input.length - 4), ); @@ -515,7 +524,7 @@ test.cb( const start = performance.now(); input.forEach(item => { - _demux.write(item); + console.log(_demux.write(item)); }); }, ); From 48a231d61c34f15a2773c09a115b4ab21bfbf1d8 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 12 Sep 2019 15:34:42 -0400 Subject: [PATCH 60/69] Remote rate from accumulator --- src/functions/accumulator.ts | 51 +++++++++--------------------------- tests/accumulator.spec.ts | 1 + 2 files changed, 14 insertions(+), 38 deletions(-) diff --git a/src/functions/accumulator.ts b/src/functions/accumulator.ts index 0bef8e1..3176304 100644 --- a/src/functions/accumulator.ts +++ b/src/functions/accumulator.ts @@ -4,11 +4,10 @@ import { FlushStrategy, TransformOptions, } from "./baseDefinitions"; -import { batch, rate as _rate } from "."; +import { batch } from "."; function _accumulator( accumulateBy: (data: T, buffer: T[], stream: Transform) => void, - rate?: number, shouldFlush: boolean = true, options: TransformOptions = { readableObjectMode: true, @@ -16,7 +15,7 @@ function _accumulator( }, ) { const buffer: T[] = []; - const stream = new Transform({ + return new Transform({ ...options, transform(data: T, encoding, callback) { accumulateBy(data, buffer, this); @@ -29,10 +28,6 @@ function _accumulator( callback(); }, }); - if (rate) { - stream.pipe(_rate(rate)); - } - return stream; } function _sliding( @@ -133,7 +128,7 @@ export function accumulator( flushStrategy: FlushStrategy, batchSize: number, keyBy?: string, - options: TransformOptions & { rate?: number } = { + options: TransformOptions = { readableObjectMode: true, writableObjectMode: true, }, @@ -143,14 +138,14 @@ export function accumulator( } else if (flushStrategy === FlushStrategy.rolling) { return rolling(batchSize, keyBy, options); } else { - return batch(batchSize, options.rate); + return batch(batchSize); } } export function accumulatorBy( flushStrategy: S, iteratee: AccumulatorByIteratee, - options: TransformOptions & { rate?: number } = { + options: TransformOptions = { readableObjectMode: true, writableObjectMode: true, }, @@ -165,49 +160,29 @@ export function accumulatorBy( function sliding( windowLength: number, key?: string, - options?: TransformOptions & { rate?: number }, + options?: TransformOptions, ): Transform { - return _accumulator( - _sliding(windowLength, key), - options && options.rate, - false, - options, - ); + return _accumulator(_sliding(windowLength, key), false, options); } function slidingBy( iteratee: AccumulatorByIteratee, - options?: TransformOptions & { rate?: number }, + options?: TransformOptions, ): Transform { - return _accumulator( - _slidingByFunction(iteratee), - options && options.rate, - false, - options, - ); + return _accumulator(_slidingByFunction(iteratee), false, options); } function rolling( windowLength: number, key?: string, - options?: TransformOptions & { rate?: number }, + options?: TransformOptions, ): Transform { - return _accumulator( - _rolling(windowLength, key), - options && options.rate, - true, - options, - ); + return _accumulator(_rolling(windowLength, key), true, options); } function rollingBy( iteratee: AccumulatorByIteratee, - options?: TransformOptions & { rate?: number }, + options?: TransformOptions, ): Transform { - return _accumulator( - _rollingByFunction(iteratee), - options && options.rate, - true, - options, - ); + return _accumulator(_rollingByFunction(iteratee), true, options); } diff --git a/tests/accumulator.spec.ts b/tests/accumulator.spec.ts index 2ee455c..feac408 100644 --- a/tests/accumulator.spec.ts +++ b/tests/accumulator.spec.ts @@ -3,6 +3,7 @@ import { expect } from "chai"; import { Readable } from "stream"; import { accumulator, accumulatorBy } from "../src"; import { FlushStrategy } from "../src/functions/baseDefinitions"; +import { performance } from "perf_hooks"; test.cb("accumulator() rolling", t => { t.plan(3); From 158475183a6cd974a3220f8aa3ae71589d953272 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 12 Sep 2019 15:35:59 -0400 Subject: [PATCH 61/69] Cleanup console logs --- src/functions/demux.ts | 14 -------------- tests/demux.spec.ts | 13 ++----------- 2 files changed, 2 insertions(+), 25 deletions(-) diff --git a/src/functions/demux.ts b/src/functions/demux.ts index 2a515ec..4b6312a 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -58,21 +58,7 @@ class Demux extends Writable { this.streamsByKey[destKey] = this.construct(destKey); } if (!this.streamsByKey[destKey].write(chunk, encoding)) { - console.log( - "waiting drain", - chunk, - this._writableState.length, - this.streamsByKey[destKey]._writableState.length, - this.streamsByKey[destKey]._readableState.length, - ); this.streamsByKey[destKey].once("drain", () => { - console.log( - "calling cb after drain", - chunk, - this._writableState.length, - this.streamsByKey[destKey]._writableState.length, - this.streamsByKey[destKey]._readableState.length, - ); cb(); }); } else { diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index 370e117..2d43ec1 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -379,7 +379,6 @@ test.cb.only( const construct = (destKey: string) => { const first = map( (chunk: Chunk) => { - console.log("1: ", chunk); chunk.mapped.push(1); return chunk; }, @@ -388,9 +387,7 @@ test.cb.only( const second = map( async (chunk: Chunk) => { - console.log("2: ", chunk); await sleep(slowProcessorSpeed); - console.log("2 done ", chunk); chunk.mapped.push(2); return chunk; }, @@ -411,7 +408,6 @@ test.cb.only( // This event should be received after at least 5 * slowProcessorSpeed (two are read immediately by first and second, 5 remaining in demux before drain event) _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); - console.log(performance.now() - start); expect(performance.now() - start).to.be.greaterThan( slowProcessorSpeed * (input.length - 2), ); @@ -431,7 +427,7 @@ test.cb.only( const start = performance.now(); input.forEach(item => { - console.log(_demux.write(item)); + _demux.write(item); }); }, ); @@ -461,7 +457,6 @@ test.cb( const construct = (destKey: string) => { const first = map( (chunk: Chunk) => { - console.log("1: ", chunk); chunk.mapped.push(1); return chunk; }, @@ -469,7 +464,6 @@ test.cb( ); const second = map( (chunk: Chunk) => { - console.log("2: ", chunk); chunk.mapped.push(2); return chunk; }, @@ -478,9 +472,7 @@ test.cb( const third = map( async (chunk: Chunk) => { - console.log("3: ", chunk); await sleep(slowProcessorSpeed); - console.log(" 3 done ", chunk); chunk.mapped.push(3); return chunk; }, @@ -504,7 +496,6 @@ test.cb( // This event should be received after at least 3 * slowProcessorSpeed (two are read immediately by first and second, 3 remaining in demux before drain event) _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); - console.log(performance.now() - start); expect(performance.now() - start).to.be.greaterThan( slowProcessorSpeed * (input.length - 4), ); @@ -524,7 +515,7 @@ test.cb( const start = performance.now(); input.forEach(item => { - console.log(_demux.write(item)); + _demux.write(item); }); }, ); From 70edee51c4a7d8f968181098fc98562dbe7d8c6f Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Fri, 13 Sep 2019 08:57:19 -0400 Subject: [PATCH 62/69] Update interface --- src/functions/accumulator.ts | 23 +++++------------------ src/functions/demux.ts | 2 +- tests/accumulator.spec.ts | 30 ++++++++++++++++++++++++------ 3 files changed, 30 insertions(+), 25 deletions(-) diff --git a/src/functions/accumulator.ts b/src/functions/accumulator.ts index 3176304..020d900 100644 --- a/src/functions/accumulator.ts +++ b/src/functions/accumulator.ts @@ -1,18 +1,11 @@ -import { Transform } from "stream"; -import { - AccumulatorByIteratee, - FlushStrategy, - TransformOptions, -} from "./baseDefinitions"; +import { Transform, TransformOptions } from "stream"; +import { AccumulatorByIteratee, FlushStrategy } from "./baseDefinitions"; import { batch } from "."; function _accumulator( accumulateBy: (data: T, buffer: T[], stream: Transform) => void, shouldFlush: boolean = true, - options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, - }, + options: TransformOptions = {}, ) { const buffer: T[] = []; return new Transform({ @@ -128,10 +121,7 @@ export function accumulator( flushStrategy: FlushStrategy, batchSize: number, keyBy?: string, - options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, - }, + options?: TransformOptions, ): Transform { if (flushStrategy === FlushStrategy.sliding) { return sliding(batchSize, keyBy, options); @@ -145,10 +135,7 @@ export function accumulator( export function accumulatorBy( flushStrategy: S, iteratee: AccumulatorByIteratee, - options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, - }, + options?: TransformOptions, ): Transform { if (flushStrategy === FlushStrategy.sliding) { return slidingBy(iteratee, options); diff --git a/src/functions/demux.ts b/src/functions/demux.ts index 4b6312a..98a1225 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -43,7 +43,7 @@ class Demux extends Writable { destKey?: string, ) => NodeJS.WritableStream | NodeJS.ReadWriteStream, demuxBy: string | ((chunk: any) => string), - options?: WritableOptions, + options: WritableOptions = {}, ) { super(options); this.demuxer = diff --git a/tests/accumulator.spec.ts b/tests/accumulator.spec.ts index feac408..9e08c74 100644 --- a/tests/accumulator.spec.ts +++ b/tests/accumulator.spec.ts @@ -19,7 +19,11 @@ test.cb("accumulator() rolling", t => { const flushes = [firstFlush, secondFlush, thirdFlush]; source - .pipe(accumulator(FlushStrategy.rolling, 2)) + .pipe( + accumulator(FlushStrategy.rolling, 2, undefined, { + objectMode: true, + }), + ) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; @@ -52,7 +56,7 @@ test.cb("accumulator() rolling with key", t => { const flushes = [firstFlush, secondFlush]; source - .pipe(accumulator(FlushStrategy.rolling, 3, "ts")) + .pipe(accumulator(FlushStrategy.rolling, 3, "ts", { objectMode: true })) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; @@ -81,6 +85,7 @@ test.cb( FlushStrategy.rolling, 3, "nonExistingKey", + { objectMode: true }, ); const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; @@ -119,7 +124,9 @@ test.cb( key: string; } const source = new Readable({ objectMode: true }); - const accumulatorStream = accumulator(FlushStrategy.rolling, 3, "ts"); + const accumulatorStream = accumulator(FlushStrategy.rolling, 3, "ts", { + objectMode: true, + }); const input = [ { ts: 0, key: "a" }, { ts: 1, key: "b" }, @@ -188,7 +195,11 @@ test.cb("accumulator() sliding", t => { const flushes = [firstFlush, secondFlush, thirdFlush, fourthFlush]; source - .pipe(accumulator(FlushStrategy.sliding, 3)) + .pipe( + accumulator(FlushStrategy.sliding, 3, undefined, { + objectMode: true, + }), + ) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; @@ -243,7 +254,7 @@ test.cb("accumulator() sliding with key", t => { sixthFlush, ]; source - .pipe(accumulator(FlushStrategy.sliding, 3, "ts")) + .pipe(accumulator(FlushStrategy.sliding, 3, "ts", { objectMode: true })) .on("data", (flush: TestObject[]) => { t.deepEqual(flush, flushes[chunkIndex]); chunkIndex++; @@ -272,6 +283,7 @@ test.cb( FlushStrategy.sliding, 3, "nonExistingKey", + { objectMode: true }, ); const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; @@ -309,7 +321,9 @@ test.cb( key: string; } const source = new Readable({ objectMode: true }); - const accumulatorStream = accumulator(FlushStrategy.sliding, 3, "ts"); + const accumulatorStream = accumulator(FlushStrategy.sliding, 3, "ts", { + objectMode: true, + }); const input = [ { ts: 0, key: "a" }, { key: "b" }, @@ -379,6 +393,7 @@ test.cb("accumulatorBy() rolling", t => { (event: TestObject, bufferChunk: TestObject) => { return bufferChunk.ts + 3 <= event.ts; }, + { objectMode: true }, ), ) .on("data", (flush: TestObject[]) => { @@ -417,6 +432,7 @@ test.cb.skip( } return bufferChunk.ts + 3 <= event.ts; }, + { objectMode: true }, ); source .pipe(accumulaterStream) @@ -481,6 +497,7 @@ test.cb("accumulatorBy() sliding", t => { (event: TestObject, bufferChunk: TestObject) => { return bufferChunk.ts + 3 <= event.ts ? true : false; }, + { objectMode: true }, ), ) .on("data", (flush: TestObject[]) => { @@ -519,6 +536,7 @@ test.cb.skip( } return bufferChunk.ts + 3 <= event.ts ? true : false; }, + { objectMode: true }, ); source .pipe(accumulaterStream) From a11aa10d166d3a35a210cf886e56a18d69120f31 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 26 Sep 2019 09:23:09 -0400 Subject: [PATCH 63/69] Clean up --- src/functions/accumulator.ts | 8 +++++++- src/functions/baseDefinitions.ts | 15 --------------- src/functions/batch.ts | 6 ++---- src/functions/collect.ts | 7 ++----- src/functions/flatMap.ts | 6 ++---- src/functions/map.ts | 8 ++------ src/functions/parallelMap.ts | 6 ++---- src/functions/rate.ts | 6 ++---- src/functions/reduce.ts | 9 +++------ src/functions/unbatch.ts | 6 ++---- tests/accumulator.spec.ts | 2 +- tests/demux.spec.ts | 13 +++++++++++-- 12 files changed, 36 insertions(+), 56 deletions(-) diff --git a/src/functions/accumulator.ts b/src/functions/accumulator.ts index 020d900..82ca9ae 100644 --- a/src/functions/accumulator.ts +++ b/src/functions/accumulator.ts @@ -1,7 +1,13 @@ import { Transform, TransformOptions } from "stream"; -import { AccumulatorByIteratee, FlushStrategy } from "./baseDefinitions"; import { batch } from "."; +export enum FlushStrategy { + rolling = "rolling", + sliding = "sliding", +} + +export type AccumulatorByIteratee = (event: T, bufferChunk: T) => boolean; + function _accumulator( accumulateBy: (data: T, buffer: T[], stream: Transform) => void, shouldFlush: boolean = true, diff --git a/src/functions/baseDefinitions.ts b/src/functions/baseDefinitions.ts index c3f5461..b02dd10 100644 --- a/src/functions/baseDefinitions.ts +++ b/src/functions/baseDefinitions.ts @@ -1,12 +1,3 @@ -export interface ThroughOptions { - objectMode?: boolean; -} - -export interface TransformOptions { - readableObjectMode?: boolean; - writableObjectMode?: boolean; -} - export interface WithEncoding { encoding: string; } @@ -21,9 +12,3 @@ export type JsonValue = JsonPrimitive | JsonPrimitive[]; export interface JsonParseOptions { pretty: boolean; } -export enum FlushStrategy { - rolling = "rolling", - sliding = "sliding", -} - -export type AccumulatorByIteratee = (event: T, bufferChunk: T) => boolean; diff --git a/src/functions/batch.ts b/src/functions/batch.ts index 0d0f314..e9f8915 100644 --- a/src/functions/batch.ts +++ b/src/functions/batch.ts @@ -1,12 +1,10 @@ -import { Transform } from "stream"; -import { TransformOptions } from "./baseDefinitions"; +import { Transform, TransformOptions } from "stream"; export function batch( batchSize: number = 1000, maxBatchAge: number = 500, options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, + objectMode: true, }, ): Transform { let buffer: any[] = []; diff --git a/src/functions/collect.ts b/src/functions/collect.ts index 38cd6ea..3c081bb 100644 --- a/src/functions/collect.ts +++ b/src/functions/collect.ts @@ -1,9 +1,6 @@ -import { Transform } from "stream"; -import { ThroughOptions } from "./baseDefinitions"; +import { Transform, TransformOptions } from "stream"; -export function collect( - options: ThroughOptions = { objectMode: false }, -): Transform { +export function collect(options: TransformOptions = {}): Transform { const collected: any[] = []; return new Transform({ ...options, diff --git a/src/functions/flatMap.ts b/src/functions/flatMap.ts index 2abb726..dd7820d 100644 --- a/src/functions/flatMap.ts +++ b/src/functions/flatMap.ts @@ -1,13 +1,11 @@ -import { Transform } from "stream"; -import { TransformOptions } from "./baseDefinitions"; +import { Transform, TransformOptions } from "stream"; export function flatMap( mapper: | ((chunk: T, encoding: string) => R[]) | ((chunk: T, encoding: string) => Promise), options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, + objectMode: true, }, ): Transform { return new Transform({ diff --git a/src/functions/map.ts b/src/functions/map.ts index 589f0a9..38d6a59 100644 --- a/src/functions/map.ts +++ b/src/functions/map.ts @@ -1,12 +1,8 @@ -import { Transform } from "stream"; -import { TransformOptions } from "./baseDefinitions"; +import { Transform, TransformOptions } from "stream"; export function map( mapper: (chunk: T, encoding: string) => R, - options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, - }, + options: TransformOptions = { objectMode: true }, ): Transform { return new Transform({ ...options, diff --git a/src/functions/parallelMap.ts b/src/functions/parallelMap.ts index 6bc6b79..8cb3e80 100644 --- a/src/functions/parallelMap.ts +++ b/src/functions/parallelMap.ts @@ -1,14 +1,12 @@ -import { Transform } from "stream"; +import { Transform, TransformOptions } from "stream"; import { sleep } from "../helpers"; -import { TransformOptions } from "./baseDefinitions"; export function parallelMap( mapper: (data: T) => R, parallel: number = 10, sleepTime: number = 5, options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, + objectMode: true, }, ) { let inflight = 0; diff --git a/src/functions/rate.ts b/src/functions/rate.ts index cb5cbfb..083d854 100644 --- a/src/functions/rate.ts +++ b/src/functions/rate.ts @@ -1,14 +1,12 @@ -import { Transform } from "stream"; +import { Transform, TransformOptions } from "stream"; import { performance } from "perf_hooks"; import { sleep } from "../helpers"; -import { TransformOptions } from "./baseDefinitions"; export function rate( targetRate: number = 50, period: number = 1, options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, + objectMode: true, }, ): Transform { const deltaMS = ((1 / targetRate) * 1000) / period; // Skip a full period diff --git a/src/functions/reduce.ts b/src/functions/reduce.ts index ff76025..9f19ca4 100644 --- a/src/functions/reduce.ts +++ b/src/functions/reduce.ts @@ -1,5 +1,4 @@ -import { Transform } from "stream"; -import { TransformOptions } from "./baseDefinitions"; +import { Transform, TransformOptions } from "stream"; export function reduce( iteratee: @@ -7,14 +6,12 @@ export function reduce( | ((previousValue: R, chunk: T, encoding: string) => Promise), initialValue: R, options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, + objectMode: true, }, ) { let value = initialValue; return new Transform({ - readableObjectMode: options.readableObjectMode, - writableObjectMode: options.writableObjectMode, + ...options, async transform(chunk: T, encoding, callback) { value = await iteratee(value, chunk, encoding); callback(); diff --git a/src/functions/unbatch.ts b/src/functions/unbatch.ts index 0f9b3f6..93d6bfe 100644 --- a/src/functions/unbatch.ts +++ b/src/functions/unbatch.ts @@ -1,10 +1,8 @@ -import { Transform } from "stream"; -import { TransformOptions } from "./baseDefinitions"; +import { Transform, TransformOptions } from "stream"; export function unbatch( options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, + objectMode: true, }, ) { return new Transform({ diff --git a/tests/accumulator.spec.ts b/tests/accumulator.spec.ts index 9e08c74..51d6098 100644 --- a/tests/accumulator.spec.ts +++ b/tests/accumulator.spec.ts @@ -2,7 +2,7 @@ import test from "ava"; import { expect } from "chai"; import { Readable } from "stream"; import { accumulator, accumulatorBy } from "../src"; -import { FlushStrategy } from "../src/functions/baseDefinitions"; +import { FlushStrategy } from "../src/functions/accumulator"; import { performance } from "perf_hooks"; test.cb("accumulator() rolling", t => { diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index 2d43ec1..370e117 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -379,6 +379,7 @@ test.cb.only( const construct = (destKey: string) => { const first = map( (chunk: Chunk) => { + console.log("1: ", chunk); chunk.mapped.push(1); return chunk; }, @@ -387,7 +388,9 @@ test.cb.only( const second = map( async (chunk: Chunk) => { + console.log("2: ", chunk); await sleep(slowProcessorSpeed); + console.log("2 done ", chunk); chunk.mapped.push(2); return chunk; }, @@ -408,6 +411,7 @@ test.cb.only( // This event should be received after at least 5 * slowProcessorSpeed (two are read immediately by first and second, 5 remaining in demux before drain event) _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); + console.log(performance.now() - start); expect(performance.now() - start).to.be.greaterThan( slowProcessorSpeed * (input.length - 2), ); @@ -427,7 +431,7 @@ test.cb.only( const start = performance.now(); input.forEach(item => { - _demux.write(item); + console.log(_demux.write(item)); }); }, ); @@ -457,6 +461,7 @@ test.cb( const construct = (destKey: string) => { const first = map( (chunk: Chunk) => { + console.log("1: ", chunk); chunk.mapped.push(1); return chunk; }, @@ -464,6 +469,7 @@ test.cb( ); const second = map( (chunk: Chunk) => { + console.log("2: ", chunk); chunk.mapped.push(2); return chunk; }, @@ -472,7 +478,9 @@ test.cb( const third = map( async (chunk: Chunk) => { + console.log("3: ", chunk); await sleep(slowProcessorSpeed); + console.log(" 3 done ", chunk); chunk.mapped.push(3); return chunk; }, @@ -496,6 +504,7 @@ test.cb( // This event should be received after at least 3 * slowProcessorSpeed (two are read immediately by first and second, 3 remaining in demux before drain event) _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); + console.log(performance.now() - start); expect(performance.now() - start).to.be.greaterThan( slowProcessorSpeed * (input.length - 4), ); @@ -515,7 +524,7 @@ test.cb( const start = performance.now(); input.forEach(item => { - _demux.write(item); + console.log(_demux.write(item)); }); }, ); From f177f95f52707ea34ffc1842a3ca62d9c6969bcf Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 26 Sep 2019 09:24:58 -0400 Subject: [PATCH 64/69] Remove logs --- tests/demux.spec.ts | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/tests/demux.spec.ts b/tests/demux.spec.ts index 370e117..2d43ec1 100644 --- a/tests/demux.spec.ts +++ b/tests/demux.spec.ts @@ -379,7 +379,6 @@ test.cb.only( const construct = (destKey: string) => { const first = map( (chunk: Chunk) => { - console.log("1: ", chunk); chunk.mapped.push(1); return chunk; }, @@ -388,9 +387,7 @@ test.cb.only( const second = map( async (chunk: Chunk) => { - console.log("2: ", chunk); await sleep(slowProcessorSpeed); - console.log("2 done ", chunk); chunk.mapped.push(2); return chunk; }, @@ -411,7 +408,6 @@ test.cb.only( // This event should be received after at least 5 * slowProcessorSpeed (two are read immediately by first and second, 5 remaining in demux before drain event) _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); - console.log(performance.now() - start); expect(performance.now() - start).to.be.greaterThan( slowProcessorSpeed * (input.length - 2), ); @@ -431,7 +427,7 @@ test.cb.only( const start = performance.now(); input.forEach(item => { - console.log(_demux.write(item)); + _demux.write(item); }); }, ); @@ -461,7 +457,6 @@ test.cb( const construct = (destKey: string) => { const first = map( (chunk: Chunk) => { - console.log("1: ", chunk); chunk.mapped.push(1); return chunk; }, @@ -469,7 +464,6 @@ test.cb( ); const second = map( (chunk: Chunk) => { - console.log("2: ", chunk); chunk.mapped.push(2); return chunk; }, @@ -478,9 +472,7 @@ test.cb( const third = map( async (chunk: Chunk) => { - console.log("3: ", chunk); await sleep(slowProcessorSpeed); - console.log(" 3 done ", chunk); chunk.mapped.push(3); return chunk; }, @@ -504,7 +496,6 @@ test.cb( // This event should be received after at least 3 * slowProcessorSpeed (two are read immediately by first and second, 3 remaining in demux before drain event) _demux.on("drain", () => { expect(_demux._writableState.length).to.be.equal(0); - console.log(performance.now() - start); expect(performance.now() - start).to.be.greaterThan( slowProcessorSpeed * (input.length - 4), ); @@ -524,7 +515,7 @@ test.cb( const start = performance.now(); input.forEach(item => { - console.log(_demux.write(item)); + _demux.write(item); }); }, ); From f6e3a03eb7108a10b561c71219a7756a0039724a Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 26 Sep 2019 09:44:42 -0400 Subject: [PATCH 65/69] Add TODO --- src/functions/demux.ts | 27 +++++++++------------------ 1 file changed, 9 insertions(+), 18 deletions(-) diff --git a/src/functions/demux.ts b/src/functions/demux.ts index 98a1225..66ee487 100644 --- a/src/functions/demux.ts +++ b/src/functions/demux.ts @@ -16,32 +16,31 @@ const eventsTarget = { error: EventSubscription.Self, finish: EventSubscription.Self, pause: EventSubscription.Self, - pipe: EventSubscription.Unhandled, + pipe: EventSubscription.Self, readable: EventSubscription.Self, resume: EventSubscription.Self, - unpipe: EventSubscription.Unhandled, + unpipe: EventSubscription.Self, }; +type DemuxStreams = NodeJS.WritableStream | NodeJS.ReadWriteStream; + export function demux( - construct: () => NodeJS.WritableStream | NodeJS.ReadWriteStream, + construct: () => DemuxStreams, demuxBy: string | ((chunk: any) => string), options?: WritableOptions, ): Writable { return new Demux(construct, demuxBy, options); } +// @TODO handle pipe event ie) Multiplex class Demux extends Writable { private streamsByKey: { - [key: string]: NodeJS.WritableStream | NodeJS.ReadWriteStream; + [key: string]: DemuxStreams; }; private demuxer: (chunk: any) => string; - private construct: ( - destKey?: string, - ) => NodeJS.WritableStream | NodeJS.ReadWriteStream; + private construct: (destKey?: string) => DemuxStreams; constructor( - construct: ( - destKey?: string, - ) => NodeJS.WritableStream | NodeJS.ReadWriteStream, + construct: (destKey?: string) => DemuxStreams, demuxBy: string | ((chunk: any) => string), options: WritableOptions = {}, ) { @@ -76,10 +75,6 @@ class Demux extends Writable { this.streamsByKey[key].on(event, cb), ); break; - case EventSubscription.Unhandled: - throw new Error( - "Stream must be multiplexed before handling this event", - ); default: super.on(event, cb); } @@ -96,10 +91,6 @@ class Demux extends Writable { this.streamsByKey[key].once(event, cb), ); break; - case EventSubscription.Unhandled: - throw new Error( - "Stream must be multiplexed before handling this event", - ); default: super.once(event, cb); } From b8bd69eb012bbfb6fb4e7ab12de19ff1d60e26f3 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 26 Sep 2019 09:53:15 -0400 Subject: [PATCH 66/69] Split rate tests --- tests/rate.spec.ts | 118 +++++++++++++++++++++++++++------------------ 1 file changed, 70 insertions(+), 48 deletions(-) diff --git a/tests/rate.spec.ts b/tests/rate.spec.ts index acd3647..1c26554 100644 --- a/tests/rate.spec.ts +++ b/tests/rate.spec.ts @@ -4,64 +4,86 @@ import test from "ava"; import { expect } from "chai"; import { rate } from "../src"; -test.cb("rate() sends data at desired rate", t => { - t.plan(9); - const fastRate = 150; - const medRate = 50; - const slowRate = 1; - const sourceFast = new Readable({ objectMode: true }); - const sourceMed = new Readable({ objectMode: true }); - const sourceSlow = new Readable({ objectMode: true }); - const expectedElements = ["a", "b", "c"]; +test.cb("rate() sends data at a rate of 150", t => { + t.plan(5); + const targetRate = 150; + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "b", "c", "d", "e"]; const start = performance.now(); let i = 0; - let j = 0; - let k = 0; - sourceFast - .pipe(rate(fastRate)) + source + .pipe(rate(targetRate)) .on("data", (element: string[]) => { const currentRate = (i / (performance.now() - start)) * 1000; expect(element).to.deep.equal(expectedElements[i]); - expect(currentRate).lessThan(fastRate); + expect(currentRate).lessThan(targetRate); t.pass(); i++; }) - .on("error", t.end); - - sourceMed - .pipe(rate(medRate)) - .on("data", (element: string[]) => { - const currentRate = (j / (performance.now() - start)) * 1000; - expect(element).to.deep.equal(expectedElements[j]); - expect(currentRate).lessThan(medRate); - t.pass(); - j++; - }) - .on("error", t.end); - - sourceSlow - .pipe(rate(slowRate)) - .on("data", (element: string[]) => { - const currentRate = (k / (performance.now() - start)) * 1000; - expect(element).to.deep.equal(expectedElements[k]); - expect(currentRate).lessThan(slowRate); - t.pass(); - k++; - }) .on("error", t.end) .on("end", t.end); - sourceFast.push("a"); - sourceFast.push("b"); - sourceFast.push("c"); - sourceFast.push(null); - sourceMed.push("a"); - sourceMed.push("b"); - sourceMed.push("c"); - sourceMed.push(null); - sourceSlow.push("a"); - sourceSlow.push("b"); - sourceSlow.push("c"); - sourceSlow.push(null); + source.push("a"); + source.push("b"); + source.push("c"); + source.push("d"); + source.push("e"); + source.push(null); +}); + +test.cb("rate() sends data at a rate of 50", t => { + t.plan(5); + const targetRate = 50; + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "b", "c", "d", "e"]; + const start = performance.now(); + let i = 0; + + source + .pipe(rate(targetRate)) + .on("data", (element: string[]) => { + const currentRate = (i / (performance.now() - start)) * 1000; + expect(element).to.deep.equal(expectedElements[i]); + expect(currentRate).lessThan(targetRate); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push("d"); + source.push("e"); + source.push(null); +}); + +test.cb("rate() sends data at a rate of 1", t => { + t.plan(5); + const targetRate = 1; + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "b", "c", "d", "e"]; + const start = performance.now(); + let i = 0; + + source + .pipe(rate(targetRate)) + .on("data", (element: string[]) => { + const currentRate = (i / (performance.now() - start)) * 1000; + expect(element).to.deep.equal(expectedElements[i]); + expect(currentRate).lessThan(targetRate); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push("d"); + source.push("e"); + source.push(null); }); From 4f80d44ed824e007468d0f113df1a7d4d4d371fd Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 26 Sep 2019 10:36:36 -0400 Subject: [PATCH 67/69] Improve test --- tests/compose.spec.ts | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/tests/compose.spec.ts b/tests/compose.spec.ts index ad2be6c..bae5825 100644 --- a/tests/compose.spec.ts +++ b/tests/compose.spec.ts @@ -172,7 +172,8 @@ test("compose() writable length should be less than highWaterMark when handing w test("compose() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", async t => { t.plan(7); - const _rate = 25; + const _rate = 100; + const highWaterMark = 2; return new Promise(async (resolve, reject) => { interface Chunk { key: string; @@ -199,7 +200,7 @@ test("compose() should emit drain event ~rate * highWaterMark ms for every write const composed = compose( [first, second], - { objectMode: true, highWaterMark: 2 }, + { objectMode: true, highWaterMark }, ); composed.on("error", err => { reject(); @@ -208,7 +209,10 @@ test("compose() should emit drain event ~rate * highWaterMark ms for every write composed.on("drain", () => { t.pass(); expect(composed._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.greaterThan(_rate); + expect(performance.now() - start).to.be.closeTo( + _rate * highWaterMark, + 10, + ); }); composed.on("data", (chunk: Chunk) => { @@ -220,21 +224,22 @@ test("compose() should emit drain event ~rate * highWaterMark ms for every write const input = [ { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, - { key: "a", mapped: [] }, + { key: "b", mapped: [] }, + { key: "c", mapped: [] }, + { key: "d", mapped: [] }, + { key: "e", mapped: [] }, ]; let start = performance.now(); let pendingReads = input.length; + start = performance.now(); for (const item of input) { const res = composed.write(item); - expect(composed._writableState.length).to.be.at.most(2); + expect(composed._writableState.length).to.be.at.most(highWaterMark); t.pass(); if (!res) { + await sleep(_rate * highWaterMark * 2); start = performance.now(); - await sleep(100); } } }); @@ -279,8 +284,9 @@ test.cb( composed.on("drain", () => { expect(composed._writableState.length).to.be.equal(0); - expect(performance.now() - start).to.be.greaterThan( + expect(performance.now() - start).to.be.closeTo( _rate * input.length, + 25, ); t.pass(); }); From 9e14d8c044aa998071039c9d88a34bf2b67c9de1 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 26 Sep 2019 14:11:17 -0400 Subject: [PATCH 68/69] Update package json --- package.json | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 76fd2bf..24c528a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "mhysa", - "version": "1.0.2", + "version": "0.0.1-alpha", "description": "Streams and event emitter utils for Node.js", "keywords": [ "promise", @@ -11,14 +11,27 @@ "author": { "name": "Wenzil" }, + "contributors": [ + { + "name": "jerry", + "email": "jerry@jogogo.co" + }, + { + "name": "lewis", + "email": "lewis@jogogo.co" + } + ], "license": "MIT", "main": "dist/index.js", "types": "dist/index.d.ts", "files": [ "dist" ], + "publishConfig": { + "registry": "https://npm.dev.jogogo.co/" + }, "repository": { - "url": "git@github.com:Wenzil/Mhysa.git", + "url": "git@github.com:Jogogoplay/analytics-client.git", "type": "git" }, "scripts": { From ac21fb7ea6ac000023d391aef1f6a4c5d3777d51 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 26 Sep 2019 14:31:23 -0400 Subject: [PATCH 69/69] Update package json --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 24c528a..bd979ce 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { - "name": "mhysa", - "version": "0.0.1-alpha", + "name": "@jogogo/mhysa", + "version": "0.0.1-alpha.1", "description": "Streams and event emitter utils for Node.js", "keywords": [ "promise", @@ -31,7 +31,7 @@ "registry": "https://npm.dev.jogogo.co/" }, "repository": { - "url": "git@github.com:Jogogoplay/analytics-client.git", + "url": "git@github.com:Jogogoplay/mhysa.git", "type": "git" }, "scripts": {