From a40b1bf38c6b85aa444215dbaaa84e91765ffcb6 Mon Sep 17 00:00:00 2001 From: Jerry Kurian Date: Thu, 15 Aug 2019 11:54:50 -0400 Subject: [PATCH] Save --- package.json | 6 +- src/functions/accumulator/accumulator.spec.ts | 323 ++++ src/functions/accumulator/definitions.ts | 6 + src/functions/accumulator/index.ts | 169 ++ src/functions/baseFunctions.ts | 20 + src/functions/batch/index.ts | 47 + src/functions/child/index.ts | 12 + src/functions/collect/index.ts | 26 + src/functions/concat/index.ts | 40 + src/functions/duplex/index.ts | 33 + src/functions/filter/filter.spec.ts | 102 + src/functions/filter/index.ts | 41 + src/functions/flatMap/flatMap.spec.ts | 100 + src/functions/flatMap/index.ts | 39 + src/functions/fromArray/fromArray.spec.ts | 45 + src/functions/fromArray/index.ts | 19 + src/functions/functions.spec.ts | 1683 ----------------- src/functions/functions.ts | 765 -------- src/functions/index.ts | 11 +- src/functions/join/index.ts | 31 + src/functions/join/join.spec.ts | 56 + src/functions/last/index.ts | 14 + src/functions/map/index.ts | 29 + src/functions/map/map.spec.ts | 107 ++ src/functions/merge/index.ts | 36 + src/functions/parallelMap/index.ts | 44 + src/functions/parse/index.ts | 26 + src/functions/rate/index.ts | 31 + src/functions/reduce/index.ts | 57 + src/functions/reduce/reduce.spec.ts | 98 + src/functions/replace/index.ts | 33 + src/functions/replace/replace.spec.ts | 80 + src/functions/split/index.ts | 34 + src/functions/split/split.spec.ts | 98 + src/functions/stringify/index.ts | 22 + src/functions/unbatch/index.ts | 21 + tsconfig.json | 4 +- yarn.lock | 289 ++- 38 files changed, 1981 insertions(+), 2616 deletions(-) create mode 100644 src/functions/accumulator/accumulator.spec.ts create mode 100644 src/functions/accumulator/definitions.ts create mode 100644 src/functions/accumulator/index.ts create mode 100644 src/functions/baseFunctions.ts create mode 100644 src/functions/batch/index.ts create mode 100644 src/functions/child/index.ts create mode 100644 src/functions/collect/index.ts create mode 100644 src/functions/concat/index.ts create mode 100644 src/functions/duplex/index.ts create mode 100644 src/functions/filter/filter.spec.ts create mode 100644 src/functions/filter/index.ts create mode 100644 src/functions/flatMap/flatMap.spec.ts create mode 100644 src/functions/flatMap/index.ts create mode 100644 src/functions/fromArray/fromArray.spec.ts create mode 100644 src/functions/fromArray/index.ts delete mode 100644 src/functions/functions.spec.ts delete mode 100644 src/functions/functions.ts create mode 100644 src/functions/join/index.ts create mode 100644 src/functions/join/join.spec.ts create mode 100644 src/functions/last/index.ts create mode 100644 src/functions/map/index.ts create mode 100644 src/functions/map/map.spec.ts create mode 100644 src/functions/merge/index.ts create mode 100644 src/functions/parallelMap/index.ts create mode 100644 src/functions/parse/index.ts create mode 100644 src/functions/rate/index.ts create mode 100644 src/functions/reduce/index.ts create mode 100644 src/functions/reduce/reduce.spec.ts create mode 100644 src/functions/replace/index.ts create mode 100644 src/functions/replace/replace.spec.ts create mode 100644 src/functions/split/index.ts create mode 100644 src/functions/split/split.spec.ts create mode 100644 src/functions/stringify/index.ts create mode 100644 src/functions/unbatch/index.ts diff --git a/package.json b/package.json index 9f86034..a5c8e2e 100644 --- a/package.json +++ b/package.json @@ -23,6 +23,7 @@ }, "scripts": { "test": "ava", + "test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.js", "lint": "tslint -p tsconfig.json", "validate:tslint": "tslint-config-prettier-check ./tslint.json", "prepublishOnly": "yarn lint && yarn test && yarn tsc" @@ -30,13 +31,12 @@ "dependencies": {}, "devDependencies": { "@types/chai": "^4.1.7", - "@types/node": "^10.12.10", - "@types/typescript": "^2.0.0", + "@types/node": "^12.7.2", "ava": "^1.0.0-rc.2", "chai": "^4.2.0", "mhysa": "./", "prettier": "^1.14.3", - "ts-node": "^7.0.1", + "ts-node": "^8.3.0", "tslint": "^5.11.0", "tslint-config-prettier": "^1.16.0", "tslint-plugin-prettier": "^2.0.1", diff --git a/src/functions/accumulator/accumulator.spec.ts b/src/functions/accumulator/accumulator.spec.ts new file mode 100644 index 0000000..c22b2fe --- /dev/null +++ b/src/functions/accumulator/accumulator.spec.ts @@ -0,0 +1,323 @@ +import test from "ava"; +import { expect } from "chai"; +import { Readable } from "stream"; +import { accumulator, accumulatorBy } from "."; +import { FlushStrategy } from "./definitions"; + +test.cb("accumulator() rolling", t => { + t.plan(3); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const secondFlush = [{ ts: 2, key: "d" }, { ts: 3, key: "e" }]; + const thirdFlush = [{ ts: 4, key: "f" }]; + const flushes = [firstFlush, secondFlush, thirdFlush]; + + source + .pipe(accumulator(2, undefined, FlushStrategy.rolling)) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + [...firstFlush, ...secondFlush, ...thirdFlush].forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb("accumulator() rolling with key", t => { + t.plan(2); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const firstFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 2, key: "d" }, + ]; + const secondFlush = [{ ts: 3, key: "e" }]; + const flushes = [firstFlush, secondFlush]; + + source + .pipe(accumulator(3, undefined, FlushStrategy.rolling, "ts")) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + [...firstFlush, ...secondFlush].forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb("accumulatorBy() rolling", t => { + t.plan(2); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const firstFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 2, key: "d" }, + ]; + const secondFlush = [{ ts: 3, key: "e" }]; + const flushes = [firstFlush, secondFlush]; + + source + .pipe( + accumulatorBy( + undefined, + FlushStrategy.rolling, + (event: TestObject, bufferChunk: TestObject) => { + return bufferChunk.ts + 3 <= event.ts; + }, + ), + ) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + [...firstFlush, ...secondFlush].forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb("accumulator() sliding", t => { + t.plan(4); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 4, key: "d" }, + ]; + const firstFlush = [{ ts: 0, key: "a" }]; + const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const thirdFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + ]; + const fourthFlush = [ + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 4, key: "d" }, + ]; + + const flushes = [firstFlush, secondFlush, thirdFlush, fourthFlush]; + source + .pipe(accumulator(3, undefined, FlushStrategy.sliding)) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + input.forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb("accumulator() sliding with key", t => { + t.plan(6); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + { ts: 5, key: "f" }, + { ts: 6, key: "g" }, + ]; + const firstFlush = [{ ts: 0, key: "a" }]; + const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const thirdFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + ]; + const fourthFlush = [ + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + ]; + const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }]; + const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }]; + + const flushes = [ + firstFlush, + secondFlush, + thirdFlush, + fourthFlush, + fifthFlush, + sixthFlush, + ]; + source + .pipe(accumulator(3, undefined, FlushStrategy.sliding, "ts")) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + input.forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb("accumulatorBy() sliding", t => { + t.plan(6); + let chunkIndex = 0; + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + { ts: 5, key: "f" }, + { ts: 6, key: "g" }, + ]; + const firstFlush = [{ ts: 0, key: "a" }]; + const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; + const thirdFlush = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + ]; + const fourthFlush = [ + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + ]; + const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }]; + const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }]; + + const flushes = [ + firstFlush, + secondFlush, + thirdFlush, + fourthFlush, + fifthFlush, + sixthFlush, + ]; + source + .pipe( + accumulatorBy( + undefined, + FlushStrategy.sliding, + (event: TestObject, bufferChunk: TestObject) => { + return bufferChunk.ts + 3 <= event.ts ? true : false; + }, + ), + ) + .on("data", (flush: TestObject[]) => { + t.deepEqual(flush, flushes[chunkIndex]); + chunkIndex++; + }) + .on("error", (e: any) => { + t.end(e); + }) + .on("end", () => { + t.end(); + }); + input.forEach(item => { + source.push(item); + }); + source.push(null); +}); + +test.cb.only("accumulatorBy() sliding should throw", t => { + t.plan(2); + interface TestObject { + ts: number; + key: string; + } + const source = new Readable({ objectMode: true }); + const input = [ + { ts: 0, key: "a" }, + { ts: 1, key: "b" }, + { ts: 2, key: "c" }, + { ts: 3, key: "d" }, + ]; + const accumulaterStream = accumulatorBy( + undefined, + FlushStrategy.sliding, + (event: TestObject, bufferChunk: TestObject) => { + if (event.key !== "a" && event.key !== "b") { + throw new Error("Failed mapping"); + } + return bufferChunk.ts + 3 <= event.ts ? true : false; + }, + ); + source + .pipe(accumulaterStream) + .on("error", (err: any) => { + source.pipe(accumulaterStream); + accumulaterStream.resume(); + expect(err.message).to.equal("Failed mapping"); + t.pass(); + }) + .on("end", () => { + t.end(); + }); + + input.forEach(item => { + source.push(item); + }); + source.push(null); +}); diff --git a/src/functions/accumulator/definitions.ts b/src/functions/accumulator/definitions.ts new file mode 100644 index 0000000..bd6ec50 --- /dev/null +++ b/src/functions/accumulator/definitions.ts @@ -0,0 +1,6 @@ +export enum FlushStrategy { + rolling = "rolling", + sliding = "sliding", +} + +export type AccumulatorByIteratee = (event: T, bufferChunk: T) => boolean; diff --git a/src/functions/accumulator/index.ts b/src/functions/accumulator/index.ts new file mode 100644 index 0000000..a801dad --- /dev/null +++ b/src/functions/accumulator/index.ts @@ -0,0 +1,169 @@ +import { Transform } from "stream"; +import { AccumulatorByIteratee, FlushStrategy } from "./definitions"; +import { batch } from "../../index"; + +function _accumulator( + accumulateBy: (data: T, buffer: T[], stream: Transform) => void, + shouldFlush: boolean = true, +) { + const buffer: T[] = []; + return new Transform({ + readableObjectMode: true, + writableObjectMode: true, + transform(data: any, encoding, callback) { + try { + accumulateBy(data, buffer, this); + callback(); + } catch (err) { + callback(err); + } + }, + flush(callback) { + if (shouldFlush) { + this.push(buffer); + } + callback(); + }, + }); +} + +function _sliding( + windowLength: number, + rate: number | undefined, + key?: string, +): (event: T, buffer: T[], stream: Transform) => void { + return (event: T, buffer: T[], stream: Transform) => { + if (key) { + let index = 0; + while ( + index < buffer.length && + buffer[index][key] + windowLength <= event[key] + ) { + index++; + } + buffer.splice(0, index); + } else if (buffer.length === windowLength) { + buffer.shift(); + } + buffer.push(event); + stream.push(buffer); + }; +} + +function _slidingByFunction( + rate: number | undefined, + iteratee: AccumulatorByIteratee, +): (event: T, buffer: T[], stream: Transform) => void { + return (event: T, buffer: T[], stream: Transform) => { + let index = 0; + while (index < buffer.length && iteratee(event, buffer[index])) { + index++; + } + buffer.splice(0, index); + buffer.push(event); + stream.push(buffer); + }; +} + +function _rollingByFunction( + rate: number | undefined, + iteratee: AccumulatorByIteratee, +): (event: T, buffer: T[], stream: Transform) => void { + return (event: T, buffer: T[], stream: Transform) => { + if (iteratee) { + if (buffer.length > 0 && iteratee(event, buffer[0])) { + stream.push(buffer.slice(0)); + buffer.length = 0; + } + } + buffer.push(event); + }; +} + +function _rolling( + windowLength: number, + rate: number | undefined, + key?: string, +): (event: T, buffer: T[], stream: Transform) => void { + return (event: T, buffer: T[], stream: Transform) => { + if (key) { + if (event[key] === undefined) { + stream.emit( + "error", + new Error( + `Key is missing in event: (${key}, ${JSON.stringify( + event, + )})`, + ), + ); + } else if ( + buffer.length > 0 && + buffer[0][key] + windowLength <= event[key] + ) { + stream.push(buffer.slice(0)); + buffer.length = 0; + } + } else if (buffer.length === windowLength) { + stream.push(buffer.slice(0)); + buffer.length = 0; + } + buffer.push(event); + }; +} + +export function accumulator( + batchSize: number, + batchRate: number | undefined, + flushStrategy: FlushStrategy, + keyBy?: string, +): Transform { + if (flushStrategy === FlushStrategy.sliding) { + return sliding(batchSize, batchRate, keyBy); + } else if (flushStrategy === FlushStrategy.rolling) { + return rolling(batchSize, batchRate, keyBy); + } else { + return batch(batchSize, batchRate); + } +} + +export function accumulatorBy( + batchRate: number | undefined, + flushStrategy: S, + iteratee: AccumulatorByIteratee, +): Transform { + if (flushStrategy === FlushStrategy.sliding) { + return slidingBy(batchRate, iteratee); + } else { + return rollingBy(batchRate, iteratee); + } +} + +function sliding( + windowLength: number, + rate: number | undefined, + key?: string, +): Transform { + return _accumulator(_sliding(windowLength, rate, key), false); +} + +function slidingBy( + rate: number | undefined, + iteratee: AccumulatorByIteratee, +): Transform { + return _accumulator(_slidingByFunction(rate, iteratee), false); +} + +function rolling( + windowLength: number, + rate: number | undefined, + key?: string, +): Transform { + return _accumulator(_rolling(windowLength, rate, key)); +} + +function rollingBy( + rate: number | undefined, + iteratee: AccumulatorByIteratee, +): Transform { + return _accumulator(_rollingByFunction(rate, iteratee)); +} diff --git a/src/functions/baseFunctions.ts b/src/functions/baseFunctions.ts new file mode 100644 index 0000000..7117746 --- /dev/null +++ b/src/functions/baseFunctions.ts @@ -0,0 +1,20 @@ +export { accumulator, accumulatorBy } from "./accumulator"; +export { batch } from "./batch"; +export { child } from "./child"; +export { collect } from "./collect"; +export { concat } from "./concat"; +export { duplex } from "./duplex"; +export { filter } from "./filter"; +export { flatMap } from "./flatMap"; +export { fromArray } from "./fromArray"; +export { join } from "./join"; +export { last } from "./last"; +export { map } from "./map"; +export { merge } from "./merge"; +export { parallelMap } from "./parallelMap"; +export { parse } from "./parse"; +export { rate } from "./rate"; +export { reduce } from "./reduce"; +export { split } from "./split"; +export { stringify } from "./stringify"; +export { unbatch } from "./unbatch"; diff --git a/src/functions/batch/index.ts b/src/functions/batch/index.ts new file mode 100644 index 0000000..6ff3d87 --- /dev/null +++ b/src/functions/batch/index.ts @@ -0,0 +1,47 @@ +import { Transform } from "stream"; +import { TransformOptions } from "../definitions"; +/** + * Stores chunks of data internally in array and batches when batchSize is reached. + * + * @param batchSize Size of the batches + * @param maxBatchAge Max lifetime of a batch + */ +export function batch( + batchSize: number = 1000, + maxBatchAge: number = 500, + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +): Transform { + let buffer: any[] = []; + let timer: NodeJS.Timer | null = null; + const sendChunk = (self: Transform) => { + if (timer) { + clearTimeout(timer); + } + timer = null; + self.push(buffer); + buffer = []; + }; + return new Transform({ + ...options, + transform(chunk, encoding, callback) { + buffer.push(chunk); + if (buffer.length === batchSize) { + sendChunk(this); + } else { + if (timer === null) { + timer = setInterval(() => { + sendChunk(this); + }, maxBatchAge); + } + } + callback(); + }, + flush(callback) { + sendChunk(this); + callback(); + }, + }); +} diff --git a/src/functions/child/index.ts b/src/functions/child/index.ts new file mode 100644 index 0000000..efe4f90 --- /dev/null +++ b/src/functions/child/index.ts @@ -0,0 +1,12 @@ +/** + * Return a Duplex stream from a child process' stdin and stdout + * @param childProcess Child process from which to create duplex stream + */ +export function child(childProcess: ChildProcess) { + if (childProcess.stdin === null) { + throw new Error("childProcess.stdin is null"); + } else if (childProcess.stdout === null) { + throw new Error("childProcess.stdout is null"); + } + return duplex(childProcess.stdin, childProcess.stdout); +} diff --git a/src/functions/collect/index.ts b/src/functions/collect/index.ts new file mode 100644 index 0000000..11ad423 --- /dev/null +++ b/src/functions/collect/index.ts @@ -0,0 +1,26 @@ +import { Transform } from "stream"; +import { ThroughOptions } from "../definitions"; +/** + * Return a ReadWrite stream that collects streamed chunks into an array or buffer + * @param options + * @param options.objectMode Whether this stream should behave as a stream of objects + */ +export function collect( + options: ThroughOptions = { objectMode: false }, +): NodeJS.ReadWriteStream { + const collected: any[] = []; + return new Transform({ + readableObjectMode: options.objectMode, + writableObjectMode: options.objectMode, + transform(data, encoding, callback) { + collected.push(data); + callback(); + }, + flush(callback) { + this.push( + options.objectMode ? collected : Buffer.concat(collected), + ); + callback(); + }, + }); +} diff --git a/src/functions/concat/index.ts b/src/functions/concat/index.ts new file mode 100644 index 0000000..8064b30 --- /dev/null +++ b/src/functions/concat/index.ts @@ -0,0 +1,40 @@ +import { Readable } from "stream"; +/** + * Return a Readable stream of readable streams concatenated together + * @param streams Readable streams to concatenate + */ +export function concat(...streams: Readable[]): Readable { + let isStarted = false; + let currentStreamIndex = 0; + const startCurrentStream = () => { + if (currentStreamIndex >= streams.length) { + wrapper.push(null); + } else { + streams[currentStreamIndex] + .on("data", chunk => { + if (!wrapper.push(chunk)) { + streams[currentStreamIndex].pause(); + } + }) + .on("error", err => wrapper.emit("error", err)) + .on("end", () => { + currentStreamIndex++; + startCurrentStream(); + }); + } + }; + + const wrapper = new Readable({ + objectMode: true, + read() { + if (!isStarted) { + isStarted = true; + startCurrentStream(); + } + if (currentStreamIndex < streams.length) { + streams[currentStreamIndex].resume(); + } + }, + }); + return wrapper; +} diff --git a/src/functions/duplex/index.ts b/src/functions/duplex/index.ts new file mode 100644 index 0000000..2470da1 --- /dev/null +++ b/src/functions/duplex/index.ts @@ -0,0 +1,33 @@ +import { Duplex, Writable, Readable } from "stream"; +/** + * Return a Duplex stream from a writable stream that is assumed to somehow, when written to, + * cause the given readable stream to yield chunks + * @param writable Writable stream assumed to cause the readable stream to yield chunks when written to + * @param readable Readable stream assumed to yield chunks when the writable stream is written to + */ +export function duplex(writable: Writable, readable: Readable) { + const wrapper = new Duplex({ + readableObjectMode: true, + writableObjectMode: true, + read() { + readable.resume(); + }, + write(chunk, encoding, callback) { + return writable.write(chunk, encoding, callback); + }, + final(callback) { + writable.end(callback); + }, + }); + readable + .on("data", chunk => { + if (!wrapper.push(chunk)) { + readable.pause(); + } + }) + .on("error", err => wrapper.emit("error", err)) + .on("end", () => wrapper.push(null)); + writable.on("drain", () => wrapper.emit("drain")); + writable.on("error", err => wrapper.emit("error", err)); + return wrapper; +} diff --git a/src/functions/filter/filter.spec.ts b/src/functions/filter/filter.spec.ts new file mode 100644 index 0000000..a537372 --- /dev/null +++ b/src/functions/filter/filter.spec.ts @@ -0,0 +1,102 @@ +import test from "ava"; +import { expect } from "chai"; +import { Readable } from "stream"; +import { filter } from "."; + +test.cb("filter() filters elements synchronously", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "c"]; + let i = 0; + source + .pipe(filter((element: string) => element !== "b")) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("filter() filters elements asynchronously", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "c"]; + let i = 0; + source + .pipe( + filter(async (element: string) => { + await Promise.resolve(); + return element !== "b"; + }), + ) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("filter() emits errors during synchronous filtering", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + filter((element: string) => { + if (element !== "a") { + throw new Error("Failed filtering"); + } + return true; + }), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed filtering"); + t.pass(); + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("filter() emits errors during asynchronous filtering", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + filter(async (element: string) => { + await Promise.resolve(); + if (element !== "a") { + throw new Error("Failed filtering"); + } + return true; + }), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed filtering"); + t.pass(); + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); diff --git a/src/functions/filter/index.ts b/src/functions/filter/index.ts new file mode 100644 index 0000000..49e7a05 --- /dev/null +++ b/src/functions/filter/index.ts @@ -0,0 +1,41 @@ +import { Transform } from "stream"; +import { ThroughOptions } from "../definitions"; +/** + * Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold + * @param predicate Predicate with which to filter scream chunks + * @param options + * @param options.objectMode Whether this stream should behave as a stream of objects + */ +export function filter( + predicate: + | ((chunk: T, encoding: string) => boolean) + | ((chunk: T, encoding: string) => Promise), + options: ThroughOptions = { + objectMode: true, + }, +) { + return new Transform({ + readableObjectMode: options.objectMode, + writableObjectMode: options.objectMode, + async transform(chunk: T, encoding, callback) { + let isPromise = false; + try { + const result = predicate(chunk, encoding); + isPromise = result instanceof Promise; + if (!!(await result)) { + callback(undefined, chunk); + } else { + callback(); + } + } catch (err) { + if (isPromise) { + // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly + this.emit("error", err); + callback(); + } else { + callback(err); + } + } + }, + }); +} diff --git a/src/functions/flatMap/flatMap.spec.ts b/src/functions/flatMap/flatMap.spec.ts new file mode 100644 index 0000000..4e6c28d --- /dev/null +++ b/src/functions/flatMap/flatMap.spec.ts @@ -0,0 +1,100 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { flatMap } from "."; + +test.cb("flatMap() maps elements synchronously", t => { + t.plan(6); + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "A", "b", "B", "c", "C"]; + let i = 0; + source + .pipe(flatMap((element: string) => [element, element.toUpperCase()])) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("flatMap() maps elements asynchronously", t => { + t.plan(6); + const source = new Readable({ objectMode: true }); + const expectedElements = ["a", "A", "b", "B", "c", "C"]; + let i = 0; + source + .pipe( + flatMap(async (element: string) => { + await Promise.resolve(); + return [element, element.toUpperCase()]; + }), + ) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("flatMap() emits errors during synchronous mapping", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + flatMap((element: string) => { + if (element !== "a") { + throw new Error("Failed mapping"); + } + return [element, element.toUpperCase()]; + }), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed mapping"); + t.pass(); + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("flatMap() emits errors during asynchronous mapping", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + flatMap(async (element: string) => { + await Promise.resolve(); + if (element !== "a") { + throw new Error("Failed mapping"); + } + return [element, element.toUpperCase()]; + }), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed mapping"); + t.pass(); + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); diff --git a/src/functions/flatMap/index.ts b/src/functions/flatMap/index.ts new file mode 100644 index 0000000..9e90c04 --- /dev/null +++ b/src/functions/flatMap/index.ts @@ -0,0 +1,39 @@ +import { Transform } from "stream"; +import { TransformOptions } from "../definitions"; +/** + * Return a ReadWrite stream that flat maps streamed chunks + * @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such) + * @param options + * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects + * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects + */ +export function flatMap( + mapper: + | ((chunk: T, encoding: string) => R[]) + | ((chunk: T, encoding: string) => Promise), + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +): NodeJS.ReadWriteStream { + return new Transform({ + ...options, + async transform(chunk: T, encoding, callback) { + let isPromise = false; + try { + const mapped = mapper(chunk, encoding); + isPromise = mapped instanceof Promise; + (await mapped).forEach(c => this.push(c)); + callback(); + } catch (err) { + if (isPromise) { + // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly + this.emit("error", err); + callback(); + } else { + callback(err); + } + } + }, + }); +} diff --git a/src/functions/fromArray/fromArray.spec.ts b/src/functions/fromArray/fromArray.spec.ts new file mode 100644 index 0000000..b0b9a95 --- /dev/null +++ b/src/functions/fromArray/fromArray.spec.ts @@ -0,0 +1,45 @@ +import test from "ava"; +import { expect } from "chai"; +import { fromArray } from "."; + +test.cb("fromArray() streams array elements in flowing mode", t => { + t.plan(3); + const elements = ["a", "b", "c"]; + const stream = fromArray(elements); + let i = 0; + stream + .on("data", (element: string) => { + expect(element).to.equal(elements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); +}); + +test.cb("fromArray() ends immediately if there are no array elements", t => { + t.plan(0); + fromArray([]) + .on("data", () => t.fail()) + .on("error", t.end) + .on("end", t.end); +}); + +test.cb("fromArray() streams array elements in paused mode", t => { + t.plan(3); + const elements = ["a", "b", "c"]; + const stream = fromArray(elements); + let i = 0; + stream + .on("readable", () => { + let element = stream.read(); + while (element !== null) { + expect(element).to.equal(elements[i]); + t.pass(); + i++; + element = stream.read(); + } + }) + .on("error", t.end) + .on("end", t.end); +}); diff --git a/src/functions/fromArray/index.ts b/src/functions/fromArray/index.ts new file mode 100644 index 0000000..f92654e --- /dev/null +++ b/src/functions/fromArray/index.ts @@ -0,0 +1,19 @@ +import { Readable } from "stream"; +/** + * Convert an array into a Readable stream of its elements + * @param array Array of elements to stream + */ +export function fromArray(array: any[]): NodeJS.ReadableStream { + let cursor = 0; + return new Readable({ + objectMode: true, + read() { + if (cursor < array.length) { + this.push(array[cursor]); + cursor++; + } else { + this.push(null); + } + }, + }); +} diff --git a/src/functions/functions.spec.ts b/src/functions/functions.spec.ts deleted file mode 100644 index f174928..0000000 --- a/src/functions/functions.spec.ts +++ /dev/null @@ -1,1683 +0,0 @@ -import * as cp from "child_process"; -import test from "ava"; -import { expect } from "chai"; -import { performance } from "perf_hooks"; -import { Readable } from "stream"; -import { - fromArray, - map, - flatMap, - filter, - split, - join, - replace, - parse, - stringify, - collect, - concat, - merge, - duplex, - child, - reduce, - last, - batch, - unbatch, - rate, - parallelMap, - accumulator, - accumulatorBy, -} from "."; -import { FlushStrategy } from "./definitions"; -import { sleep } from "../helpers"; - -test.cb("fromArray() streams array elements in flowing mode", t => { - t.plan(3); - const elements = ["a", "b", "c"]; - const stream = fromArray(elements); - let i = 0; - stream - .on("data", (element: string) => { - expect(element).to.equal(elements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); -}); - -test.cb("fromArray() streams array elements in paused mode", t => { - t.plan(3); - const elements = ["a", "b", "c"]; - const stream = fromArray(elements); - let i = 0; - stream - .on("readable", () => { - let element = stream.read(); - while (element !== null) { - expect(element).to.equal(elements[i]); - t.pass(); - i++; - element = stream.read(); - } - }) - .on("error", t.end) - .on("end", t.end); -}); - -test.cb("fromArray() ends immediately if there are no array elements", t => { - t.plan(0); - fromArray([]) - .on("data", () => t.fail()) - .on("error", t.end) - .on("end", t.end); -}); - -test.cb("map() maps elements synchronously", t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["A", "B", "C"]; - let i = 0; - source - .pipe(map((element: string) => element.toUpperCase())) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("map() maps elements asynchronously", t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["A", "B", "C"]; - let i = 0; - source - .pipe( - map(async (element: string) => { - await Promise.resolve(); - return element.toUpperCase(); - }), - ) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("map() emits errors during synchronous mapping", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - map((element: string) => { - if (element !== "a") { - throw new Error("Failed mapping"); - } - return element.toUpperCase(); - }), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed mapping"); - t.pass(); - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test("map() emits errors during asynchronous mapping", t => { - t.plan(1); - return new Promise((resolve, reject) => { - const source = new Readable({ objectMode: true }); - source - .pipe( - map(async (element: string) => { - await Promise.resolve(); - if (element !== "a") { - throw new Error("Failed mapping"); - } - return element.toUpperCase(); - }), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed mapping"); - t.pass(); - resolve(); - }) - .on("end", () => { - t.fail(); - }); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); - }); -}); - -test.cb("flatMap() maps elements synchronously", t => { - t.plan(6); - const source = new Readable({ objectMode: true }); - const expectedElements = ["a", "A", "b", "B", "c", "C"]; - let i = 0; - source - .pipe(flatMap((element: string) => [element, element.toUpperCase()])) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("flatMap() maps elements asynchronously", t => { - t.plan(6); - const source = new Readable({ objectMode: true }); - const expectedElements = ["a", "A", "b", "B", "c", "C"]; - let i = 0; - source - .pipe( - flatMap(async (element: string) => { - await Promise.resolve(); - return [element, element.toUpperCase()]; - }), - ) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("flatMap() emits errors during synchronous mapping", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - flatMap((element: string) => { - if (element !== "a") { - throw new Error("Failed mapping"); - } - return [element, element.toUpperCase()]; - }), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed mapping"); - t.pass(); - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("flatMap() emits errors during asynchronous mapping", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - flatMap(async (element: string) => { - await Promise.resolve(); - if (element !== "a") { - throw new Error("Failed mapping"); - } - return [element, element.toUpperCase()]; - }), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed mapping"); - t.pass(); - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("filter() filters elements synchronously", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - const expectedElements = ["a", "c"]; - let i = 0; - source - .pipe(filter((element: string) => element !== "b")) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("filter() filters elements asynchronously", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - const expectedElements = ["a", "c"]; - let i = 0; - source - .pipe( - filter(async (element: string) => { - await Promise.resolve(); - return element !== "b"; - }), - ) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("filter() emits errors during synchronous filtering", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - filter((element: string) => { - if (element !== "a") { - throw new Error("Failed filtering"); - } - return true; - }), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed filtering"); - t.pass(); - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("filter() emits errors during asynchronous filtering", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - filter(async (element: string) => { - await Promise.resolve(); - if (element !== "a") { - throw new Error("Failed filtering"); - } - return true; - }), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed filtering"); - t.pass(); - }) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("reduce() reduces elements synchronously", t => { - t.plan(1); - const source = new Readable({ objectMode: true }); - const expectedValue = 6; - source - .pipe(reduce((acc: number, element: string) => acc + element.length, 0)) - .on("data", (element: string) => { - expect(element).to.equal(expectedValue); - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); -}); - -test.cb("reduce() reduces elements asynchronously", t => { - t.plan(1); - const source = new Readable({ objectMode: true }); - const expectedValue = 6; - source - .pipe( - reduce(async (acc: number, element: string) => { - await Promise.resolve(); - return acc + element.length; - }, 0), - ) - .on("data", (element: string) => { - expect(element).to.equal(expectedValue); - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); -}); - -test.cb("reduce() emits errors during synchronous reduce", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - reduce((acc: number, element: string) => { - if (element !== "ab") { - throw new Error("Failed reduce"); - } - return acc + element.length; - }, 0), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed reduce"); - t.pass(); - }) - .on("end", t.end); - - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); -}); - -test.cb("reduce() emits errors during asynchronous reduce", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe( - reduce(async (acc: number, element: string) => { - await Promise.resolve(); - if (element !== "ab") { - throw new Error("Failed mapping"); - } - return acc + element.length; - }, 0), - ) - .resume() - .on("error", err => { - expect(err.message).to.equal("Failed mapping"); - t.pass(); - }) - .on("end", t.end); - - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); -}); - -test.cb("split() splits chunks using the default separator (\\n)", t => { - t.plan(5); - const source = new Readable({ objectMode: true }); - const expectedParts = ["ab", "c", "d", "ef", ""]; - let i = 0; - source - .pipe(split()) - .on("data", part => { - expect(part).to.equal(expectedParts[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("ab\n"); - source.push("c"); - source.push("\n"); - source.push("d"); - source.push("\nef\n"); - source.push(null); -}); - -test.cb("split() splits chunks using the specified separator", t => { - t.plan(6); - const source = new Readable({ objectMode: true }); - const expectedParts = ["ab", "c", "d", "e", "f", ""]; - let i = 0; - source - .pipe(split("|")) - .on("data", (part: string) => { - expect(part).to.equal(expectedParts[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("ab|"); - source.push("c|d"); - source.push("|"); - source.push("e"); - source.push("|f|"); - source.push(null); -}); - -test.cb( - "split() splits utf8 encoded buffers using the specified separator", - t => { - t.plan(3); - const expectedElements = ["a", "b", "c"]; - let i = 0; - const through = split(","); - const buf = Buffer.from("a,b,c"); - through - .on("data", element => { - expect(element).to.equal(expectedElements[i]); - i++; - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - for (let j = 0; j < buf.length; ++j) { - through.write(buf.slice(j, j + 1)); - } - through.end(); - }, -); - -test.cb( - "split() splits utf8 encoded buffers with multi-byte characters using the specified separator", - t => { - t.plan(3); - const expectedElements = ["一", "一", "一"]; - let i = 0; - const through = split(","); - const buf = Buffer.from("一,一,一"); // Those spaces are multi-byte utf8 characters (code: 4E00) - through - .on("data", element => { - expect(element).to.equal(expectedElements[i]); - i++; - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - for (let j = 0; j < buf.length; ++j) { - through.write(buf.slice(j, j + 1)); - } - through.end(); - }, -); - -test.cb("join() joins chunks using the specified separator", t => { - t.plan(9); - const source = new Readable({ objectMode: true }); - const expectedParts = ["ab|", "|", "c|d", "|", "|", "|", "e", "|", "|f|"]; - let i = 0; - source - .pipe(join("|")) - .on("data", part => { - expect(part).to.equal(expectedParts[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("ab|"); - source.push("c|d"); - source.push("|"); - source.push("e"); - source.push("|f|"); - source.push(null); -}); - -test.cb( - "join() joins chunks using the specified separator without breaking up multi-byte characters " + - "spanning multiple chunks", - t => { - t.plan(5); - const source = new Readable({ objectMode: true }); - const expectedParts = ["ø", "|", "ö", "|", "一"]; - let i = 0; - source - .pipe(join("|")) - .on("data", part => { - expect(part).to.equal(expectedParts[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks - source.push(Buffer.from("ø").slice(1, 2)); - source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks - source.push(Buffer.from("ö").slice(1, 2)); - source.push(Buffer.from("一").slice(0, 1)); // 3-byte character spanning three chunks - source.push(Buffer.from("一").slice(1, 2)); - source.push(Buffer.from("一").slice(2, 3)); - source.push(null); - }, -); - -test.cb( - "replace() replaces occurrences of the given string in the streamed elements with the specified " + - "replacement string", - t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["abc", "xyf", "ghi"]; - let i = 0; - source - .pipe(replace("de", "xy")) - .on("data", part => { - expect(part).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("abc"); - source.push("def"); - source.push("ghi"); - source.push(null); - }, -); - -test.cb( - "replace() replaces occurrences of the given regular expression in the streamed elements with " + - "the specified replacement string", - t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["abc", "xyz", "ghi"]; - let i = 0; - source - .pipe(replace(/^def$/, "xyz")) - .on("data", part => { - expect(part).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("abc"); - source.push("def"); - source.push("ghi"); - source.push(null); - }, -); - -test.cb( - "replace() replaces occurrences of the given multi-byte character even if it spans multiple chunks", - t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["ø", "O", "a"]; - let i = 0; - source - .pipe(replace("ö", "O")) - .on("data", part => { - expect(part).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks - source.push(Buffer.from("ø").slice(1, 2)); - source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks - source.push(Buffer.from("ö").slice(1, 2)); - source.push("a"); - source.push(null); - }, -); - -test.cb("parse() parses the streamed elements as JSON", t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["abc", {}, []]; - let i = 0; - source - .pipe(parse()) - .on("data", part => { - expect(part).to.deep.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push('"abc"'); - source.push("{}"); - source.push("[]"); - source.push(null); -}); - -test.cb("parse() emits errors on invalid JSON", t => { - t.plan(2); - const source = new Readable({ objectMode: true }); - source - .pipe(parse()) - .resume() - .on("error", () => t.pass()) - .on("end", t.end); - - source.push("{}"); - source.push({}); - source.push([]); - source.push(null); -}); - -test.cb("stringify() stringifies the streamed elements as JSON", t => { - t.plan(4); - const source = new Readable({ objectMode: true }); - const expectedElements = [ - '"abc"', - "0", - '{"a":"a","b":"b","c":"c"}', - '["a","b","c"]', - ]; - let i = 0; - source - .pipe(stringify()) - .on("data", part => { - expect(part).to.deep.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("abc"); - source.push(0); - source.push({ a: "a", b: "b", c: "c" }); - source.push(["a", "b", "c"]); - source.push(null); -}); - -test.cb( - "stringify() stringifies the streamed elements as pretty-printed JSON", - t => { - t.plan(4); - const source = new Readable({ objectMode: true }); - const expectedElements = [ - '"abc"', - "0", - '{\n "a": "a",\n "b": "b",\n "c": "c"\n}', - '[\n "a",\n "b",\n "c"\n]', - ]; - let i = 0; - source - .pipe(stringify({ pretty: true })) - .on("data", part => { - expect(part).to.deep.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("abc"); - source.push(0); - source.push({ a: "a", b: "b", c: "c" }); - source.push(["a", "b", "c"]); - source.push(null); - }, -); - -test.cb( - "collect() collects streamed elements into an array (object, flowing mode)", - t => { - t.plan(1); - const source = new Readable({ objectMode: true }); - - source - .pipe(collect({ objectMode: true })) - .on("data", collected => { - expect(collected).to.deep.equal(["a", "b", "c"]); - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); - }, -); - -test.cb( - "collect() collects streamed elements into an array (object, paused mode)", - t => { - t.plan(1); - const source = new Readable({ objectMode: true }); - const collector = source.pipe(collect({ objectMode: true })); - - collector - .on("readable", () => { - let collected = collector.read(); - while (collected !== null) { - expect(collected).to.deep.equal(["a", "b", "c"]); - t.pass(); - collected = collector.read(); - } - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); - }, -); - -test.cb( - "collect() collects streamed bytes into a buffer (non-object, flowing mode)", - t => { - t.plan(1); - const source = new Readable({ objectMode: false }); - - source - .pipe(collect()) - .on("data", collected => { - expect(collected).to.deep.equal(Buffer.from("abc")); - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); - }, -); - -test.cb( - "collect() collects streamed bytes into a buffer (non-object, paused mode)", - t => { - t.plan(1); - const source = new Readable({ objectMode: false }); - const collector = source.pipe(collect({ objectMode: false })); - collector - .on("readable", () => { - let collected = collector.read(); - while (collected !== null) { - expect(collected).to.deep.equal(Buffer.from("abc")); - t.pass(); - collected = collector.read(); - } - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); - }, -); - -test.cb( - "collect() emits an empty array if the source was empty (object mode)", - t => { - t.plan(1); - const source = new Readable({ objectMode: true }); - const collector = source.pipe(collect({ objectMode: true })); - collector - .on("data", collected => { - expect(collected).to.deep.equal([]); - t.pass(); - }) - .on("error", t.end) - .on("end", t.end); - - source.push(null); - }, -); - -test.cb( - "collect() emits nothing if the source was empty (non-object mode)", - t => { - t.plan(0); - const source = new Readable({ objectMode: false }); - const collector = source.pipe(collect({ objectMode: false })); - collector - .on("data", () => t.fail()) - .on("error", t.end) - .on("end", t.end); - - source.push(null); - }, -); - -test.cb( - "concat() concatenates multiple readable streams (object, flowing mode)", - t => { - t.plan(6); - const source1 = new Readable({ objectMode: true }); - const source2 = new Readable({ objectMode: true }); - const expectedElements = ["a", "b", "c", "d", "e", "f"]; - let i = 0; - concat(source1, source2) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source1.push("a"); - source2.push("d"); - source1.push("b"); - source2.push("e"); - source1.push("c"); - source2.push("f"); - source2.push(null); - source1.push(null); - }, -); - -test.cb( - "concat() concatenates multiple readable streams (object, paused mode)", - t => { - t.plan(6); - const source1 = new Readable({ objectMode: true }); - const source2 = new Readable({ objectMode: true }); - const expectedElements = ["a", "b", "c", "d", "e", "f"]; - let i = 0; - const concatenation = concat(source1, source2) - .on("readable", () => { - let element = concatenation.read(); - while (element !== null) { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - element = concatenation.read(); - } - }) - .on("error", t.end) - .on("end", t.end); - - source1.push("a"); - source2.push("d"); - source1.push("b"); - source2.push("e"); - source1.push("c"); - source2.push("f"); - source2.push(null); - source1.push(null); - }, -); - -test.cb( - "concat() concatenates multiple readable streams (non-object, flowing mode)", - t => { - t.plan(6); - const source1 = new Readable({ objectMode: false }); - const source2 = new Readable({ objectMode: false }); - const expectedElements = ["a", "b", "c", "d", "e", "f"]; - let i = 0; - concat(source1, source2) - .on("data", (element: string) => { - expect(element).to.deep.equal(Buffer.from(expectedElements[i])); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source1.push("a"); - source2.push("d"); - source1.push("b"); - source2.push("e"); - source1.push("c"); - source2.push("f"); - source2.push(null); - source1.push(null); - }, -); - -test.cb( - "concat() concatenates multiple readable streams (non-object, paused mode)", - t => { - t.plan(6); - const source1 = new Readable({ objectMode: false, read: () => ({}) }); - const source2 = new Readable({ objectMode: false, read: () => ({}) }); - const expectedElements = ["a", "b", "c", "d", "e", "f"]; - let i = 0; - const concatenation = concat(source1, source2) - .on("readable", () => { - let element = concatenation.read(); - while (element !== null) { - expect(element).to.deep.equal( - Buffer.from(expectedElements[i]), - ); - t.pass(); - i++; - element = concatenation.read(); - } - }) - .on("error", t.end) - .on("end", t.end); - - source1.push("a"); - setTimeout(() => source2.push("d"), 10); - setTimeout(() => source1.push("b"), 20); - setTimeout(() => source2.push("e"), 30); - setTimeout(() => source1.push("c"), 40); - setTimeout(() => source2.push("f"), 50); - setTimeout(() => source2.push(null), 60); - setTimeout(() => source1.push(null), 70); - }, -); - -test.cb("concat() concatenates a single readable stream (object mode)", t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["a", "b", "c", "d", "e", "f"]; - let i = 0; - concat(source) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb( - "concat() concatenates a single readable stream (non-object mode)", - t => { - t.plan(3); - const source = new Readable({ objectMode: false }); - const expectedElements = ["a", "b", "c", "d", "e", "f"]; - let i = 0; - concat(source) - .on("data", (element: string) => { - expect(element).to.deep.equal(Buffer.from(expectedElements[i])); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); - }, -); - -test.cb("concat() concatenates empty list of readable streams", t => { - t.plan(0); - concat() - .pipe(collect()) - .on("data", _ => { - t.fail(); - }) - .on("error", t.end) - .on("end", t.end); -}); - -test.cb( - "merge() merges multiple readable streams in chunk arrival order", - t => { - t.plan(6); - const source1 = new Readable({ objectMode: true, read: () => ({}) }); - const source2 = new Readable({ objectMode: true, read: () => ({}) }); - const expectedElements = ["a", "d", "b", "e", "c", "f"]; - let i = 0; - merge(source1, source2) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source1.push("a"); - setTimeout(() => source2.push("d"), 10); - setTimeout(() => source1.push("b"), 20); - setTimeout(() => source2.push("e"), 30); - setTimeout(() => source1.push("c"), 40); - setTimeout(() => source2.push("f"), 50); - setTimeout(() => source2.push(null), 60); - setTimeout(() => source1.push(null), 70); - }, -); - -test.cb("merge() merges a readable stream", t => { - t.plan(3); - const source = new Readable({ objectMode: true, read: () => ({}) }); - const expectedElements = ["a", "b", "c"]; - let i = 0; - merge(source) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("merge() merges an empty list of readable streams", t => { - t.plan(0); - merge() - .on("data", () => t.pass()) - .on("error", t.end) - .on("end", t.end); -}); - -test.cb( - "duplex() combines a writable and readable stream into a ReadWrite stream", - t => { - t.plan(1); - const source = new Readable(); - const catProcess = cp.exec("cat"); - let out = ""; - source - .pipe(duplex(catProcess.stdin, catProcess.stdout)) - .on("data", chunk => (out += chunk)) - .on("error", t.end) - .on("end", () => { - expect(out).to.equal("abcdef"); - t.pass(); - t.end(); - }); - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); - }, -); - -test.cb( - "child() allows easily writing to child process stdin and reading from its stdout", - t => { - t.plan(1); - const source = new Readable(); - const catProcess = cp.exec("cat"); - let out = ""; - source - .pipe(child(catProcess)) - .on("data", chunk => (out += chunk)) - .on("error", t.end) - .on("end", () => { - expect(out).to.equal("abcdef"); - t.pass(); - t.end(); - }); - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); - }, -); - -test("last() resolves to the last chunk streamed by the given readable stream", async t => { - const source = new Readable({ objectMode: true }); - const lastPromise = last(source); - source.push("ab"); - source.push("cd"); - source.push("ef"); - source.push(null); - const lastChunk = await lastPromise; - expect(lastChunk).to.equal("ef"); -}); - -test.cb("batch() batches chunks together", t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = [["a", "b", "c"], ["d", "e", "f"], ["g"]]; - let i = 0; - source - .pipe(batch(3)) - .on("data", (element: string[]) => { - expect(element).to.deep.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push("d"); - source.push("e"); - source.push("f"); - source.push("g"); - source.push(null); -}); - -test.cb("batch() yields a batch after the timeout", t => { - t.plan(3); - const source = new Readable({ - objectMode: true, - read(size: number) {}, - }); - const expectedElements = [["a", "b"], ["c"], ["d"]]; - let i = 0; - source - .pipe(batch(3)) - .on("data", (element: string[]) => { - console.error("DATA", element); - expect(element).to.deep.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.fail) - .on("end", t.end); - - source.push("a"); - source.push("b"); - setTimeout(() => { - source.push("c"); - }, 600); - setTimeout(() => { - source.push("d"); - source.push(null); - }, 600 * 2); -}); - -test.cb("unbatch() unbatches", t => { - t.plan(3); - const source = new Readable({ objectMode: true }); - const expectedElements = ["a", "b", "c"]; - let i = 0; - source - .pipe(batch(3)) - .pipe(unbatch()) - .on("data", (element: string) => { - expect(element).to.equal(expectedElements[i]); - t.pass(); - i++; - }) - .on("error", t.end) - .on("end", t.end); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push(null); -}); - -test.cb("rate() sends data at desired rate", t => { - t.plan(9); - const fastRate = 150; - const medRate = 50; - const slowRate = 1; - const sourceFast = new Readable({ objectMode: true }); - const sourceMed = new Readable({ objectMode: true }); - const sourceSlow = new Readable({ objectMode: true }); - const expectedElements = ["a", "b", "c"]; - const start = performance.now(); - let i = 0; - let j = 0; - let k = 0; - - sourceFast - .pipe(rate(fastRate)) - .on("data", (element: string[]) => { - const currentRate = (i / (performance.now() - start)) * 1000; - expect(element).to.deep.equal(expectedElements[i]); - expect(currentRate).lessThan(fastRate); - t.pass(); - i++; - }) - .on("error", t.end); - - sourceMed - .pipe(rate(medRate)) - .on("data", (element: string[]) => { - const currentRate = (j / (performance.now() - start)) * 1000; - expect(element).to.deep.equal(expectedElements[j]); - expect(currentRate).lessThan(medRate); - t.pass(); - j++; - }) - .on("error", t.end); - - sourceSlow - .pipe(rate(slowRate, 1)) - .on("data", (element: string[]) => { - const currentRate = (k / (performance.now() - start)) * 1000; - expect(element).to.deep.equal(expectedElements[k]); - expect(currentRate).lessThan(slowRate); - t.pass(); - k++; - }) - .on("error", t.end) - .on("end", t.end); - - sourceFast.push("a"); - sourceFast.push("b"); - sourceFast.push("c"); - sourceFast.push(null); - sourceMed.push("a"); - sourceMed.push("b"); - sourceMed.push("c"); - sourceMed.push(null); - sourceSlow.push("a"); - sourceSlow.push("b"); - sourceSlow.push("c"); - sourceSlow.push(null); -}); - -test.cb("parallel() parallel mapping", t => { - t.plan(6); - const offset = 50; - const source = new Readable({ objectMode: true }); - const expectedElements = [ - "a_processed", - "b_processed", - "c_processed", - "d_processed", - "e_processed", - "f_processed", - ]; - interface IPerfData { - start: number; - output?: string; - finish?: number; - } - const orderedResults: IPerfData[] = []; - source - .pipe( - parallelMap(async (data: any) => { - const perfData: IPerfData = { start: performance.now() }; - const c = data + "_processed"; - perfData.output = c; - await sleep(offset); - perfData.finish = performance.now(); - orderedResults.push(perfData); - return c; - }, 2), - ) - .on("data", (element: string) => { - t.true(expectedElements.includes(element)); - }) - .on("error", t.end) - .on("end", async () => { - expect(orderedResults[0].finish).to.be.lessThan( - orderedResults[2].start, - ); - expect(orderedResults[1].finish).to.be.lessThan( - orderedResults[3].start, - ); - expect(orderedResults[2].finish).to.be.lessThan( - orderedResults[4].start, - ); - expect(orderedResults[3].finish).to.be.lessThan( - orderedResults[5].start, - ); - expect(orderedResults[0].start).to.be.lessThan( - orderedResults[2].start + offset, - ); - expect(orderedResults[1].start).to.be.lessThan( - orderedResults[3].start + offset, - ); - expect(orderedResults[2].start).to.be.lessThan( - orderedResults[4].start + offset, - ); - expect(orderedResults[3].start).to.be.lessThan( - orderedResults[5].start + offset, - ); - t.end(); - }); - - source.push("a"); - source.push("b"); - source.push("c"); - source.push("d"); - source.push("e"); - source.push("f"); - source.push(null); -}); - -test.cb("accumulator() rolling", t => { - t.plan(3); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; - const secondFlush = [{ ts: 2, key: "d" }, { ts: 3, key: "e" }]; - const thirdFlush = [{ ts: 4, key: "f" }]; - const flushes = [firstFlush, secondFlush, thirdFlush]; - - source - .pipe(accumulator(2, undefined, FlushStrategy.rolling)) - .on("data", (flush: TestObject[]) => { - t.deepEqual(flush, flushes[chunkIndex]); - chunkIndex++; - }) - .on("error", (e: any) => { - t.end(e); - }) - .on("end", () => { - t.end(); - }); - [...firstFlush, ...secondFlush, ...thirdFlush].forEach(item => { - source.push(item); - }); - source.push(null); -}); - -test.cb("accumulator() rolling with key", t => { - t.plan(2); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const firstFlush = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 2, key: "d" }, - ]; - const secondFlush = [{ ts: 3, key: "e" }]; - const flushes = [firstFlush, secondFlush]; - - source - .pipe(accumulator(3, undefined, FlushStrategy.rolling, "ts")) - .on("data", (flush: TestObject[]) => { - t.deepEqual(flush, flushes[chunkIndex]); - chunkIndex++; - }) - .on("error", (e: any) => { - t.end(e); - }) - .on("end", () => { - t.end(); - }); - [...firstFlush, ...secondFlush].forEach(item => { - source.push(item); - }); - source.push(null); -}); - -test.cb("accumulatorBy() rolling", t => { - t.plan(2); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const firstFlush = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 2, key: "d" }, - ]; - const secondFlush = [{ ts: 3, key: "e" }]; - const flushes = [firstFlush, secondFlush]; - - source - .pipe( - accumulatorBy( - undefined, - FlushStrategy.rolling, - (event: TestObject, bufferChunk: TestObject) => { - return bufferChunk.ts + 3 <= event.ts; - }, - ), - ) - .on("data", (flush: TestObject[]) => { - t.deepEqual(flush, flushes[chunkIndex]); - chunkIndex++; - }) - .on("error", (e: any) => { - t.end(e); - }) - .on("end", () => { - t.end(); - }); - [...firstFlush, ...secondFlush].forEach(item => { - source.push(item); - }); - source.push(null); -}); - -test.cb("accumulator() sliding", t => { - t.plan(4); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const input = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 4, key: "d" }, - ]; - const firstFlush = [{ ts: 0, key: "a" }]; - const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; - const thirdFlush = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - ]; - const fourthFlush = [ - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 4, key: "d" }, - ]; - - const flushes = [firstFlush, secondFlush, thirdFlush, fourthFlush]; - source - .pipe(accumulator(3, undefined, FlushStrategy.sliding)) - .on("data", (flush: TestObject[]) => { - t.deepEqual(flush, flushes[chunkIndex]); - chunkIndex++; - }) - .on("error", (e: any) => { - t.end(e); - }) - .on("end", () => { - t.end(); - }); - input.forEach(item => { - source.push(item); - }); - source.push(null); -}); - -test.cb("accumulator() sliding with key", t => { - t.plan(6); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const input = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 3, key: "d" }, - { ts: 5, key: "f" }, - { ts: 6, key: "g" }, - ]; - const firstFlush = [{ ts: 0, key: "a" }]; - const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; - const thirdFlush = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - ]; - const fourthFlush = [ - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 3, key: "d" }, - ]; - const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }]; - const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }]; - - const flushes = [ - firstFlush, - secondFlush, - thirdFlush, - fourthFlush, - fifthFlush, - sixthFlush, - ]; - source - .pipe(accumulator(3, undefined, FlushStrategy.sliding, "ts")) - .on("data", (flush: TestObject[]) => { - t.deepEqual(flush, flushes[chunkIndex]); - chunkIndex++; - }) - .on("error", (e: any) => { - t.end(e); - }) - .on("end", () => { - t.end(); - }); - input.forEach(item => { - source.push(item); - }); - source.push(null); -}); - -test.cb("accumulatorBy() sliding", t => { - t.plan(6); - let chunkIndex = 0; - interface TestObject { - ts: number; - key: string; - } - const source = new Readable({ objectMode: true }); - const input = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 3, key: "d" }, - { ts: 5, key: "f" }, - { ts: 6, key: "g" }, - ]; - const firstFlush = [{ ts: 0, key: "a" }]; - const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }]; - const thirdFlush = [ - { ts: 0, key: "a" }, - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - ]; - const fourthFlush = [ - { ts: 1, key: "b" }, - { ts: 2, key: "c" }, - { ts: 3, key: "d" }, - ]; - const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }]; - const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }]; - - const flushes = [ - firstFlush, - secondFlush, - thirdFlush, - fourthFlush, - fifthFlush, - sixthFlush, - ]; - source - .pipe( - accumulatorBy( - undefined, - FlushStrategy.sliding, - (event: TestObject, bufferChunk: TestObject) => { - return bufferChunk.ts + 3 <= event.ts ? true : false; - }, - ), - ) - .on("data", (flush: TestObject[]) => { - t.deepEqual(flush, flushes[chunkIndex]); - chunkIndex++; - }) - .on("error", (e: any) => { - t.end(e); - }) - .on("end", () => { - t.end(); - }); - input.forEach(item => { - source.push(item); - }); - source.push(null); -}); diff --git a/src/functions/functions.ts b/src/functions/functions.ts deleted file mode 100644 index 49eea1f..0000000 --- a/src/functions/functions.ts +++ /dev/null @@ -1,765 +0,0 @@ -import { Transform, Readable, Writable, Duplex } from "stream"; -import { performance } from "perf_hooks"; -import { ChildProcess } from "child_process"; -import { StringDecoder } from "string_decoder"; -import { - TransformOptions, - ThroughOptions, - WithEncoding, - SerializationFormats, - JsonValue, - JsonParseOptions, - FlushStrategy, - AccumulatorByIteratee, -} from "./definitions"; -import { sleep } from "../helpers"; - -/** - * Convert an array into a Readable stream of its elements - * @param array Array of elements to stream - */ -export function fromArray(array: any[]): NodeJS.ReadableStream { - let cursor = 0; - return new Readable({ - objectMode: true, - read() { - if (cursor < array.length) { - this.push(array[cursor]); - cursor++; - } else { - this.push(null); - } - }, - }); -} - -/** - * Return a ReadWrite stream that maps streamed chunks - * @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such) - * @param options - * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects - * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects - */ -export function map( - mapper: (chunk: T, encoding: string) => R, - options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, - }, -): NodeJS.ReadWriteStream { - return new Transform({ - ...options, - async transform(chunk: T, encoding, callback) { - try { - const mapped = await mapper(chunk, encoding); - this.push(mapped); - callback(); - } catch (err) { - callback(err); - } - }, - }); -} - -/** - * Return a ReadWrite stream that flat maps streamed chunks - * @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such) - * @param options - * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects - * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects - */ -export function flatMap( - mapper: - | ((chunk: T, encoding: string) => R[]) - | ((chunk: T, encoding: string) => Promise), - options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, - }, -): NodeJS.ReadWriteStream { - return new Transform({ - ...options, - async transform(chunk: T, encoding, callback) { - let isPromise = false; - try { - const mapped = mapper(chunk, encoding); - isPromise = mapped instanceof Promise; - (await mapped).forEach(c => this.push(c)); - callback(); - } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } - } - }, - }); -} - -/** - * Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold - * @param predicate Predicate with which to filter scream chunks - * @param options - * @param options.objectMode Whether this stream should behave as a stream of objects - */ -export function filter( - predicate: - | ((chunk: T, encoding: string) => boolean) - | ((chunk: T, encoding: string) => Promise), - options: ThroughOptions = { - objectMode: true, - }, -) { - return new Transform({ - readableObjectMode: options.objectMode, - writableObjectMode: options.objectMode, - async transform(chunk: T, encoding, callback) { - let isPromise = false; - try { - const result = predicate(chunk, encoding); - isPromise = result instanceof Promise; - if (!!(await result)) { - callback(undefined, chunk); - } else { - callback(); - } - } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } - } - }, - }); -} - -/** - * Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that - * value - * @param iteratee Reducer function to apply on each streamed chunk - * @param initialValue Initial value - * @param options - * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects - * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects - */ -export function reduce( - iteratee: - | ((previousValue: R, chunk: T, encoding: string) => R) - | ((previousValue: R, chunk: T, encoding: string) => Promise), - initialValue: R, - options: TransformOptions = { - readableObjectMode: true, - writableObjectMode: true, - }, -) { - let value = initialValue; - return new Transform({ - readableObjectMode: options.readableObjectMode, - writableObjectMode: options.writableObjectMode, - async transform(chunk: T, encoding, callback) { - let isPromise = false; - try { - const result = iteratee(value, chunk, encoding); - isPromise = result instanceof Promise; - value = await result; - callback(); - } catch (err) { - if (isPromise) { - // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly - this.emit("error", err); - callback(); - } else { - callback(err); - } - } - }, - flush(callback) { - // Best effort attempt at yielding the final value (will throw if e.g. yielding an object and - // downstream doesn't expect objects) - try { - callback(undefined, value); - } catch (err) { - try { - this.emit("error", err); - } catch { - // Best effort was made - } - } - }, - }); -} - -/** - * Return a ReadWrite stream that splits streamed chunks using the given separator - * @param separator Separator to split by, defaulting to "\n" - * @param options - * @param options.encoding Encoding written chunks are assumed to use - */ -export function split( - separator: string | RegExp = "\n", - options: WithEncoding = { encoding: "utf8" }, -): NodeJS.ReadWriteStream { - let buffered = ""; - const decoder = new StringDecoder(options.encoding); - - return new Transform({ - readableObjectMode: true, - transform(chunk: Buffer, encoding, callback) { - const asString = decoder.write(chunk); - const splitted = asString.split(separator); - if (splitted.length > 1) { - splitted[0] = buffered.concat(splitted[0]); - buffered = ""; - } - buffered += splitted[splitted.length - 1]; - splitted.slice(0, -1).forEach((part: string) => this.push(part)); - callback(); - }, - flush(callback) { - callback(undefined, buffered + decoder.end()); - }, - }); -} - -/** - * Return a ReadWrite stream that joins streamed chunks using the given separator - * @param separator Separator to join with - * @param options - * @param options.encoding Encoding written chunks are assumed to use - */ -export function join( - separator: string, - options: WithEncoding = { encoding: "utf8" }, -): NodeJS.ReadWriteStream { - let isFirstChunk = true; - const decoder = new StringDecoder(options.encoding); - return new Transform({ - readableObjectMode: true, - async transform(chunk: Buffer, encoding, callback) { - const asString = decoder.write(chunk); - // Take care not to break up multi-byte characters spanning multiple chunks - if (asString !== "" || chunk.length === 0) { - if (!isFirstChunk) { - this.push(separator); - } - this.push(asString); - isFirstChunk = false; - } - callback(); - }, - }); -} - -/** - * Return a ReadWrite stream that replaces occurrences of the given string or regular expression in - * the streamed chunks with the specified replacement string - * @param searchValue Search string to use - * @param replaceValue Replacement string to use - * @param options - * @param options.encoding Encoding written chunks are assumed to use - */ -export function replace( - searchValue: string | RegExp, - replaceValue: string, - options: WithEncoding = { encoding: "utf8" }, -): NodeJS.ReadWriteStream { - const decoder = new StringDecoder(options.encoding); - return new Transform({ - readableObjectMode: true, - transform(chunk: Buffer, encoding, callback) { - const asString = decoder.write(chunk); - // Take care not to break up multi-byte characters spanning multiple chunks - if (asString !== "" || chunk.length === 0) { - callback( - undefined, - asString.replace(searchValue, replaceValue), - ); - } else { - callback(); - } - }, - }); -} - -/** - * Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk - * must be a fully defined JSON string. - * @param format Format of serialized data, only utf8 supported. - */ -export function parse( - format: SerializationFormats = SerializationFormats.utf8, -): NodeJS.ReadWriteStream { - const decoder = new StringDecoder(format); - return new Transform({ - readableObjectMode: true, - writableObjectMode: true, - async transform(chunk: Buffer, encoding, callback) { - try { - const asString = decoder.write(chunk); - // Using await causes parsing errors to be emitted - callback(undefined, await JSON.parse(asString)); - } catch (err) { - callback(err); - } - }, - }); -} - -/** - * Return a ReadWrite stream that stringifies the streamed chunks to JSON - */ -export function stringify( - options: JsonParseOptions = { pretty: false }, -): NodeJS.ReadWriteStream { - return new Transform({ - readableObjectMode: true, - writableObjectMode: true, - transform(chunk: JsonValue, encoding, callback) { - callback( - undefined, - options.pretty - ? JSON.stringify(chunk, null, 2) - : JSON.stringify(chunk), - ); - }, - }); -} - -/** - * Return a ReadWrite stream that collects streamed chunks into an array or buffer - * @param options - * @param options.objectMode Whether this stream should behave as a stream of objects - */ -export function collect( - options: ThroughOptions = { objectMode: false }, -): NodeJS.ReadWriteStream { - const collected: any[] = []; - return new Transform({ - readableObjectMode: options.objectMode, - writableObjectMode: options.objectMode, - transform(data, encoding, callback) { - collected.push(data); - callback(); - }, - flush(callback) { - this.push( - options.objectMode ? collected : Buffer.concat(collected), - ); - callback(); - }, - }); -} - -/** - * Return a Readable stream of readable streams concatenated together - * @param streams Readable streams to concatenate - */ -export function concat( - ...streams: NodeJS.ReadableStream[] -): NodeJS.ReadableStream { - let isStarted = false; - let currentStreamIndex = 0; - const startCurrentStream = () => { - if (currentStreamIndex >= streams.length) { - wrapper.push(null); - } else { - streams[currentStreamIndex] - .on("data", chunk => { - if (!wrapper.push(chunk)) { - streams[currentStreamIndex].pause(); - } - }) - .on("error", err => wrapper.emit("error", err)) - .on("end", () => { - currentStreamIndex++; - startCurrentStream(); - }); - } - }; - - const wrapper = new Readable({ - objectMode: true, - read() { - if (!isStarted) { - isStarted = true; - startCurrentStream(); - } - if (currentStreamIndex < streams.length) { - streams[currentStreamIndex].resume(); - } - }, - }); - return wrapper; -} - -/** - * Return a Readable stream of readable streams merged together in chunk arrival order - * @param streams Readable streams to merge - */ -export function merge( - ...streams: NodeJS.ReadableStream[] -): NodeJS.ReadableStream { - let isStarted = false; - let streamEndedCount = 0; - return new Readable({ - objectMode: true, - read() { - if (streamEndedCount >= streams.length) { - this.push(null); - } else if (!isStarted) { - isStarted = true; - streams.forEach(stream => - stream - .on("data", chunk => { - if (!this.push(chunk)) { - streams.forEach(s => s.pause()); - } - }) - .on("error", err => this.emit("error", err)) - .on("end", () => { - streamEndedCount++; - if (streamEndedCount === streams.length) { - this.push(null); - } - }), - ); - } else { - streams.forEach(s => s.resume()); - } - }, - }); -} - -/** - * Return a Duplex stream from a writable stream that is assumed to somehow, when written to, - * cause the given readable stream to yield chunks - * @param writable Writable stream assumed to cause the readable stream to yield chunks when written to - * @param readable Readable stream assumed to yield chunks when the writable stream is written to - */ -export function duplex(writable: Writable, readable: Readable) { - const wrapper = new Duplex({ - readableObjectMode: true, - writableObjectMode: true, - read() { - readable.resume(); - }, - write(chunk, encoding, callback) { - return writable.write(chunk, encoding, callback); - }, - final(callback) { - writable.end(callback); - }, - }); - readable - .on("data", chunk => { - if (!wrapper.push(chunk)) { - readable.pause(); - } - }) - .on("error", err => wrapper.emit("error", err)) - .on("end", () => wrapper.push(null)); - writable.on("drain", () => wrapper.emit("drain")); - writable.on("error", err => wrapper.emit("error", err)); - return wrapper; -} - -/** - * Return a Duplex stream from a child process' stdin and stdout - * @param childProcess Child process from which to create duplex stream - */ -export function child(childProcess: ChildProcess) { - return duplex(childProcess.stdin, childProcess.stdout); -} - -/** - * Return a Promise resolving to the last streamed chunk of the given readable stream, after it has - * ended - * @param readable Readable stream to wait on - */ -export function last(readable: Readable): Promise { - let lastChunk: T | null = null; - return new Promise((resolve, reject) => { - readable - .on("data", chunk => (lastChunk = chunk)) - .on("end", () => resolve(lastChunk)); - }); -} - -/** - * Stores chunks of data internally in array and batches when batchSize is reached. - * - * @param batchSize Size of the batches - * @param maxBatchAge Max lifetime of a batch - */ -export function batch(batchSize: number = 1000, maxBatchAge: number = 500) { - let buffer: any[] = []; - let timer: NodeJS.Timer | null = null; - let sendChunk = (self: Transform) => { - timer && clearTimeout(timer); - timer = null; - self.push(buffer); - buffer = []; - }; - return new Transform({ - objectMode: true, - transform(chunk, encoding, callback) { - buffer.push(chunk); - if (buffer.length === batchSize) { - sendChunk(this); - } else { - if (timer === null) { - timer = setInterval(() => { - sendChunk(this); - }, maxBatchAge); - } - } - callback(); - }, - flush(callback) { - sendChunk(this); - callback(); - }, - }); -} - -/** - * Unbatches and sends individual chunks of data - */ -export function unbatch() { - return new Transform({ - objectMode: true, - transform(data, encoding, callback) { - for (const d of data) { - this.push(d); - } - callback(); - }, - }); -} - -/** - * Limits date of data transferred into stream. - * @param targetRate Desired rate in ms - * @param period Period to sleep for when rate is above or equal to targetRate - */ -export function rate(targetRate: number = 50, period: number = 2) { - const deltaMS = ((1 / targetRate) * 1000) / period; // Skip half a period - let total = 0; - const start = performance.now(); - return new Transform({ - objectMode: true, - async transform(data, encoding, callback) { - const currentRate = (total / (performance.now() - start)) * 1000; - if (targetRate && currentRate > targetRate) { - await sleep(deltaMS); - } - total += 1; - callback(undefined, data); - }, - }); -} - -/** - * Limits number of parallel processes in flight. - * @param parallel Max number of parallel processes. - * @param func Function to execute on each data chunk - * @param pause Amount of time to pause processing when max number of parallel processes are executing. - */ -export function parallelMap( - mapper: (data: T) => R, - parallel: number = 10, - sleepTime: number = 5, -) { - let inflight = 0; - return new Transform({ - objectMode: true, - async transform(data, encoding, callback) { - while (parallel <= inflight) { - await sleep(sleepTime); - } - inflight += 1; - callback(); - try { - const res = await mapper(data); - this.push(res); - } catch (e) { - this.emit(e); - } finally { - inflight -= 1; - } - }, - async flush(callback) { - while (inflight > 0) { - await sleep(sleepTime); - } - callback(); - }, - }); -} - -function _accumulator( - accumulateBy: (data: T, buffer: T[], stream: Transform) => void, - shouldFlush: boolean = true, -) { - const buffer: T[] = []; - return new Transform({ - objectMode: true, - async transform(data: any, encoding, callback) { - accumulateBy(data, buffer, this); - callback(); - }, - flush(callback) { - if (shouldFlush) { - this.push(buffer); - } - callback(); - }, - }); -} - -function _sliding( - windowLength: number, - rate: number | undefined, - key?: string, -): (event: T, buffer: T[], stream: Transform) => void { - return (event: T, buffer: T[], stream: Transform) => { - if (key) { - let index = 0; - while ( - index < buffer.length && - buffer[index][key] + windowLength <= event[key] - ) { - index++; - } - buffer.splice(0, index); - } else if (buffer.length === windowLength) { - buffer.shift(); - } - buffer.push(event); - stream.push(buffer); - }; -} - -function _slidingByFunction( - rate: number | undefined, - iteratee: AccumulatorByIteratee, -): (event: T, buffer: T[], stream: Transform) => void { - return (event: T, buffer: T[], stream: Transform) => { - let index = 0; - while (index < buffer.length && iteratee(event, buffer[index])) { - index++; - } - buffer.splice(0, index); - buffer.push(event); - stream.push(buffer); - }; -} - -function _rollingByFunction( - rate: number | undefined, - iteratee: AccumulatorByIteratee, -): (event: T, buffer: T[], stream: Transform) => void { - return (event: T, buffer: T[], stream: Transform) => { - if (iteratee) { - if (buffer.length > 0 && iteratee(event, buffer[0])) { - stream.push(buffer.slice(0)); - buffer.length = 0; - } - } - buffer.push(event); - }; -} - -function _rolling( - windowLength: number, - rate: number | undefined, - key?: string, -): (event: T, buffer: T[], stream: Transform) => void { - return (event: T, buffer: T[], stream: Transform) => { - if (key) { - if (event[key] === undefined) { - stream.emit( - "error", - new Error( - `Key is missing in event: (${key}, ${JSON.stringify( - event, - )})`, - ), - ); - } else if ( - buffer.length > 0 && - buffer[0][key] + windowLength <= event[key] - ) { - stream.push(buffer.slice(0)); - buffer.length = 0; - } - } else if (buffer.length === windowLength) { - stream.push(buffer.slice(0)); - buffer.length = 0; - } - buffer.push(event); - }; -} - -export function accumulator( - batchSize: number, - batchRate: number | undefined, - flushStrategy: FlushStrategy, - keyBy?: string, -): Transform { - if (flushStrategy === FlushStrategy.sliding) { - return sliding(batchSize, batchRate, keyBy); - } else if (flushStrategy === FlushStrategy.rolling) { - return rolling(batchSize, batchRate, keyBy); - } else { - return batch(batchSize, batchRate); - } -} - -export function accumulatorBy( - batchRate: number | undefined, - flushStrategy: S, - iteratee: AccumulatorByIteratee, -): Transform { - if (flushStrategy === FlushStrategy.sliding) { - return slidingBy(batchRate, iteratee); - } else { - return rollingBy(batchRate, iteratee); - } -} - -export function sliding( - windowLength: number, - rate: number | undefined, - key?: string, -): Transform { - return _accumulator(_sliding(windowLength, rate, key), false); -} - -export function slidingBy( - rate: number | undefined, - iteratee: AccumulatorByIteratee, -): Transform { - return _accumulator(_slidingByFunction(rate, iteratee), false); -} - -export function rolling( - windowLength: number, - rate: number | undefined, - key?: string, -): Transform { - return _accumulator(_rolling(windowLength, rate, key)); -} - -export function rollingBy( - rate: number | undefined, - iteratee: AccumulatorByIteratee, -): Transform { - return _accumulator(_rollingByFunction(rate, iteratee)); -} diff --git a/src/functions/index.ts b/src/functions/index.ts index 2c511c0..2100353 100644 --- a/src/functions/index.ts +++ b/src/functions/index.ts @@ -1,6 +1,6 @@ -import { Readable, Writable } from "stream"; +import { Readable, Writable, Transform } from "stream"; import { ChildProcess } from "child_process"; -import * as baseFunctions from "./functions"; +import * as baseFunctions from "./baseFunctions"; import { ThroughOptions, @@ -29,7 +29,7 @@ export function fromArray(array: any[]): NodeJS.ReadableStream { export function map( mapper: (chunk: T, encoding?: string) => R, options?: TransformOptions, -): NodeJS.ReadWriteStream { +): Transform { return baseFunctions.map(mapper, options); } @@ -207,10 +207,7 @@ export function last(readable: Readable): Promise { * @param batchSize Size of the batches, defaults to 1000. * @param maxBatchAge? Max lifetime of a batch, defaults to 500 */ -export function batch( - batchSize: number, - maxBatchAge?: number, -): NodeJS.ReadWriteStream { +export function batch(batchSize: number, maxBatchAge?: number): Transform { return baseFunctions.batch(batchSize, maxBatchAge); } diff --git a/src/functions/join/index.ts b/src/functions/join/index.ts new file mode 100644 index 0000000..0bd22d3 --- /dev/null +++ b/src/functions/join/index.ts @@ -0,0 +1,31 @@ +import { Transform } from "stream"; +import { StringDecoder } from "string_decoder"; +import { WithEncoding } from "../definitions"; +/** + * Return a ReadWrite stream that joins streamed chunks using the given separator + * @param separator Separator to join with + * @param options + * @param options.encoding Encoding written chunks are assumed to use + */ +export function join( + separator: string, + options: WithEncoding = { encoding: "utf8" }, +): NodeJS.ReadWriteStream { + let isFirstChunk = true; + const decoder = new StringDecoder(options.encoding); + return new Transform({ + readableObjectMode: true, + async transform(chunk: Buffer, encoding, callback) { + const asString = decoder.write(chunk); + // Take care not to break up multi-byte characters spanning multiple chunks + if (asString !== "" || chunk.length === 0) { + if (!isFirstChunk) { + this.push(separator); + } + this.push(asString); + isFirstChunk = false; + } + callback(); + }, + }); +} diff --git a/src/functions/join/join.spec.ts b/src/functions/join/join.spec.ts new file mode 100644 index 0000000..fc9d5b7 --- /dev/null +++ b/src/functions/join/join.spec.ts @@ -0,0 +1,56 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { join } from "."; + +test.cb("join() joins chunks using the specified separator", t => { + t.plan(9); + const source = new Readable({ objectMode: true }); + const expectedParts = ["ab|", "|", "c|d", "|", "|", "|", "e", "|", "|f|"]; + let i = 0; + source + .pipe(join("|")) + .on("data", part => { + expect(part).to.equal(expectedParts[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("ab|"); + source.push("c|d"); + source.push("|"); + source.push("e"); + source.push("|f|"); + source.push(null); +}); + +test.cb( + "join() joins chunks using the specified separator without breaking up multi-byte characters " + + "spanning multiple chunks", + t => { + t.plan(5); + const source = new Readable({ objectMode: true }); + const expectedParts = ["ø", "|", "ö", "|", "一"]; + let i = 0; + source + .pipe(join("|")) + .on("data", part => { + expect(part).to.equal(expectedParts[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks + source.push(Buffer.from("ø").slice(1, 2)); + source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks + source.push(Buffer.from("ö").slice(1, 2)); + source.push(Buffer.from("一").slice(0, 1)); // 3-byte character spanning three chunks + source.push(Buffer.from("一").slice(1, 2)); + source.push(Buffer.from("一").slice(2, 3)); + source.push(null); + }, +); diff --git a/src/functions/last/index.ts b/src/functions/last/index.ts new file mode 100644 index 0000000..baf7440 --- /dev/null +++ b/src/functions/last/index.ts @@ -0,0 +1,14 @@ +import { Readable } from "stream"; +/** + * Return a Promise resolving to the last streamed chunk of the given readable stream, after it has + * ended + * @param readable Readable stream to wait on + */ +export function last(readable: Readable): Promise { + let lastChunk: T | null = null; + return new Promise((resolve, _) => { + readable + .on("data", chunk => (lastChunk = chunk)) + .on("end", () => resolve(lastChunk)); + }); +} diff --git a/src/functions/map/index.ts b/src/functions/map/index.ts new file mode 100644 index 0000000..7ddfbed --- /dev/null +++ b/src/functions/map/index.ts @@ -0,0 +1,29 @@ +import { Transform } from "stream"; +import { TransformOptions } from "../definitions"; +/** + * Return a ReadWrite stream that maps streamed chunks + * @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such) + * @param options + * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects + * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects + */ +export function map( + mapper: (chunk: T, encoding: string) => R, + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +): Transform { + return new Transform({ + ...options, + async transform(chunk: T, encoding, callback) { + try { + const mapped = await mapper(chunk, encoding); + this.push(mapped); + callback(); + } catch (err) { + callback(err); + } + }, + }); +} diff --git a/src/functions/map/map.spec.ts b/src/functions/map/map.spec.ts new file mode 100644 index 0000000..2812503 --- /dev/null +++ b/src/functions/map/map.spec.ts @@ -0,0 +1,107 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { map } from "."; + +test.cb("map() maps elements synchronously", t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["A", "B", "C"]; + let i = 0; + source + .pipe(map((element: string) => element.toUpperCase())) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("map() maps elements asynchronously", t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["A", "B", "C"]; + let i = 0; + source + .pipe( + map(async (element: string) => { + await Promise.resolve(); + return element.toUpperCase(); + }), + ) + .on("data", (element: string) => { + expect(element).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test.cb("map() emits errors during synchronous mapping", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + map((element: string) => { + if (element !== "a") { + throw new Error("Failed mapping"); + } + return element.toUpperCase(); + }), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed mapping"); + t.pass(); + }) + .on("end", t.end); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); +}); + +test("map() emits errors during asynchronous mapping", t => { + t.plan(1); + return new Promise((resolve, reject) => { + const source = new Readable({ objectMode: true }); + source + .pipe( + map(async (element: string) => { + await Promise.resolve(); + if (element !== "a") { + throw new Error("Failed mapping"); + } + return element.toUpperCase(); + }), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed mapping"); + t.pass(); + resolve(); + }) + .on("end", () => { + t.fail(); + }); + + source.push("a"); + source.push("b"); + source.push("c"); + source.push(null); + }); +}); diff --git a/src/functions/merge/index.ts b/src/functions/merge/index.ts new file mode 100644 index 0000000..7166006 --- /dev/null +++ b/src/functions/merge/index.ts @@ -0,0 +1,36 @@ +import { Readable } from "stream"; +/** + * Return a Readable stream of readable streams merged together in chunk arrival order + * @param streams Readable streams to merge + */ +export function merge(...streams: Readable[]): Readable { + let isStarted = false; + let streamEndedCount = 0; + return new Readable({ + objectMode: true, + read() { + if (streamEndedCount >= streams.length) { + this.push(null); + } else if (!isStarted) { + isStarted = true; + streams.forEach(stream => + stream + .on("data", chunk => { + if (!this.push(chunk)) { + streams.forEach(s => s.pause()); + } + }) + .on("error", err => this.emit("error", err)) + .on("end", () => { + streamEndedCount++; + if (streamEndedCount === streams.length) { + this.push(null); + } + }), + ); + } else { + streams.forEach(s => s.resume()); + } + }, + }); +} diff --git a/src/functions/parallelMap/index.ts b/src/functions/parallelMap/index.ts new file mode 100644 index 0000000..ec82f35 --- /dev/null +++ b/src/functions/parallelMap/index.ts @@ -0,0 +1,44 @@ +import { Transform } from "stream"; +import { sleep } from "../../helpers"; +import { TransformOptions } from "../definitions"; +/** + * Limits number of parallel processes in flight. + * @param parallel Max number of parallel processes. + * @param func Function to execute on each data chunk + * @param pause Amount of time to pause processing when max number of parallel processes are executing. + */ +export function parallelMap( + mapper: (data: T) => R, + parallel: number = 10, + sleepTime: number = 5, + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +) { + let inflight = 0; + return new Transform({ + ...options, + async transform(data, encoding, callback) { + while (parallel <= inflight) { + await sleep(sleepTime); + } + inflight += 1; + callback(); + try { + const res = await mapper(data); + this.push(res); + } catch (e) { + this.emit(e); + } finally { + inflight -= 1; + } + }, + async flush(callback) { + while (inflight > 0) { + await sleep(sleepTime); + } + callback(); + }, + }); +} diff --git a/src/functions/parse/index.ts b/src/functions/parse/index.ts new file mode 100644 index 0000000..1e32cb2 --- /dev/null +++ b/src/functions/parse/index.ts @@ -0,0 +1,26 @@ +import { Transform } from "stream"; +import { StringDecoder } from "string_decoder"; +import { SerializationFormats } from "../definitions"; +/** + * Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk + * must be a fully defined JSON string. + * @param format Format of serialized data, only utf8 supported. + */ +export function parse( + format: SerializationFormats = SerializationFormats.utf8, +): NodeJS.ReadWriteStream { + const decoder = new StringDecoder(format); + return new Transform({ + readableObjectMode: true, + writableObjectMode: true, + async transform(chunk: Buffer, encoding, callback) { + try { + const asString = decoder.write(chunk); + // Using await causes parsing errors to be emitted + callback(undefined, await JSON.parse(asString)); + } catch (err) { + callback(err); + } + }, + }); +} diff --git a/src/functions/rate/index.ts b/src/functions/rate/index.ts new file mode 100644 index 0000000..5e88950 --- /dev/null +++ b/src/functions/rate/index.ts @@ -0,0 +1,31 @@ +import { Transform } from "stream"; +import { performance } from "perf_hooks"; +import { sleep } from "../../helpers"; +import { TransformOptions } from "../definitions"; +/** + * Limits date of data transferred into stream. + * @param targetRate Desired rate in ms + * @param period Period to sleep for when rate is above or equal to targetRate + */ +export function rate( + targetRate: number = 50, + period: number = 2, + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +) { + const deltaMS = ((1 / targetRate) * 1000) / period; // Skip half a period + let total = 0; + const start = performance.now(); + return new Transform({ + async transform(data, encoding, callback) { + const currentRate = (total / (performance.now() - start)) * 1000; + if (targetRate && currentRate > targetRate) { + await sleep(deltaMS); + } + total += 1; + callback(undefined, data); + }, + }); +} diff --git a/src/functions/reduce/index.ts b/src/functions/reduce/index.ts new file mode 100644 index 0000000..f7654fb --- /dev/null +++ b/src/functions/reduce/index.ts @@ -0,0 +1,57 @@ +import { Transform } from "stream"; +import { TransformOptions } from "../definitions"; +/** + * Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that + * value + * @param iteratee Reducer function to apply on each streamed chunk + * @param initialValue Initial value + * @param options + * @param options.readableObjectMode Whether this stream should behave as a readable stream of objects + * @param options.writableObjectMode Whether this stream should behave as a writable stream of objects + */ +export function reduce( + iteratee: + | ((previousValue: R, chunk: T, encoding: string) => R) + | ((previousValue: R, chunk: T, encoding: string) => Promise), + initialValue: R, + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +) { + let value = initialValue; + return new Transform({ + readableObjectMode: options.readableObjectMode, + writableObjectMode: options.writableObjectMode, + async transform(chunk: T, encoding, callback) { + let isPromise = false; + try { + const result = iteratee(value, chunk, encoding); + isPromise = result instanceof Promise; + value = await result; + callback(); + } catch (err) { + if (isPromise) { + // Calling the callback asynchronously with an error wouldn't emit the error, so emit directly + this.emit("error", err); + callback(); + } else { + callback(err); + } + } + }, + flush(callback) { + // Best effort attempt at yielding the final value (will throw if e.g. yielding an object and + // downstream doesn't expect objects) + try { + callback(undefined, value); + } catch (err) { + try { + this.emit("error", err); + } catch { + // Best effort was made + } + } + }, + }); +} diff --git a/src/functions/reduce/reduce.spec.ts b/src/functions/reduce/reduce.spec.ts new file mode 100644 index 0000000..c01a51e --- /dev/null +++ b/src/functions/reduce/reduce.spec.ts @@ -0,0 +1,98 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { reduce } from "."; + +test.cb("reduce() reduces elements synchronously", t => { + t.plan(1); + const source = new Readable({ objectMode: true }); + const expectedValue = 6; + source + .pipe(reduce((acc: number, element: string) => acc + element.length, 0)) + .on("data", (element: string) => { + expect(element).to.equal(expectedValue); + t.pass(); + }) + .on("error", t.end) + .on("end", t.end); + + source.push("ab"); + source.push("cd"); + source.push("ef"); + source.push(null); +}); + +test.cb("reduce() reduces elements asynchronously", t => { + t.plan(1); + const source = new Readable({ objectMode: true }); + const expectedValue = 6; + source + .pipe( + reduce(async (acc: number, element: string) => { + await Promise.resolve(); + return acc + element.length; + }, 0), + ) + .on("data", (element: string) => { + expect(element).to.equal(expectedValue); + t.pass(); + }) + .on("error", t.end) + .on("end", t.end); + + source.push("ab"); + source.push("cd"); + source.push("ef"); + source.push(null); +}); + +test.cb("reduce() emits errors during synchronous reduce", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + reduce((acc: number, element: string) => { + if (element !== "ab") { + throw new Error("Failed reduce"); + } + return acc + element.length; + }, 0), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed reduce"); + t.pass(); + }) + .on("end", t.end); + + source.push("ab"); + source.push("cd"); + source.push("ef"); + source.push(null); +}); + +test.cb("reduce() emits errors during asynchronous reduce", t => { + t.plan(2); + const source = new Readable({ objectMode: true }); + source + .pipe( + reduce(async (acc: number, element: string) => { + await Promise.resolve(); + if (element !== "ab") { + throw new Error("Failed mapping"); + } + return acc + element.length; + }, 0), + ) + .resume() + .on("error", err => { + expect(err.message).to.equal("Failed mapping"); + t.pass(); + }) + .on("end", t.end); + + source.push("ab"); + source.push("cd"); + source.push("ef"); + source.push(null); +}); diff --git a/src/functions/replace/index.ts b/src/functions/replace/index.ts new file mode 100644 index 0000000..462103c --- /dev/null +++ b/src/functions/replace/index.ts @@ -0,0 +1,33 @@ +import { Transform } from "stream"; +import { StringDecoder } from "string_decoder"; +import { WithEncoding } from "../definitions"; +/** + * Return a ReadWrite stream that replaces occurrences of the given string or regular expression in + * the streamed chunks with the specified replacement string + * @param searchValue Search string to use + * @param replaceValue Replacement string to use + * @param options + * @param options.encoding Encoding written chunks are assumed to use + */ +export function replace( + searchValue: string | RegExp, + replaceValue: string, + options: WithEncoding = { encoding: "utf8" }, +): NodeJS.ReadWriteStream { + const decoder = new StringDecoder(options.encoding); + return new Transform({ + readableObjectMode: true, + transform(chunk: Buffer, encoding, callback) { + const asString = decoder.write(chunk); + // Take care not to break up multi-byte characters spanning multiple chunks + if (asString !== "" || chunk.length === 0) { + callback( + undefined, + asString.replace(searchValue, replaceValue), + ); + } else { + callback(); + } + }, + }); +} diff --git a/src/functions/replace/replace.spec.ts b/src/functions/replace/replace.spec.ts new file mode 100644 index 0000000..a36642c --- /dev/null +++ b/src/functions/replace/replace.spec.ts @@ -0,0 +1,80 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { replace } from "."; + +test.cb( + "replace() replaces occurrences of the given string in the streamed elements with the specified " + + "replacement string", + t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["abc", "xyf", "ghi"]; + let i = 0; + source + .pipe(replace("de", "xy")) + .on("data", part => { + expect(part).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("abc"); + source.push("def"); + source.push("ghi"); + source.push(null); + }, +); + +test.cb( + "replace() replaces occurrences of the given regular expression in the streamed elements with " + + "the specified replacement string", + t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["abc", "xyz", "ghi"]; + let i = 0; + source + .pipe(replace(/^def$/, "xyz")) + .on("data", part => { + expect(part).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("abc"); + source.push("def"); + source.push("ghi"); + source.push(null); + }, +); + +test.cb( + "replace() replaces occurrences of the given multi-byte character even if it spans multiple chunks", + t => { + t.plan(3); + const source = new Readable({ objectMode: true }); + const expectedElements = ["ø", "O", "a"]; + let i = 0; + source + .pipe(replace("ö", "O")) + .on("data", part => { + expect(part).to.equal(expectedElements[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks + source.push(Buffer.from("ø").slice(1, 2)); + source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks + source.push(Buffer.from("ö").slice(1, 2)); + source.push("a"); + source.push(null); + }, +); diff --git a/src/functions/split/index.ts b/src/functions/split/index.ts new file mode 100644 index 0000000..fb0f319 --- /dev/null +++ b/src/functions/split/index.ts @@ -0,0 +1,34 @@ +import { Transform } from "stream"; +import { StringDecoder } from "string_decoder"; +import { WithEncoding } from "../definitions"; +/** + * Return a ReadWrite stream that splits streamed chunks using the given separator + * @param separator Separator to split by, defaulting to "\n" + * @param options + * @param options.encoding Encoding written chunks are assumed to use + */ +export function split( + separator: string | RegExp = "\n", + options: WithEncoding = { encoding: "utf8" }, +): NodeJS.ReadWriteStream { + let buffered = ""; + const decoder = new StringDecoder(options.encoding); + + return new Transform({ + readableObjectMode: true, + transform(chunk: Buffer, encoding, callback) { + const asString = decoder.write(chunk); + const splitted = asString.split(separator); + if (splitted.length > 1) { + splitted[0] = buffered.concat(splitted[0]); + buffered = ""; + } + buffered += splitted[splitted.length - 1]; + splitted.slice(0, -1).forEach((part: string) => this.push(part)); + callback(); + }, + flush(callback) { + callback(undefined, buffered + decoder.end()); + }, + }); +} diff --git a/src/functions/split/split.spec.ts b/src/functions/split/split.spec.ts new file mode 100644 index 0000000..9e909f3 --- /dev/null +++ b/src/functions/split/split.spec.ts @@ -0,0 +1,98 @@ +import { Readable } from "stream"; +import test from "ava"; +import { expect } from "chai"; +import { split } from "."; + +test.cb("split() splits chunks using the default separator (\\n)", t => { + t.plan(5); + const source = new Readable({ objectMode: true }); + const expectedParts = ["ab", "c", "d", "ef", ""]; + let i = 0; + source + .pipe(split()) + .on("data", part => { + expect(part).to.equal(expectedParts[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("ab\n"); + source.push("c"); + source.push("\n"); + source.push("d"); + source.push("\nef\n"); + source.push(null); +}); + +test.cb("split() splits chunks using the specified separator", t => { + t.plan(6); + const source = new Readable({ objectMode: true }); + const expectedParts = ["ab", "c", "d", "e", "f", ""]; + let i = 0; + source + .pipe(split("|")) + .on("data", (part: string) => { + expect(part).to.equal(expectedParts[i]); + t.pass(); + i++; + }) + .on("error", t.end) + .on("end", t.end); + + source.push("ab|"); + source.push("c|d"); + source.push("|"); + source.push("e"); + source.push("|f|"); + source.push(null); +}); + +test.cb( + "split() splits utf8 encoded buffers using the specified separator", + t => { + t.plan(3); + const expectedElements = ["a", "b", "c"]; + let i = 0; + const through = split(","); + const buf = Buffer.from("a,b,c"); + through + .on("data", element => { + expect(element).to.equal(expectedElements[i]); + i++; + t.pass(); + }) + .on("error", t.end) + .on("end", t.end); + + for (let j = 0; j < buf.length; ++j) { + through.write(buf.slice(j, j + 1)); + } + through.end(); + }, +); + +test.cb( + "split() splits utf8 encoded buffers with multi-byte characters using the specified separator", + t => { + t.plan(3); + const expectedElements = ["一", "一", "一"]; + let i = 0; + const through = split(","); + const buf = Buffer.from("一,一,一"); // Those spaces are multi-byte utf8 characters (code: 4E00) + through + .on("data", element => { + expect(element).to.equal(expectedElements[i]); + i++; + t.pass(); + }) + .on("error", t.end) + .on("end", t.end); + + for (let j = 0; j < buf.length; ++j) { + through.write(buf.slice(j, j + 1)); + } + through.end(); + }, +); diff --git a/src/functions/stringify/index.ts b/src/functions/stringify/index.ts new file mode 100644 index 0000000..3ac5a6f --- /dev/null +++ b/src/functions/stringify/index.ts @@ -0,0 +1,22 @@ +import { Transform } from "stream"; +import { JsonValue, JsonParseOptions } from "../definitions"; + +/** + * Return a ReadWrite stream that stringifies the streamed chunks to JSON + */ +export function stringify( + options: JsonParseOptions = { pretty: false }, +): NodeJS.ReadWriteStream { + return new Transform({ + readableObjectMode: true, + writableObjectMode: true, + transform(chunk: JsonValue, encoding, callback) { + callback( + undefined, + options.pretty + ? JSON.stringify(chunk, null, 2) + : JSON.stringify(chunk), + ); + }, + }); +} diff --git a/src/functions/unbatch/index.ts b/src/functions/unbatch/index.ts new file mode 100644 index 0000000..b0dd51c --- /dev/null +++ b/src/functions/unbatch/index.ts @@ -0,0 +1,21 @@ +import { Transform } from "stream"; +import { TransformOptions } from "../definitions"; +/** + * Unbatches and sends individual chunks of data + */ +export function unbatch( + options: TransformOptions = { + readableObjectMode: true, + writableObjectMode: true, + }, +) { + return new Transform({ + ...options, + transform(data, encoding, callback) { + for (const d of data) { + this.push(d); + } + callback(); + }, + }); +} diff --git a/tsconfig.json b/tsconfig.json index 8df64fa..4a3d25c 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -3,14 +3,14 @@ "noImplicitAny": true, "strictNullChecks": true, "noImplicitReturns": true, - "noUnusedLocals": true, + "noUnusedLocals": false, "noImplicitThis": true, "forceConsistentCasingInFileNames": true, "suppressImplicitAnyIndexErrors": true, "outDir": "./dist", "module": "commonjs", "target": "es5", - "lib": ["es2016"], + "lib": ["es2016", "es2019"], "sourceMap": true, "declaration": true }, diff --git a/yarn.lock b/yarn.lock index e0b7aca..ee57991 100644 --- a/yarn.lock +++ b/yarn.lock @@ -37,41 +37,41 @@ imurmurhash "^0.1.4" slide "^1.1.5" -"@babel/code-frame@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.0.0.tgz#06e2ab19bdb535385559aabb5ba59729482800f8" - integrity sha512-OfC2uemaknXr87bdLUkWog7nYuliM9Ij5HUcajsVcMCpQrcLmtxRbVFTIqmcSkSeYRBFBRxs2FiUqFJDLdiebA== +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d" + integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw== dependencies: "@babel/highlight" "^7.0.0" "@babel/core@^7.4.0": - version "7.5.0" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.5.0.tgz#6ed6a2881ad48a732c5433096d96d1b0ee5eb734" - integrity sha512-6Isr4X98pwXqHvtigw71CKgmhL1etZjPs5A67jL/w0TkLM9eqmFR40YrnJvEc1WnMZFsskjsmid8bHZyxKEAnw== + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.5.5.tgz#17b2686ef0d6bc58f963dddd68ab669755582c30" + integrity sha512-i4qoSr2KTtce0DmkuuQBV4AuQgGPUcPXMr9L5MyYAtk06z068lQ10a4O009fe5OB/DfNV+h+qqT7ddNV8UnRjg== dependencies: - "@babel/code-frame" "^7.0.0" - "@babel/generator" "^7.5.0" - "@babel/helpers" "^7.5.0" - "@babel/parser" "^7.5.0" + "@babel/code-frame" "^7.5.5" + "@babel/generator" "^7.5.5" + "@babel/helpers" "^7.5.5" + "@babel/parser" "^7.5.5" "@babel/template" "^7.4.4" - "@babel/traverse" "^7.5.0" - "@babel/types" "^7.5.0" + "@babel/traverse" "^7.5.5" + "@babel/types" "^7.5.5" convert-source-map "^1.1.0" debug "^4.1.0" json5 "^2.1.0" - lodash "^4.17.11" + lodash "^4.17.13" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" -"@babel/generator@^7.0.0", "@babel/generator@^7.4.0", "@babel/generator@^7.5.0": - version "7.5.0" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.5.0.tgz#f20e4b7a91750ee8b63656073d843d2a736dca4a" - integrity sha512-1TTVrt7J9rcG5PMjvO7VEG3FrEoEJNHxumRq66GemPmzboLWtIjjcJgk8rokuAS7IiRSpgVSu5Vb9lc99iJkOA== +"@babel/generator@^7.0.0", "@babel/generator@^7.4.0", "@babel/generator@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.5.5.tgz#873a7f936a3c89491b43536d12245b626664e3cf" + integrity sha512-ETI/4vyTSxTzGnU2c49XHv2zhExkv9JHLTwDAFz85kmcwuShvYG2H08FwgIguQf4JC75CBnXAUM5PqeF4fj0nQ== dependencies: - "@babel/types" "^7.5.0" + "@babel/types" "^7.5.5" jsesc "^2.5.1" - lodash "^4.17.11" + lodash "^4.17.13" source-map "^0.5.0" trim-right "^1.0.1" @@ -122,16 +122,16 @@ "@babel/types" "^7.0.0" "@babel/helper-module-transforms@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.4.4.tgz#96115ea42a2f139e619e98ed46df6019b94414b8" - integrity sha512-3Z1yp8TVQf+B4ynN7WoHPKS8EkdTbgAEy0nU0rs/1Kw4pDgmvYH3rz3aI11KgxKCba2cn7N+tqzV1mY2HMN96w== + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.5.5.tgz#f84ff8a09038dcbca1fd4355661a500937165b4a" + integrity sha512-jBeCvETKuJqeiaCdyaheF40aXnnU1+wkSiUs/IQg3tB85up1LyL8x77ClY8qJpuRJUcXQo+ZtdNESmZl4j56Pw== dependencies: "@babel/helper-module-imports" "^7.0.0" "@babel/helper-simple-access" "^7.1.0" "@babel/helper-split-export-declaration" "^7.4.4" "@babel/template" "^7.4.4" - "@babel/types" "^7.4.4" - lodash "^4.17.11" + "@babel/types" "^7.5.5" + lodash "^4.17.13" "@babel/helper-plugin-utils@^7.0.0": version "7.0.0" @@ -139,11 +139,11 @@ integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== "@babel/helper-regex@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.4.4.tgz#a47e02bc91fb259d2e6727c2a30013e3ac13c4a2" - integrity sha512-Y5nuB/kESmR3tKjU8Nkn1wMGEx1tjJX076HBMeL3XLQCu6vA/YRzuTW0bbb+qRnXvQGn+d6Rx953yffl8vEy7Q== + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351" + integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw== dependencies: - lodash "^4.17.11" + lodash "^4.17.13" "@babel/helper-remap-async-to-generator@^7.1.0": version "7.1.0" @@ -181,14 +181,14 @@ "@babel/traverse" "^7.1.0" "@babel/types" "^7.2.0" -"@babel/helpers@^7.5.0": - version "7.5.1" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.5.1.tgz#65407c741a56ddd59dd86346cd112da3de912db3" - integrity sha512-rVOTDv8sH8kNI72Unenusxw6u+1vEepZgLxeV+jHkhsQlYhzVhzL1EpfoWT7Ub3zpWSv2WV03V853dqsnyoQzA== +"@babel/helpers@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.5.5.tgz#63908d2a73942229d1e6685bc2a0e730dde3b75e" + integrity sha512-nRq2BUhxZFnfEn/ciJuhklHvFOqjJUD5wpx+1bxUF2axL9C+v4DE/dmp5sT2dKnpOs4orZWzpAZqlCy8QqE/7g== dependencies: "@babel/template" "^7.4.4" - "@babel/traverse" "^7.5.0" - "@babel/types" "^7.5.0" + "@babel/traverse" "^7.5.5" + "@babel/types" "^7.5.5" "@babel/highlight@^7.0.0": version "7.5.0" @@ -199,10 +199,10 @@ esutils "^2.0.2" js-tokens "^4.0.0" -"@babel/parser@^7.0.0", "@babel/parser@^7.4.4", "@babel/parser@^7.5.0": - version "7.5.0" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.5.0.tgz#3e0713dff89ad6ae37faec3b29dcfc5c979770b7" - integrity sha512-I5nW8AhGpOXGCCNYGc+p7ExQIBxRFnS2fd/d862bNOKvmoEPjYPcfIjsfdy0ujagYOIYPczKgD9l3FsgTkAzKA== +"@babel/parser@^7.0.0", "@babel/parser@^7.4.4", "@babel/parser@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.5.5.tgz#02f077ac8817d3df4a832ef59de67565e71cca4b" + integrity sha512-E5BN68cqR7dhKan1SfqgPGhQ178bkVKpXTPEXnFJBrEt8/DKRZlybmy+IgYLTeN7tp1R5Ccmbm2rBk17sHYU3g== "@babel/plugin-proposal-async-generator-functions@^7.0.0": version "7.2.0" @@ -214,9 +214,9 @@ "@babel/plugin-syntax-async-generators" "^7.2.0" "@babel/plugin-proposal-object-rest-spread@^7.0.0": - version "7.5.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.5.1.tgz#5788ab097c63135e4236548b4f112bfce09dd394" - integrity sha512-PVGXx5LYHcT7L4MdoE+rM5uq68IKlvU9lljVQ4OXY6aUEnGvezcGbM4VNY57Ug+3R2Zg/nYHlEdiWoIBoRA0mw== + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.5.5.tgz#61939744f71ba76a3ae46b5eea18a54c16d22e58" + integrity sha512-F2DxJJSQ7f64FyTVl5cw/9MWn6naXGdk3Q3UhDbFEEHv+EilCPoeRD3Zh/Utx1CJz4uyKlQ4uH+bJPbEhMV7Zw== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-syntax-object-rest-spread" "^7.2.0" @@ -295,28 +295,28 @@ "@babel/parser" "^7.4.4" "@babel/types" "^7.4.4" -"@babel/traverse@^7.1.0", "@babel/traverse@^7.5.0": - version "7.5.0" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.5.0.tgz#4216d6586854ef5c3c4592dab56ec7eb78485485" - integrity sha512-SnA9aLbyOCcnnbQEGwdfBggnc142h/rbqqsXcaATj2hZcegCl903pUD/lfpsNBlBSuWow/YDfRyJuWi2EPR5cg== +"@babel/traverse@^7.1.0", "@babel/traverse@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.5.5.tgz#f664f8f368ed32988cd648da9f72d5ca70f165bb" + integrity sha512-MqB0782whsfffYfSjH4TM+LMjrJnhCNEDMDIjeTpl+ASaUvxcjoiVCo/sM1GhS1pHOXYfWVCYneLjMckuUxDaQ== dependencies: - "@babel/code-frame" "^7.0.0" - "@babel/generator" "^7.5.0" + "@babel/code-frame" "^7.5.5" + "@babel/generator" "^7.5.5" "@babel/helper-function-name" "^7.1.0" "@babel/helper-split-export-declaration" "^7.4.4" - "@babel/parser" "^7.5.0" - "@babel/types" "^7.5.0" + "@babel/parser" "^7.5.5" + "@babel/types" "^7.5.5" debug "^4.1.0" globals "^11.1.0" - lodash "^4.17.11" + lodash "^4.17.13" -"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.0": - version "7.5.0" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.5.0.tgz#e47d43840c2e7f9105bc4d3a2c371b4d0c7832ab" - integrity sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ== +"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.5.5.tgz#97b9f728e182785909aa4ab56264f090a028d18a" + integrity sha512-s63F9nJioLqOlW3UkyMd+BYhXt44YuaFm/VV0VwuteqjYwRrObkU7ra9pY4wAJR3oXi8hJrMcrcJdO/HH33vtw== dependencies: esutils "^2.0.2" - lodash "^4.17.11" + lodash "^4.17.13" to-fast-properties "^2.0.0" "@concordance/react@^2.0.0": @@ -327,9 +327,9 @@ arrify "^1.0.1" "@types/chai@^4.1.7": - version "4.1.7" - resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.1.7.tgz#1b8e33b61a8c09cbe1f85133071baa0dbf9fa71a" - integrity sha512-2Y8uPt0/jwjhQ6EiluT0XCri1Dbplr0ZxfFXUz+ye13gaqE8u5gL5ppao1JrUYr9cIip5S6MvQzBS7Kke7U9VA== + version "4.2.0" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.0.tgz#2478260021408dec32c123a7cad3414beb811a07" + integrity sha512-zw8UvoBEImn392tLjxoavuonblX/4Yb9ha4KBU10FirCfwgzhKO0dvyJSF9ByxV1xK1r2AgnAi/tvQaLgxQqxA== "@types/events@*": version "3.0.0" @@ -350,22 +350,10 @@ resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== -"@types/node@*": - version "12.0.12" - resolved "https://registry.yarnpkg.com/@types/node/-/node-12.0.12.tgz#cc791b402360db1eaf7176479072f91ee6c6c7ca" - integrity sha512-Uy0PN4R5vgBUXFoJrKryf5aTk3kJ8Rv3PdlHjl6UaX+Cqp1QE0yPQ68MPXGrZOfG7gZVNDIJZYyot0B9ubXUrQ== - -"@types/node@^10.12.10": - version "10.14.12" - resolved "https://registry.yarnpkg.com/@types/node/-/node-10.14.12.tgz#0eec3155a46e6c4db1f27c3e588a205f767d622f" - integrity sha512-QcAKpaO6nhHLlxWBvpc4WeLrTvPqlHOvaj0s5GriKkA1zq+bsFBPpfYCvQhLqLgYlIko8A9YrPdaMHCo5mBcpg== - -"@types/typescript@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@types/typescript/-/typescript-2.0.0.tgz#c433539c98bae28682b307eaa7a0fd2115b83c28" - integrity sha1-xDNTnJi64oaCswfqp6D9IRW4PCg= - dependencies: - typescript "*" +"@types/node@*", "@types/node@^12.7.2": + version "12.7.2" + resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.2.tgz#c4e63af5e8823ce9cc3f0b34f7b998c2171f0c44" + integrity sha512-dyYO+f6ihZEtNPDcWNR1fkoTDf3zAK3lAABDze3mz6POyIercH0lEUawUFXlG8xaQZmm1yEBON/4TsYv/laDYg== abbrev@1: version "1.1.1" @@ -427,6 +415,11 @@ are-we-there-yet@~1.1.2: delegates "^1.0.0" readable-stream "^2.0.6" +arg@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.1.tgz#485f8e7c390ce4c5f78257dbea80d4be11feda4c" + integrity sha512-SlmP3fEA88MBv0PypnXZ8ZfJhwmDeIE3SP71j37AiXQBXYosPV0x6uISAaHYSlSVhmHOVkomen0tbGk6Anlebw== + argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" @@ -678,7 +671,7 @@ braces@^2.3.1, braces@^2.3.2: split-string "^3.0.2" to-regex "^3.0.1" -buffer-from@^1.0.0, buffer-from@^1.1.0: +buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== @@ -818,9 +811,9 @@ class-utils@^0.3.5: static-extend "^0.1.1" clean-stack@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.1.0.tgz#9e7fec7f3f8340a2ab4f127c80273085e8fbbdd0" - integrity sha512-uQWrpRm+iZZUCAp7ZZJQbd4Za9I3AjR/3YTjmcnAtkauaIm/T5CT6U8zVI6e60T6OANqBFAzuR9/HB3NzuZCRA== + version "2.2.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== clean-yaml-object@^0.1.0: version "0.1.0" @@ -1120,11 +1113,16 @@ detect-libc@^1.0.2: resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= -diff@^3.1.0, diff@^3.2.0: +diff@^3.2.0: version "3.5.0" resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA== +diff@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.1.tgz#0c667cb467ebbb5cea7f14f135cc2dba7780a8ff" + integrity sha512-s2+XdvhPCOF01LRQBC8hf4vhbVmI2CGS5aZnxLJlT5FtdhPCDFq80q++zK2KlrVorVDdL5BOGZ/VfLrVtYNF+Q== + dir-glob@^2.0.0: version "2.2.2" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.2.2.tgz#fa09f0694153c8918b18ba0deafae94769fc50c4" @@ -1215,14 +1213,14 @@ espurify@^1.6.0: core-js "^2.0.0" estraverse@^4.0.0, estraverse@^4.1.1: - version "4.2.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" - integrity sha1-De4/7TH81GlhjOc0IJn8GvoL2xM= + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== esutils@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" - integrity sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs= + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== execa@^0.7.0: version "0.7.0" @@ -1459,9 +1457,9 @@ got@^6.7.1: url-parse-lax "^1.0.0" graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2: - version "4.2.0" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.0.tgz#8d8fdc73977cb04104721cb53666c1ca64cd328b" - integrity sha512-jpSvDPV4Cq/bgtpndIWbI5hmYxhQGHPC4d4cqBPb4DLniCfhJokdXhwhaDuLBGLQdvvRum/UiX6ECVIPvDXqdg== + version "4.2.2" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.2.tgz#6f0952605d0140c1cfdb138ed005775b92d67b02" + integrity sha512-IItsdsea19BoLC7ELy13q1iJFNmd7ofZH5+X/pJr90/nRoPEX0DJo1dHDbgtYWOhJhcCgMDTOw84RZ72q6lB+Q== has-flag@^3.0.0: version "3.0.0" @@ -1517,9 +1515,9 @@ hasha@^3.0.0: is-stream "^1.0.1" hosted-git-info@^2.1.4: - version "2.7.1" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.7.1.tgz#97f236977bd6e125408930ff6de3eec6281ec047" - integrity sha512-7T/BxH19zbcCTa8XkMlbK5lTo1WtgkFi3GvdWEyNuc4Vex7/9Dqbnpsf4JMydcfj9HCg4zUWFTL3Za6lapg5/w== + version "2.8.4" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.4.tgz#44119abaf4bc64692a16ace34700fed9c03e2546" + integrity sha512-pzXIvANXEFrc5oFFXRMkbLPQ2rXRoDERwDLyrcUxGhaZhgP54BBSl9Oheh7Vv0T090cszWBxPjkQQ5Sq1PbBRQ== iconv-lite@^0.4.4: version "0.4.24" @@ -2009,14 +2007,14 @@ lodash.islength@^4.0.1: integrity sha1-Tpho1FJXXXUK/9NYyXlUPcIO1Xc= lodash.merge@^4.6.1: - version "4.6.1" - resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.1.tgz#adc25d9cb99b9391c59624f379fbba60d7111d54" - integrity sha512-AOYza4+Hf5z1/0Hztxpm2/xiPZgi/cjMqdnKTUWTBSKchJlxXXuUSxCCl8rJlf4g6yww/j6mA8nC8Hw/EZWxKQ== + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== -lodash@^4.17.11: - version "4.17.11" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d" - integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg== +lodash@^4.17.13: + version "4.17.15" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" + integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== log-symbols@^2.2.0: version "2.2.0" @@ -2702,7 +2700,7 @@ redent@^2.0.0: indent-string "^3.0.0" strip-indent "^2.0.0" -regenerate-unicode-properties@^8.0.2: +regenerate-unicode-properties@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== @@ -2723,12 +2721,12 @@ regex-not@^1.0.0, regex-not@^1.0.2: safe-regex "^1.1.0" regexpu-core@^4.5.4: - version "4.5.4" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.5.4.tgz#080d9d02289aa87fe1667a4f5136bc98a6aebaae" - integrity sha512-BtizvGtFQKGPUcTy56o3nk1bGRp4SZOTYrDtGNlqCQufptV5IkkLN6Emw+yunAJjzf+C9FQFtvq7IoA3+oMYHQ== + version "4.5.5" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.5.5.tgz#aaffe61c2af58269b3e516b61a73790376326411" + integrity sha512-FpI67+ky9J+cDizQUJlIlNZFKual/lUkFr1AG6zOCpwZ9cLrg8UUVakyUQJD7fCDIe9Z2nwTQJNPyonatNmDFQ== dependencies: regenerate "^1.4.0" - regenerate-unicode-properties "^8.0.2" + regenerate-unicode-properties "^8.1.0" regjsgen "^0.5.0" regjsparser "^0.6.0" unicode-match-property-ecmascript "^1.0.4" @@ -2806,9 +2804,9 @@ resolve-url@^0.2.1: integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= resolve@^1.10.0, resolve@^1.3.2: - version "1.11.1" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.11.1.tgz#ea10d8110376982fef578df8fc30b9ac30a07a3e" - integrity sha512-vIpgF6wfuJOZI7KKKSP+HmiKggadPQAdsp5HiC1mvqnfp0gF1vdwgBWZIdrVft9pgqoMFQN+R7BSWZiBxx+BBw== + version "1.12.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.0.tgz#3fc644a35c84a48554609ff26ec52b66fa577df6" + integrity sha512-B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w== dependencies: path-parse "^1.0.6" @@ -2826,9 +2824,9 @@ ret@~0.1.10: integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== rimraf@^2.6.1, rimraf@^2.6.3: - version "2.6.3" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" - integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== + version "2.7.1" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" + integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" @@ -2867,9 +2865,9 @@ semver-diff@^2.0.0: semver "^5.0.3" "semver@2 || 3 || 4 || 5", semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@^5.4.1, semver@^5.5.1, semver@^5.6.0: - version "5.7.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.0.tgz#790a7cf6fea5459bac96110b29b60412dc8ff96b" - integrity sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA== + version "5.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== serialize-error@^2.1.0: version "2.1.0" @@ -2971,18 +2969,10 @@ source-map-resolve@^0.5.0: source-map-url "^0.4.0" urix "^0.1.0" -source-map-support@^0.5.11: - version "0.5.12" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.12.tgz#b4f3b10d51857a5af0138d3ce8003b201613d599" - integrity sha512-4h2Pbvyy15EE02G+JOZpUCmqWJuqrs+sEkzewTm++BPi7Hvn/HwcqLAcNxYAyI0x13CpPPn+kMjl+hplXMHITQ== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map-support@^0.5.6: - version "0.5.9" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.9.tgz#41bc953b2534267ea2d605bccfa7bfa3111ced5f" - integrity sha512-gR6Rw4MvUlYy83vP0vxoVNzM6t8MUXqNuRsuBmBHQDu1Fh6X015FrLdgoDKcNdkwGubozq0P4N0Q37UyFVr1EA== +source-map-support@^0.5.11, source-map-support@^0.5.6: + version "0.5.13" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" + integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" @@ -3024,9 +3014,9 @@ spdx-expression-parse@^3.0.0: spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: - version "3.0.4" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.4.tgz#75ecd1a88de8c184ef015eafb51b5b48bfd11bb1" - integrity sha512-7j8LYJLeY/Yb6ACbQ7F76qy5jHkp0U6jgBfJsk97bwWlVUnUWsAgpyaCvo17h0/RQGnQ036tVDomiwoI4pDkQA== + version "3.0.5" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" + integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" @@ -3235,26 +3225,18 @@ trim-right@^1.0.1: resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= -ts-node@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-7.0.1.tgz#9562dc2d1e6d248d24bc55f773e3f614337d9baf" - integrity sha512-BVwVbPJRspzNh2yfslyT1PSbl5uIk03EZlb493RKHN4qej/D06n1cEhjlOJG69oFsE7OT8XjpTUcYf6pKTLMhw== +ts-node@^8.3.0: + version "8.3.0" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-8.3.0.tgz#e4059618411371924a1fb5f3b125915f324efb57" + integrity sha512-dyNS/RqyVTDcmNM4NIBAeDMpsAdaQ+ojdf0GOLqE6nwJOgzEkdRNzJywhDfwnuvB10oa6NLVG1rUJQCpRN7qoQ== dependencies: - arrify "^1.0.0" - buffer-from "^1.1.0" - diff "^3.1.0" + arg "^4.1.0" + diff "^4.0.1" make-error "^1.1.1" - minimist "^1.2.0" - mkdirp "^0.5.1" source-map-support "^0.5.6" - yn "^2.0.0" + yn "^3.0.0" -tslib@^1.7.1: - version "1.9.3" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.3.tgz#d7e4dd79245d85428c4d7e4822a79917954ca286" - integrity sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ== - -tslib@^1.8.0, tslib@^1.8.1: +tslib@^1.7.1, tslib@^1.8.0, tslib@^1.8.1: version "1.10.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== @@ -3309,11 +3291,6 @@ type-fest@^0.3.0: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1" integrity sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ== -typescript@*: - version "3.5.2" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.2.tgz#a09e1dc69bc9551cadf17dba10ee42cf55e5d56c" - integrity sha512-7KxJovlYhTX5RaRbUdkAXN1KUZ8PwWlTzQdHV6xNqvuFOs7+WBo10TQUqT19Q/Jz2hk5v9TQDIhyLhhJY4p5AA== - typescript@^3.5.3: version "3.5.3" resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" @@ -3490,9 +3467,9 @@ xdg-basedir@^3.0.0: integrity sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ= xtend@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" - integrity sha1-pcbVMr5lbiPbgg77lDofBJmNY68= + version "4.0.2" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== yallist@^2.1.2: version "2.1.2" @@ -3511,7 +3488,7 @@ yargs-parser@^10.0.0: dependencies: camelcase "^4.1.0" -yn@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/yn/-/yn-2.0.0.tgz#e5adabc8acf408f6385fc76495684c88e6af689a" - integrity sha1-5a2ryKz0CPY4X8dklWhMiOavaJo= +yn@^3.0.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==