This commit is contained in:
Jerry Kurian 2019-08-15 11:54:50 -04:00
parent 3a1fbf44d7
commit a40b1bf38c
38 changed files with 1981 additions and 2616 deletions

View File

@ -23,6 +23,7 @@
}, },
"scripts": { "scripts": {
"test": "ava", "test": "ava",
"test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.js",
"lint": "tslint -p tsconfig.json", "lint": "tslint -p tsconfig.json",
"validate:tslint": "tslint-config-prettier-check ./tslint.json", "validate:tslint": "tslint-config-prettier-check ./tslint.json",
"prepublishOnly": "yarn lint && yarn test && yarn tsc" "prepublishOnly": "yarn lint && yarn test && yarn tsc"
@ -30,13 +31,12 @@
"dependencies": {}, "dependencies": {},
"devDependencies": { "devDependencies": {
"@types/chai": "^4.1.7", "@types/chai": "^4.1.7",
"@types/node": "^10.12.10", "@types/node": "^12.7.2",
"@types/typescript": "^2.0.0",
"ava": "^1.0.0-rc.2", "ava": "^1.0.0-rc.2",
"chai": "^4.2.0", "chai": "^4.2.0",
"mhysa": "./", "mhysa": "./",
"prettier": "^1.14.3", "prettier": "^1.14.3",
"ts-node": "^7.0.1", "ts-node": "^8.3.0",
"tslint": "^5.11.0", "tslint": "^5.11.0",
"tslint-config-prettier": "^1.16.0", "tslint-config-prettier": "^1.16.0",
"tslint-plugin-prettier": "^2.0.1", "tslint-plugin-prettier": "^2.0.1",

View File

@ -0,0 +1,323 @@
import test from "ava";
import { expect } from "chai";
import { Readable } from "stream";
import { accumulator, accumulatorBy } from ".";
import { FlushStrategy } from "./definitions";
test.cb("accumulator() rolling", t => {
t.plan(3);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
const secondFlush = [{ ts: 2, key: "d" }, { ts: 3, key: "e" }];
const thirdFlush = [{ ts: 4, key: "f" }];
const flushes = [firstFlush, secondFlush, thirdFlush];
source
.pipe(accumulator(2, undefined, FlushStrategy.rolling))
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (e: any) => {
t.end(e);
})
.on("end", () => {
t.end();
});
[...firstFlush, ...secondFlush, ...thirdFlush].forEach(item => {
source.push(item);
});
source.push(null);
});
test.cb("accumulator() rolling with key", t => {
t.plan(2);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const firstFlush = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 2, key: "d" },
];
const secondFlush = [{ ts: 3, key: "e" }];
const flushes = [firstFlush, secondFlush];
source
.pipe(accumulator(3, undefined, FlushStrategy.rolling, "ts"))
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (e: any) => {
t.end(e);
})
.on("end", () => {
t.end();
});
[...firstFlush, ...secondFlush].forEach(item => {
source.push(item);
});
source.push(null);
});
test.cb("accumulatorBy() rolling", t => {
t.plan(2);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const firstFlush = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 2, key: "d" },
];
const secondFlush = [{ ts: 3, key: "e" }];
const flushes = [firstFlush, secondFlush];
source
.pipe(
accumulatorBy(
undefined,
FlushStrategy.rolling,
(event: TestObject, bufferChunk: TestObject) => {
return bufferChunk.ts + 3 <= event.ts;
},
),
)
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (e: any) => {
t.end(e);
})
.on("end", () => {
t.end();
});
[...firstFlush, ...secondFlush].forEach(item => {
source.push(item);
});
source.push(null);
});
test.cb("accumulator() sliding", t => {
t.plan(4);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const input = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 4, key: "d" },
];
const firstFlush = [{ ts: 0, key: "a" }];
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
const thirdFlush = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
];
const fourthFlush = [
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 4, key: "d" },
];
const flushes = [firstFlush, secondFlush, thirdFlush, fourthFlush];
source
.pipe(accumulator(3, undefined, FlushStrategy.sliding))
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (e: any) => {
t.end(e);
})
.on("end", () => {
t.end();
});
input.forEach(item => {
source.push(item);
});
source.push(null);
});
test.cb("accumulator() sliding with key", t => {
t.plan(6);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const input = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 3, key: "d" },
{ ts: 5, key: "f" },
{ ts: 6, key: "g" },
];
const firstFlush = [{ ts: 0, key: "a" }];
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
const thirdFlush = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
];
const fourthFlush = [
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 3, key: "d" },
];
const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
const flushes = [
firstFlush,
secondFlush,
thirdFlush,
fourthFlush,
fifthFlush,
sixthFlush,
];
source
.pipe(accumulator(3, undefined, FlushStrategy.sliding, "ts"))
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (e: any) => {
t.end(e);
})
.on("end", () => {
t.end();
});
input.forEach(item => {
source.push(item);
});
source.push(null);
});
test.cb("accumulatorBy() sliding", t => {
t.plan(6);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const input = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 3, key: "d" },
{ ts: 5, key: "f" },
{ ts: 6, key: "g" },
];
const firstFlush = [{ ts: 0, key: "a" }];
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
const thirdFlush = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
];
const fourthFlush = [
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 3, key: "d" },
];
const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
const flushes = [
firstFlush,
secondFlush,
thirdFlush,
fourthFlush,
fifthFlush,
sixthFlush,
];
source
.pipe(
accumulatorBy(
undefined,
FlushStrategy.sliding,
(event: TestObject, bufferChunk: TestObject) => {
return bufferChunk.ts + 3 <= event.ts ? true : false;
},
),
)
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (e: any) => {
t.end(e);
})
.on("end", () => {
t.end();
});
input.forEach(item => {
source.push(item);
});
source.push(null);
});
test.cb.only("accumulatorBy() sliding should throw", t => {
t.plan(2);
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const input = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 3, key: "d" },
];
const accumulaterStream = accumulatorBy(
undefined,
FlushStrategy.sliding,
(event: TestObject, bufferChunk: TestObject) => {
if (event.key !== "a" && event.key !== "b") {
throw new Error("Failed mapping");
}
return bufferChunk.ts + 3 <= event.ts ? true : false;
},
);
source
.pipe(accumulaterStream)
.on("error", (err: any) => {
source.pipe(accumulaterStream);
accumulaterStream.resume();
expect(err.message).to.equal("Failed mapping");
t.pass();
})
.on("end", () => {
t.end();
});
input.forEach(item => {
source.push(item);
});
source.push(null);
});

View File

@ -0,0 +1,6 @@
export enum FlushStrategy {
rolling = "rolling",
sliding = "sliding",
}
export type AccumulatorByIteratee<T> = (event: T, bufferChunk: T) => boolean;

View File

@ -0,0 +1,169 @@
import { Transform } from "stream";
import { AccumulatorByIteratee, FlushStrategy } from "./definitions";
import { batch } from "../../index";
function _accumulator<T>(
accumulateBy: (data: T, buffer: T[], stream: Transform) => void,
shouldFlush: boolean = true,
) {
const buffer: T[] = [];
return new Transform({
readableObjectMode: true,
writableObjectMode: true,
transform(data: any, encoding, callback) {
try {
accumulateBy(data, buffer, this);
callback();
} catch (err) {
callback(err);
}
},
flush(callback) {
if (shouldFlush) {
this.push(buffer);
}
callback();
},
});
}
function _sliding<T>(
windowLength: number,
rate: number | undefined,
key?: string,
): (event: T, buffer: T[], stream: Transform) => void {
return (event: T, buffer: T[], stream: Transform) => {
if (key) {
let index = 0;
while (
index < buffer.length &&
buffer[index][key] + windowLength <= event[key]
) {
index++;
}
buffer.splice(0, index);
} else if (buffer.length === windowLength) {
buffer.shift();
}
buffer.push(event);
stream.push(buffer);
};
}
function _slidingByFunction<T>(
rate: number | undefined,
iteratee: AccumulatorByIteratee<T>,
): (event: T, buffer: T[], stream: Transform) => void {
return (event: T, buffer: T[], stream: Transform) => {
let index = 0;
while (index < buffer.length && iteratee(event, buffer[index])) {
index++;
}
buffer.splice(0, index);
buffer.push(event);
stream.push(buffer);
};
}
function _rollingByFunction<T>(
rate: number | undefined,
iteratee: AccumulatorByIteratee<T>,
): (event: T, buffer: T[], stream: Transform) => void {
return (event: T, buffer: T[], stream: Transform) => {
if (iteratee) {
if (buffer.length > 0 && iteratee(event, buffer[0])) {
stream.push(buffer.slice(0));
buffer.length = 0;
}
}
buffer.push(event);
};
}
function _rolling<T>(
windowLength: number,
rate: number | undefined,
key?: string,
): (event: T, buffer: T[], stream: Transform) => void {
return (event: T, buffer: T[], stream: Transform) => {
if (key) {
if (event[key] === undefined) {
stream.emit(
"error",
new Error(
`Key is missing in event: (${key}, ${JSON.stringify(
event,
)})`,
),
);
} else if (
buffer.length > 0 &&
buffer[0][key] + windowLength <= event[key]
) {
stream.push(buffer.slice(0));
buffer.length = 0;
}
} else if (buffer.length === windowLength) {
stream.push(buffer.slice(0));
buffer.length = 0;
}
buffer.push(event);
};
}
export function accumulator(
batchSize: number,
batchRate: number | undefined,
flushStrategy: FlushStrategy,
keyBy?: string,
): Transform {
if (flushStrategy === FlushStrategy.sliding) {
return sliding(batchSize, batchRate, keyBy);
} else if (flushStrategy === FlushStrategy.rolling) {
return rolling(batchSize, batchRate, keyBy);
} else {
return batch(batchSize, batchRate);
}
}
export function accumulatorBy<T, S extends FlushStrategy>(
batchRate: number | undefined,
flushStrategy: S,
iteratee: AccumulatorByIteratee<T>,
): Transform {
if (flushStrategy === FlushStrategy.sliding) {
return slidingBy(batchRate, iteratee);
} else {
return rollingBy(batchRate, iteratee);
}
}
function sliding(
windowLength: number,
rate: number | undefined,
key?: string,
): Transform {
return _accumulator(_sliding(windowLength, rate, key), false);
}
function slidingBy<T>(
rate: number | undefined,
iteratee: AccumulatorByIteratee<T>,
): Transform {
return _accumulator(_slidingByFunction(rate, iteratee), false);
}
function rolling(
windowLength: number,
rate: number | undefined,
key?: string,
): Transform {
return _accumulator(_rolling(windowLength, rate, key));
}
function rollingBy<T>(
rate: number | undefined,
iteratee: AccumulatorByIteratee<T>,
): Transform {
return _accumulator(_rollingByFunction(rate, iteratee));
}

View File

@ -0,0 +1,20 @@
export { accumulator, accumulatorBy } from "./accumulator";
export { batch } from "./batch";
export { child } from "./child";
export { collect } from "./collect";
export { concat } from "./concat";
export { duplex } from "./duplex";
export { filter } from "./filter";
export { flatMap } from "./flatMap";
export { fromArray } from "./fromArray";
export { join } from "./join";
export { last } from "./last";
export { map } from "./map";
export { merge } from "./merge";
export { parallelMap } from "./parallelMap";
export { parse } from "./parse";
export { rate } from "./rate";
export { reduce } from "./reduce";
export { split } from "./split";
export { stringify } from "./stringify";
export { unbatch } from "./unbatch";

View File

@ -0,0 +1,47 @@
import { Transform } from "stream";
import { TransformOptions } from "../definitions";
/**
* Stores chunks of data internally in array and batches when batchSize is reached.
*
* @param batchSize Size of the batches
* @param maxBatchAge Max lifetime of a batch
*/
export function batch(
batchSize: number = 1000,
maxBatchAge: number = 500,
options: TransformOptions = {
readableObjectMode: true,
writableObjectMode: true,
},
): Transform {
let buffer: any[] = [];
let timer: NodeJS.Timer | null = null;
const sendChunk = (self: Transform) => {
if (timer) {
clearTimeout(timer);
}
timer = null;
self.push(buffer);
buffer = [];
};
return new Transform({
...options,
transform(chunk, encoding, callback) {
buffer.push(chunk);
if (buffer.length === batchSize) {
sendChunk(this);
} else {
if (timer === null) {
timer = setInterval(() => {
sendChunk(this);
}, maxBatchAge);
}
}
callback();
},
flush(callback) {
sendChunk(this);
callback();
},
});
}

View File

@ -0,0 +1,12 @@
/**
* Return a Duplex stream from a child process' stdin and stdout
* @param childProcess Child process from which to create duplex stream
*/
export function child(childProcess: ChildProcess) {
if (childProcess.stdin === null) {
throw new Error("childProcess.stdin is null");
} else if (childProcess.stdout === null) {
throw new Error("childProcess.stdout is null");
}
return duplex(childProcess.stdin, childProcess.stdout);
}

View File

@ -0,0 +1,26 @@
import { Transform } from "stream";
import { ThroughOptions } from "../definitions";
/**
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
* @param options
* @param options.objectMode Whether this stream should behave as a stream of objects
*/
export function collect(
options: ThroughOptions = { objectMode: false },
): NodeJS.ReadWriteStream {
const collected: any[] = [];
return new Transform({
readableObjectMode: options.objectMode,
writableObjectMode: options.objectMode,
transform(data, encoding, callback) {
collected.push(data);
callback();
},
flush(callback) {
this.push(
options.objectMode ? collected : Buffer.concat(collected),
);
callback();
},
});
}

View File

@ -0,0 +1,40 @@
import { Readable } from "stream";
/**
* Return a Readable stream of readable streams concatenated together
* @param streams Readable streams to concatenate
*/
export function concat(...streams: Readable[]): Readable {
let isStarted = false;
let currentStreamIndex = 0;
const startCurrentStream = () => {
if (currentStreamIndex >= streams.length) {
wrapper.push(null);
} else {
streams[currentStreamIndex]
.on("data", chunk => {
if (!wrapper.push(chunk)) {
streams[currentStreamIndex].pause();
}
})
.on("error", err => wrapper.emit("error", err))
.on("end", () => {
currentStreamIndex++;
startCurrentStream();
});
}
};
const wrapper = new Readable({
objectMode: true,
read() {
if (!isStarted) {
isStarted = true;
startCurrentStream();
}
if (currentStreamIndex < streams.length) {
streams[currentStreamIndex].resume();
}
},
});
return wrapper;
}

View File

@ -0,0 +1,33 @@
import { Duplex, Writable, Readable } from "stream";
/**
* Return a Duplex stream from a writable stream that is assumed to somehow, when written to,
* cause the given readable stream to yield chunks
* @param writable Writable stream assumed to cause the readable stream to yield chunks when written to
* @param readable Readable stream assumed to yield chunks when the writable stream is written to
*/
export function duplex(writable: Writable, readable: Readable) {
const wrapper = new Duplex({
readableObjectMode: true,
writableObjectMode: true,
read() {
readable.resume();
},
write(chunk, encoding, callback) {
return writable.write(chunk, encoding, callback);
},
final(callback) {
writable.end(callback);
},
});
readable
.on("data", chunk => {
if (!wrapper.push(chunk)) {
readable.pause();
}
})
.on("error", err => wrapper.emit("error", err))
.on("end", () => wrapper.push(null));
writable.on("drain", () => wrapper.emit("drain"));
writable.on("error", err => wrapper.emit("error", err));
return wrapper;
}

View File

@ -0,0 +1,102 @@
import test from "ava";
import { expect } from "chai";
import { Readable } from "stream";
import { filter } from ".";
test.cb("filter() filters elements synchronously", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
const expectedElements = ["a", "c"];
let i = 0;
source
.pipe(filter((element: string) => element !== "b"))
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("filter() filters elements asynchronously", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
const expectedElements = ["a", "c"];
let i = 0;
source
.pipe(
filter(async (element: string) => {
await Promise.resolve();
return element !== "b";
}),
)
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("filter() emits errors during synchronous filtering", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(
filter((element: string) => {
if (element !== "a") {
throw new Error("Failed filtering");
}
return true;
}),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed filtering");
t.pass();
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("filter() emits errors during asynchronous filtering", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(
filter(async (element: string) => {
await Promise.resolve();
if (element !== "a") {
throw new Error("Failed filtering");
}
return true;
}),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed filtering");
t.pass();
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});

View File

@ -0,0 +1,41 @@
import { Transform } from "stream";
import { ThroughOptions } from "../definitions";
/**
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
* @param predicate Predicate with which to filter scream chunks
* @param options
* @param options.objectMode Whether this stream should behave as a stream of objects
*/
export function filter<T>(
predicate:
| ((chunk: T, encoding: string) => boolean)
| ((chunk: T, encoding: string) => Promise<boolean>),
options: ThroughOptions = {
objectMode: true,
},
) {
return new Transform({
readableObjectMode: options.objectMode,
writableObjectMode: options.objectMode,
async transform(chunk: T, encoding, callback) {
let isPromise = false;
try {
const result = predicate(chunk, encoding);
isPromise = result instanceof Promise;
if (!!(await result)) {
callback(undefined, chunk);
} else {
callback();
}
} catch (err) {
if (isPromise) {
// Calling the callback asynchronously with an error wouldn't emit the error, so emit directly
this.emit("error", err);
callback();
} else {
callback(err);
}
}
},
});
}

View File

@ -0,0 +1,100 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { flatMap } from ".";
test.cb("flatMap() maps elements synchronously", t => {
t.plan(6);
const source = new Readable({ objectMode: true });
const expectedElements = ["a", "A", "b", "B", "c", "C"];
let i = 0;
source
.pipe(flatMap((element: string) => [element, element.toUpperCase()]))
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("flatMap() maps elements asynchronously", t => {
t.plan(6);
const source = new Readable({ objectMode: true });
const expectedElements = ["a", "A", "b", "B", "c", "C"];
let i = 0;
source
.pipe(
flatMap(async (element: string) => {
await Promise.resolve();
return [element, element.toUpperCase()];
}),
)
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("flatMap() emits errors during synchronous mapping", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(
flatMap((element: string) => {
if (element !== "a") {
throw new Error("Failed mapping");
}
return [element, element.toUpperCase()];
}),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed mapping");
t.pass();
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("flatMap() emits errors during asynchronous mapping", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(
flatMap(async (element: string) => {
await Promise.resolve();
if (element !== "a") {
throw new Error("Failed mapping");
}
return [element, element.toUpperCase()];
}),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed mapping");
t.pass();
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});

View File

@ -0,0 +1,39 @@
import { Transform } from "stream";
import { TransformOptions } from "../definitions";
/**
* Return a ReadWrite stream that flat maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such)
* @param options
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
*/
export function flatMap<T, R>(
mapper:
| ((chunk: T, encoding: string) => R[])
| ((chunk: T, encoding: string) => Promise<R[]>),
options: TransformOptions = {
readableObjectMode: true,
writableObjectMode: true,
},
): NodeJS.ReadWriteStream {
return new Transform({
...options,
async transform(chunk: T, encoding, callback) {
let isPromise = false;
try {
const mapped = mapper(chunk, encoding);
isPromise = mapped instanceof Promise;
(await mapped).forEach(c => this.push(c));
callback();
} catch (err) {
if (isPromise) {
// Calling the callback asynchronously with an error wouldn't emit the error, so emit directly
this.emit("error", err);
callback();
} else {
callback(err);
}
}
},
});
}

View File

@ -0,0 +1,45 @@
import test from "ava";
import { expect } from "chai";
import { fromArray } from ".";
test.cb("fromArray() streams array elements in flowing mode", t => {
t.plan(3);
const elements = ["a", "b", "c"];
const stream = fromArray(elements);
let i = 0;
stream
.on("data", (element: string) => {
expect(element).to.equal(elements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
});
test.cb("fromArray() ends immediately if there are no array elements", t => {
t.plan(0);
fromArray([])
.on("data", () => t.fail())
.on("error", t.end)
.on("end", t.end);
});
test.cb("fromArray() streams array elements in paused mode", t => {
t.plan(3);
const elements = ["a", "b", "c"];
const stream = fromArray(elements);
let i = 0;
stream
.on("readable", () => {
let element = stream.read();
while (element !== null) {
expect(element).to.equal(elements[i]);
t.pass();
i++;
element = stream.read();
}
})
.on("error", t.end)
.on("end", t.end);
});

View File

@ -0,0 +1,19 @@
import { Readable } from "stream";
/**
* Convert an array into a Readable stream of its elements
* @param array Array of elements to stream
*/
export function fromArray(array: any[]): NodeJS.ReadableStream {
let cursor = 0;
return new Readable({
objectMode: true,
read() {
if (cursor < array.length) {
this.push(array[cursor]);
cursor++;
} else {
this.push(null);
}
},
});
}

File diff suppressed because it is too large Load Diff

View File

@ -1,765 +0,0 @@
import { Transform, Readable, Writable, Duplex } from "stream";
import { performance } from "perf_hooks";
import { ChildProcess } from "child_process";
import { StringDecoder } from "string_decoder";
import {
TransformOptions,
ThroughOptions,
WithEncoding,
SerializationFormats,
JsonValue,
JsonParseOptions,
FlushStrategy,
AccumulatorByIteratee,
} from "./definitions";
import { sleep } from "../helpers";
/**
* Convert an array into a Readable stream of its elements
* @param array Array of elements to stream
*/
export function fromArray(array: any[]): NodeJS.ReadableStream {
let cursor = 0;
return new Readable({
objectMode: true,
read() {
if (cursor < array.length) {
this.push(array[cursor]);
cursor++;
} else {
this.push(null);
}
},
});
}
/**
* Return a ReadWrite stream that maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such)
* @param options
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
*/
export function map<T, R>(
mapper: (chunk: T, encoding: string) => R,
options: TransformOptions = {
readableObjectMode: true,
writableObjectMode: true,
},
): NodeJS.ReadWriteStream {
return new Transform({
...options,
async transform(chunk: T, encoding, callback) {
try {
const mapped = await mapper(chunk, encoding);
this.push(mapped);
callback();
} catch (err) {
callback(err);
}
},
});
}
/**
* Return a ReadWrite stream that flat maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such)
* @param options
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
*/
export function flatMap<T, R>(
mapper:
| ((chunk: T, encoding: string) => R[])
| ((chunk: T, encoding: string) => Promise<R[]>),
options: TransformOptions = {
readableObjectMode: true,
writableObjectMode: true,
},
): NodeJS.ReadWriteStream {
return new Transform({
...options,
async transform(chunk: T, encoding, callback) {
let isPromise = false;
try {
const mapped = mapper(chunk, encoding);
isPromise = mapped instanceof Promise;
(await mapped).forEach(c => this.push(c));
callback();
} catch (err) {
if (isPromise) {
// Calling the callback asynchronously with an error wouldn't emit the error, so emit directly
this.emit("error", err);
callback();
} else {
callback(err);
}
}
},
});
}
/**
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
* @param predicate Predicate with which to filter scream chunks
* @param options
* @param options.objectMode Whether this stream should behave as a stream of objects
*/
export function filter<T>(
predicate:
| ((chunk: T, encoding: string) => boolean)
| ((chunk: T, encoding: string) => Promise<boolean>),
options: ThroughOptions = {
objectMode: true,
},
) {
return new Transform({
readableObjectMode: options.objectMode,
writableObjectMode: options.objectMode,
async transform(chunk: T, encoding, callback) {
let isPromise = false;
try {
const result = predicate(chunk, encoding);
isPromise = result instanceof Promise;
if (!!(await result)) {
callback(undefined, chunk);
} else {
callback();
}
} catch (err) {
if (isPromise) {
// Calling the callback asynchronously with an error wouldn't emit the error, so emit directly
this.emit("error", err);
callback();
} else {
callback(err);
}
}
},
});
}
/**
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
* value
* @param iteratee Reducer function to apply on each streamed chunk
* @param initialValue Initial value
* @param options
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
*/
export function reduce<T, R>(
iteratee:
| ((previousValue: R, chunk: T, encoding: string) => R)
| ((previousValue: R, chunk: T, encoding: string) => Promise<R>),
initialValue: R,
options: TransformOptions = {
readableObjectMode: true,
writableObjectMode: true,
},
) {
let value = initialValue;
return new Transform({
readableObjectMode: options.readableObjectMode,
writableObjectMode: options.writableObjectMode,
async transform(chunk: T, encoding, callback) {
let isPromise = false;
try {
const result = iteratee(value, chunk, encoding);
isPromise = result instanceof Promise;
value = await result;
callback();
} catch (err) {
if (isPromise) {
// Calling the callback asynchronously with an error wouldn't emit the error, so emit directly
this.emit("error", err);
callback();
} else {
callback(err);
}
}
},
flush(callback) {
// Best effort attempt at yielding the final value (will throw if e.g. yielding an object and
// downstream doesn't expect objects)
try {
callback(undefined, value);
} catch (err) {
try {
this.emit("error", err);
} catch {
// Best effort was made
}
}
},
});
}
/**
* Return a ReadWrite stream that splits streamed chunks using the given separator
* @param separator Separator to split by, defaulting to "\n"
* @param options
* @param options.encoding Encoding written chunks are assumed to use
*/
export function split(
separator: string | RegExp = "\n",
options: WithEncoding = { encoding: "utf8" },
): NodeJS.ReadWriteStream {
let buffered = "";
const decoder = new StringDecoder(options.encoding);
return new Transform({
readableObjectMode: true,
transform(chunk: Buffer, encoding, callback) {
const asString = decoder.write(chunk);
const splitted = asString.split(separator);
if (splitted.length > 1) {
splitted[0] = buffered.concat(splitted[0]);
buffered = "";
}
buffered += splitted[splitted.length - 1];
splitted.slice(0, -1).forEach((part: string) => this.push(part));
callback();
},
flush(callback) {
callback(undefined, buffered + decoder.end());
},
});
}
/**
* Return a ReadWrite stream that joins streamed chunks using the given separator
* @param separator Separator to join with
* @param options
* @param options.encoding Encoding written chunks are assumed to use
*/
export function join(
separator: string,
options: WithEncoding = { encoding: "utf8" },
): NodeJS.ReadWriteStream {
let isFirstChunk = true;
const decoder = new StringDecoder(options.encoding);
return new Transform({
readableObjectMode: true,
async transform(chunk: Buffer, encoding, callback) {
const asString = decoder.write(chunk);
// Take care not to break up multi-byte characters spanning multiple chunks
if (asString !== "" || chunk.length === 0) {
if (!isFirstChunk) {
this.push(separator);
}
this.push(asString);
isFirstChunk = false;
}
callback();
},
});
}
/**
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
* the streamed chunks with the specified replacement string
* @param searchValue Search string to use
* @param replaceValue Replacement string to use
* @param options
* @param options.encoding Encoding written chunks are assumed to use
*/
export function replace(
searchValue: string | RegExp,
replaceValue: string,
options: WithEncoding = { encoding: "utf8" },
): NodeJS.ReadWriteStream {
const decoder = new StringDecoder(options.encoding);
return new Transform({
readableObjectMode: true,
transform(chunk: Buffer, encoding, callback) {
const asString = decoder.write(chunk);
// Take care not to break up multi-byte characters spanning multiple chunks
if (asString !== "" || chunk.length === 0) {
callback(
undefined,
asString.replace(searchValue, replaceValue),
);
} else {
callback();
}
},
});
}
/**
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
* must be a fully defined JSON string.
* @param format Format of serialized data, only utf8 supported.
*/
export function parse(
format: SerializationFormats = SerializationFormats.utf8,
): NodeJS.ReadWriteStream {
const decoder = new StringDecoder(format);
return new Transform({
readableObjectMode: true,
writableObjectMode: true,
async transform(chunk: Buffer, encoding, callback) {
try {
const asString = decoder.write(chunk);
// Using await causes parsing errors to be emitted
callback(undefined, await JSON.parse(asString));
} catch (err) {
callback(err);
}
},
});
}
/**
* Return a ReadWrite stream that stringifies the streamed chunks to JSON
*/
export function stringify(
options: JsonParseOptions = { pretty: false },
): NodeJS.ReadWriteStream {
return new Transform({
readableObjectMode: true,
writableObjectMode: true,
transform(chunk: JsonValue, encoding, callback) {
callback(
undefined,
options.pretty
? JSON.stringify(chunk, null, 2)
: JSON.stringify(chunk),
);
},
});
}
/**
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
* @param options
* @param options.objectMode Whether this stream should behave as a stream of objects
*/
export function collect(
options: ThroughOptions = { objectMode: false },
): NodeJS.ReadWriteStream {
const collected: any[] = [];
return new Transform({
readableObjectMode: options.objectMode,
writableObjectMode: options.objectMode,
transform(data, encoding, callback) {
collected.push(data);
callback();
},
flush(callback) {
this.push(
options.objectMode ? collected : Buffer.concat(collected),
);
callback();
},
});
}
/**
* Return a Readable stream of readable streams concatenated together
* @param streams Readable streams to concatenate
*/
export function concat(
...streams: NodeJS.ReadableStream[]
): NodeJS.ReadableStream {
let isStarted = false;
let currentStreamIndex = 0;
const startCurrentStream = () => {
if (currentStreamIndex >= streams.length) {
wrapper.push(null);
} else {
streams[currentStreamIndex]
.on("data", chunk => {
if (!wrapper.push(chunk)) {
streams[currentStreamIndex].pause();
}
})
.on("error", err => wrapper.emit("error", err))
.on("end", () => {
currentStreamIndex++;
startCurrentStream();
});
}
};
const wrapper = new Readable({
objectMode: true,
read() {
if (!isStarted) {
isStarted = true;
startCurrentStream();
}
if (currentStreamIndex < streams.length) {
streams[currentStreamIndex].resume();
}
},
});
return wrapper;
}
/**
* Return a Readable stream of readable streams merged together in chunk arrival order
* @param streams Readable streams to merge
*/
export function merge(
...streams: NodeJS.ReadableStream[]
): NodeJS.ReadableStream {
let isStarted = false;
let streamEndedCount = 0;
return new Readable({
objectMode: true,
read() {
if (streamEndedCount >= streams.length) {
this.push(null);
} else if (!isStarted) {
isStarted = true;
streams.forEach(stream =>
stream
.on("data", chunk => {
if (!this.push(chunk)) {
streams.forEach(s => s.pause());
}
})
.on("error", err => this.emit("error", err))
.on("end", () => {
streamEndedCount++;
if (streamEndedCount === streams.length) {
this.push(null);
}
}),
);
} else {
streams.forEach(s => s.resume());
}
},
});
}
/**
* Return a Duplex stream from a writable stream that is assumed to somehow, when written to,
* cause the given readable stream to yield chunks
* @param writable Writable stream assumed to cause the readable stream to yield chunks when written to
* @param readable Readable stream assumed to yield chunks when the writable stream is written to
*/
export function duplex(writable: Writable, readable: Readable) {
const wrapper = new Duplex({
readableObjectMode: true,
writableObjectMode: true,
read() {
readable.resume();
},
write(chunk, encoding, callback) {
return writable.write(chunk, encoding, callback);
},
final(callback) {
writable.end(callback);
},
});
readable
.on("data", chunk => {
if (!wrapper.push(chunk)) {
readable.pause();
}
})
.on("error", err => wrapper.emit("error", err))
.on("end", () => wrapper.push(null));
writable.on("drain", () => wrapper.emit("drain"));
writable.on("error", err => wrapper.emit("error", err));
return wrapper;
}
/**
* Return a Duplex stream from a child process' stdin and stdout
* @param childProcess Child process from which to create duplex stream
*/
export function child(childProcess: ChildProcess) {
return duplex(childProcess.stdin, childProcess.stdout);
}
/**
* Return a Promise resolving to the last streamed chunk of the given readable stream, after it has
* ended
* @param readable Readable stream to wait on
*/
export function last<T>(readable: Readable): Promise<T | null> {
let lastChunk: T | null = null;
return new Promise((resolve, reject) => {
readable
.on("data", chunk => (lastChunk = chunk))
.on("end", () => resolve(lastChunk));
});
}
/**
* Stores chunks of data internally in array and batches when batchSize is reached.
*
* @param batchSize Size of the batches
* @param maxBatchAge Max lifetime of a batch
*/
export function batch(batchSize: number = 1000, maxBatchAge: number = 500) {
let buffer: any[] = [];
let timer: NodeJS.Timer | null = null;
let sendChunk = (self: Transform) => {
timer && clearTimeout(timer);
timer = null;
self.push(buffer);
buffer = [];
};
return new Transform({
objectMode: true,
transform(chunk, encoding, callback) {
buffer.push(chunk);
if (buffer.length === batchSize) {
sendChunk(this);
} else {
if (timer === null) {
timer = setInterval(() => {
sendChunk(this);
}, maxBatchAge);
}
}
callback();
},
flush(callback) {
sendChunk(this);
callback();
},
});
}
/**
* Unbatches and sends individual chunks of data
*/
export function unbatch() {
return new Transform({
objectMode: true,
transform(data, encoding, callback) {
for (const d of data) {
this.push(d);
}
callback();
},
});
}
/**
* Limits date of data transferred into stream.
* @param targetRate Desired rate in ms
* @param period Period to sleep for when rate is above or equal to targetRate
*/
export function rate(targetRate: number = 50, period: number = 2) {
const deltaMS = ((1 / targetRate) * 1000) / period; // Skip half a period
let total = 0;
const start = performance.now();
return new Transform({
objectMode: true,
async transform(data, encoding, callback) {
const currentRate = (total / (performance.now() - start)) * 1000;
if (targetRate && currentRate > targetRate) {
await sleep(deltaMS);
}
total += 1;
callback(undefined, data);
},
});
}
/**
* Limits number of parallel processes in flight.
* @param parallel Max number of parallel processes.
* @param func Function to execute on each data chunk
* @param pause Amount of time to pause processing when max number of parallel processes are executing.
*/
export function parallelMap<T, R>(
mapper: (data: T) => R,
parallel: number = 10,
sleepTime: number = 5,
) {
let inflight = 0;
return new Transform({
objectMode: true,
async transform(data, encoding, callback) {
while (parallel <= inflight) {
await sleep(sleepTime);
}
inflight += 1;
callback();
try {
const res = await mapper(data);
this.push(res);
} catch (e) {
this.emit(e);
} finally {
inflight -= 1;
}
},
async flush(callback) {
while (inflight > 0) {
await sleep(sleepTime);
}
callback();
},
});
}
function _accumulator<T>(
accumulateBy: (data: T, buffer: T[], stream: Transform) => void,
shouldFlush: boolean = true,
) {
const buffer: T[] = [];
return new Transform({
objectMode: true,
async transform(data: any, encoding, callback) {
accumulateBy(data, buffer, this);
callback();
},
flush(callback) {
if (shouldFlush) {
this.push(buffer);
}
callback();
},
});
}
function _sliding<T>(
windowLength: number,
rate: number | undefined,
key?: string,
): (event: T, buffer: T[], stream: Transform) => void {
return (event: T, buffer: T[], stream: Transform) => {
if (key) {
let index = 0;
while (
index < buffer.length &&
buffer[index][key] + windowLength <= event[key]
) {
index++;
}
buffer.splice(0, index);
} else if (buffer.length === windowLength) {
buffer.shift();
}
buffer.push(event);
stream.push(buffer);
};
}
function _slidingByFunction<T>(
rate: number | undefined,
iteratee: AccumulatorByIteratee<T>,
): (event: T, buffer: T[], stream: Transform) => void {
return (event: T, buffer: T[], stream: Transform) => {
let index = 0;
while (index < buffer.length && iteratee(event, buffer[index])) {
index++;
}
buffer.splice(0, index);
buffer.push(event);
stream.push(buffer);
};
}
function _rollingByFunction<T>(
rate: number | undefined,
iteratee: AccumulatorByIteratee<T>,
): (event: T, buffer: T[], stream: Transform) => void {
return (event: T, buffer: T[], stream: Transform) => {
if (iteratee) {
if (buffer.length > 0 && iteratee(event, buffer[0])) {
stream.push(buffer.slice(0));
buffer.length = 0;
}
}
buffer.push(event);
};
}
function _rolling<T>(
windowLength: number,
rate: number | undefined,
key?: string,
): (event: T, buffer: T[], stream: Transform) => void {
return (event: T, buffer: T[], stream: Transform) => {
if (key) {
if (event[key] === undefined) {
stream.emit(
"error",
new Error(
`Key is missing in event: (${key}, ${JSON.stringify(
event,
)})`,
),
);
} else if (
buffer.length > 0 &&
buffer[0][key] + windowLength <= event[key]
) {
stream.push(buffer.slice(0));
buffer.length = 0;
}
} else if (buffer.length === windowLength) {
stream.push(buffer.slice(0));
buffer.length = 0;
}
buffer.push(event);
};
}
export function accumulator(
batchSize: number,
batchRate: number | undefined,
flushStrategy: FlushStrategy,
keyBy?: string,
): Transform {
if (flushStrategy === FlushStrategy.sliding) {
return sliding(batchSize, batchRate, keyBy);
} else if (flushStrategy === FlushStrategy.rolling) {
return rolling(batchSize, batchRate, keyBy);
} else {
return batch(batchSize, batchRate);
}
}
export function accumulatorBy<T, S extends FlushStrategy>(
batchRate: number | undefined,
flushStrategy: S,
iteratee: AccumulatorByIteratee<T>,
): Transform {
if (flushStrategy === FlushStrategy.sliding) {
return slidingBy(batchRate, iteratee);
} else {
return rollingBy(batchRate, iteratee);
}
}
export function sliding(
windowLength: number,
rate: number | undefined,
key?: string,
): Transform {
return _accumulator(_sliding(windowLength, rate, key), false);
}
export function slidingBy<T>(
rate: number | undefined,
iteratee: AccumulatorByIteratee<T>,
): Transform {
return _accumulator(_slidingByFunction(rate, iteratee), false);
}
export function rolling(
windowLength: number,
rate: number | undefined,
key?: string,
): Transform {
return _accumulator(_rolling(windowLength, rate, key));
}
export function rollingBy<T>(
rate: number | undefined,
iteratee: AccumulatorByIteratee<T>,
): Transform {
return _accumulator(_rollingByFunction(rate, iteratee));
}

View File

@ -1,6 +1,6 @@
import { Readable, Writable } from "stream"; import { Readable, Writable, Transform } from "stream";
import { ChildProcess } from "child_process"; import { ChildProcess } from "child_process";
import * as baseFunctions from "./functions"; import * as baseFunctions from "./baseFunctions";
import { import {
ThroughOptions, ThroughOptions,
@ -29,7 +29,7 @@ export function fromArray(array: any[]): NodeJS.ReadableStream {
export function map<T, R>( export function map<T, R>(
mapper: (chunk: T, encoding?: string) => R, mapper: (chunk: T, encoding?: string) => R,
options?: TransformOptions, options?: TransformOptions,
): NodeJS.ReadWriteStream { ): Transform {
return baseFunctions.map(mapper, options); return baseFunctions.map(mapper, options);
} }
@ -207,10 +207,7 @@ export function last<T>(readable: Readable): Promise<T | null> {
* @param batchSize Size of the batches, defaults to 1000. * @param batchSize Size of the batches, defaults to 1000.
* @param maxBatchAge? Max lifetime of a batch, defaults to 500 * @param maxBatchAge? Max lifetime of a batch, defaults to 500
*/ */
export function batch( export function batch(batchSize: number, maxBatchAge?: number): Transform {
batchSize: number,
maxBatchAge?: number,
): NodeJS.ReadWriteStream {
return baseFunctions.batch(batchSize, maxBatchAge); return baseFunctions.batch(batchSize, maxBatchAge);
} }

View File

@ -0,0 +1,31 @@
import { Transform } from "stream";
import { StringDecoder } from "string_decoder";
import { WithEncoding } from "../definitions";
/**
* Return a ReadWrite stream that joins streamed chunks using the given separator
* @param separator Separator to join with
* @param options
* @param options.encoding Encoding written chunks are assumed to use
*/
export function join(
separator: string,
options: WithEncoding = { encoding: "utf8" },
): NodeJS.ReadWriteStream {
let isFirstChunk = true;
const decoder = new StringDecoder(options.encoding);
return new Transform({
readableObjectMode: true,
async transform(chunk: Buffer, encoding, callback) {
const asString = decoder.write(chunk);
// Take care not to break up multi-byte characters spanning multiple chunks
if (asString !== "" || chunk.length === 0) {
if (!isFirstChunk) {
this.push(separator);
}
this.push(asString);
isFirstChunk = false;
}
callback();
},
});
}

View File

@ -0,0 +1,56 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { join } from ".";
test.cb("join() joins chunks using the specified separator", t => {
t.plan(9);
const source = new Readable({ objectMode: true });
const expectedParts = ["ab|", "|", "c|d", "|", "|", "|", "e", "|", "|f|"];
let i = 0;
source
.pipe(join("|"))
.on("data", part => {
expect(part).to.equal(expectedParts[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("ab|");
source.push("c|d");
source.push("|");
source.push("e");
source.push("|f|");
source.push(null);
});
test.cb(
"join() joins chunks using the specified separator without breaking up multi-byte characters " +
"spanning multiple chunks",
t => {
t.plan(5);
const source = new Readable({ objectMode: true });
const expectedParts = ["ø", "|", "ö", "|", "一"];
let i = 0;
source
.pipe(join("|"))
.on("data", part => {
expect(part).to.equal(expectedParts[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks
source.push(Buffer.from("ø").slice(1, 2));
source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks
source.push(Buffer.from("ö").slice(1, 2));
source.push(Buffer.from("一").slice(0, 1)); // 3-byte character spanning three chunks
source.push(Buffer.from("一").slice(1, 2));
source.push(Buffer.from("一").slice(2, 3));
source.push(null);
},
);

View File

@ -0,0 +1,14 @@
import { Readable } from "stream";
/**
* Return a Promise resolving to the last streamed chunk of the given readable stream, after it has
* ended
* @param readable Readable stream to wait on
*/
export function last<T>(readable: Readable): Promise<T | null> {
let lastChunk: T | null = null;
return new Promise((resolve, _) => {
readable
.on("data", chunk => (lastChunk = chunk))
.on("end", () => resolve(lastChunk));
});
}

View File

@ -0,0 +1,29 @@
import { Transform } from "stream";
import { TransformOptions } from "../definitions";
/**
* Return a ReadWrite stream that maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such)
* @param options
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
*/
export function map<T, R>(
mapper: (chunk: T, encoding: string) => R,
options: TransformOptions = {
readableObjectMode: true,
writableObjectMode: true,
},
): Transform {
return new Transform({
...options,
async transform(chunk: T, encoding, callback) {
try {
const mapped = await mapper(chunk, encoding);
this.push(mapped);
callback();
} catch (err) {
callback(err);
}
},
});
}

View File

@ -0,0 +1,107 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { map } from ".";
test.cb("map() maps elements synchronously", t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const expectedElements = ["A", "B", "C"];
let i = 0;
source
.pipe(map((element: string) => element.toUpperCase()))
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("map() maps elements asynchronously", t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const expectedElements = ["A", "B", "C"];
let i = 0;
source
.pipe(
map(async (element: string) => {
await Promise.resolve();
return element.toUpperCase();
}),
)
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("map() emits errors during synchronous mapping", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(
map((element: string) => {
if (element !== "a") {
throw new Error("Failed mapping");
}
return element.toUpperCase();
}),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed mapping");
t.pass();
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test("map() emits errors during asynchronous mapping", t => {
t.plan(1);
return new Promise((resolve, reject) => {
const source = new Readable({ objectMode: true });
source
.pipe(
map(async (element: string) => {
await Promise.resolve();
if (element !== "a") {
throw new Error("Failed mapping");
}
return element.toUpperCase();
}),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed mapping");
t.pass();
resolve();
})
.on("end", () => {
t.fail();
});
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
});

View File

@ -0,0 +1,36 @@
import { Readable } from "stream";
/**
* Return a Readable stream of readable streams merged together in chunk arrival order
* @param streams Readable streams to merge
*/
export function merge(...streams: Readable[]): Readable {
let isStarted = false;
let streamEndedCount = 0;
return new Readable({
objectMode: true,
read() {
if (streamEndedCount >= streams.length) {
this.push(null);
} else if (!isStarted) {
isStarted = true;
streams.forEach(stream =>
stream
.on("data", chunk => {
if (!this.push(chunk)) {
streams.forEach(s => s.pause());
}
})
.on("error", err => this.emit("error", err))
.on("end", () => {
streamEndedCount++;
if (streamEndedCount === streams.length) {
this.push(null);
}
}),
);
} else {
streams.forEach(s => s.resume());
}
},
});
}

View File

@ -0,0 +1,44 @@
import { Transform } from "stream";
import { sleep } from "../../helpers";
import { TransformOptions } from "../definitions";
/**
* Limits number of parallel processes in flight.
* @param parallel Max number of parallel processes.
* @param func Function to execute on each data chunk
* @param pause Amount of time to pause processing when max number of parallel processes are executing.
*/
export function parallelMap<T, R>(
mapper: (data: T) => R,
parallel: number = 10,
sleepTime: number = 5,
options: TransformOptions = {
readableObjectMode: true,
writableObjectMode: true,
},
) {
let inflight = 0;
return new Transform({
...options,
async transform(data, encoding, callback) {
while (parallel <= inflight) {
await sleep(sleepTime);
}
inflight += 1;
callback();
try {
const res = await mapper(data);
this.push(res);
} catch (e) {
this.emit(e);
} finally {
inflight -= 1;
}
},
async flush(callback) {
while (inflight > 0) {
await sleep(sleepTime);
}
callback();
},
});
}

View File

@ -0,0 +1,26 @@
import { Transform } from "stream";
import { StringDecoder } from "string_decoder";
import { SerializationFormats } from "../definitions";
/**
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
* must be a fully defined JSON string.
* @param format Format of serialized data, only utf8 supported.
*/
export function parse(
format: SerializationFormats = SerializationFormats.utf8,
): NodeJS.ReadWriteStream {
const decoder = new StringDecoder(format);
return new Transform({
readableObjectMode: true,
writableObjectMode: true,
async transform(chunk: Buffer, encoding, callback) {
try {
const asString = decoder.write(chunk);
// Using await causes parsing errors to be emitted
callback(undefined, await JSON.parse(asString));
} catch (err) {
callback(err);
}
},
});
}

View File

@ -0,0 +1,31 @@
import { Transform } from "stream";
import { performance } from "perf_hooks";
import { sleep } from "../../helpers";
import { TransformOptions } from "../definitions";
/**
* Limits date of data transferred into stream.
* @param targetRate Desired rate in ms
* @param period Period to sleep for when rate is above or equal to targetRate
*/
export function rate(
targetRate: number = 50,
period: number = 2,
options: TransformOptions = {
readableObjectMode: true,
writableObjectMode: true,
},
) {
const deltaMS = ((1 / targetRate) * 1000) / period; // Skip half a period
let total = 0;
const start = performance.now();
return new Transform({
async transform(data, encoding, callback) {
const currentRate = (total / (performance.now() - start)) * 1000;
if (targetRate && currentRate > targetRate) {
await sleep(deltaMS);
}
total += 1;
callback(undefined, data);
},
});
}

View File

@ -0,0 +1,57 @@
import { Transform } from "stream";
import { TransformOptions } from "../definitions";
/**
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
* value
* @param iteratee Reducer function to apply on each streamed chunk
* @param initialValue Initial value
* @param options
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
*/
export function reduce<T, R>(
iteratee:
| ((previousValue: R, chunk: T, encoding: string) => R)
| ((previousValue: R, chunk: T, encoding: string) => Promise<R>),
initialValue: R,
options: TransformOptions = {
readableObjectMode: true,
writableObjectMode: true,
},
) {
let value = initialValue;
return new Transform({
readableObjectMode: options.readableObjectMode,
writableObjectMode: options.writableObjectMode,
async transform(chunk: T, encoding, callback) {
let isPromise = false;
try {
const result = iteratee(value, chunk, encoding);
isPromise = result instanceof Promise;
value = await result;
callback();
} catch (err) {
if (isPromise) {
// Calling the callback asynchronously with an error wouldn't emit the error, so emit directly
this.emit("error", err);
callback();
} else {
callback(err);
}
}
},
flush(callback) {
// Best effort attempt at yielding the final value (will throw if e.g. yielding an object and
// downstream doesn't expect objects)
try {
callback(undefined, value);
} catch (err) {
try {
this.emit("error", err);
} catch {
// Best effort was made
}
}
},
});
}

View File

@ -0,0 +1,98 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { reduce } from ".";
test.cb("reduce() reduces elements synchronously", t => {
t.plan(1);
const source = new Readable({ objectMode: true });
const expectedValue = 6;
source
.pipe(reduce((acc: number, element: string) => acc + element.length, 0))
.on("data", (element: string) => {
expect(element).to.equal(expectedValue);
t.pass();
})
.on("error", t.end)
.on("end", t.end);
source.push("ab");
source.push("cd");
source.push("ef");
source.push(null);
});
test.cb("reduce() reduces elements asynchronously", t => {
t.plan(1);
const source = new Readable({ objectMode: true });
const expectedValue = 6;
source
.pipe(
reduce(async (acc: number, element: string) => {
await Promise.resolve();
return acc + element.length;
}, 0),
)
.on("data", (element: string) => {
expect(element).to.equal(expectedValue);
t.pass();
})
.on("error", t.end)
.on("end", t.end);
source.push("ab");
source.push("cd");
source.push("ef");
source.push(null);
});
test.cb("reduce() emits errors during synchronous reduce", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(
reduce((acc: number, element: string) => {
if (element !== "ab") {
throw new Error("Failed reduce");
}
return acc + element.length;
}, 0),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed reduce");
t.pass();
})
.on("end", t.end);
source.push("ab");
source.push("cd");
source.push("ef");
source.push(null);
});
test.cb("reduce() emits errors during asynchronous reduce", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(
reduce(async (acc: number, element: string) => {
await Promise.resolve();
if (element !== "ab") {
throw new Error("Failed mapping");
}
return acc + element.length;
}, 0),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed mapping");
t.pass();
})
.on("end", t.end);
source.push("ab");
source.push("cd");
source.push("ef");
source.push(null);
});

View File

@ -0,0 +1,33 @@
import { Transform } from "stream";
import { StringDecoder } from "string_decoder";
import { WithEncoding } from "../definitions";
/**
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
* the streamed chunks with the specified replacement string
* @param searchValue Search string to use
* @param replaceValue Replacement string to use
* @param options
* @param options.encoding Encoding written chunks are assumed to use
*/
export function replace(
searchValue: string | RegExp,
replaceValue: string,
options: WithEncoding = { encoding: "utf8" },
): NodeJS.ReadWriteStream {
const decoder = new StringDecoder(options.encoding);
return new Transform({
readableObjectMode: true,
transform(chunk: Buffer, encoding, callback) {
const asString = decoder.write(chunk);
// Take care not to break up multi-byte characters spanning multiple chunks
if (asString !== "" || chunk.length === 0) {
callback(
undefined,
asString.replace(searchValue, replaceValue),
);
} else {
callback();
}
},
});
}

View File

@ -0,0 +1,80 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { replace } from ".";
test.cb(
"replace() replaces occurrences of the given string in the streamed elements with the specified " +
"replacement string",
t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const expectedElements = ["abc", "xyf", "ghi"];
let i = 0;
source
.pipe(replace("de", "xy"))
.on("data", part => {
expect(part).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("abc");
source.push("def");
source.push("ghi");
source.push(null);
},
);
test.cb(
"replace() replaces occurrences of the given regular expression in the streamed elements with " +
"the specified replacement string",
t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const expectedElements = ["abc", "xyz", "ghi"];
let i = 0;
source
.pipe(replace(/^def$/, "xyz"))
.on("data", part => {
expect(part).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("abc");
source.push("def");
source.push("ghi");
source.push(null);
},
);
test.cb(
"replace() replaces occurrences of the given multi-byte character even if it spans multiple chunks",
t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const expectedElements = ["ø", "O", "a"];
let i = 0;
source
.pipe(replace("ö", "O"))
.on("data", part => {
expect(part).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks
source.push(Buffer.from("ø").slice(1, 2));
source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks
source.push(Buffer.from("ö").slice(1, 2));
source.push("a");
source.push(null);
},
);

View File

@ -0,0 +1,34 @@
import { Transform } from "stream";
import { StringDecoder } from "string_decoder";
import { WithEncoding } from "../definitions";
/**
* Return a ReadWrite stream that splits streamed chunks using the given separator
* @param separator Separator to split by, defaulting to "\n"
* @param options
* @param options.encoding Encoding written chunks are assumed to use
*/
export function split(
separator: string | RegExp = "\n",
options: WithEncoding = { encoding: "utf8" },
): NodeJS.ReadWriteStream {
let buffered = "";
const decoder = new StringDecoder(options.encoding);
return new Transform({
readableObjectMode: true,
transform(chunk: Buffer, encoding, callback) {
const asString = decoder.write(chunk);
const splitted = asString.split(separator);
if (splitted.length > 1) {
splitted[0] = buffered.concat(splitted[0]);
buffered = "";
}
buffered += splitted[splitted.length - 1];
splitted.slice(0, -1).forEach((part: string) => this.push(part));
callback();
},
flush(callback) {
callback(undefined, buffered + decoder.end());
},
});
}

View File

@ -0,0 +1,98 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { split } from ".";
test.cb("split() splits chunks using the default separator (\\n)", t => {
t.plan(5);
const source = new Readable({ objectMode: true });
const expectedParts = ["ab", "c", "d", "ef", ""];
let i = 0;
source
.pipe(split())
.on("data", part => {
expect(part).to.equal(expectedParts[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("ab\n");
source.push("c");
source.push("\n");
source.push("d");
source.push("\nef\n");
source.push(null);
});
test.cb("split() splits chunks using the specified separator", t => {
t.plan(6);
const source = new Readable({ objectMode: true });
const expectedParts = ["ab", "c", "d", "e", "f", ""];
let i = 0;
source
.pipe(split("|"))
.on("data", (part: string) => {
expect(part).to.equal(expectedParts[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("ab|");
source.push("c|d");
source.push("|");
source.push("e");
source.push("|f|");
source.push(null);
});
test.cb(
"split() splits utf8 encoded buffers using the specified separator",
t => {
t.plan(3);
const expectedElements = ["a", "b", "c"];
let i = 0;
const through = split(",");
const buf = Buffer.from("a,b,c");
through
.on("data", element => {
expect(element).to.equal(expectedElements[i]);
i++;
t.pass();
})
.on("error", t.end)
.on("end", t.end);
for (let j = 0; j < buf.length; ++j) {
through.write(buf.slice(j, j + 1));
}
through.end();
},
);
test.cb(
"split() splits utf8 encoded buffers with multi-byte characters using the specified separator",
t => {
t.plan(3);
const expectedElements = ["一", "一", "一"];
let i = 0;
const through = split(",");
const buf = Buffer.from("一,一,一"); // Those spaces are multi-byte utf8 characters (code: 4E00)
through
.on("data", element => {
expect(element).to.equal(expectedElements[i]);
i++;
t.pass();
})
.on("error", t.end)
.on("end", t.end);
for (let j = 0; j < buf.length; ++j) {
through.write(buf.slice(j, j + 1));
}
through.end();
},
);

View File

@ -0,0 +1,22 @@
import { Transform } from "stream";
import { JsonValue, JsonParseOptions } from "../definitions";
/**
* Return a ReadWrite stream that stringifies the streamed chunks to JSON
*/
export function stringify(
options: JsonParseOptions = { pretty: false },
): NodeJS.ReadWriteStream {
return new Transform({
readableObjectMode: true,
writableObjectMode: true,
transform(chunk: JsonValue, encoding, callback) {
callback(
undefined,
options.pretty
? JSON.stringify(chunk, null, 2)
: JSON.stringify(chunk),
);
},
});
}

View File

@ -0,0 +1,21 @@
import { Transform } from "stream";
import { TransformOptions } from "../definitions";
/**
* Unbatches and sends individual chunks of data
*/
export function unbatch(
options: TransformOptions = {
readableObjectMode: true,
writableObjectMode: true,
},
) {
return new Transform({
...options,
transform(data, encoding, callback) {
for (const d of data) {
this.push(d);
}
callback();
},
});
}

View File

@ -3,14 +3,14 @@
"noImplicitAny": true, "noImplicitAny": true,
"strictNullChecks": true, "strictNullChecks": true,
"noImplicitReturns": true, "noImplicitReturns": true,
"noUnusedLocals": true, "noUnusedLocals": false,
"noImplicitThis": true, "noImplicitThis": true,
"forceConsistentCasingInFileNames": true, "forceConsistentCasingInFileNames": true,
"suppressImplicitAnyIndexErrors": true, "suppressImplicitAnyIndexErrors": true,
"outDir": "./dist", "outDir": "./dist",
"module": "commonjs", "module": "commonjs",
"target": "es5", "target": "es5",
"lib": ["es2016"], "lib": ["es2016", "es2019"],
"sourceMap": true, "sourceMap": true,
"declaration": true "declaration": true
}, },

289
yarn.lock
View File

@ -37,41 +37,41 @@
imurmurhash "^0.1.4" imurmurhash "^0.1.4"
slide "^1.1.5" slide "^1.1.5"
"@babel/code-frame@^7.0.0": "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5":
version "7.0.0" version "7.5.5"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.0.0.tgz#06e2ab19bdb535385559aabb5ba59729482800f8" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d"
integrity sha512-OfC2uemaknXr87bdLUkWog7nYuliM9Ij5HUcajsVcMCpQrcLmtxRbVFTIqmcSkSeYRBFBRxs2FiUqFJDLdiebA== integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw==
dependencies: dependencies:
"@babel/highlight" "^7.0.0" "@babel/highlight" "^7.0.0"
"@babel/core@^7.4.0": "@babel/core@^7.4.0":
version "7.5.0" version "7.5.5"
resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.5.0.tgz#6ed6a2881ad48a732c5433096d96d1b0ee5eb734" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.5.5.tgz#17b2686ef0d6bc58f963dddd68ab669755582c30"
integrity sha512-6Isr4X98pwXqHvtigw71CKgmhL1etZjPs5A67jL/w0TkLM9eqmFR40YrnJvEc1WnMZFsskjsmid8bHZyxKEAnw== integrity sha512-i4qoSr2KTtce0DmkuuQBV4AuQgGPUcPXMr9L5MyYAtk06z068lQ10a4O009fe5OB/DfNV+h+qqT7ddNV8UnRjg==
dependencies: dependencies:
"@babel/code-frame" "^7.0.0" "@babel/code-frame" "^7.5.5"
"@babel/generator" "^7.5.0" "@babel/generator" "^7.5.5"
"@babel/helpers" "^7.5.0" "@babel/helpers" "^7.5.5"
"@babel/parser" "^7.5.0" "@babel/parser" "^7.5.5"
"@babel/template" "^7.4.4" "@babel/template" "^7.4.4"
"@babel/traverse" "^7.5.0" "@babel/traverse" "^7.5.5"
"@babel/types" "^7.5.0" "@babel/types" "^7.5.5"
convert-source-map "^1.1.0" convert-source-map "^1.1.0"
debug "^4.1.0" debug "^4.1.0"
json5 "^2.1.0" json5 "^2.1.0"
lodash "^4.17.11" lodash "^4.17.13"
resolve "^1.3.2" resolve "^1.3.2"
semver "^5.4.1" semver "^5.4.1"
source-map "^0.5.0" source-map "^0.5.0"
"@babel/generator@^7.0.0", "@babel/generator@^7.4.0", "@babel/generator@^7.5.0": "@babel/generator@^7.0.0", "@babel/generator@^7.4.0", "@babel/generator@^7.5.5":
version "7.5.0" version "7.5.5"
resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.5.0.tgz#f20e4b7a91750ee8b63656073d843d2a736dca4a" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.5.5.tgz#873a7f936a3c89491b43536d12245b626664e3cf"
integrity sha512-1TTVrt7J9rcG5PMjvO7VEG3FrEoEJNHxumRq66GemPmzboLWtIjjcJgk8rokuAS7IiRSpgVSu5Vb9lc99iJkOA== integrity sha512-ETI/4vyTSxTzGnU2c49XHv2zhExkv9JHLTwDAFz85kmcwuShvYG2H08FwgIguQf4JC75CBnXAUM5PqeF4fj0nQ==
dependencies: dependencies:
"@babel/types" "^7.5.0" "@babel/types" "^7.5.5"
jsesc "^2.5.1" jsesc "^2.5.1"
lodash "^4.17.11" lodash "^4.17.13"
source-map "^0.5.0" source-map "^0.5.0"
trim-right "^1.0.1" trim-right "^1.0.1"
@ -122,16 +122,16 @@
"@babel/types" "^7.0.0" "@babel/types" "^7.0.0"
"@babel/helper-module-transforms@^7.4.4": "@babel/helper-module-transforms@^7.4.4":
version "7.4.4" version "7.5.5"
resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.4.4.tgz#96115ea42a2f139e619e98ed46df6019b94414b8" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.5.5.tgz#f84ff8a09038dcbca1fd4355661a500937165b4a"
integrity sha512-3Z1yp8TVQf+B4ynN7WoHPKS8EkdTbgAEy0nU0rs/1Kw4pDgmvYH3rz3aI11KgxKCba2cn7N+tqzV1mY2HMN96w== integrity sha512-jBeCvETKuJqeiaCdyaheF40aXnnU1+wkSiUs/IQg3tB85up1LyL8x77ClY8qJpuRJUcXQo+ZtdNESmZl4j56Pw==
dependencies: dependencies:
"@babel/helper-module-imports" "^7.0.0" "@babel/helper-module-imports" "^7.0.0"
"@babel/helper-simple-access" "^7.1.0" "@babel/helper-simple-access" "^7.1.0"
"@babel/helper-split-export-declaration" "^7.4.4" "@babel/helper-split-export-declaration" "^7.4.4"
"@babel/template" "^7.4.4" "@babel/template" "^7.4.4"
"@babel/types" "^7.4.4" "@babel/types" "^7.5.5"
lodash "^4.17.11" lodash "^4.17.13"
"@babel/helper-plugin-utils@^7.0.0": "@babel/helper-plugin-utils@^7.0.0":
version "7.0.0" version "7.0.0"
@ -139,11 +139,11 @@
integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA==
"@babel/helper-regex@^7.4.4": "@babel/helper-regex@^7.4.4":
version "7.4.4" version "7.5.5"
resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.4.4.tgz#a47e02bc91fb259d2e6727c2a30013e3ac13c4a2" resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351"
integrity sha512-Y5nuB/kESmR3tKjU8Nkn1wMGEx1tjJX076HBMeL3XLQCu6vA/YRzuTW0bbb+qRnXvQGn+d6Rx953yffl8vEy7Q== integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw==
dependencies: dependencies:
lodash "^4.17.11" lodash "^4.17.13"
"@babel/helper-remap-async-to-generator@^7.1.0": "@babel/helper-remap-async-to-generator@^7.1.0":
version "7.1.0" version "7.1.0"
@ -181,14 +181,14 @@
"@babel/traverse" "^7.1.0" "@babel/traverse" "^7.1.0"
"@babel/types" "^7.2.0" "@babel/types" "^7.2.0"
"@babel/helpers@^7.5.0": "@babel/helpers@^7.5.5":
version "7.5.1" version "7.5.5"
resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.5.1.tgz#65407c741a56ddd59dd86346cd112da3de912db3" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.5.5.tgz#63908d2a73942229d1e6685bc2a0e730dde3b75e"
integrity sha512-rVOTDv8sH8kNI72Unenusxw6u+1vEepZgLxeV+jHkhsQlYhzVhzL1EpfoWT7Ub3zpWSv2WV03V853dqsnyoQzA== integrity sha512-nRq2BUhxZFnfEn/ciJuhklHvFOqjJUD5wpx+1bxUF2axL9C+v4DE/dmp5sT2dKnpOs4orZWzpAZqlCy8QqE/7g==
dependencies: dependencies:
"@babel/template" "^7.4.4" "@babel/template" "^7.4.4"
"@babel/traverse" "^7.5.0" "@babel/traverse" "^7.5.5"
"@babel/types" "^7.5.0" "@babel/types" "^7.5.5"
"@babel/highlight@^7.0.0": "@babel/highlight@^7.0.0":
version "7.5.0" version "7.5.0"
@ -199,10 +199,10 @@
esutils "^2.0.2" esutils "^2.0.2"
js-tokens "^4.0.0" js-tokens "^4.0.0"
"@babel/parser@^7.0.0", "@babel/parser@^7.4.4", "@babel/parser@^7.5.0": "@babel/parser@^7.0.0", "@babel/parser@^7.4.4", "@babel/parser@^7.5.5":
version "7.5.0" version "7.5.5"
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.5.0.tgz#3e0713dff89ad6ae37faec3b29dcfc5c979770b7" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.5.5.tgz#02f077ac8817d3df4a832ef59de67565e71cca4b"
integrity sha512-I5nW8AhGpOXGCCNYGc+p7ExQIBxRFnS2fd/d862bNOKvmoEPjYPcfIjsfdy0ujagYOIYPczKgD9l3FsgTkAzKA== integrity sha512-E5BN68cqR7dhKan1SfqgPGhQ178bkVKpXTPEXnFJBrEt8/DKRZlybmy+IgYLTeN7tp1R5Ccmbm2rBk17sHYU3g==
"@babel/plugin-proposal-async-generator-functions@^7.0.0": "@babel/plugin-proposal-async-generator-functions@^7.0.0":
version "7.2.0" version "7.2.0"
@ -214,9 +214,9 @@
"@babel/plugin-syntax-async-generators" "^7.2.0" "@babel/plugin-syntax-async-generators" "^7.2.0"
"@babel/plugin-proposal-object-rest-spread@^7.0.0": "@babel/plugin-proposal-object-rest-spread@^7.0.0":
version "7.5.1" version "7.5.5"
resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.5.1.tgz#5788ab097c63135e4236548b4f112bfce09dd394" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.5.5.tgz#61939744f71ba76a3ae46b5eea18a54c16d22e58"
integrity sha512-PVGXx5LYHcT7L4MdoE+rM5uq68IKlvU9lljVQ4OXY6aUEnGvezcGbM4VNY57Ug+3R2Zg/nYHlEdiWoIBoRA0mw== integrity sha512-F2DxJJSQ7f64FyTVl5cw/9MWn6naXGdk3Q3UhDbFEEHv+EilCPoeRD3Zh/Utx1CJz4uyKlQ4uH+bJPbEhMV7Zw==
dependencies: dependencies:
"@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-plugin-utils" "^7.0.0"
"@babel/plugin-syntax-object-rest-spread" "^7.2.0" "@babel/plugin-syntax-object-rest-spread" "^7.2.0"
@ -295,28 +295,28 @@
"@babel/parser" "^7.4.4" "@babel/parser" "^7.4.4"
"@babel/types" "^7.4.4" "@babel/types" "^7.4.4"
"@babel/traverse@^7.1.0", "@babel/traverse@^7.5.0": "@babel/traverse@^7.1.0", "@babel/traverse@^7.5.5":
version "7.5.0" version "7.5.5"
resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.5.0.tgz#4216d6586854ef5c3c4592dab56ec7eb78485485" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.5.5.tgz#f664f8f368ed32988cd648da9f72d5ca70f165bb"
integrity sha512-SnA9aLbyOCcnnbQEGwdfBggnc142h/rbqqsXcaATj2hZcegCl903pUD/lfpsNBlBSuWow/YDfRyJuWi2EPR5cg== integrity sha512-MqB0782whsfffYfSjH4TM+LMjrJnhCNEDMDIjeTpl+ASaUvxcjoiVCo/sM1GhS1pHOXYfWVCYneLjMckuUxDaQ==
dependencies: dependencies:
"@babel/code-frame" "^7.0.0" "@babel/code-frame" "^7.5.5"
"@babel/generator" "^7.5.0" "@babel/generator" "^7.5.5"
"@babel/helper-function-name" "^7.1.0" "@babel/helper-function-name" "^7.1.0"
"@babel/helper-split-export-declaration" "^7.4.4" "@babel/helper-split-export-declaration" "^7.4.4"
"@babel/parser" "^7.5.0" "@babel/parser" "^7.5.5"
"@babel/types" "^7.5.0" "@babel/types" "^7.5.5"
debug "^4.1.0" debug "^4.1.0"
globals "^11.1.0" globals "^11.1.0"
lodash "^4.17.11" lodash "^4.17.13"
"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.0": "@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.5":
version "7.5.0" version "7.5.5"
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.5.0.tgz#e47d43840c2e7f9105bc4d3a2c371b4d0c7832ab" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.5.5.tgz#97b9f728e182785909aa4ab56264f090a028d18a"
integrity sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ== integrity sha512-s63F9nJioLqOlW3UkyMd+BYhXt44YuaFm/VV0VwuteqjYwRrObkU7ra9pY4wAJR3oXi8hJrMcrcJdO/HH33vtw==
dependencies: dependencies:
esutils "^2.0.2" esutils "^2.0.2"
lodash "^4.17.11" lodash "^4.17.13"
to-fast-properties "^2.0.0" to-fast-properties "^2.0.0"
"@concordance/react@^2.0.0": "@concordance/react@^2.0.0":
@ -327,9 +327,9 @@
arrify "^1.0.1" arrify "^1.0.1"
"@types/chai@^4.1.7": "@types/chai@^4.1.7":
version "4.1.7" version "4.2.0"
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.1.7.tgz#1b8e33b61a8c09cbe1f85133071baa0dbf9fa71a" resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.0.tgz#2478260021408dec32c123a7cad3414beb811a07"
integrity sha512-2Y8uPt0/jwjhQ6EiluT0XCri1Dbplr0ZxfFXUz+ye13gaqE8u5gL5ppao1JrUYr9cIip5S6MvQzBS7Kke7U9VA== integrity sha512-zw8UvoBEImn392tLjxoavuonblX/4Yb9ha4KBU10FirCfwgzhKO0dvyJSF9ByxV1xK1r2AgnAi/tvQaLgxQqxA==
"@types/events@*": "@types/events@*":
version "3.0.0" version "3.0.0"
@ -350,22 +350,10 @@
resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d"
integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==
"@types/node@*": "@types/node@*", "@types/node@^12.7.2":
version "12.0.12" version "12.7.2"
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.0.12.tgz#cc791b402360db1eaf7176479072f91ee6c6c7ca" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.2.tgz#c4e63af5e8823ce9cc3f0b34f7b998c2171f0c44"
integrity sha512-Uy0PN4R5vgBUXFoJrKryf5aTk3kJ8Rv3PdlHjl6UaX+Cqp1QE0yPQ68MPXGrZOfG7gZVNDIJZYyot0B9ubXUrQ== integrity sha512-dyYO+f6ihZEtNPDcWNR1fkoTDf3zAK3lAABDze3mz6POyIercH0lEUawUFXlG8xaQZmm1yEBON/4TsYv/laDYg==
"@types/node@^10.12.10":
version "10.14.12"
resolved "https://registry.yarnpkg.com/@types/node/-/node-10.14.12.tgz#0eec3155a46e6c4db1f27c3e588a205f767d622f"
integrity sha512-QcAKpaO6nhHLlxWBvpc4WeLrTvPqlHOvaj0s5GriKkA1zq+bsFBPpfYCvQhLqLgYlIko8A9YrPdaMHCo5mBcpg==
"@types/typescript@^2.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@types/typescript/-/typescript-2.0.0.tgz#c433539c98bae28682b307eaa7a0fd2115b83c28"
integrity sha1-xDNTnJi64oaCswfqp6D9IRW4PCg=
dependencies:
typescript "*"
abbrev@1: abbrev@1:
version "1.1.1" version "1.1.1"
@ -427,6 +415,11 @@ are-we-there-yet@~1.1.2:
delegates "^1.0.0" delegates "^1.0.0"
readable-stream "^2.0.6" readable-stream "^2.0.6"
arg@^4.1.0:
version "4.1.1"
resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.1.tgz#485f8e7c390ce4c5f78257dbea80d4be11feda4c"
integrity sha512-SlmP3fEA88MBv0PypnXZ8ZfJhwmDeIE3SP71j37AiXQBXYosPV0x6uISAaHYSlSVhmHOVkomen0tbGk6Anlebw==
argparse@^1.0.7: argparse@^1.0.7:
version "1.0.10" version "1.0.10"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
@ -678,7 +671,7 @@ braces@^2.3.1, braces@^2.3.2:
split-string "^3.0.2" split-string "^3.0.2"
to-regex "^3.0.1" to-regex "^3.0.1"
buffer-from@^1.0.0, buffer-from@^1.1.0: buffer-from@^1.0.0:
version "1.1.1" version "1.1.1"
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
@ -818,9 +811,9 @@ class-utils@^0.3.5:
static-extend "^0.1.1" static-extend "^0.1.1"
clean-stack@^2.0.0: clean-stack@^2.0.0:
version "2.1.0" version "2.2.0"
resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.1.0.tgz#9e7fec7f3f8340a2ab4f127c80273085e8fbbdd0" resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b"
integrity sha512-uQWrpRm+iZZUCAp7ZZJQbd4Za9I3AjR/3YTjmcnAtkauaIm/T5CT6U8zVI6e60T6OANqBFAzuR9/HB3NzuZCRA== integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==
clean-yaml-object@^0.1.0: clean-yaml-object@^0.1.0:
version "0.1.0" version "0.1.0"
@ -1120,11 +1113,16 @@ detect-libc@^1.0.2:
resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b"
integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=
diff@^3.1.0, diff@^3.2.0: diff@^3.2.0:
version "3.5.0" version "3.5.0"
resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12"
integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA== integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==
diff@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.1.tgz#0c667cb467ebbb5cea7f14f135cc2dba7780a8ff"
integrity sha512-s2+XdvhPCOF01LRQBC8hf4vhbVmI2CGS5aZnxLJlT5FtdhPCDFq80q++zK2KlrVorVDdL5BOGZ/VfLrVtYNF+Q==
dir-glob@^2.0.0: dir-glob@^2.0.0:
version "2.2.2" version "2.2.2"
resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.2.2.tgz#fa09f0694153c8918b18ba0deafae94769fc50c4" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.2.2.tgz#fa09f0694153c8918b18ba0deafae94769fc50c4"
@ -1215,14 +1213,14 @@ espurify@^1.6.0:
core-js "^2.0.0" core-js "^2.0.0"
estraverse@^4.0.0, estraverse@^4.1.1: estraverse@^4.0.0, estraverse@^4.1.1:
version "4.2.0" version "4.3.0"
resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d"
integrity sha1-De4/7TH81GlhjOc0IJn8GvoL2xM= integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==
esutils@^2.0.2: esutils@^2.0.2:
version "2.0.2" version "2.0.3"
resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
integrity sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs= integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
execa@^0.7.0: execa@^0.7.0:
version "0.7.0" version "0.7.0"
@ -1459,9 +1457,9 @@ got@^6.7.1:
url-parse-lax "^1.0.0" url-parse-lax "^1.0.0"
graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2: graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2:
version "4.2.0" version "4.2.2"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.0.tgz#8d8fdc73977cb04104721cb53666c1ca64cd328b" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.2.tgz#6f0952605d0140c1cfdb138ed005775b92d67b02"
integrity sha512-jpSvDPV4Cq/bgtpndIWbI5hmYxhQGHPC4d4cqBPb4DLniCfhJokdXhwhaDuLBGLQdvvRum/UiX6ECVIPvDXqdg== integrity sha512-IItsdsea19BoLC7ELy13q1iJFNmd7ofZH5+X/pJr90/nRoPEX0DJo1dHDbgtYWOhJhcCgMDTOw84RZ72q6lB+Q==
has-flag@^3.0.0: has-flag@^3.0.0:
version "3.0.0" version "3.0.0"
@ -1517,9 +1515,9 @@ hasha@^3.0.0:
is-stream "^1.0.1" is-stream "^1.0.1"
hosted-git-info@^2.1.4: hosted-git-info@^2.1.4:
version "2.7.1" version "2.8.4"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.7.1.tgz#97f236977bd6e125408930ff6de3eec6281ec047" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.4.tgz#44119abaf4bc64692a16ace34700fed9c03e2546"
integrity sha512-7T/BxH19zbcCTa8XkMlbK5lTo1WtgkFi3GvdWEyNuc4Vex7/9Dqbnpsf4JMydcfj9HCg4zUWFTL3Za6lapg5/w== integrity sha512-pzXIvANXEFrc5oFFXRMkbLPQ2rXRoDERwDLyrcUxGhaZhgP54BBSl9Oheh7Vv0T090cszWBxPjkQQ5Sq1PbBRQ==
iconv-lite@^0.4.4: iconv-lite@^0.4.4:
version "0.4.24" version "0.4.24"
@ -2009,14 +2007,14 @@ lodash.islength@^4.0.1:
integrity sha1-Tpho1FJXXXUK/9NYyXlUPcIO1Xc= integrity sha1-Tpho1FJXXXUK/9NYyXlUPcIO1Xc=
lodash.merge@^4.6.1: lodash.merge@^4.6.1:
version "4.6.1" version "4.6.2"
resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.1.tgz#adc25d9cb99b9391c59624f379fbba60d7111d54" resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a"
integrity sha512-AOYza4+Hf5z1/0Hztxpm2/xiPZgi/cjMqdnKTUWTBSKchJlxXXuUSxCCl8rJlf4g6yww/j6mA8nC8Hw/EZWxKQ== integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==
lodash@^4.17.11: lodash@^4.17.13:
version "4.17.11" version "4.17.15"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg== integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
log-symbols@^2.2.0: log-symbols@^2.2.0:
version "2.2.0" version "2.2.0"
@ -2702,7 +2700,7 @@ redent@^2.0.0:
indent-string "^3.0.0" indent-string "^3.0.0"
strip-indent "^2.0.0" strip-indent "^2.0.0"
regenerate-unicode-properties@^8.0.2: regenerate-unicode-properties@^8.1.0:
version "8.1.0" version "8.1.0"
resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e"
integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA==
@ -2723,12 +2721,12 @@ regex-not@^1.0.0, regex-not@^1.0.2:
safe-regex "^1.1.0" safe-regex "^1.1.0"
regexpu-core@^4.5.4: regexpu-core@^4.5.4:
version "4.5.4" version "4.5.5"
resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.5.4.tgz#080d9d02289aa87fe1667a4f5136bc98a6aebaae" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.5.5.tgz#aaffe61c2af58269b3e516b61a73790376326411"
integrity sha512-BtizvGtFQKGPUcTy56o3nk1bGRp4SZOTYrDtGNlqCQufptV5IkkLN6Emw+yunAJjzf+C9FQFtvq7IoA3+oMYHQ== integrity sha512-FpI67+ky9J+cDizQUJlIlNZFKual/lUkFr1AG6zOCpwZ9cLrg8UUVakyUQJD7fCDIe9Z2nwTQJNPyonatNmDFQ==
dependencies: dependencies:
regenerate "^1.4.0" regenerate "^1.4.0"
regenerate-unicode-properties "^8.0.2" regenerate-unicode-properties "^8.1.0"
regjsgen "^0.5.0" regjsgen "^0.5.0"
regjsparser "^0.6.0" regjsparser "^0.6.0"
unicode-match-property-ecmascript "^1.0.4" unicode-match-property-ecmascript "^1.0.4"
@ -2806,9 +2804,9 @@ resolve-url@^0.2.1:
integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=
resolve@^1.10.0, resolve@^1.3.2: resolve@^1.10.0, resolve@^1.3.2:
version "1.11.1" version "1.12.0"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.11.1.tgz#ea10d8110376982fef578df8fc30b9ac30a07a3e" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.0.tgz#3fc644a35c84a48554609ff26ec52b66fa577df6"
integrity sha512-vIpgF6wfuJOZI7KKKSP+HmiKggadPQAdsp5HiC1mvqnfp0gF1vdwgBWZIdrVft9pgqoMFQN+R7BSWZiBxx+BBw== integrity sha512-B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w==
dependencies: dependencies:
path-parse "^1.0.6" path-parse "^1.0.6"
@ -2826,9 +2824,9 @@ ret@~0.1.10:
integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==
rimraf@^2.6.1, rimraf@^2.6.3: rimraf@^2.6.1, rimraf@^2.6.3:
version "2.6.3" version "2.7.1"
resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec"
integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==
dependencies: dependencies:
glob "^7.1.3" glob "^7.1.3"
@ -2867,9 +2865,9 @@ semver-diff@^2.0.0:
semver "^5.0.3" semver "^5.0.3"
"semver@2 || 3 || 4 || 5", semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@^5.4.1, semver@^5.5.1, semver@^5.6.0: "semver@2 || 3 || 4 || 5", semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@^5.4.1, semver@^5.5.1, semver@^5.6.0:
version "5.7.0" version "5.7.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.0.tgz#790a7cf6fea5459bac96110b29b60412dc8ff96b" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
integrity sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA== integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
serialize-error@^2.1.0: serialize-error@^2.1.0:
version "2.1.0" version "2.1.0"
@ -2971,18 +2969,10 @@ source-map-resolve@^0.5.0:
source-map-url "^0.4.0" source-map-url "^0.4.0"
urix "^0.1.0" urix "^0.1.0"
source-map-support@^0.5.11: source-map-support@^0.5.11, source-map-support@^0.5.6:
version "0.5.12" version "0.5.13"
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.12.tgz#b4f3b10d51857a5af0138d3ce8003b201613d599" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932"
integrity sha512-4h2Pbvyy15EE02G+JOZpUCmqWJuqrs+sEkzewTm++BPi7Hvn/HwcqLAcNxYAyI0x13CpPPn+kMjl+hplXMHITQ== integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==
dependencies:
buffer-from "^1.0.0"
source-map "^0.6.0"
source-map-support@^0.5.6:
version "0.5.9"
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.9.tgz#41bc953b2534267ea2d605bccfa7bfa3111ced5f"
integrity sha512-gR6Rw4MvUlYy83vP0vxoVNzM6t8MUXqNuRsuBmBHQDu1Fh6X015FrLdgoDKcNdkwGubozq0P4N0Q37UyFVr1EA==
dependencies: dependencies:
buffer-from "^1.0.0" buffer-from "^1.0.0"
source-map "^0.6.0" source-map "^0.6.0"
@ -3024,9 +3014,9 @@ spdx-expression-parse@^3.0.0:
spdx-license-ids "^3.0.0" spdx-license-ids "^3.0.0"
spdx-license-ids@^3.0.0: spdx-license-ids@^3.0.0:
version "3.0.4" version "3.0.5"
resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.4.tgz#75ecd1a88de8c184ef015eafb51b5b48bfd11bb1" resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654"
integrity sha512-7j8LYJLeY/Yb6ACbQ7F76qy5jHkp0U6jgBfJsk97bwWlVUnUWsAgpyaCvo17h0/RQGnQ036tVDomiwoI4pDkQA== integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==
split-string@^3.0.1, split-string@^3.0.2: split-string@^3.0.1, split-string@^3.0.2:
version "3.1.0" version "3.1.0"
@ -3235,26 +3225,18 @@ trim-right@^1.0.1:
resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003"
integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM=
ts-node@^7.0.1: ts-node@^8.3.0:
version "7.0.1" version "8.3.0"
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-7.0.1.tgz#9562dc2d1e6d248d24bc55f773e3f614337d9baf" resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-8.3.0.tgz#e4059618411371924a1fb5f3b125915f324efb57"
integrity sha512-BVwVbPJRspzNh2yfslyT1PSbl5uIk03EZlb493RKHN4qej/D06n1cEhjlOJG69oFsE7OT8XjpTUcYf6pKTLMhw== integrity sha512-dyNS/RqyVTDcmNM4NIBAeDMpsAdaQ+ojdf0GOLqE6nwJOgzEkdRNzJywhDfwnuvB10oa6NLVG1rUJQCpRN7qoQ==
dependencies: dependencies:
arrify "^1.0.0" arg "^4.1.0"
buffer-from "^1.1.0" diff "^4.0.1"
diff "^3.1.0"
make-error "^1.1.1" make-error "^1.1.1"
minimist "^1.2.0"
mkdirp "^0.5.1"
source-map-support "^0.5.6" source-map-support "^0.5.6"
yn "^2.0.0" yn "^3.0.0"
tslib@^1.7.1: tslib@^1.7.1, tslib@^1.8.0, tslib@^1.8.1:
version "1.9.3"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.3.tgz#d7e4dd79245d85428c4d7e4822a79917954ca286"
integrity sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ==
tslib@^1.8.0, tslib@^1.8.1:
version "1.10.0" version "1.10.0"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a"
integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==
@ -3309,11 +3291,6 @@ type-fest@^0.3.0:
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1"
integrity sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ== integrity sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==
typescript@*:
version "3.5.2"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.2.tgz#a09e1dc69bc9551cadf17dba10ee42cf55e5d56c"
integrity sha512-7KxJovlYhTX5RaRbUdkAXN1KUZ8PwWlTzQdHV6xNqvuFOs7+WBo10TQUqT19Q/Jz2hk5v9TQDIhyLhhJY4p5AA==
typescript@^3.5.3: typescript@^3.5.3:
version "3.5.3" version "3.5.3"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977"
@ -3490,9 +3467,9 @@ xdg-basedir@^3.0.0:
integrity sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ= integrity sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ=
xtend@^4.0.0: xtend@^4.0.0:
version "4.0.1" version "4.0.2"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
integrity sha1-pcbVMr5lbiPbgg77lDofBJmNY68= integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==
yallist@^2.1.2: yallist@^2.1.2:
version "2.1.2" version "2.1.2"
@ -3511,7 +3488,7 @@ yargs-parser@^10.0.0:
dependencies: dependencies:
camelcase "^4.1.0" camelcase "^4.1.0"
yn@^2.0.0: yn@^3.0.0:
version "2.0.0" version "3.1.1"
resolved "https://registry.yarnpkg.com/yn/-/yn-2.0.0.tgz#e5adabc8acf408f6385fc76495684c88e6af689a" resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
integrity sha1-5a2ryKz0CPY4X8dklWhMiOavaJo= integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==