Compare commits
75 Commits
feature/ba
...
feature/Ob
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
107bc17bd4 | ||
|
|
1b3be03db3 | ||
|
|
fddaa03300 | ||
|
|
57645c68da | ||
|
|
a45a144854 | ||
|
|
7ab8541cf6 | ||
|
|
a85054fd82 | ||
|
|
c690185ab7 | ||
|
|
9b31479406 | ||
|
|
298a8b328d | ||
|
|
1794910b64 | ||
|
|
e08558ca88 | ||
|
|
ac21fb7ea6 | ||
|
|
9e14d8c044 | ||
|
|
4f80d44ed8 | ||
|
|
b8bd69eb01 | ||
|
|
f6e3a03eb7 | ||
|
|
f177f95f52 | ||
|
|
a11aa10d16 | ||
|
|
70edee51c4 | ||
|
|
158475183a | ||
|
|
48a231d61c | ||
|
|
4c7e9ceb7e | ||
|
|
517e281ce5 | ||
|
|
586f618e95 | ||
|
|
65c36a8f22 | ||
|
|
ce19c5e987 | ||
|
|
f06cb1c33e | ||
|
|
dcfd6fe4c2 | ||
|
|
9d280b1662 | ||
|
|
ee3d9b9ded | ||
|
|
83ef6e9734 | ||
|
|
7aeea4815a | ||
|
|
d33d8dcad3 | ||
|
|
eed36a4fe9 | ||
|
|
ea2ffdb38c | ||
|
|
0067ba6a7c | ||
|
|
599ba16d48 | ||
|
|
2cbeae38e7 | ||
|
|
ae7c9d6b09 | ||
|
|
cd10649d44 | ||
|
|
d5f3fd8bd8 | ||
|
|
2ee04a2d79 | ||
|
|
fe0e53147c | ||
|
|
2524d51aa7 | ||
|
|
9765e6cb49 | ||
|
|
685215bee6 | ||
|
|
9b09a3f949 | ||
|
|
c7903376e9 | ||
|
|
f35f025dbc | ||
|
|
1d0e15890c | ||
|
|
d097fa6aa5 | ||
|
|
1e7fad2403 | ||
|
|
6581e1d745 | ||
|
|
7394b6ef84 | ||
|
|
50f6886b4b | ||
|
|
4e80e48fa4 | ||
|
|
047ff66ee1 | ||
|
|
faac6134af | ||
|
|
505fefeeb5 | ||
|
|
d6d974ee0d | ||
|
|
5a9fcc94a6 | ||
|
|
27b4b2427b | ||
|
|
6a9f6ff919 | ||
|
|
a40b1bf38c | ||
|
|
3a1fbf44d7 | ||
|
|
c72ecaf219 | ||
|
|
e8d672d903 | ||
|
|
5112ee9540 | ||
|
|
e932adde67 | ||
|
|
fdcc5bafc6 | ||
|
|
c1ef5fec4b | ||
|
|
a60b23496b | ||
|
|
d918d8ca10 | ||
|
|
af9293ab52 |
11
.gitignore
vendored
11
.gitignore
vendored
@@ -4,3 +4,14 @@ dist
|
|||||||
sample_output
|
sample_output
|
||||||
yarn-error.log
|
yarn-error.log
|
||||||
TODO.md
|
TODO.md
|
||||||
|
|
||||||
|
|
||||||
|
#VIM
|
||||||
|
## Swap
|
||||||
|
[._]*.s[a-v][a-z]
|
||||||
|
!*.svg # comment out if you don't need vector files
|
||||||
|
[._]*.sw[a-p]
|
||||||
|
[._]s[a-rt-v][a-z]
|
||||||
|
[._]ss[a-gi-z]
|
||||||
|
[._]sw[a-p]
|
||||||
|
*.orig
|
||||||
|
|||||||
36
package.json
36
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "mhysa",
|
"name": "@jogogo/mhysa",
|
||||||
"version": "1.0.2",
|
"version": "0.0.1-beta.4",
|
||||||
"description": "Streams and event emitter utils for Node.js",
|
"description": "Streams and event emitter utils for Node.js",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"promise",
|
"promise",
|
||||||
@@ -11,40 +11,56 @@
|
|||||||
"author": {
|
"author": {
|
||||||
"name": "Wenzil"
|
"name": "Wenzil"
|
||||||
},
|
},
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "jerry",
|
||||||
|
"email": "jerry@jogogo.co"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "lewis",
|
||||||
|
"email": "lewis@jogogo.co"
|
||||||
|
}
|
||||||
|
],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"files": [
|
"files": [
|
||||||
"dist"
|
"dist"
|
||||||
],
|
],
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://npm.dev.jogogo.co/"
|
||||||
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"url": "git@github.com:Wenzil/Mhysa.git",
|
"url": "git@github.com:Jogogoplay/mhysa.git",
|
||||||
"type": "git"
|
"type": "git"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "ava",
|
"test": "NODE_PATH=src node node_modules/.bin/ava 'tests/*.spec.ts' -e",
|
||||||
|
"test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.js",
|
||||||
|
"test:all": "NODE_PATH=src node node_modules/.bin/ava",
|
||||||
"lint": "tslint -p tsconfig.json",
|
"lint": "tslint -p tsconfig.json",
|
||||||
"validate:tslint": "tslint-config-prettier-check ./tslint.json",
|
"validate:tslint": "tslint-config-prettier-check ./tslint.json",
|
||||||
"prepublishOnly": "yarn lint && yarn test && yarn tsc"
|
"prepublishOnly": "yarn lint && yarn test && yarn tsc -d"
|
||||||
},
|
},
|
||||||
"dependencies": {},
|
"dependencies": {},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/chai": "^4.1.7",
|
"@types/chai": "^4.1.7",
|
||||||
"@types/node": "^10.12.10",
|
"@types/node": "^12.7.2",
|
||||||
"@types/typescript": "^2.0.0",
|
"@types/sinon": "^7.0.13",
|
||||||
"ava": "^1.0.0-rc.2",
|
"ava": "^1.0.0-rc.2",
|
||||||
"chai": "^4.2.0",
|
"chai": "^4.2.0",
|
||||||
"mhysa": "./",
|
"mhysa": "./",
|
||||||
"prettier": "^1.14.3",
|
"prettier": "^1.14.3",
|
||||||
"ts-node": "^7.0.1",
|
"sinon": "^7.4.2",
|
||||||
|
"ts-node": "^8.3.0",
|
||||||
"tslint": "^5.11.0",
|
"tslint": "^5.11.0",
|
||||||
"tslint-config-prettier": "^1.16.0",
|
"tslint-config-prettier": "^1.16.0",
|
||||||
"tslint-plugin-prettier": "^2.0.1",
|
"tslint-plugin-prettier": "^2.0.1",
|
||||||
"typescript": "^3.1.6"
|
"typescript": "^3.5.3"
|
||||||
},
|
},
|
||||||
"ava": {
|
"ava": {
|
||||||
"files": [
|
"files": [
|
||||||
"src/**/*.spec.ts"
|
"tests/*.spec.ts"
|
||||||
],
|
],
|
||||||
"sources": [
|
"sources": [
|
||||||
"src/**/*.ts"
|
"src/**/*.ts"
|
||||||
|
|||||||
180
src/functions/accumulator.ts
Normal file
180
src/functions/accumulator.ts
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
import { Transform, TransformOptions } from "stream";
|
||||||
|
|
||||||
|
export enum FlushStrategy {
|
||||||
|
rolling = "rolling",
|
||||||
|
sliding = "sliding",
|
||||||
|
}
|
||||||
|
|
||||||
|
export type AccumulatorByIteratee<T> = (event: T, bufferChunk: T) => boolean;
|
||||||
|
|
||||||
|
function _accumulator<T>(
|
||||||
|
accumulateBy: (data: T, buffer: T[], stream: Transform) => void,
|
||||||
|
shouldFlush: boolean = true,
|
||||||
|
options: TransformOptions = {},
|
||||||
|
) {
|
||||||
|
const buffer: T[] = [];
|
||||||
|
return new Transform({
|
||||||
|
...options,
|
||||||
|
transform(data: T, encoding, callback) {
|
||||||
|
accumulateBy(data, buffer, this);
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
flush(callback) {
|
||||||
|
if (shouldFlush) {
|
||||||
|
this.push(buffer);
|
||||||
|
}
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function _sliding<T>(
|
||||||
|
windowLength: number,
|
||||||
|
key?: string,
|
||||||
|
): (event: T, buffer: T[], stream: Transform) => void {
|
||||||
|
return (event: T, buffer: T[], stream: Transform) => {
|
||||||
|
if (key) {
|
||||||
|
let index = 0;
|
||||||
|
if (event[key] === undefined) {
|
||||||
|
stream.emit(
|
||||||
|
"error",
|
||||||
|
new Error(
|
||||||
|
`Key is missing in event: (${key}, ${JSON.stringify(
|
||||||
|
event,
|
||||||
|
)})`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
stream.resume();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
while (
|
||||||
|
index < buffer.length &&
|
||||||
|
buffer[index][key] + windowLength <= event[key]
|
||||||
|
) {
|
||||||
|
index++;
|
||||||
|
}
|
||||||
|
buffer.splice(0, index);
|
||||||
|
} else if (buffer.length === windowLength) {
|
||||||
|
buffer.shift();
|
||||||
|
}
|
||||||
|
buffer.push(event);
|
||||||
|
stream.push([...buffer]);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function _slidingByFunction<T>(
|
||||||
|
iteratee: AccumulatorByIteratee<T>,
|
||||||
|
): (event: T, buffer: T[], stream: Transform) => void {
|
||||||
|
return (event: T, buffer: T[], stream: Transform) => {
|
||||||
|
let index = 0;
|
||||||
|
while (index < buffer.length && iteratee(event, buffer[index])) {
|
||||||
|
index++;
|
||||||
|
}
|
||||||
|
buffer.splice(0, index);
|
||||||
|
buffer.push(event);
|
||||||
|
stream.push([...buffer]);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function _rollingByFunction<T>(
|
||||||
|
iteratee: AccumulatorByIteratee<T>,
|
||||||
|
): (event: T, buffer: T[], stream: Transform) => void {
|
||||||
|
return (event: T, buffer: T[], stream: Transform) => {
|
||||||
|
if (iteratee) {
|
||||||
|
if (buffer.length > 0 && iteratee(event, buffer[0])) {
|
||||||
|
stream.push(buffer.slice(0));
|
||||||
|
buffer.length = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
buffer.push(event);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function _rolling<T>(
|
||||||
|
windowLength: number,
|
||||||
|
key?: string,
|
||||||
|
): (event: T, buffer: T[], stream: Transform) => void {
|
||||||
|
return (event: T, buffer: T[], stream: Transform) => {
|
||||||
|
if (key) {
|
||||||
|
if (event[key] === undefined) {
|
||||||
|
stream.emit(
|
||||||
|
"error",
|
||||||
|
new Error(
|
||||||
|
`Key is missing in event: (${key}, ${JSON.stringify(
|
||||||
|
event,
|
||||||
|
)})`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
stream.resume();
|
||||||
|
return;
|
||||||
|
} else if (
|
||||||
|
buffer.length > 0 &&
|
||||||
|
buffer[0][key] + windowLength <= event[key]
|
||||||
|
) {
|
||||||
|
stream.push(buffer.slice(0));
|
||||||
|
buffer.length = 0;
|
||||||
|
}
|
||||||
|
} else if (buffer.length === windowLength) {
|
||||||
|
stream.push(buffer.slice(0));
|
||||||
|
buffer.length = 0;
|
||||||
|
}
|
||||||
|
buffer.push(event);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function accumulator(
|
||||||
|
flushStrategy: FlushStrategy,
|
||||||
|
batchSize: number,
|
||||||
|
keyBy?: string,
|
||||||
|
options?: TransformOptions,
|
||||||
|
): Transform {
|
||||||
|
switch (flushStrategy) {
|
||||||
|
case FlushStrategy.sliding:
|
||||||
|
return sliding(batchSize, keyBy, options);
|
||||||
|
case FlushStrategy.rolling:
|
||||||
|
return rolling(batchSize, keyBy, options);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function accumulatorBy<T>(
|
||||||
|
flushStrategy: FlushStrategy,
|
||||||
|
iteratee: AccumulatorByIteratee<T>,
|
||||||
|
options?: TransformOptions,
|
||||||
|
): Transform {
|
||||||
|
switch (flushStrategy) {
|
||||||
|
case FlushStrategy.sliding:
|
||||||
|
return slidingBy(iteratee, options);
|
||||||
|
case FlushStrategy.rolling:
|
||||||
|
return rollingBy(iteratee, options);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function sliding(
|
||||||
|
windowLength: number,
|
||||||
|
key?: string,
|
||||||
|
options?: TransformOptions,
|
||||||
|
): Transform {
|
||||||
|
return _accumulator(_sliding(windowLength, key), false, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
function slidingBy<T>(
|
||||||
|
iteratee: AccumulatorByIteratee<T>,
|
||||||
|
options?: TransformOptions,
|
||||||
|
): Transform {
|
||||||
|
return _accumulator(_slidingByFunction(iteratee), false, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
function rolling(
|
||||||
|
windowLength: number,
|
||||||
|
key?: string,
|
||||||
|
options?: TransformOptions,
|
||||||
|
): Transform {
|
||||||
|
return _accumulator(_rolling(windowLength, key), true, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
function rollingBy<T>(
|
||||||
|
iteratee: AccumulatorByIteratee<T>,
|
||||||
|
options?: TransformOptions,
|
||||||
|
): Transform {
|
||||||
|
return _accumulator(_rollingByFunction(iteratee), true, options);
|
||||||
|
}
|
||||||
@@ -1,12 +1,3 @@
|
|||||||
export interface ThroughOptions {
|
|
||||||
objectMode?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TransformOptions {
|
|
||||||
readableObjectMode?: boolean;
|
|
||||||
writableObjectMode?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface WithEncoding {
|
export interface WithEncoding {
|
||||||
encoding: string;
|
encoding: string;
|
||||||
}
|
}
|
||||||
@@ -21,4 +12,3 @@ export type JsonValue = JsonPrimitive | JsonPrimitive[];
|
|||||||
export interface JsonParseOptions {
|
export interface JsonParseOptions {
|
||||||
pretty: boolean;
|
pretty: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
38
src/functions/batch.ts
Normal file
38
src/functions/batch.ts
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
import { Transform, TransformOptions } from "stream";
|
||||||
|
|
||||||
|
export function batch(
|
||||||
|
batchSize: number = 1000,
|
||||||
|
maxBatchAge: number = 500,
|
||||||
|
options: TransformOptions = {},
|
||||||
|
): Transform {
|
||||||
|
let buffer: any[] = [];
|
||||||
|
let timer: NodeJS.Timer | null = null;
|
||||||
|
const sendChunk = (self: Transform) => {
|
||||||
|
if (timer) {
|
||||||
|
clearTimeout(timer);
|
||||||
|
}
|
||||||
|
timer = null;
|
||||||
|
self.push(buffer);
|
||||||
|
buffer = [];
|
||||||
|
};
|
||||||
|
return new Transform({
|
||||||
|
...options,
|
||||||
|
transform(chunk, encoding, callback) {
|
||||||
|
buffer.push(chunk);
|
||||||
|
if (buffer.length === batchSize) {
|
||||||
|
sendChunk(this);
|
||||||
|
} else {
|
||||||
|
if (timer === null) {
|
||||||
|
timer = setInterval(() => {
|
||||||
|
sendChunk(this);
|
||||||
|
}, maxBatchAge);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
flush(callback) {
|
||||||
|
sendChunk(this);
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
11
src/functions/child.ts
Normal file
11
src/functions/child.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { ChildProcess } from "child_process";
|
||||||
|
import { duplex } from "./duplex";
|
||||||
|
|
||||||
|
export function child(childProcess: ChildProcess) {
|
||||||
|
if (childProcess.stdin === null) {
|
||||||
|
throw new Error("childProcess.stdin is null");
|
||||||
|
} else if (childProcess.stdout === null) {
|
||||||
|
throw new Error("childProcess.stdout is null");
|
||||||
|
}
|
||||||
|
return duplex(childProcess.stdin, childProcess.stdout);
|
||||||
|
}
|
||||||
18
src/functions/collect.ts
Normal file
18
src/functions/collect.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import { Transform, TransformOptions } from "stream";
|
||||||
|
|
||||||
|
export function collect(options: TransformOptions = {}): Transform {
|
||||||
|
const collected: any[] = [];
|
||||||
|
return new Transform({
|
||||||
|
...options,
|
||||||
|
transform(data, encoding, callback) {
|
||||||
|
collected.push(data);
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
flush(callback) {
|
||||||
|
this.push(
|
||||||
|
options.objectMode ? collected : Buffer.concat(collected),
|
||||||
|
);
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
106
src/functions/compose.ts
Normal file
106
src/functions/compose.ts
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
import { pipeline, Duplex, DuplexOptions } from "stream";
|
||||||
|
|
||||||
|
export function compose(
|
||||||
|
streams: Array<
|
||||||
|
NodeJS.ReadableStream | NodeJS.ReadWriteStream | NodeJS.WritableStream
|
||||||
|
>,
|
||||||
|
options?: DuplexOptions,
|
||||||
|
): Compose {
|
||||||
|
if (streams.length < 2) {
|
||||||
|
throw new Error("At least two streams are required to compose");
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Compose(streams, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
enum EventSubscription {
|
||||||
|
Last = 0,
|
||||||
|
First,
|
||||||
|
All,
|
||||||
|
Self,
|
||||||
|
}
|
||||||
|
|
||||||
|
const eventsTarget = {
|
||||||
|
close: EventSubscription.Last,
|
||||||
|
data: EventSubscription.Last,
|
||||||
|
drain: EventSubscription.Self,
|
||||||
|
end: EventSubscription.Last,
|
||||||
|
error: EventSubscription.Self,
|
||||||
|
finish: EventSubscription.Last,
|
||||||
|
pause: EventSubscription.Last,
|
||||||
|
pipe: EventSubscription.First,
|
||||||
|
readable: EventSubscription.Last,
|
||||||
|
resume: EventSubscription.Last,
|
||||||
|
unpipe: EventSubscription.First,
|
||||||
|
};
|
||||||
|
|
||||||
|
type AllStreams =
|
||||||
|
| NodeJS.ReadableStream
|
||||||
|
| NodeJS.ReadWriteStream
|
||||||
|
| NodeJS.WritableStream;
|
||||||
|
|
||||||
|
export class Compose extends Duplex {
|
||||||
|
private first: AllStreams;
|
||||||
|
private last: AllStreams;
|
||||||
|
private streams: AllStreams[];
|
||||||
|
|
||||||
|
constructor(streams: AllStreams[], options?: DuplexOptions) {
|
||||||
|
super(options);
|
||||||
|
this.first = streams[0];
|
||||||
|
this.last = streams[streams.length - 1];
|
||||||
|
this.streams = streams;
|
||||||
|
pipeline(streams, (err: any) => {
|
||||||
|
this.emit("error", err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public pipe<T extends NodeJS.WritableStream>(dest: T) {
|
||||||
|
return (this.last as NodeJS.ReadableStream).pipe(dest);
|
||||||
|
}
|
||||||
|
|
||||||
|
public _write(chunk: any, encoding: string, cb: any) {
|
||||||
|
(this.first as NodeJS.WritableStream).write(chunk, encoding, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
public bubble(...events: string[]) {
|
||||||
|
this.streams.forEach(s => {
|
||||||
|
events.forEach(e => {
|
||||||
|
s.on(e, (...args) => super.emit(e, ...args));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public on(event: string, cb: any) {
|
||||||
|
switch (eventsTarget[event]) {
|
||||||
|
case EventSubscription.First:
|
||||||
|
this.first.on(event, cb);
|
||||||
|
break;
|
||||||
|
case EventSubscription.Last:
|
||||||
|
this.last.on(event, cb);
|
||||||
|
break;
|
||||||
|
case EventSubscription.All:
|
||||||
|
this.streams.forEach(s => s.on(event, cb));
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
super.on(event, cb);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public once(event: string, cb: any) {
|
||||||
|
switch (eventsTarget[event]) {
|
||||||
|
case EventSubscription.First:
|
||||||
|
this.first.once(event, cb);
|
||||||
|
break;
|
||||||
|
case EventSubscription.Last:
|
||||||
|
this.last.once(event, cb);
|
||||||
|
break;
|
||||||
|
case EventSubscription.All:
|
||||||
|
this.streams.forEach(s => s.once(event, cb));
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
super.once(event, cb);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
||||||
37
src/functions/concat.ts
Normal file
37
src/functions/concat.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
|
||||||
|
export function concat(...streams: NodeJS.ReadableStream[]): Readable {
|
||||||
|
let isStarted = false;
|
||||||
|
let currentStreamIndex = 0;
|
||||||
|
const startCurrentStream = () => {
|
||||||
|
if (currentStreamIndex >= streams.length) {
|
||||||
|
wrapper.push(null);
|
||||||
|
} else {
|
||||||
|
streams[currentStreamIndex]
|
||||||
|
.on("data", chunk => {
|
||||||
|
if (!wrapper.push(chunk)) {
|
||||||
|
streams[currentStreamIndex].pause();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on("error", err => wrapper.emit("error", err))
|
||||||
|
.on("end", () => {
|
||||||
|
currentStreamIndex++;
|
||||||
|
startCurrentStream();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const wrapper = new Readable({
|
||||||
|
objectMode: true,
|
||||||
|
read() {
|
||||||
|
if (!isStarted) {
|
||||||
|
isStarted = true;
|
||||||
|
startCurrentStream();
|
||||||
|
}
|
||||||
|
if (currentStreamIndex < streams.length) {
|
||||||
|
streams[currentStreamIndex].resume();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return wrapper;
|
||||||
|
}
|
||||||
99
src/functions/demux.ts
Normal file
99
src/functions/demux.ts
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
import { WritableOptions, Writable } from "stream";
|
||||||
|
|
||||||
|
enum EventSubscription {
|
||||||
|
Last = 0,
|
||||||
|
First,
|
||||||
|
All,
|
||||||
|
Self,
|
||||||
|
Unhandled,
|
||||||
|
}
|
||||||
|
|
||||||
|
const eventsTarget = {
|
||||||
|
close: EventSubscription.Self,
|
||||||
|
data: EventSubscription.All,
|
||||||
|
drain: EventSubscription.Self,
|
||||||
|
end: EventSubscription.Self,
|
||||||
|
error: EventSubscription.Self,
|
||||||
|
finish: EventSubscription.Self,
|
||||||
|
pause: EventSubscription.Self,
|
||||||
|
pipe: EventSubscription.Self,
|
||||||
|
readable: EventSubscription.Self,
|
||||||
|
resume: EventSubscription.Self,
|
||||||
|
unpipe: EventSubscription.Self,
|
||||||
|
};
|
||||||
|
|
||||||
|
type DemuxStreams = NodeJS.WritableStream | NodeJS.ReadWriteStream;
|
||||||
|
|
||||||
|
export function demux(
|
||||||
|
construct: (destKey?: string) => DemuxStreams,
|
||||||
|
demuxBy: string | ((chunk: any) => string),
|
||||||
|
options?: WritableOptions,
|
||||||
|
): Writable {
|
||||||
|
return new Demux(construct, demuxBy, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
// @TODO handle pipe event ie) Multiplex
|
||||||
|
class Demux extends Writable {
|
||||||
|
private streamsByKey: {
|
||||||
|
[key: string]: DemuxStreams;
|
||||||
|
};
|
||||||
|
private demuxer: (chunk: any) => string;
|
||||||
|
private construct: (destKey?: string) => DemuxStreams;
|
||||||
|
constructor(
|
||||||
|
construct: (destKey?: string) => DemuxStreams,
|
||||||
|
demuxBy: string | ((chunk: any) => string),
|
||||||
|
options: WritableOptions = {},
|
||||||
|
) {
|
||||||
|
super(options);
|
||||||
|
this.demuxer =
|
||||||
|
typeof demuxBy === "string" ? chunk => chunk[demuxBy] : demuxBy;
|
||||||
|
this.construct = construct;
|
||||||
|
this.streamsByKey = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
public async _write(chunk: any, encoding: any, cb: any) {
|
||||||
|
const destKey = this.demuxer(chunk);
|
||||||
|
if (this.streamsByKey[destKey] === undefined) {
|
||||||
|
this.streamsByKey[destKey] = await this.construct(destKey);
|
||||||
|
}
|
||||||
|
if (!this.streamsByKey[destKey].write(chunk, encoding)) {
|
||||||
|
this.streamsByKey[destKey].once("drain", () => {
|
||||||
|
cb();
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
cb();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public on(event: string, cb: any) {
|
||||||
|
switch (eventsTarget[event]) {
|
||||||
|
case EventSubscription.Self:
|
||||||
|
super.on(event, cb);
|
||||||
|
break;
|
||||||
|
case EventSubscription.All:
|
||||||
|
Object.keys(this.streamsByKey).forEach(key =>
|
||||||
|
this.streamsByKey[key].on(event, cb),
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
super.on(event, cb);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public once(event: string, cb: any) {
|
||||||
|
switch (eventsTarget[event]) {
|
||||||
|
case EventSubscription.Self:
|
||||||
|
super.once(event, cb);
|
||||||
|
break;
|
||||||
|
case EventSubscription.All:
|
||||||
|
Object.keys(this.streamsByKey).forEach(key =>
|
||||||
|
this.streamsByKey[key].once(event, cb),
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
super.once(event, cb);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
||||||
31
src/functions/duplex.ts
Normal file
31
src/functions/duplex.ts
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import { Duplex } from "stream";
|
||||||
|
|
||||||
|
export function duplex(
|
||||||
|
writable: NodeJS.WritableStream,
|
||||||
|
readable: NodeJS.ReadableStream,
|
||||||
|
) {
|
||||||
|
const wrapper = new Duplex({
|
||||||
|
readableObjectMode: true,
|
||||||
|
writableObjectMode: true,
|
||||||
|
read() {
|
||||||
|
readable.resume();
|
||||||
|
},
|
||||||
|
write(chunk, encoding, callback) {
|
||||||
|
return writable.write(chunk, encoding, callback);
|
||||||
|
},
|
||||||
|
final(callback) {
|
||||||
|
writable.end(callback);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
readable
|
||||||
|
.on("data", chunk => {
|
||||||
|
if (!wrapper.push(chunk)) {
|
||||||
|
readable.pause();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on("error", err => wrapper.emit("error", err))
|
||||||
|
.on("end", () => wrapper.push(null));
|
||||||
|
writable.on("drain", () => wrapper.emit("drain"));
|
||||||
|
writable.on("error", err => wrapper.emit("error", err));
|
||||||
|
return wrapper;
|
||||||
|
}
|
||||||
20
src/functions/filter.ts
Normal file
20
src/functions/filter.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { Transform, TransformOptions } from "stream";
|
||||||
|
|
||||||
|
export function filter<T>(
|
||||||
|
predicate:
|
||||||
|
| ((chunk: T, encoding: string) => boolean)
|
||||||
|
| ((chunk: T, encoding: string) => Promise<boolean>),
|
||||||
|
options?: TransformOptions,
|
||||||
|
) {
|
||||||
|
return new Transform({
|
||||||
|
...options,
|
||||||
|
async transform(chunk: T, encoding?: any, callback?: any) {
|
||||||
|
const result = await predicate(chunk, encoding);
|
||||||
|
if (result === true) {
|
||||||
|
callback(null, chunk);
|
||||||
|
} else {
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
16
src/functions/flatMap.ts
Normal file
16
src/functions/flatMap.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { Transform, TransformOptions } from "stream";
|
||||||
|
|
||||||
|
export function flatMap<T, R>(
|
||||||
|
mapper:
|
||||||
|
| ((chunk: T, encoding: string) => R[])
|
||||||
|
| ((chunk: T, encoding: string) => Promise<R[]>),
|
||||||
|
options?: TransformOptions,
|
||||||
|
): Transform {
|
||||||
|
return new Transform({
|
||||||
|
...options,
|
||||||
|
async transform(chunk: T, encoding, callback) {
|
||||||
|
(await mapper(chunk, encoding)).forEach(c => this.push(c));
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
16
src/functions/fromArray.ts
Normal file
16
src/functions/fromArray.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
|
||||||
|
export function fromArray(array: any[]): Readable {
|
||||||
|
let cursor = 0;
|
||||||
|
return new Readable({
|
||||||
|
objectMode: true,
|
||||||
|
read() {
|
||||||
|
if (cursor < array.length) {
|
||||||
|
this.push(array[cursor]);
|
||||||
|
cursor++;
|
||||||
|
} else {
|
||||||
|
this.push(null);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,604 +0,0 @@
|
|||||||
import { Transform, Readable, Writable, Duplex } from "stream";
|
|
||||||
import { performance } from "perf_hooks";
|
|
||||||
import { ChildProcess } from "child_process";
|
|
||||||
import { StringDecoder } from "string_decoder";
|
|
||||||
|
|
||||||
import {
|
|
||||||
TransformOptions,
|
|
||||||
ThroughOptions,
|
|
||||||
WithEncoding,
|
|
||||||
SerializationFormats,
|
|
||||||
JsonValue,
|
|
||||||
JsonParseOptions,
|
|
||||||
} from "./definitions";
|
|
||||||
import { sleep } from "../helpers";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert an array into a Readable stream of its elements
|
|
||||||
* @param array Array of elements to stream
|
|
||||||
*/
|
|
||||||
export function fromArray(array: any[]): NodeJS.ReadableStream {
|
|
||||||
let cursor = 0;
|
|
||||||
return new Readable({
|
|
||||||
objectMode: true,
|
|
||||||
read() {
|
|
||||||
if (cursor < array.length) {
|
|
||||||
this.push(array[cursor]);
|
|
||||||
cursor++;
|
|
||||||
} else {
|
|
||||||
this.push(null);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a ReadWrite stream that maps streamed chunks
|
|
||||||
* @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such)
|
|
||||||
* @param options
|
|
||||||
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
|
|
||||||
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
|
|
||||||
*/
|
|
||||||
export function map<T, R>(
|
|
||||||
mapper: (chunk: T, encoding: string) => R,
|
|
||||||
options: TransformOptions = {
|
|
||||||
readableObjectMode: true,
|
|
||||||
writableObjectMode: true,
|
|
||||||
},
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
return new Transform({
|
|
||||||
...options,
|
|
||||||
async transform(chunk: T, encoding, callback) {
|
|
||||||
try {
|
|
||||||
const mapped = await mapper(chunk, encoding);
|
|
||||||
this.push(mapped);
|
|
||||||
callback();
|
|
||||||
} catch (err) {
|
|
||||||
callback(err);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a ReadWrite stream that flat maps streamed chunks
|
|
||||||
* @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such)
|
|
||||||
* @param options
|
|
||||||
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
|
|
||||||
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
|
|
||||||
*/
|
|
||||||
export function flatMap<T, R>(
|
|
||||||
mapper:
|
|
||||||
| ((chunk: T, encoding: string) => R[])
|
|
||||||
| ((chunk: T, encoding: string) => Promise<R[]>),
|
|
||||||
options: TransformOptions = {
|
|
||||||
readableObjectMode: true,
|
|
||||||
writableObjectMode: true,
|
|
||||||
},
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
return new Transform({
|
|
||||||
...options,
|
|
||||||
async transform(chunk: T, encoding, callback) {
|
|
||||||
let isPromise = false;
|
|
||||||
try {
|
|
||||||
const mapped = mapper(chunk, encoding);
|
|
||||||
isPromise = mapped instanceof Promise;
|
|
||||||
(await mapped).forEach(c => this.push(c));
|
|
||||||
callback();
|
|
||||||
} catch (err) {
|
|
||||||
if (isPromise) {
|
|
||||||
// Calling the callback asynchronously with an error wouldn't emit the error, so emit directly
|
|
||||||
this.emit("error", err);
|
|
||||||
callback();
|
|
||||||
} else {
|
|
||||||
callback(err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
|
|
||||||
* @param predicate Predicate with which to filter scream chunks
|
|
||||||
* @param options
|
|
||||||
* @param options.objectMode Whether this stream should behave as a stream of objects
|
|
||||||
*/
|
|
||||||
export function filter<T>(
|
|
||||||
predicate:
|
|
||||||
| ((chunk: T, encoding: string) => boolean)
|
|
||||||
| ((chunk: T, encoding: string) => Promise<boolean>),
|
|
||||||
options: ThroughOptions = {
|
|
||||||
objectMode: true,
|
|
||||||
},
|
|
||||||
) {
|
|
||||||
return new Transform({
|
|
||||||
readableObjectMode: options.objectMode,
|
|
||||||
writableObjectMode: options.objectMode,
|
|
||||||
async transform(chunk: T, encoding, callback) {
|
|
||||||
let isPromise = false;
|
|
||||||
try {
|
|
||||||
const result = predicate(chunk, encoding);
|
|
||||||
isPromise = result instanceof Promise;
|
|
||||||
if (!!(await result)) {
|
|
||||||
callback(undefined, chunk);
|
|
||||||
} else {
|
|
||||||
callback();
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
if (isPromise) {
|
|
||||||
// Calling the callback asynchronously with an error wouldn't emit the error, so emit directly
|
|
||||||
this.emit("error", err);
|
|
||||||
callback();
|
|
||||||
} else {
|
|
||||||
callback(err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
|
|
||||||
* value
|
|
||||||
* @param iteratee Reducer function to apply on each streamed chunk
|
|
||||||
* @param initialValue Initial value
|
|
||||||
* @param options
|
|
||||||
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
|
|
||||||
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
|
|
||||||
*/
|
|
||||||
export function reduce<T, R>(
|
|
||||||
iteratee:
|
|
||||||
| ((previousValue: R, chunk: T, encoding: string) => R)
|
|
||||||
| ((previousValue: R, chunk: T, encoding: string) => Promise<R>),
|
|
||||||
initialValue: R,
|
|
||||||
options: TransformOptions = {
|
|
||||||
readableObjectMode: true,
|
|
||||||
writableObjectMode: true,
|
|
||||||
},
|
|
||||||
) {
|
|
||||||
let value = initialValue;
|
|
||||||
return new Transform({
|
|
||||||
readableObjectMode: options.readableObjectMode,
|
|
||||||
writableObjectMode: options.writableObjectMode,
|
|
||||||
async transform(chunk: T, encoding, callback) {
|
|
||||||
let isPromise = false;
|
|
||||||
try {
|
|
||||||
const result = iteratee(value, chunk, encoding);
|
|
||||||
isPromise = result instanceof Promise;
|
|
||||||
value = await result;
|
|
||||||
callback();
|
|
||||||
} catch (err) {
|
|
||||||
if (isPromise) {
|
|
||||||
// Calling the callback asynchronously with an error wouldn't emit the error, so emit directly
|
|
||||||
this.emit("error", err);
|
|
||||||
callback();
|
|
||||||
} else {
|
|
||||||
callback(err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
flush(callback) {
|
|
||||||
// Best effort attempt at yielding the final value (will throw if e.g. yielding an object and
|
|
||||||
// downstream doesn't expect objects)
|
|
||||||
try {
|
|
||||||
callback(undefined, value);
|
|
||||||
} catch (err) {
|
|
||||||
try {
|
|
||||||
this.emit("error", err);
|
|
||||||
} catch {
|
|
||||||
// Best effort was made
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a ReadWrite stream that splits streamed chunks using the given separator
|
|
||||||
* @param separator Separator to split by, defaulting to "\n"
|
|
||||||
* @param options
|
|
||||||
* @param options.encoding Encoding written chunks are assumed to use
|
|
||||||
*/
|
|
||||||
export function split(
|
|
||||||
separator: string | RegExp = "\n",
|
|
||||||
options: WithEncoding = { encoding: "utf8" },
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
let buffered = "";
|
|
||||||
const decoder = new StringDecoder(options.encoding);
|
|
||||||
|
|
||||||
return new Transform({
|
|
||||||
readableObjectMode: true,
|
|
||||||
transform(chunk: Buffer, encoding, callback) {
|
|
||||||
const asString = decoder.write(chunk);
|
|
||||||
const splitted = asString.split(separator);
|
|
||||||
if (splitted.length > 1) {
|
|
||||||
splitted[0] = buffered.concat(splitted[0]);
|
|
||||||
buffered = "";
|
|
||||||
}
|
|
||||||
buffered += splitted[splitted.length - 1];
|
|
||||||
splitted.slice(0, -1).forEach((part: string) => this.push(part));
|
|
||||||
callback();
|
|
||||||
},
|
|
||||||
flush(callback) {
|
|
||||||
callback(undefined, buffered + decoder.end());
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a ReadWrite stream that joins streamed chunks using the given separator
|
|
||||||
* @param separator Separator to join with
|
|
||||||
* @param options
|
|
||||||
* @param options.encoding Encoding written chunks are assumed to use
|
|
||||||
*/
|
|
||||||
export function join(
|
|
||||||
separator: string,
|
|
||||||
options: WithEncoding = { encoding: "utf8" },
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
let isFirstChunk = true;
|
|
||||||
const decoder = new StringDecoder(options.encoding);
|
|
||||||
return new Transform({
|
|
||||||
readableObjectMode: true,
|
|
||||||
async transform(chunk: Buffer, encoding, callback) {
|
|
||||||
const asString = decoder.write(chunk);
|
|
||||||
// Take care not to break up multi-byte characters spanning multiple chunks
|
|
||||||
if (asString !== "" || chunk.length === 0) {
|
|
||||||
if (!isFirstChunk) {
|
|
||||||
this.push(separator);
|
|
||||||
}
|
|
||||||
this.push(asString);
|
|
||||||
isFirstChunk = false;
|
|
||||||
}
|
|
||||||
callback();
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
|
|
||||||
* the streamed chunks with the specified replacement string
|
|
||||||
* @param searchValue Search string to use
|
|
||||||
* @param replaceValue Replacement string to use
|
|
||||||
* @param options
|
|
||||||
* @param options.encoding Encoding written chunks are assumed to use
|
|
||||||
*/
|
|
||||||
export function replace(
|
|
||||||
searchValue: string | RegExp,
|
|
||||||
replaceValue: string,
|
|
||||||
options: WithEncoding = { encoding: "utf8" },
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
const decoder = new StringDecoder(options.encoding);
|
|
||||||
return new Transform({
|
|
||||||
readableObjectMode: true,
|
|
||||||
transform(chunk: Buffer, encoding, callback) {
|
|
||||||
const asString = decoder.write(chunk);
|
|
||||||
// Take care not to break up multi-byte characters spanning multiple chunks
|
|
||||||
if (asString !== "" || chunk.length === 0) {
|
|
||||||
callback(
|
|
||||||
undefined,
|
|
||||||
asString.replace(searchValue, replaceValue),
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
callback();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
|
|
||||||
* must be a fully defined JSON string.
|
|
||||||
* @param format Format of serialized data, only utf8 supported.
|
|
||||||
*/
|
|
||||||
export function parse(
|
|
||||||
format: SerializationFormats = SerializationFormats.utf8,
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
const decoder = new StringDecoder(format);
|
|
||||||
return new Transform({
|
|
||||||
readableObjectMode: true,
|
|
||||||
writableObjectMode: true,
|
|
||||||
async transform(chunk: Buffer, encoding, callback) {
|
|
||||||
try {
|
|
||||||
const asString = decoder.write(chunk);
|
|
||||||
// Using await causes parsing errors to be emitted
|
|
||||||
callback(undefined, await JSON.parse(asString));
|
|
||||||
} catch (err) {
|
|
||||||
callback(err);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a ReadWrite stream that stringifies the streamed chunks to JSON
|
|
||||||
*/
|
|
||||||
export function stringify(
|
|
||||||
options: JsonParseOptions = { pretty: false },
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
return new Transform({
|
|
||||||
readableObjectMode: true,
|
|
||||||
writableObjectMode: true,
|
|
||||||
transform(chunk: JsonValue, encoding, callback) {
|
|
||||||
callback(
|
|
||||||
undefined,
|
|
||||||
options.pretty
|
|
||||||
? JSON.stringify(chunk, null, 2)
|
|
||||||
: JSON.stringify(chunk),
|
|
||||||
);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
|
|
||||||
* @param options
|
|
||||||
* @param options.objectMode Whether this stream should behave as a stream of objects
|
|
||||||
*/
|
|
||||||
export function collect(
|
|
||||||
options: ThroughOptions = { objectMode: false },
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
const collected: any[] = [];
|
|
||||||
return new Transform({
|
|
||||||
readableObjectMode: options.objectMode,
|
|
||||||
writableObjectMode: options.objectMode,
|
|
||||||
transform(data, encoding, callback) {
|
|
||||||
collected.push(data);
|
|
||||||
callback();
|
|
||||||
},
|
|
||||||
flush(callback) {
|
|
||||||
this.push(
|
|
||||||
options.objectMode ? collected : Buffer.concat(collected),
|
|
||||||
);
|
|
||||||
callback();
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a Readable stream of readable streams concatenated together
|
|
||||||
* @param streams Readable streams to concatenate
|
|
||||||
*/
|
|
||||||
export function concat(
|
|
||||||
...streams: NodeJS.ReadableStream[]
|
|
||||||
): NodeJS.ReadableStream {
|
|
||||||
let isStarted = false;
|
|
||||||
let currentStreamIndex = 0;
|
|
||||||
const startCurrentStream = () => {
|
|
||||||
if (currentStreamIndex >= streams.length) {
|
|
||||||
wrapper.push(null);
|
|
||||||
} else {
|
|
||||||
streams[currentStreamIndex]
|
|
||||||
.on("data", chunk => {
|
|
||||||
if (!wrapper.push(chunk)) {
|
|
||||||
streams[currentStreamIndex].pause();
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.on("error", err => wrapper.emit("error", err))
|
|
||||||
.on("end", () => {
|
|
||||||
currentStreamIndex++;
|
|
||||||
startCurrentStream();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const wrapper = new Readable({
|
|
||||||
objectMode: true,
|
|
||||||
read() {
|
|
||||||
if (!isStarted) {
|
|
||||||
isStarted = true;
|
|
||||||
startCurrentStream();
|
|
||||||
}
|
|
||||||
if (currentStreamIndex < streams.length) {
|
|
||||||
streams[currentStreamIndex].resume();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
return wrapper;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a Readable stream of readable streams merged together in chunk arrival order
|
|
||||||
* @param streams Readable streams to merge
|
|
||||||
*/
|
|
||||||
export function merge(
|
|
||||||
...streams: NodeJS.ReadableStream[]
|
|
||||||
): NodeJS.ReadableStream {
|
|
||||||
let isStarted = false;
|
|
||||||
let streamEndedCount = 0;
|
|
||||||
return new Readable({
|
|
||||||
objectMode: true,
|
|
||||||
read() {
|
|
||||||
if (streamEndedCount >= streams.length) {
|
|
||||||
this.push(null);
|
|
||||||
} else if (!isStarted) {
|
|
||||||
isStarted = true;
|
|
||||||
streams.forEach(stream =>
|
|
||||||
stream
|
|
||||||
.on("data", chunk => {
|
|
||||||
if (!this.push(chunk)) {
|
|
||||||
streams.forEach(s => s.pause());
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.on("error", err => this.emit("error", err))
|
|
||||||
.on("end", () => {
|
|
||||||
streamEndedCount++;
|
|
||||||
if (streamEndedCount === streams.length) {
|
|
||||||
this.push(null);
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
streams.forEach(s => s.resume());
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a Duplex stream from a writable stream that is assumed to somehow, when written to,
|
|
||||||
* cause the given readable stream to yield chunks
|
|
||||||
* @param writable Writable stream assumed to cause the readable stream to yield chunks when written to
|
|
||||||
* @param readable Readable stream assumed to yield chunks when the writable stream is written to
|
|
||||||
*/
|
|
||||||
export function duplex(writable: Writable, readable: Readable) {
|
|
||||||
const wrapper = new Duplex({
|
|
||||||
readableObjectMode: true,
|
|
||||||
writableObjectMode: true,
|
|
||||||
read() {
|
|
||||||
readable.resume();
|
|
||||||
},
|
|
||||||
write(chunk, encoding, callback) {
|
|
||||||
return writable.write(chunk, encoding, callback);
|
|
||||||
},
|
|
||||||
final(callback) {
|
|
||||||
writable.end(callback);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
readable
|
|
||||||
.on("data", chunk => {
|
|
||||||
if (!wrapper.push(chunk)) {
|
|
||||||
readable.pause();
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.on("error", err => wrapper.emit("error", err))
|
|
||||||
.on("end", () => wrapper.push(null));
|
|
||||||
writable.on("drain", () => wrapper.emit("drain"));
|
|
||||||
writable.on("error", err => wrapper.emit("error", err));
|
|
||||||
return wrapper;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a Duplex stream from a child process' stdin and stdout
|
|
||||||
* @param childProcess Child process from which to create duplex stream
|
|
||||||
*/
|
|
||||||
export function child(childProcess: ChildProcess) {
|
|
||||||
return duplex(childProcess.stdin, childProcess.stdout);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a Promise resolving to the last streamed chunk of the given readable stream, after it has
|
|
||||||
* ended
|
|
||||||
* @param readable Readable stream to wait on
|
|
||||||
*/
|
|
||||||
export function last<T>(readable: Readable): Promise<T | null> {
|
|
||||||
let lastChunk: T | null = null;
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
readable
|
|
||||||
.on("data", chunk => (lastChunk = chunk))
|
|
||||||
.on("end", () => resolve(lastChunk));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Stores chunks of data internally in array and batches when batchSize is reached.
|
|
||||||
*
|
|
||||||
* @param batchSize Size of the batches
|
|
||||||
* @param maxBatchAge Max lifetime of a batch
|
|
||||||
*/
|
|
||||||
export function batch(batchSize: number = 1000, maxBatchAge: number = 500) {
|
|
||||||
let buffer: any[] = [];
|
|
||||||
let timer: NodeJS.Timer | null = null;
|
|
||||||
let sendChunk = (self: Transform) => {
|
|
||||||
timer && clearTimeout(timer);
|
|
||||||
timer = null;
|
|
||||||
self.push(buffer);
|
|
||||||
buffer = [];
|
|
||||||
};
|
|
||||||
return new Transform({
|
|
||||||
objectMode: true,
|
|
||||||
transform(chunk, encoding, callback) {
|
|
||||||
buffer.push(chunk);
|
|
||||||
if (buffer.length === batchSize) {
|
|
||||||
sendChunk(this);
|
|
||||||
} else {
|
|
||||||
if (timer === null) {
|
|
||||||
timer = setInterval(() => {
|
|
||||||
sendChunk(this);
|
|
||||||
}, maxBatchAge);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
callback();
|
|
||||||
},
|
|
||||||
flush(callback) {
|
|
||||||
console.error("flushing");
|
|
||||||
sendChunk(this);
|
|
||||||
callback();
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unbatches and sends individual chunks of data
|
|
||||||
*/
|
|
||||||
export function unbatch() {
|
|
||||||
return new Transform({
|
|
||||||
objectMode: true,
|
|
||||||
transform(data, encoding, callback) {
|
|
||||||
for (const d of data) {
|
|
||||||
this.push(d);
|
|
||||||
}
|
|
||||||
callback();
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Limits date of data transferred into stream.
|
|
||||||
* @param targetRate Desired rate in ms
|
|
||||||
* @param period Period to sleep for when rate is above or equal to targetRate
|
|
||||||
*/
|
|
||||||
export function rate(targetRate: number = 50, period: number = 2) {
|
|
||||||
const deltaMS = ((1 / targetRate) * 1000) / period; // Skip half a period
|
|
||||||
let total = 0;
|
|
||||||
const start = performance.now();
|
|
||||||
return new Transform({
|
|
||||||
objectMode: true,
|
|
||||||
async transform(data, encoding, callback) {
|
|
||||||
const currentRate = (total / (performance.now() - start)) * 1000;
|
|
||||||
if (targetRate && currentRate > targetRate) {
|
|
||||||
await sleep(deltaMS);
|
|
||||||
}
|
|
||||||
total += 1;
|
|
||||||
callback(undefined, data);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Limits number of parallel processes in flight.
|
|
||||||
* @param parallel Max number of parallel processes.
|
|
||||||
* @param func Function to execute on each data chunk
|
|
||||||
* @param pause Amount of time to pause processing when max number of parallel processes are executing.
|
|
||||||
*/
|
|
||||||
export function parallelMap<T, R>(
|
|
||||||
mapper: (data: T) => R,
|
|
||||||
parallel: number = 10,
|
|
||||||
sleepTime: number = 5,
|
|
||||||
) {
|
|
||||||
let inflight = 0;
|
|
||||||
return new Transform({
|
|
||||||
objectMode: true,
|
|
||||||
async transform(data, encoding, callback) {
|
|
||||||
while (parallel <= inflight) {
|
|
||||||
await sleep(sleepTime);
|
|
||||||
}
|
|
||||||
inflight += 1;
|
|
||||||
callback();
|
|
||||||
try {
|
|
||||||
const res = await mapper(data);
|
|
||||||
this.push(res);
|
|
||||||
} catch (e) {
|
|
||||||
this.emit(e);
|
|
||||||
} finally {
|
|
||||||
inflight -= 1;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
async flush(callback) {
|
|
||||||
while (inflight > 0) {
|
|
||||||
await sleep(sleepTime);
|
|
||||||
}
|
|
||||||
callback();
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -1,21 +1,59 @@
|
|||||||
import { Readable, Writable } from "stream";
|
|
||||||
import { ChildProcess } from "child_process";
|
|
||||||
import * as baseFunctions from "./functions";
|
|
||||||
|
|
||||||
import {
|
import {
|
||||||
ThroughOptions,
|
Transform,
|
||||||
TransformOptions,
|
TransformOptions,
|
||||||
WithEncoding,
|
WritableOptions,
|
||||||
JsonParseOptions,
|
ReadableOptions,
|
||||||
} from "./definitions";
|
} from "stream";
|
||||||
|
import { accumulator, accumulatorBy } from "./accumulator";
|
||||||
|
import { batch } from "./batch";
|
||||||
|
import { child } from "./child";
|
||||||
|
import { collect } from "./collect";
|
||||||
|
import { concat } from "./concat";
|
||||||
|
import { duplex } from "./duplex";
|
||||||
|
import { filter } from "./filter";
|
||||||
|
import { flatMap } from "./flatMap";
|
||||||
|
import { fromArray } from "./fromArray";
|
||||||
|
import { join } from "./join";
|
||||||
|
import { last } from "./last";
|
||||||
|
import { map } from "./map";
|
||||||
|
import { merge } from "./merge";
|
||||||
|
import { parallelMap } from "./parallelMap";
|
||||||
|
import { parse } from "./parse";
|
||||||
|
import { rate } from "./rate";
|
||||||
|
import { reduce } from "./reduce";
|
||||||
|
import { replace } from "./replace";
|
||||||
|
import { split } from "./split";
|
||||||
|
import { stringify } from "./stringify";
|
||||||
|
import { unbatch } from "./unbatch";
|
||||||
|
import { compose } from "./compose";
|
||||||
|
import { demux } from "./demux";
|
||||||
|
|
||||||
|
export default function mhysa(defaultOptions?: TransformOptions) {
|
||||||
|
function withDefaultOptions<T extends any[], R>(
|
||||||
|
n: number,
|
||||||
|
fn: (...args: T) => R,
|
||||||
|
): (...args: T) => R {
|
||||||
|
return (...args) => {
|
||||||
|
const options = {
|
||||||
|
...defaultOptions,
|
||||||
|
...((args[n] || {}) as TransformOptions | {}),
|
||||||
|
};
|
||||||
|
const provided = args.slice(0, n);
|
||||||
|
const nextArgs = [
|
||||||
|
...provided,
|
||||||
|
...Array(n - provided.length).fill(undefined),
|
||||||
|
options,
|
||||||
|
] as T;
|
||||||
|
return fn(...nextArgs) as R;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
/**
|
/**
|
||||||
* Convert an array into a Readable stream of its elements
|
* Convert an array into a Readable stream of its elements
|
||||||
* @param array Array of elements to stream
|
* @param array Array of elements to stream
|
||||||
*/
|
*/
|
||||||
export function fromArray(array: any[]): NodeJS.ReadableStream {
|
fromArray,
|
||||||
return baseFunctions.fromArray(array);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a ReadWrite stream that maps streamed chunks
|
* Return a ReadWrite stream that maps streamed chunks
|
||||||
@@ -24,12 +62,7 @@ export function fromArray(array: any[]): NodeJS.ReadableStream {
|
|||||||
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
|
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
|
||||||
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
|
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
|
||||||
*/
|
*/
|
||||||
export function map<T, R>(
|
map: withDefaultOptions(1, map),
|
||||||
mapper: (chunk: T, encoding?: string) => R,
|
|
||||||
options?: TransformOptions,
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
return baseFunctions.map(mapper, options);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a ReadWrite stream that flat maps streamed chunks
|
* Return a ReadWrite stream that flat maps streamed chunks
|
||||||
@@ -38,14 +71,7 @@ export function map<T, R>(
|
|||||||
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
|
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
|
||||||
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
|
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
|
||||||
*/
|
*/
|
||||||
export function flatMap<T, R>(
|
flatMap: withDefaultOptions(1, flatMap),
|
||||||
mapper:
|
|
||||||
| ((chunk: T, encoding: string) => R[])
|
|
||||||
| ((chunk: T, encoding: string) => Promise<R[]>),
|
|
||||||
options?: TransformOptions,
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
return baseFunctions.flatMap(mapper, options);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
|
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
|
||||||
@@ -53,14 +79,7 @@ export function flatMap<T, R>(
|
|||||||
* @param options?
|
* @param options?
|
||||||
* @param options.objectMode? Whether this stream should behave as a stream of objects.
|
* @param options.objectMode? Whether this stream should behave as a stream of objects.
|
||||||
*/
|
*/
|
||||||
export function filter<T>(
|
filter: withDefaultOptions(1, filter),
|
||||||
mapper:
|
|
||||||
| ((chunk: T, encoding: string) => boolean)
|
|
||||||
| ((chunk: T, encoding: string) => Promise<boolean>),
|
|
||||||
options?: ThroughOptions,
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
return baseFunctions.filter(mapper, options);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
|
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
|
||||||
@@ -71,15 +90,7 @@ export function filter<T>(
|
|||||||
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
|
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
|
||||||
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
|
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
|
||||||
*/
|
*/
|
||||||
export function reduce<T, R>(
|
reduce: withDefaultOptions(2, reduce),
|
||||||
iteratee:
|
|
||||||
| ((previousValue: R, chunk: T, encoding: string) => R)
|
|
||||||
| ((previousValue: R, chunk: T, encoding: string) => Promise<R>),
|
|
||||||
initialValue: R,
|
|
||||||
options?: TransformOptions,
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
return baseFunctions.reduce(iteratee, initialValue, options);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a ReadWrite stream that splits streamed chunks using the given separator
|
* Return a ReadWrite stream that splits streamed chunks using the given separator
|
||||||
@@ -87,12 +98,7 @@ export function reduce<T, R>(
|
|||||||
* @param options? Defaults to encoding: utf8
|
* @param options? Defaults to encoding: utf8
|
||||||
* @param options.encoding? Encoding written chunks are assumed to use
|
* @param options.encoding? Encoding written chunks are assumed to use
|
||||||
*/
|
*/
|
||||||
export function split(
|
split,
|
||||||
separator?: string | RegExp,
|
|
||||||
options?: WithEncoding,
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
return baseFunctions.split(separator, options);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a ReadWrite stream that joins streamed chunks using the given separator
|
* Return a ReadWrite stream that joins streamed chunks using the given separator
|
||||||
@@ -100,12 +106,7 @@ export function split(
|
|||||||
* @param options? Defaults to encoding: utf8
|
* @param options? Defaults to encoding: utf8
|
||||||
* @param options.encoding? Encoding written chunks are assumed to use
|
* @param options.encoding? Encoding written chunks are assumed to use
|
||||||
*/
|
*/
|
||||||
export function join(
|
join: withDefaultOptions(1, join),
|
||||||
separator: string,
|
|
||||||
options?: WithEncoding,
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
return baseFunctions.join(separator, options);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
|
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
|
||||||
@@ -115,21 +116,13 @@ export function join(
|
|||||||
* @param options? Defaults to encoding: utf8
|
* @param options? Defaults to encoding: utf8
|
||||||
* @param options.encoding Encoding written chunks are assumed to use
|
* @param options.encoding Encoding written chunks are assumed to use
|
||||||
*/
|
*/
|
||||||
export function replace(
|
replace,
|
||||||
searchValue: string | RegExp,
|
|
||||||
replaceValue: string,
|
|
||||||
options?: WithEncoding,
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
return baseFunctions.replace(searchValue, replaceValue, options);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
|
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
|
||||||
* must be a fully defined JSON string in utf8.
|
* must be a fully defined JSON string in utf8.
|
||||||
*/
|
*/
|
||||||
export function parse(): NodeJS.ReadWriteStream {
|
parse,
|
||||||
return baseFunctions.parse();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a ReadWrite stream that stringifies the streamed chunks to JSON
|
* Return a ReadWrite stream that stringifies the streamed chunks to JSON
|
||||||
@@ -137,38 +130,26 @@ export function parse(): NodeJS.ReadWriteStream {
|
|||||||
* @param options.pretty If true, whitespace is inserted into the stringified chunks.
|
* @param options.pretty If true, whitespace is inserted into the stringified chunks.
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
export function stringify(options?: JsonParseOptions): NodeJS.ReadWriteStream {
|
stringify,
|
||||||
return baseFunctions.stringify(options);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
|
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
|
||||||
* @param options?
|
* @param options?
|
||||||
* @param options.objectMode? Whether this stream should behave as a stream of objects
|
* @param options.objectMode? Whether this stream should behave as a stream of objects
|
||||||
*/
|
*/
|
||||||
export function collect(options?: ThroughOptions): NodeJS.ReadWriteStream {
|
collect: withDefaultOptions(0, collect),
|
||||||
return baseFunctions.collect(options);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a Readable stream of readable streams concatenated together
|
* Return a Readable stream of readable streams concatenated together
|
||||||
* @param streams Readable streams to concatenate
|
* @param streams Readable streams to concatenate
|
||||||
*/
|
*/
|
||||||
export function concat(
|
concat,
|
||||||
...streams: NodeJS.ReadableStream[]
|
|
||||||
): NodeJS.ReadableStream {
|
|
||||||
return baseFunctions.concat(...streams);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a Readable stream of readable streams concatenated together
|
* Return a Readable stream of readable streams concatenated together
|
||||||
* @param streams Readable streams to merge
|
* @param streams Readable streams to merge
|
||||||
*/
|
*/
|
||||||
export function merge(
|
merge,
|
||||||
...streams: NodeJS.ReadableStream[]
|
|
||||||
): NodeJS.ReadableStream {
|
|
||||||
return baseFunctions.merge(...streams);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a Duplex stream from a writable stream that is assumed to somehow, when written to,
|
* Return a Duplex stream from a writable stream that is assumed to somehow, when written to,
|
||||||
@@ -176,66 +157,106 @@ export function merge(
|
|||||||
* @param writable Writable stream assumed to cause the readable stream to yield chunks when written to
|
* @param writable Writable stream assumed to cause the readable stream to yield chunks when written to
|
||||||
* @param readable Readable stream assumed to yield chunks when the writable stream is written to
|
* @param readable Readable stream assumed to yield chunks when the writable stream is written to
|
||||||
*/
|
*/
|
||||||
export function duplex(
|
duplex,
|
||||||
writable: Writable,
|
|
||||||
readable: Readable,
|
|
||||||
): NodeJS.ReadWriteStream {
|
|
||||||
return baseFunctions.duplex(writable, readable);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a Duplex stream from a child process' stdin and stdout
|
* Return a Duplex stream from a child process' stdin and stdout
|
||||||
* @param childProcess Child process from which to create duplex stream
|
* @param childProcess Child process from which to create duplex stream
|
||||||
*/
|
*/
|
||||||
export function child(childProcess: ChildProcess): NodeJS.ReadWriteStream {
|
child,
|
||||||
return baseFunctions.child(childProcess);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a Promise resolving to the last streamed chunk of the given readable stream, after it has
|
* Return a Promise resolving to the last streamed chunk of the given readable stream, after it has
|
||||||
* ended
|
* ended
|
||||||
* @param readable Readable stream to wait on
|
* @param readable Readable stream to wait on
|
||||||
*/
|
*/
|
||||||
export function last<T>(readable: Readable): Promise<T | null> {
|
last,
|
||||||
return baseFunctions.last(readable);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stores chunks of data internally in array and batches when batchSize is reached.
|
* Stores chunks of data internally in array and batches when batchSize is reached.
|
||||||
* @param batchSize Size of the batches, defaults to 1000.
|
* @param batchSize Size of the batches, defaults to 1000.
|
||||||
* @param maxBatchAge? Max lifetime of a batch, defaults to 500
|
* @param maxBatchAge? Max lifetime of a batch, defaults to 500
|
||||||
*/
|
|
||||||
export function batch(batchSize: number, maxBatchAge?: number): NodeJS.ReadWriteStream {
|
|
||||||
return baseFunctions.batch(batchSize, maxBatchAge);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unbatches and sends individual chunks of data
|
|
||||||
*/
|
|
||||||
export function unbatch(): NodeJS.ReadWriteStream {
|
|
||||||
return baseFunctions.unbatch();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Limits date of data transferred into stream.
|
|
||||||
* @param options?
|
* @param options?
|
||||||
* @param targetRate? Desired rate in ms
|
* @param options.objectMode? Whether this stream should behave as a stream of objects
|
||||||
* @param period? Period to sleep for when rate is above or equal to targetRate
|
|
||||||
*/
|
*/
|
||||||
export function rate(targetRate?: number, period?: number): NodeJS.ReadWriteStream {
|
batch: withDefaultOptions(2, batch),
|
||||||
return baseFunctions.rate(targetRate, period);
|
|
||||||
}
|
/**
|
||||||
|
* Unbatches and sends individual chunks of data.
|
||||||
|
* @param options?
|
||||||
|
* @param options.objectMode? Whether this stream should behave as a stream of objects
|
||||||
|
*/
|
||||||
|
unbatch: withDefaultOptions(0, unbatch),
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Limits rate of data transferred into stream.
|
||||||
|
* @param targetRate? Desired rate in ms.
|
||||||
|
* @param period? Period to sleep for when rate is above or equal to targetRate.
|
||||||
|
* @param options?
|
||||||
|
*/
|
||||||
|
rate: withDefaultOptions(2, rate),
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Limits number of parallel processes in flight.
|
* Limits number of parallel processes in flight.
|
||||||
* @param parallel Max number of parallel processes.
|
* @param parallel Max number of parallel processes.
|
||||||
* @param func Function to execute on each data chunk
|
* @param func Function to execute on each data chunk.
|
||||||
* @param pause Amount of time to pause processing when max number of parallel processes are executing.
|
* @param pause Amount of time to pause processing when max number of parallel processes are executing.
|
||||||
*/
|
*/
|
||||||
export function parallelMap<T, R>(
|
parallelMap: withDefaultOptions(3, parallelMap),
|
||||||
mapper: (chunk: T) => R,
|
|
||||||
parallel?: number,
|
/**
|
||||||
sleepTime?: number,
|
* Accummulates and sends batches of data. Each chunk that flows into the stream is checked against items
|
||||||
) {
|
* in the buffer. How the buffer is mutated is based on 1 of 2 possible buffering strategies:
|
||||||
return baseFunctions.parallelMap(mapper, parallel, sleepTime);
|
* 1. Sliding
|
||||||
|
* - If the buffer is larger than the batchSize, the front of the buffer is popped to maintain
|
||||||
|
* the batchSize. When no key is provided, the batchSize is effectively the buffer length. When
|
||||||
|
* a key is provided, the batchSize is based on the value at that key. For example, given a key
|
||||||
|
* of `timestamp` and a batchSize of 3000, each item in the buffer will be guaranteed to be
|
||||||
|
* within 3000 timestamp units from the first element. This means that with a key, multiple elements
|
||||||
|
* may be spliced off the front of the buffer. The buffer is then pushed into the stream.
|
||||||
|
* 2. Rolling
|
||||||
|
* - If the buffer is larger than the batchSize, the buffer is cleared and pushed into the stream.
|
||||||
|
* When no key is provided, the batchSize is the buffer length. When a key is provided, the batchSize
|
||||||
|
* is based on the value at that key. For example, given a key of `timestamp` and a batchSize of 3000,
|
||||||
|
* each item in the buffer will be guaranteed to be within 3000 timestamp units from the first element.
|
||||||
|
* @param flushStrategy Buffering strategy to use.
|
||||||
|
* @param batchSize Size of the batch (in units of buffer length or value at key).
|
||||||
|
* @param keyBy Key to determine if element fits into buffer or items need to be cleared from buffer.
|
||||||
|
* @param options Transform stream options
|
||||||
|
*/
|
||||||
|
accumulator: withDefaultOptions(3, accumulator),
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Accummulates and sends batches of data. Each chunk that flows into the stream is checked against items
|
||||||
|
* in the buffer. How the buffer is mutated is based on 1 of 2 possible buffering strategies:
|
||||||
|
* 1. Sliding
|
||||||
|
* - If the iteratee returns false, the front of the buffer is popped until iteratee returns true. The
|
||||||
|
* item is pushed into the buffer and buffer is pushed into stream.
|
||||||
|
* 2. Rolling
|
||||||
|
* - If the iteratee returns false, the buffer is cleared and pushed into stream. The item is
|
||||||
|
* then pushed into the buffer.
|
||||||
|
* @param flushStrategy Buffering strategy to use.
|
||||||
|
* @param iteratee Function applied to buffer when a chunk of data enters stream to determine if element fits into
|
||||||
|
* or items need to be cleared from buffer.
|
||||||
|
* @param options Transform stream options
|
||||||
|
*/
|
||||||
|
accumulatorBy: withDefaultOptions(2, accumulatorBy),
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
|
||||||
|
* @param streams Array of streams to compose. Minimum of two.
|
||||||
|
* @param options Transform stream options
|
||||||
|
*/
|
||||||
|
compose: withDefaultOptions(1, compose),
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
|
||||||
|
* @param construct Constructor for new output source. Should return a Writable or ReadWrite stream.
|
||||||
|
* @param demuxBy
|
||||||
|
* @param demuxBy.key? Key to fetch value from source chunks to demultiplex source.
|
||||||
|
* @param demuxBy.keyBy? Function to fetch value from source chunks to demultiplex source.
|
||||||
|
* @param options Writable stream options
|
||||||
|
*/
|
||||||
|
demux: withDefaultOptions(2, demux),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
26
src/functions/join.ts
Normal file
26
src/functions/join.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import { Transform, TransformOptions } from "stream";
|
||||||
|
import { StringDecoder } from "string_decoder";
|
||||||
|
import { WithEncoding } from "./baseDefinitions";
|
||||||
|
|
||||||
|
export function join(
|
||||||
|
separator: string,
|
||||||
|
options: WithEncoding & TransformOptions = { encoding: "utf8" },
|
||||||
|
): Transform {
|
||||||
|
let isFirstChunk = true;
|
||||||
|
const decoder = new StringDecoder(options.encoding);
|
||||||
|
return new Transform({
|
||||||
|
readableObjectMode: true,
|
||||||
|
async transform(chunk: Buffer, encoding, callback) {
|
||||||
|
const asString = decoder.write(chunk);
|
||||||
|
// Take care not to break up multi-byte characters spanning multiple chunks
|
||||||
|
if (asString !== "" || chunk.length === 0) {
|
||||||
|
if (!isFirstChunk) {
|
||||||
|
this.push(separator);
|
||||||
|
}
|
||||||
|
this.push(asString);
|
||||||
|
isFirstChunk = false;
|
||||||
|
}
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
8
src/functions/last.ts
Normal file
8
src/functions/last.ts
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
export function last<T>(readable: NodeJS.ReadableStream): Promise<T | null> {
|
||||||
|
let lastChunk: T | null = null;
|
||||||
|
return new Promise((resolve, _) => {
|
||||||
|
readable
|
||||||
|
.on("data", chunk => (lastChunk = chunk))
|
||||||
|
.on("end", () => resolve(lastChunk));
|
||||||
|
});
|
||||||
|
}
|
||||||
13
src/functions/map.ts
Normal file
13
src/functions/map.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { Transform, TransformOptions } from "stream";
|
||||||
|
|
||||||
|
export function map<T, R>(
|
||||||
|
mapper: (chunk: T, encoding: string) => R,
|
||||||
|
options: TransformOptions = { objectMode: true },
|
||||||
|
): Transform {
|
||||||
|
return new Transform({
|
||||||
|
...options,
|
||||||
|
async transform(chunk: T, encoding, callback) {
|
||||||
|
callback(null, await mapper(chunk, encoding));
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
33
src/functions/merge.ts
Normal file
33
src/functions/merge.ts
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
|
||||||
|
export function merge(...streams: Readable[]): Readable {
|
||||||
|
let isStarted = false;
|
||||||
|
let streamEndedCount = 0;
|
||||||
|
return new Readable({
|
||||||
|
objectMode: true,
|
||||||
|
read() {
|
||||||
|
if (streamEndedCount >= streams.length) {
|
||||||
|
this.push(null);
|
||||||
|
} else if (!isStarted) {
|
||||||
|
isStarted = true;
|
||||||
|
streams.forEach(stream =>
|
||||||
|
stream
|
||||||
|
.on("data", chunk => {
|
||||||
|
if (!this.push(chunk)) {
|
||||||
|
streams.forEach(s => s.pause());
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on("error", err => this.emit("error", err))
|
||||||
|
.on("end", () => {
|
||||||
|
streamEndedCount++;
|
||||||
|
if (streamEndedCount === streams.length) {
|
||||||
|
this.push(null);
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
streams.forEach(s => s.resume());
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
30
src/functions/parallelMap.ts
Normal file
30
src/functions/parallelMap.ts
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import { Transform, TransformOptions } from "stream";
|
||||||
|
import { sleep } from "../helpers";
|
||||||
|
|
||||||
|
export function parallelMap<T, R>(
|
||||||
|
mapper: (data: T) => R,
|
||||||
|
parallel: number = 10,
|
||||||
|
sleepTime: number = 1,
|
||||||
|
options?: TransformOptions,
|
||||||
|
) {
|
||||||
|
let inflight = 0;
|
||||||
|
return new Transform({
|
||||||
|
...options,
|
||||||
|
async transform(data, encoding, callback) {
|
||||||
|
while (parallel <= inflight) {
|
||||||
|
await sleep(sleepTime);
|
||||||
|
}
|
||||||
|
inflight += 1;
|
||||||
|
callback();
|
||||||
|
const res = await mapper(data);
|
||||||
|
this.push(res);
|
||||||
|
inflight -= 1;
|
||||||
|
},
|
||||||
|
async flush(callback) {
|
||||||
|
while (inflight > 0) {
|
||||||
|
await sleep(sleepTime);
|
||||||
|
}
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
22
src/functions/parse.ts
Normal file
22
src/functions/parse.ts
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import { Transform } from "stream";
|
||||||
|
import { StringDecoder } from "string_decoder";
|
||||||
|
import { SerializationFormats } from "./baseDefinitions";
|
||||||
|
|
||||||
|
export function parse(
|
||||||
|
format: SerializationFormats = SerializationFormats.utf8,
|
||||||
|
): Transform {
|
||||||
|
const decoder = new StringDecoder(format);
|
||||||
|
return new Transform({
|
||||||
|
readableObjectMode: true,
|
||||||
|
writableObjectMode: true,
|
||||||
|
async transform(chunk: Buffer, encoding, callback) {
|
||||||
|
try {
|
||||||
|
const asString = decoder.write(chunk);
|
||||||
|
// Using await causes parsing errors to be emitted
|
||||||
|
callback(undefined, await JSON.parse(asString));
|
||||||
|
} catch (err) {
|
||||||
|
callback(err);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
24
src/functions/rate.ts
Normal file
24
src/functions/rate.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import { Transform, TransformOptions } from "stream";
|
||||||
|
import { performance } from "perf_hooks";
|
||||||
|
import { sleep } from "../helpers";
|
||||||
|
|
||||||
|
export function rate(
|
||||||
|
targetRate: number = 50,
|
||||||
|
period: number = 1,
|
||||||
|
options?: TransformOptions,
|
||||||
|
): Transform {
|
||||||
|
const deltaMS = ((1 / targetRate) * 1000) / period; // Skip a full period
|
||||||
|
let total = 0;
|
||||||
|
const start = performance.now();
|
||||||
|
return new Transform({
|
||||||
|
...options,
|
||||||
|
async transform(data, encoding, callback) {
|
||||||
|
const currentRate = (total / (performance.now() - start)) * 1000;
|
||||||
|
if (targetRate && currentRate > targetRate) {
|
||||||
|
await sleep(deltaMS);
|
||||||
|
}
|
||||||
|
total += 1;
|
||||||
|
callback(undefined, data);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
31
src/functions/reduce.ts
Normal file
31
src/functions/reduce.ts
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import { Transform, TransformOptions } from "stream";
|
||||||
|
|
||||||
|
export function reduce<T, R>(
|
||||||
|
iteratee:
|
||||||
|
| ((previousValue: R, chunk: T, encoding: string) => R)
|
||||||
|
| ((previousValue: R, chunk: T, encoding: string) => Promise<R>),
|
||||||
|
initialValue: R,
|
||||||
|
options?: TransformOptions,
|
||||||
|
) {
|
||||||
|
let value = initialValue;
|
||||||
|
return new Transform({
|
||||||
|
...options,
|
||||||
|
async transform(chunk: T, encoding, callback) {
|
||||||
|
value = await iteratee(value, chunk, encoding);
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
flush(callback) {
|
||||||
|
// Best effort attempt at yielding the final value (will throw if e.g. yielding an object and
|
||||||
|
// downstream doesn't expect objects)
|
||||||
|
try {
|
||||||
|
callback(undefined, value);
|
||||||
|
} catch (err) {
|
||||||
|
try {
|
||||||
|
this.emit("error", err);
|
||||||
|
} catch {
|
||||||
|
// Best effort was made
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
25
src/functions/replace.ts
Normal file
25
src/functions/replace.ts
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import { Transform } from "stream";
|
||||||
|
import { StringDecoder } from "string_decoder";
|
||||||
|
import { WithEncoding } from "./baseDefinitions";
|
||||||
|
export function replace(
|
||||||
|
searchValue: string | RegExp,
|
||||||
|
replaceValue: string,
|
||||||
|
options: WithEncoding = { encoding: "utf8" },
|
||||||
|
): Transform {
|
||||||
|
const decoder = new StringDecoder(options.encoding);
|
||||||
|
return new Transform({
|
||||||
|
readableObjectMode: true,
|
||||||
|
transform(chunk: Buffer, encoding, callback) {
|
||||||
|
const asString = decoder.write(chunk);
|
||||||
|
// Take care not to break up multi-byte characters spanning multiple chunks
|
||||||
|
if (asString !== "" || chunk.length === 0) {
|
||||||
|
callback(
|
||||||
|
undefined,
|
||||||
|
asString.replace(searchValue, replaceValue),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
29
src/functions/split.ts
Normal file
29
src/functions/split.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import { Transform } from "stream";
|
||||||
|
import { StringDecoder } from "string_decoder";
|
||||||
|
import { WithEncoding } from "./baseDefinitions";
|
||||||
|
|
||||||
|
export function split(
|
||||||
|
separator: string | RegExp = "\n",
|
||||||
|
options: WithEncoding = { encoding: "utf8" },
|
||||||
|
): Transform {
|
||||||
|
let buffered = "";
|
||||||
|
const decoder = new StringDecoder(options.encoding);
|
||||||
|
|
||||||
|
return new Transform({
|
||||||
|
readableObjectMode: true,
|
||||||
|
transform(chunk: Buffer, encoding, callback) {
|
||||||
|
const asString = decoder.write(chunk);
|
||||||
|
const splitted = asString.split(separator);
|
||||||
|
if (splitted.length > 1) {
|
||||||
|
splitted[0] = buffered.concat(splitted[0]);
|
||||||
|
buffered = "";
|
||||||
|
}
|
||||||
|
buffered += splitted[splitted.length - 1];
|
||||||
|
splitted.slice(0, -1).forEach((part: string) => this.push(part));
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
flush(callback) {
|
||||||
|
callback(undefined, buffered + decoder.end());
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
19
src/functions/stringify.ts
Normal file
19
src/functions/stringify.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { Transform } from "stream";
|
||||||
|
import { JsonValue, JsonParseOptions } from "./baseDefinitions";
|
||||||
|
|
||||||
|
export function stringify(
|
||||||
|
options: JsonParseOptions = { pretty: false },
|
||||||
|
): Transform {
|
||||||
|
return new Transform({
|
||||||
|
readableObjectMode: true,
|
||||||
|
writableObjectMode: true,
|
||||||
|
transform(chunk: JsonValue, encoding, callback) {
|
||||||
|
callback(
|
||||||
|
undefined,
|
||||||
|
options.pretty
|
||||||
|
? JSON.stringify(chunk, null, 2)
|
||||||
|
: JSON.stringify(chunk),
|
||||||
|
);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
13
src/functions/unbatch.ts
Normal file
13
src/functions/unbatch.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { Transform, TransformOptions } from "stream";
|
||||||
|
|
||||||
|
export function unbatch(options?: TransformOptions) {
|
||||||
|
return new Transform({
|
||||||
|
...options,
|
||||||
|
transform(data, encoding, callback) {
|
||||||
|
for (const d of data) {
|
||||||
|
this.push(d);
|
||||||
|
}
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
24
src/index.ts
24
src/index.ts
@@ -1,22 +1,2 @@
|
|||||||
export {
|
import mhysa from "./functions";
|
||||||
fromArray,
|
export default mhysa;
|
||||||
map,
|
|
||||||
flatMap,
|
|
||||||
filter,
|
|
||||||
reduce,
|
|
||||||
split,
|
|
||||||
join,
|
|
||||||
replace,
|
|
||||||
parse,
|
|
||||||
stringify,
|
|
||||||
collect,
|
|
||||||
concat,
|
|
||||||
merge,
|
|
||||||
duplex,
|
|
||||||
child,
|
|
||||||
last,
|
|
||||||
batch,
|
|
||||||
unbatch,
|
|
||||||
rate,
|
|
||||||
parallelMap,
|
|
||||||
} from "./functions";
|
|
||||||
|
|||||||
557
tests/accumulator.spec.ts
Normal file
557
tests/accumulator.spec.ts
Normal file
@@ -0,0 +1,557 @@
|
|||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import { Readable } from "stream";
|
||||||
|
import mhysa from "../src";
|
||||||
|
import { FlushStrategy } from "../src/functions/accumulator";
|
||||||
|
import { performance } from "perf_hooks";
|
||||||
|
const { accumulator, accumulatorBy } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
|
test.cb("accumulator() rolling", t => {
|
||||||
|
t.plan(3);
|
||||||
|
let chunkIndex = 0;
|
||||||
|
interface TestObject {
|
||||||
|
ts: number;
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||||
|
const secondFlush = [{ ts: 2, key: "d" }, { ts: 3, key: "e" }];
|
||||||
|
const thirdFlush = [{ ts: 4, key: "f" }];
|
||||||
|
const flushes = [firstFlush, secondFlush, thirdFlush];
|
||||||
|
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
accumulator(FlushStrategy.rolling, 2, undefined, {
|
||||||
|
objectMode: true,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.on("data", (flush: TestObject[]) => {
|
||||||
|
t.deepEqual(flush, flushes[chunkIndex]);
|
||||||
|
chunkIndex++;
|
||||||
|
})
|
||||||
|
.on("error", (e: any) => {
|
||||||
|
t.end(e);
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
[...firstFlush, ...secondFlush, ...thirdFlush].forEach(item => {
|
||||||
|
source.push(item);
|
||||||
|
});
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("accumulator() rolling with key", t => {
|
||||||
|
t.plan(2);
|
||||||
|
let chunkIndex = 0;
|
||||||
|
interface TestObject {
|
||||||
|
ts: number;
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const firstFlush = [
|
||||||
|
{ ts: 0, key: "a" },
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
{ ts: 2, key: "d" },
|
||||||
|
];
|
||||||
|
const secondFlush = [{ ts: 3, key: "e" }];
|
||||||
|
const flushes = [firstFlush, secondFlush];
|
||||||
|
|
||||||
|
source
|
||||||
|
.pipe(accumulator(FlushStrategy.rolling, 3, "ts", { objectMode: true }))
|
||||||
|
.on("data", (flush: TestObject[]) => {
|
||||||
|
t.deepEqual(flush, flushes[chunkIndex]);
|
||||||
|
chunkIndex++;
|
||||||
|
})
|
||||||
|
.on("error", (e: any) => {
|
||||||
|
t.end(e);
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
[...firstFlush, ...secondFlush].forEach(item => {
|
||||||
|
source.push(item);
|
||||||
|
});
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"accumulator() rolling should emit error and ignore chunk when its missing key",
|
||||||
|
t => {
|
||||||
|
t.plan(2);
|
||||||
|
let index = 0;
|
||||||
|
interface TestObject {
|
||||||
|
ts: number;
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const accumulatorStream = accumulator(
|
||||||
|
FlushStrategy.rolling,
|
||||||
|
3,
|
||||||
|
"nonExistingKey",
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||||
|
|
||||||
|
source
|
||||||
|
.pipe(accumulatorStream)
|
||||||
|
.on("data", (flush: TestObject[]) => {
|
||||||
|
// No valid data output
|
||||||
|
expect(flush).to.deep.equal([]);
|
||||||
|
})
|
||||||
|
.on("error", (err: any) => {
|
||||||
|
source.pipe(accumulatorStream);
|
||||||
|
accumulatorStream.resume();
|
||||||
|
expect(err.message).to.equal(
|
||||||
|
`Key is missing in event: (nonExistingKey, ${JSON.stringify(
|
||||||
|
input[index],
|
||||||
|
)})`,
|
||||||
|
);
|
||||||
|
index++;
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
input.forEach(item => {
|
||||||
|
source.push(item);
|
||||||
|
});
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"accumulator() rolling should emit error, ignore chunk when key is missing and continue processing chunks correctly",
|
||||||
|
t => {
|
||||||
|
t.plan(3);
|
||||||
|
let chunkIndex = 0;
|
||||||
|
interface TestObject {
|
||||||
|
ts: number;
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const accumulatorStream = accumulator(FlushStrategy.rolling, 3, "ts", {
|
||||||
|
objectMode: true,
|
||||||
|
});
|
||||||
|
const input = [
|
||||||
|
{ ts: 0, key: "a" },
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
{ key: "d" },
|
||||||
|
{ ts: 3, key: "e" },
|
||||||
|
];
|
||||||
|
const firstFlush = [
|
||||||
|
{ ts: 0, key: "a" },
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
];
|
||||||
|
const secondFlush = [{ ts: 3, key: "e" }];
|
||||||
|
const flushes = [firstFlush, secondFlush];
|
||||||
|
|
||||||
|
source
|
||||||
|
.pipe(accumulatorStream)
|
||||||
|
.on("data", (flush: TestObject[]) => {
|
||||||
|
t.deepEqual(flush, flushes[chunkIndex]);
|
||||||
|
chunkIndex++;
|
||||||
|
})
|
||||||
|
.on("error", (err: any) => {
|
||||||
|
source.pipe(accumulatorStream);
|
||||||
|
accumulatorStream.resume();
|
||||||
|
expect(err.message).to.equal(
|
||||||
|
`Key is missing in event: (ts, ${JSON.stringify(
|
||||||
|
input[3],
|
||||||
|
)})`,
|
||||||
|
);
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
input.forEach(item => {
|
||||||
|
source.push(item);
|
||||||
|
});
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb("accumulator() sliding", t => {
|
||||||
|
t.plan(4);
|
||||||
|
let chunkIndex = 0;
|
||||||
|
interface TestObject {
|
||||||
|
ts: number;
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const input = [
|
||||||
|
{ ts: 0, key: "a" },
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
{ ts: 4, key: "d" },
|
||||||
|
];
|
||||||
|
const firstFlush = [{ ts: 0, key: "a" }];
|
||||||
|
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||||
|
const thirdFlush = [
|
||||||
|
{ ts: 0, key: "a" },
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
];
|
||||||
|
const fourthFlush = [
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
{ ts: 4, key: "d" },
|
||||||
|
];
|
||||||
|
|
||||||
|
const flushes = [firstFlush, secondFlush, thirdFlush, fourthFlush];
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
accumulator(FlushStrategy.sliding, 3, undefined, {
|
||||||
|
objectMode: true,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.on("data", (flush: TestObject[]) => {
|
||||||
|
t.deepEqual(flush, flushes[chunkIndex]);
|
||||||
|
chunkIndex++;
|
||||||
|
})
|
||||||
|
.on("error", (e: any) => {
|
||||||
|
t.end(e);
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
input.forEach(item => {
|
||||||
|
source.push(item);
|
||||||
|
});
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("accumulator() sliding with key", t => {
|
||||||
|
t.plan(6);
|
||||||
|
let chunkIndex = 0;
|
||||||
|
interface TestObject {
|
||||||
|
ts: number;
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const input = [
|
||||||
|
{ ts: 0, key: "a" },
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
{ ts: 3, key: "d" },
|
||||||
|
{ ts: 5, key: "f" },
|
||||||
|
{ ts: 6, key: "g" },
|
||||||
|
];
|
||||||
|
const firstFlush = [{ ts: 0, key: "a" }];
|
||||||
|
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||||
|
const thirdFlush = [
|
||||||
|
{ ts: 0, key: "a" },
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
];
|
||||||
|
const fourthFlush = [
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
{ ts: 3, key: "d" },
|
||||||
|
];
|
||||||
|
const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
|
||||||
|
const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
|
||||||
|
|
||||||
|
const flushes = [
|
||||||
|
firstFlush,
|
||||||
|
secondFlush,
|
||||||
|
thirdFlush,
|
||||||
|
fourthFlush,
|
||||||
|
fifthFlush,
|
||||||
|
sixthFlush,
|
||||||
|
];
|
||||||
|
source
|
||||||
|
.pipe(accumulator(FlushStrategy.sliding, 3, "ts", { objectMode: true }))
|
||||||
|
.on("data", (flush: TestObject[]) => {
|
||||||
|
t.deepEqual(flush, flushes[chunkIndex]);
|
||||||
|
chunkIndex++;
|
||||||
|
})
|
||||||
|
.on("error", (e: any) => {
|
||||||
|
t.end(e);
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
input.forEach(item => {
|
||||||
|
source.push(item);
|
||||||
|
});
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"accumulator() sliding should emit error and ignore chunk when key is missing",
|
||||||
|
t => {
|
||||||
|
t.plan(2);
|
||||||
|
let index = 0;
|
||||||
|
interface TestObject {
|
||||||
|
ts: number;
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const accumulatorStream = accumulator(
|
||||||
|
FlushStrategy.sliding,
|
||||||
|
3,
|
||||||
|
"nonExistingKey",
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||||
|
|
||||||
|
source
|
||||||
|
.pipe(accumulatorStream)
|
||||||
|
.on("data", (flush: TestObject[]) => {
|
||||||
|
expect(flush).to.deep.equal([]);
|
||||||
|
})
|
||||||
|
.on("error", (err: any) => {
|
||||||
|
source.pipe(accumulatorStream);
|
||||||
|
accumulatorStream.resume();
|
||||||
|
expect(err.message).to.equal(
|
||||||
|
`Key is missing in event: (nonExistingKey, ${JSON.stringify(
|
||||||
|
input[index],
|
||||||
|
)})`,
|
||||||
|
);
|
||||||
|
index++;
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
input.forEach(item => {
|
||||||
|
source.push(item);
|
||||||
|
});
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"accumulator() sliding should emit error, ignore chunk when key is missing and continue processing chunks correctly",
|
||||||
|
t => {
|
||||||
|
t.plan(6);
|
||||||
|
let chunkIndex = 0;
|
||||||
|
interface TestObject {
|
||||||
|
ts: number;
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const accumulatorStream = accumulator(FlushStrategy.sliding, 3, "ts", {
|
||||||
|
objectMode: true,
|
||||||
|
});
|
||||||
|
const input = [
|
||||||
|
{ ts: 0, key: "a" },
|
||||||
|
{ key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
{ ts: 3, key: "d" },
|
||||||
|
{ ts: 5, key: "f" },
|
||||||
|
{ ts: 6, key: "g" },
|
||||||
|
];
|
||||||
|
const firstFlush = [{ ts: 0, key: "a" }];
|
||||||
|
const secondFlush = [{ ts: 0, key: "a" }, { ts: 2, key: "c" }];
|
||||||
|
const thirdFlush = [{ ts: 2, key: "c" }, { ts: 3, key: "d" }];
|
||||||
|
const fourthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
|
||||||
|
const fifthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
|
||||||
|
|
||||||
|
const flushes = [
|
||||||
|
firstFlush,
|
||||||
|
secondFlush,
|
||||||
|
thirdFlush,
|
||||||
|
fourthFlush,
|
||||||
|
fifthFlush,
|
||||||
|
];
|
||||||
|
source
|
||||||
|
.pipe(accumulatorStream)
|
||||||
|
.on("data", (flush: TestObject[]) => {
|
||||||
|
t.deepEqual(flush, flushes[chunkIndex]);
|
||||||
|
chunkIndex++;
|
||||||
|
})
|
||||||
|
.on("error", (err: any) => {
|
||||||
|
source.pipe(accumulatorStream);
|
||||||
|
accumulatorStream.resume();
|
||||||
|
expect(err.message).to.equal(
|
||||||
|
`Key is missing in event: (ts, ${JSON.stringify(
|
||||||
|
input[1],
|
||||||
|
)})`,
|
||||||
|
);
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
input.forEach(item => {
|
||||||
|
source.push(item);
|
||||||
|
});
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb("accumulatorBy() rolling", t => {
|
||||||
|
t.plan(2);
|
||||||
|
let chunkIndex = 0;
|
||||||
|
interface TestObject {
|
||||||
|
ts: number;
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const firstFlush = [
|
||||||
|
{ ts: 0, key: "a" },
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
{ ts: 2, key: "d" },
|
||||||
|
];
|
||||||
|
const secondFlush = [{ ts: 3, key: "e" }];
|
||||||
|
const flushes = [firstFlush, secondFlush];
|
||||||
|
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
accumulatorBy(
|
||||||
|
FlushStrategy.rolling,
|
||||||
|
(event: TestObject, bufferChunk: TestObject) => {
|
||||||
|
return bufferChunk.ts + 3 <= event.ts;
|
||||||
|
},
|
||||||
|
{ objectMode: true },
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.on("data", (flush: TestObject[]) => {
|
||||||
|
t.deepEqual(flush, flushes[chunkIndex]);
|
||||||
|
chunkIndex++;
|
||||||
|
})
|
||||||
|
.on("error", (e: any) => {
|
||||||
|
t.end(e);
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
[...firstFlush, ...secondFlush].forEach(item => {
|
||||||
|
source.push(item);
|
||||||
|
});
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb.skip(
|
||||||
|
"accumulatorBy() rolling should emit error when key iteratee throws",
|
||||||
|
t => {
|
||||||
|
t.plan(2);
|
||||||
|
interface TestObject {
|
||||||
|
ts: number;
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const input = [
|
||||||
|
{ ts: 0, key: "a" },
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
];
|
||||||
|
const accumulaterStream = accumulatorBy(
|
||||||
|
FlushStrategy.rolling,
|
||||||
|
(event: TestObject, bufferChunk: TestObject) => {
|
||||||
|
if (event.key !== "a") {
|
||||||
|
throw new Error("Failed mapping");
|
||||||
|
}
|
||||||
|
return bufferChunk.ts + 3 <= event.ts;
|
||||||
|
},
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
source
|
||||||
|
.pipe(accumulaterStream)
|
||||||
|
.on("error", (err: any) => {
|
||||||
|
source.pipe(accumulaterStream);
|
||||||
|
accumulaterStream.resume();
|
||||||
|
expect(err.message).to.equal("Failed mapping");
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
input.forEach(item => {
|
||||||
|
source.push(item);
|
||||||
|
});
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb("accumulatorBy() sliding", t => {
|
||||||
|
t.plan(6);
|
||||||
|
let chunkIndex = 0;
|
||||||
|
interface TestObject {
|
||||||
|
ts: number;
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const input = [
|
||||||
|
{ ts: 0, key: "a" },
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
{ ts: 3, key: "d" },
|
||||||
|
{ ts: 5, key: "f" },
|
||||||
|
{ ts: 6, key: "g" },
|
||||||
|
];
|
||||||
|
const firstFlush = [{ ts: 0, key: "a" }];
|
||||||
|
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||||
|
const thirdFlush = [
|
||||||
|
{ ts: 0, key: "a" },
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
];
|
||||||
|
const fourthFlush = [
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
{ ts: 3, key: "d" },
|
||||||
|
];
|
||||||
|
const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
|
||||||
|
const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
|
||||||
|
|
||||||
|
const flushes = [
|
||||||
|
firstFlush,
|
||||||
|
secondFlush,
|
||||||
|
thirdFlush,
|
||||||
|
fourthFlush,
|
||||||
|
fifthFlush,
|
||||||
|
sixthFlush,
|
||||||
|
];
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
accumulatorBy(
|
||||||
|
FlushStrategy.sliding,
|
||||||
|
(event: TestObject, bufferChunk: TestObject) => {
|
||||||
|
return bufferChunk.ts + 3 <= event.ts ? true : false;
|
||||||
|
},
|
||||||
|
{ objectMode: true },
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.on("data", (flush: TestObject[]) => {
|
||||||
|
t.deepEqual(flush, flushes[chunkIndex]);
|
||||||
|
chunkIndex++;
|
||||||
|
})
|
||||||
|
.on("error", (e: any) => {
|
||||||
|
t.end(e);
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
input.forEach(item => {
|
||||||
|
source.push(item);
|
||||||
|
});
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb.skip(
|
||||||
|
"accumulatorBy() sliding should emit error when key iteratee throws",
|
||||||
|
t => {
|
||||||
|
t.plan(2);
|
||||||
|
interface TestObject {
|
||||||
|
ts: number;
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const input = [
|
||||||
|
{ ts: 0, key: "a" },
|
||||||
|
{ ts: 1, key: "b" },
|
||||||
|
{ ts: 2, key: "c" },
|
||||||
|
];
|
||||||
|
const accumulaterStream = accumulatorBy(
|
||||||
|
FlushStrategy.sliding,
|
||||||
|
(event: TestObject, bufferChunk: TestObject) => {
|
||||||
|
if (event.key !== "a") {
|
||||||
|
throw new Error("Failed mapping");
|
||||||
|
}
|
||||||
|
return bufferChunk.ts + 3 <= event.ts ? true : false;
|
||||||
|
},
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
source
|
||||||
|
.pipe(accumulaterStream)
|
||||||
|
.on("error", (err: any) => {
|
||||||
|
source.pipe(accumulaterStream);
|
||||||
|
accumulaterStream.resume();
|
||||||
|
expect(err.message).to.equal("Failed mapping");
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
input.forEach(item => {
|
||||||
|
source.push(item);
|
||||||
|
});
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
59
tests/batch.spec.ts
Normal file
59
tests/batch.spec.ts
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { batch } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
|
test.cb("batch() batches chunks together", t => {
|
||||||
|
t.plan(3);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = [["a", "b", "c"], ["d", "e", "f"], ["g"]];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(batch(3))
|
||||||
|
.on("data", (element: string[]) => {
|
||||||
|
t.deepEqual(element, expectedElements[i]);
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push("d");
|
||||||
|
source.push("e");
|
||||||
|
source.push("f");
|
||||||
|
source.push("g");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("batch() yields a batch after the timeout", t => {
|
||||||
|
t.plan(3);
|
||||||
|
const source = new Readable({
|
||||||
|
objectMode: true,
|
||||||
|
read(size: number) {
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const expectedElements = [["a", "b"], ["c"], ["d"]];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(batch(3))
|
||||||
|
.on("data", (element: string[]) => {
|
||||||
|
t.deepEqual(element, expectedElements[i]);
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.fail)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
setTimeout(() => {
|
||||||
|
source.push("c");
|
||||||
|
}, 600);
|
||||||
|
setTimeout(() => {
|
||||||
|
source.push("d");
|
||||||
|
source.push(null);
|
||||||
|
}, 600 * 2);
|
||||||
|
});
|
||||||
29
tests/child.spec.ts
Normal file
29
tests/child.spec.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import * as cp from "child_process";
|
||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { child } = mhysa();
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"child() allows easily writing to child process stdin and reading from its stdout",
|
||||||
|
t => {
|
||||||
|
t.plan(1);
|
||||||
|
const source = new Readable();
|
||||||
|
const catProcess = cp.exec("cat");
|
||||||
|
let out = "";
|
||||||
|
source
|
||||||
|
.pipe(child(catProcess))
|
||||||
|
.on("data", chunk => (out += chunk))
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", () => {
|
||||||
|
expect(out).to.equal("abcdef");
|
||||||
|
t.pass();
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
source.push("ab");
|
||||||
|
source.push("cd");
|
||||||
|
source.push("ef");
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
133
tests/collect.spec.ts
Normal file
133
tests/collect.spec.ts
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { collect } = mhysa();
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"collect() collects streamed elements into an array (object, flowing mode)",
|
||||||
|
t => {
|
||||||
|
t.plan(1);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
|
||||||
|
source
|
||||||
|
.pipe(collect({ objectMode: true }))
|
||||||
|
.on("data", collected => {
|
||||||
|
expect(collected).to.deep.equal(["a", "b", "c"]);
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"collect() collects streamed elements into an array (object, paused mode)",
|
||||||
|
t => {
|
||||||
|
t.plan(1);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const collector = source.pipe(collect({ objectMode: true }));
|
||||||
|
|
||||||
|
collector
|
||||||
|
.on("readable", () => {
|
||||||
|
let collected = collector.read();
|
||||||
|
while (collected !== null) {
|
||||||
|
expect(collected).to.deep.equal(["a", "b", "c"]);
|
||||||
|
t.pass();
|
||||||
|
collected = collector.read();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"collect() collects streamed bytes into a buffer (non-object, flowing mode)",
|
||||||
|
t => {
|
||||||
|
t.plan(1);
|
||||||
|
const source = new Readable({ objectMode: false });
|
||||||
|
|
||||||
|
source
|
||||||
|
.pipe(collect())
|
||||||
|
.on("data", collected => {
|
||||||
|
expect(collected).to.deep.equal(Buffer.from("abc"));
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"collect() collects streamed bytes into a buffer (non-object, paused mode)",
|
||||||
|
t => {
|
||||||
|
t.plan(1);
|
||||||
|
const source = new Readable({ objectMode: false });
|
||||||
|
const collector = source.pipe(collect({ objectMode: false }));
|
||||||
|
collector
|
||||||
|
.on("readable", () => {
|
||||||
|
let collected = collector.read();
|
||||||
|
while (collected !== null) {
|
||||||
|
expect(collected).to.deep.equal(Buffer.from("abc"));
|
||||||
|
t.pass();
|
||||||
|
collected = collector.read();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"collect() emits an empty array if the source was empty (object mode)",
|
||||||
|
t => {
|
||||||
|
t.plan(1);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const collector = source.pipe(collect({ objectMode: true }));
|
||||||
|
collector
|
||||||
|
.on("data", collected => {
|
||||||
|
expect(collected).to.deep.equal([]);
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"collect() emits nothing if the source was empty (non-object mode)",
|
||||||
|
t => {
|
||||||
|
t.plan(0);
|
||||||
|
const source = new Readable({ objectMode: false });
|
||||||
|
const collector = source.pipe(collect({ objectMode: false }));
|
||||||
|
collector
|
||||||
|
.on("data", () => t.fail())
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
512
tests/compose.spec.ts
Normal file
512
tests/compose.spec.ts
Normal file
@@ -0,0 +1,512 @@
|
|||||||
|
const test = require("ava");
|
||||||
|
const { expect } = require("chai");
|
||||||
|
const { sleep } = require("../src/helpers");
|
||||||
|
import mhysa from "../src";
|
||||||
|
import { performance } from "perf_hooks";
|
||||||
|
const { compose, map } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
|
test.cb("compose() chains two streams together in the correct order", t => {
|
||||||
|
t.plan(3);
|
||||||
|
interface Chunk {
|
||||||
|
visited: number[];
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
let i = 0;
|
||||||
|
const first = map((chunk: Chunk) => {
|
||||||
|
chunk.visited.push(1);
|
||||||
|
return chunk;
|
||||||
|
});
|
||||||
|
const second = map((chunk: Chunk) => {
|
||||||
|
chunk.visited.push(2);
|
||||||
|
return chunk;
|
||||||
|
});
|
||||||
|
|
||||||
|
const composed = compose(
|
||||||
|
[first, second],
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
|
||||||
|
composed.on("data", data => {
|
||||||
|
expect(data).to.deep.equal(result[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
if (i === 3) {
|
||||||
|
t.end();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
composed.on("error", err => {
|
||||||
|
t.end(err);
|
||||||
|
});
|
||||||
|
composed.on("end", () => {
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
const input = [
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "b", visited: [] },
|
||||||
|
{ key: "c", visited: [] },
|
||||||
|
];
|
||||||
|
const result = [
|
||||||
|
{ key: "a", visited: [1, 2] },
|
||||||
|
{ key: "b", visited: [1, 2] },
|
||||||
|
{ key: "c", visited: [1, 2] },
|
||||||
|
];
|
||||||
|
|
||||||
|
input.forEach(item => composed.write(item));
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("piping compose() maintains correct order", t => {
|
||||||
|
t.plan(3);
|
||||||
|
interface Chunk {
|
||||||
|
visited: number[];
|
||||||
|
key: string;
|
||||||
|
}
|
||||||
|
let i = 0;
|
||||||
|
const first = map((chunk: Chunk) => {
|
||||||
|
chunk.visited.push(1);
|
||||||
|
return chunk;
|
||||||
|
});
|
||||||
|
const second = map((chunk: Chunk) => {
|
||||||
|
chunk.visited.push(2);
|
||||||
|
return chunk;
|
||||||
|
});
|
||||||
|
|
||||||
|
const composed = compose(
|
||||||
|
[first, second],
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
const third = map((chunk: Chunk) => {
|
||||||
|
chunk.visited.push(3);
|
||||||
|
return chunk;
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.pipe(third).on("data", data => {
|
||||||
|
expect(data).to.deep.equal(result[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
if (i === 3) {
|
||||||
|
t.end();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.on("error", err => {
|
||||||
|
t.end(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
const input = [
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "b", visited: [] },
|
||||||
|
{ key: "c", visited: [] },
|
||||||
|
];
|
||||||
|
const result = [
|
||||||
|
{ key: "a", visited: [1, 2, 3] },
|
||||||
|
{ key: "b", visited: [1, 2, 3] },
|
||||||
|
{ key: "c", visited: [1, 2, 3] },
|
||||||
|
];
|
||||||
|
|
||||||
|
input.forEach(item => composed.write(item));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("compose() writable length should be less than highWaterMark when handing writes", async t => {
|
||||||
|
t.plan(7);
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
interface Chunk {
|
||||||
|
key: string;
|
||||||
|
mapped: number[];
|
||||||
|
}
|
||||||
|
const first = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
chunk.mapped.push(1);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
objectMode: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const second = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
chunk.mapped.push(2);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
|
||||||
|
const composed = compose(
|
||||||
|
[first, second],
|
||||||
|
{ objectMode: true, highWaterMark: 2 },
|
||||||
|
);
|
||||||
|
composed.on("error", err => {
|
||||||
|
reject();
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.on("drain", () => {
|
||||||
|
t.pass();
|
||||||
|
expect(composed._writableState.length).to.be.equal(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.on("data", (chunk: Chunk) => {
|
||||||
|
if (chunk.key === "e") {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const input = [
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "b", mapped: [] },
|
||||||
|
{ key: "c", mapped: [] },
|
||||||
|
{ key: "d", mapped: [] },
|
||||||
|
{ key: "e", mapped: [] },
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const item of input) {
|
||||||
|
const res = composed.write(item);
|
||||||
|
expect(composed._writableState.length).to.be.at.most(2);
|
||||||
|
t.pass();
|
||||||
|
if (!res) {
|
||||||
|
await sleep(10);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("compose() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", async t => {
|
||||||
|
t.plan(7);
|
||||||
|
const _rate = 100;
|
||||||
|
const highWaterMark = 2;
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
interface Chunk {
|
||||||
|
key: string;
|
||||||
|
mapped: number[];
|
||||||
|
}
|
||||||
|
const first = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
await sleep(_rate);
|
||||||
|
chunk.mapped.push(1);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
objectMode: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const second = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
chunk.mapped.push(2);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
|
||||||
|
const composed = compose(
|
||||||
|
[first, second],
|
||||||
|
{ objectMode: true, highWaterMark },
|
||||||
|
);
|
||||||
|
composed.on("error", err => {
|
||||||
|
reject();
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.on("drain", () => {
|
||||||
|
t.pass();
|
||||||
|
expect(composed._writableState.length).to.be.equal(0);
|
||||||
|
expect(performance.now() - start).to.be.closeTo(
|
||||||
|
_rate * highWaterMark,
|
||||||
|
40,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.on("data", (chunk: Chunk) => {
|
||||||
|
pendingReads--;
|
||||||
|
if (pendingReads === 0) {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const input = [
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "b", mapped: [] },
|
||||||
|
{ key: "c", mapped: [] },
|
||||||
|
{ key: "d", mapped: [] },
|
||||||
|
{ key: "e", mapped: [] },
|
||||||
|
];
|
||||||
|
|
||||||
|
let start = performance.now();
|
||||||
|
let pendingReads = input.length;
|
||||||
|
start = performance.now();
|
||||||
|
for (const item of input) {
|
||||||
|
const res = composed.write(item);
|
||||||
|
expect(composed._writableState.length).to.be.at.most(highWaterMark);
|
||||||
|
t.pass();
|
||||||
|
if (!res) {
|
||||||
|
await sleep(_rate * highWaterMark * 2);
|
||||||
|
start = performance.now();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"compose() should emit drain event after 500 ms when writing 5 items that take 100ms to process with a highWaterMark of 5 ",
|
||||||
|
t => {
|
||||||
|
t.plan(6);
|
||||||
|
const _rate = 100;
|
||||||
|
interface Chunk {
|
||||||
|
key: string;
|
||||||
|
mapped: number[];
|
||||||
|
}
|
||||||
|
const first = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
await sleep(_rate);
|
||||||
|
chunk.mapped.push(1);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
objectMode: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const second = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
chunk.mapped.push(2);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
|
||||||
|
const composed = compose(
|
||||||
|
[first, second],
|
||||||
|
{ objectMode: true, highWaterMark: 5 },
|
||||||
|
);
|
||||||
|
|
||||||
|
composed.on("error", err => {
|
||||||
|
t.end(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.on("drain", () => {
|
||||||
|
expect(composed._writableState.length).to.be.equal(0);
|
||||||
|
expect(performance.now() - start).to.be.closeTo(
|
||||||
|
_rate * input.length,
|
||||||
|
50,
|
||||||
|
);
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.on("data", (chunk: Chunk) => {
|
||||||
|
t.pass();
|
||||||
|
if (chunk.key === "e") {
|
||||||
|
t.end();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const input = [
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "b", mapped: [] },
|
||||||
|
{ key: "c", mapped: [] },
|
||||||
|
{ key: "d", mapped: [] },
|
||||||
|
{ key: "e", mapped: [] },
|
||||||
|
];
|
||||||
|
input.forEach(item => {
|
||||||
|
composed.write(item);
|
||||||
|
});
|
||||||
|
const start = performance.now();
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"compose() should emit drain event immediately when second stream is bottleneck",
|
||||||
|
t => {
|
||||||
|
t.plan(6);
|
||||||
|
const _rate = 200;
|
||||||
|
interface Chunk {
|
||||||
|
key: string;
|
||||||
|
mapped: number[];
|
||||||
|
}
|
||||||
|
const first = map(
|
||||||
|
(chunk: Chunk) => {
|
||||||
|
chunk.mapped.push(1);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
objectMode: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const second = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
pendingReads--;
|
||||||
|
await sleep(_rate);
|
||||||
|
expect(second._writableState.length).to.be.equal(1);
|
||||||
|
expect(first._readableState.length).to.equal(pendingReads);
|
||||||
|
chunk.mapped.push(2);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
|
);
|
||||||
|
|
||||||
|
const composed = compose(
|
||||||
|
[first, second],
|
||||||
|
{ objectMode: true, highWaterMark: 5 },
|
||||||
|
);
|
||||||
|
composed.on("error", err => {
|
||||||
|
t.end(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.on("drain", () => {
|
||||||
|
expect(composed._writableState.length).to.be.equal(0);
|
||||||
|
expect(performance.now() - start).to.be.lessThan(_rate);
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.on("data", (chunk: Chunk) => {
|
||||||
|
expect(composed._writableState.length).to.be.equal(0);
|
||||||
|
t.pass();
|
||||||
|
if (chunk.key === "e") {
|
||||||
|
t.end();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const input = [
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "b", mapped: [] },
|
||||||
|
{ key: "c", mapped: [] },
|
||||||
|
{ key: "d", mapped: [] },
|
||||||
|
{ key: "e", mapped: [] },
|
||||||
|
];
|
||||||
|
let pendingReads = input.length;
|
||||||
|
|
||||||
|
input.forEach(item => {
|
||||||
|
composed.write(item);
|
||||||
|
});
|
||||||
|
|
||||||
|
const start = performance.now();
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"compose() should emit drain event and first should contain up to highWaterMark items in readable state when second is bottleneck",
|
||||||
|
t => {
|
||||||
|
t.plan(6);
|
||||||
|
interface Chunk {
|
||||||
|
index: number;
|
||||||
|
mapped: string[];
|
||||||
|
}
|
||||||
|
const first = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
expect(first._readableState.length).to.be.at.most(2);
|
||||||
|
chunk.mapped.push("first");
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
objectMode: true,
|
||||||
|
highWaterMark: 2,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const second = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
expect(second._writableState.length).to.be.equal(1);
|
||||||
|
await sleep(100);
|
||||||
|
chunk.mapped.push("second");
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true, highWaterMark: 2 },
|
||||||
|
);
|
||||||
|
|
||||||
|
const composed = compose(
|
||||||
|
[first, second],
|
||||||
|
{ objectMode: true, highWaterMark: 5 },
|
||||||
|
);
|
||||||
|
composed.on("error", err => {
|
||||||
|
t.end(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.on("data", (chunk: Chunk) => {
|
||||||
|
expect(chunk.mapped.length).to.equal(2);
|
||||||
|
expect(chunk.mapped).to.deep.equal(["first", "second"]);
|
||||||
|
t.pass();
|
||||||
|
if (chunk.index === 5) {
|
||||||
|
t.end();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.on("drain", () => {
|
||||||
|
expect(composed._writableState.length).to.be.equal(0);
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
|
||||||
|
const input = [
|
||||||
|
{ index: 1, mapped: [] },
|
||||||
|
{ index: 2, mapped: [] },
|
||||||
|
{ index: 3, mapped: [] },
|
||||||
|
{ index: 4, mapped: [] },
|
||||||
|
{ index: 5, mapped: [] },
|
||||||
|
];
|
||||||
|
|
||||||
|
input.forEach(item => {
|
||||||
|
composed.write(item);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"compose() should not emit drain event writing 5 items to compose with a highWaterMark of 6",
|
||||||
|
t => {
|
||||||
|
t.plan(5);
|
||||||
|
const _rate = 100;
|
||||||
|
interface Chunk {
|
||||||
|
key: string;
|
||||||
|
mapped: number[];
|
||||||
|
}
|
||||||
|
const first = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
await sleep(_rate);
|
||||||
|
chunk.mapped.push(1);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
objectMode: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const second = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
chunk.mapped.push(2);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
|
||||||
|
const composed = compose(
|
||||||
|
[first, second],
|
||||||
|
{ objectMode: true, highWaterMark: 6 },
|
||||||
|
);
|
||||||
|
|
||||||
|
composed.on("error", err => {
|
||||||
|
t.end(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.on("drain", () => {
|
||||||
|
t.end(new Error("Drain should not be emitted"));
|
||||||
|
});
|
||||||
|
|
||||||
|
composed.on("data", (chunk: Chunk) => {
|
||||||
|
t.pass();
|
||||||
|
if (chunk.key === "e") {
|
||||||
|
t.end();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const input = [
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "b", mapped: [] },
|
||||||
|
{ key: "c", mapped: [] },
|
||||||
|
{ key: "d", mapped: [] },
|
||||||
|
{ key: "e", mapped: [] },
|
||||||
|
];
|
||||||
|
|
||||||
|
input.forEach(item => {
|
||||||
|
composed.write(item);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
);
|
||||||
181
tests/concat.spec.ts
Normal file
181
tests/concat.spec.ts
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { concat, collect } = mhysa();
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"concat() concatenates multiple readable streams (object, flowing mode)",
|
||||||
|
t => {
|
||||||
|
t.plan(6);
|
||||||
|
const source1 = new Readable({ objectMode: true });
|
||||||
|
const source2 = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||||
|
let i = 0;
|
||||||
|
concat(source1, source2)
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source1.push("a");
|
||||||
|
source2.push("d");
|
||||||
|
source1.push("b");
|
||||||
|
source2.push("e");
|
||||||
|
source1.push("c");
|
||||||
|
source2.push("f");
|
||||||
|
source2.push(null);
|
||||||
|
source1.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"concat() concatenates multiple readable streams (object, paused mode)",
|
||||||
|
t => {
|
||||||
|
t.plan(6);
|
||||||
|
const source1 = new Readable({ objectMode: true });
|
||||||
|
const source2 = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||||
|
let i = 0;
|
||||||
|
const concatenation = concat(source1, source2)
|
||||||
|
.on("readable", () => {
|
||||||
|
let element = concatenation.read();
|
||||||
|
while (element !== null) {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
element = concatenation.read();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source1.push("a");
|
||||||
|
source2.push("d");
|
||||||
|
source1.push("b");
|
||||||
|
source2.push("e");
|
||||||
|
source1.push("c");
|
||||||
|
source2.push("f");
|
||||||
|
source2.push(null);
|
||||||
|
source1.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"concat() concatenates multiple readable streams (non-object, flowing mode)",
|
||||||
|
t => {
|
||||||
|
t.plan(6);
|
||||||
|
const source1 = new Readable({ objectMode: false });
|
||||||
|
const source2 = new Readable({ objectMode: false });
|
||||||
|
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||||
|
let i = 0;
|
||||||
|
concat(source1, source2)
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.deep.equal(Buffer.from(expectedElements[i]));
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source1.push("a");
|
||||||
|
source2.push("d");
|
||||||
|
source1.push("b");
|
||||||
|
source2.push("e");
|
||||||
|
source1.push("c");
|
||||||
|
source2.push("f");
|
||||||
|
source2.push(null);
|
||||||
|
source1.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"concat() concatenates multiple readable streams (non-object, paused mode)",
|
||||||
|
t => {
|
||||||
|
t.plan(6);
|
||||||
|
const source1 = new Readable({ objectMode: false, read: () => ({}) });
|
||||||
|
const source2 = new Readable({ objectMode: false, read: () => ({}) });
|
||||||
|
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||||
|
let i = 0;
|
||||||
|
const concatenation = concat(source1, source2)
|
||||||
|
.on("readable", () => {
|
||||||
|
let element = concatenation.read();
|
||||||
|
while (element !== null) {
|
||||||
|
expect(element).to.deep.equal(
|
||||||
|
Buffer.from(expectedElements[i]),
|
||||||
|
);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
element = concatenation.read();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source1.push("a");
|
||||||
|
setTimeout(() => source2.push("d"), 10);
|
||||||
|
setTimeout(() => source1.push("b"), 20);
|
||||||
|
setTimeout(() => source2.push("e"), 30);
|
||||||
|
setTimeout(() => source1.push("c"), 40);
|
||||||
|
setTimeout(() => source2.push("f"), 50);
|
||||||
|
setTimeout(() => source2.push(null), 60);
|
||||||
|
setTimeout(() => source1.push(null), 70);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb("concat() concatenates a single readable stream (object mode)", t => {
|
||||||
|
t.plan(3);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||||
|
let i = 0;
|
||||||
|
concat(source)
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"concat() concatenates a single readable stream (non-object mode)",
|
||||||
|
t => {
|
||||||
|
t.plan(3);
|
||||||
|
const source = new Readable({ objectMode: false });
|
||||||
|
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||||
|
let i = 0;
|
||||||
|
concat(source)
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.deep.equal(Buffer.from(expectedElements[i]));
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb("concat() concatenates empty list of readable streams", t => {
|
||||||
|
t.plan(0);
|
||||||
|
concat()
|
||||||
|
.pipe(collect())
|
||||||
|
.on("data", _ => {
|
||||||
|
t.fail();
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
});
|
||||||
21
tests/defaultOptions.spec.ts
Normal file
21
tests/defaultOptions.spec.ts
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import mhysa from "../src";
|
||||||
|
|
||||||
|
const withDefaultOptions = mhysa({ objectMode: true });
|
||||||
|
const withoutOptions = mhysa();
|
||||||
|
|
||||||
|
test("Mhysa instances can have default options", t => {
|
||||||
|
let batch = withDefaultOptions.batch();
|
||||||
|
t.true(batch._readableState.objectMode);
|
||||||
|
t.true(batch._writableState.objectMode);
|
||||||
|
batch = withDefaultOptions.batch(3);
|
||||||
|
t.true(batch._readableState.objectMode);
|
||||||
|
t.true(batch._writableState.objectMode);
|
||||||
|
batch = withDefaultOptions.batch(3, 1);
|
||||||
|
t.true(batch._readableState.objectMode);
|
||||||
|
t.true(batch._writableState.objectMode);
|
||||||
|
batch = withDefaultOptions.batch(3, 1, { objectMode: false });
|
||||||
|
t.false(batch._readableState.objectMode);
|
||||||
|
t.false(batch._writableState.objectMode);
|
||||||
|
});
|
||||||
657
tests/demux.spec.ts
Normal file
657
tests/demux.spec.ts
Normal file
@@ -0,0 +1,657 @@
|
|||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
import { Writable } from "stream";
|
||||||
|
const sinon = require("sinon");
|
||||||
|
const { sleep } = require("../src/helpers");
|
||||||
|
import { performance } from "perf_hooks";
|
||||||
|
const { demux, map } = mhysa();
|
||||||
|
|
||||||
|
interface Test {
|
||||||
|
key: string;
|
||||||
|
visited: number[];
|
||||||
|
}
|
||||||
|
|
||||||
|
test.cb("demux() constructor should be called once per key", t => {
|
||||||
|
t.plan(1);
|
||||||
|
const input = [
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "b", visited: [] },
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "c", visited: [] },
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "b", visited: [] },
|
||||||
|
];
|
||||||
|
const construct = sinon.spy((destKey: string) => {
|
||||||
|
const dest = map((chunk: Test) => {
|
||||||
|
chunk.visited.push(1);
|
||||||
|
return chunk;
|
||||||
|
});
|
||||||
|
|
||||||
|
return dest;
|
||||||
|
});
|
||||||
|
|
||||||
|
const demuxed = demux(construct, "key", { objectMode: true });
|
||||||
|
|
||||||
|
demuxed.on("finish", () => {
|
||||||
|
expect(construct.withArgs("a").callCount).to.equal(1);
|
||||||
|
expect(construct.withArgs("b").callCount).to.equal(1);
|
||||||
|
expect(construct.withArgs("c").callCount).to.equal(1);
|
||||||
|
t.pass();
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
input.forEach(event => demuxed.write(event));
|
||||||
|
demuxed.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("demux() should send input through correct pipeline", t => {
|
||||||
|
t.plan(6);
|
||||||
|
const input = [
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "b", visited: [] },
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "c", visited: [] },
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "b", visited: [] },
|
||||||
|
];
|
||||||
|
const pipelineSpies = {};
|
||||||
|
const construct = (destKey: string) => {
|
||||||
|
const mapper = sinon.spy((chunk: Test) => {
|
||||||
|
return { ...chunk, visited: [1] };
|
||||||
|
});
|
||||||
|
const dest = map(mapper);
|
||||||
|
pipelineSpies[destKey] = mapper;
|
||||||
|
|
||||||
|
return dest;
|
||||||
|
};
|
||||||
|
|
||||||
|
const demuxed = demux(construct, "key", { objectMode: true });
|
||||||
|
|
||||||
|
demuxed.on("finish", () => {
|
||||||
|
pipelineSpies["a"].getCalls().forEach(call => {
|
||||||
|
expect(call.args[0].key).to.equal("a");
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
pipelineSpies["b"].getCalls().forEach(call => {
|
||||||
|
expect(call.args[0].key).to.equal("b");
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
pipelineSpies["c"].getCalls().forEach(call => {
|
||||||
|
expect(call.args[0].key).to.equal("c");
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
input.forEach(event => demuxed.write(event));
|
||||||
|
demuxed.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("demux() constructor should be called once per key using keyBy", t => {
|
||||||
|
t.plan(1);
|
||||||
|
const input = [
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "b", visited: [] },
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "c", visited: [] },
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "b", visited: [] },
|
||||||
|
];
|
||||||
|
|
||||||
|
const construct = sinon.spy((destKey: string) => {
|
||||||
|
const dest = map((chunk: Test) => {
|
||||||
|
chunk.visited.push(1);
|
||||||
|
return chunk;
|
||||||
|
});
|
||||||
|
|
||||||
|
return dest;
|
||||||
|
});
|
||||||
|
|
||||||
|
const demuxed = demux(construct, item => item.key, { objectMode: true });
|
||||||
|
|
||||||
|
demuxed.on("finish", () => {
|
||||||
|
expect(construct.withArgs("a").callCount).to.equal(1);
|
||||||
|
expect(construct.withArgs("b").callCount).to.equal(1);
|
||||||
|
expect(construct.withArgs("c").callCount).to.equal(1);
|
||||||
|
t.pass();
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
input.forEach(event => demuxed.write(event));
|
||||||
|
demuxed.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("demux() should send input through correct pipeline using keyBy", t => {
|
||||||
|
t.plan(6);
|
||||||
|
const input = [
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "b", visited: [] },
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "c", visited: [] },
|
||||||
|
{ key: "a", visited: [] },
|
||||||
|
{ key: "b", visited: [] },
|
||||||
|
];
|
||||||
|
const pipelineSpies = {};
|
||||||
|
const construct = (destKey: string) => {
|
||||||
|
const mapper = sinon.spy((chunk: Test) => {
|
||||||
|
return { ...chunk, visited: [1] };
|
||||||
|
});
|
||||||
|
const dest = map(mapper);
|
||||||
|
pipelineSpies[destKey] = mapper;
|
||||||
|
|
||||||
|
return dest;
|
||||||
|
};
|
||||||
|
|
||||||
|
const demuxed = demux(construct, item => item.key, { objectMode: true });
|
||||||
|
|
||||||
|
demuxed.on("finish", () => {
|
||||||
|
pipelineSpies["a"].getCalls().forEach(call => {
|
||||||
|
expect(call.args[0].key).to.equal("a");
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
pipelineSpies["b"].getCalls().forEach(call => {
|
||||||
|
expect(call.args[0].key).to.equal("b");
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
pipelineSpies["c"].getCalls().forEach(call => {
|
||||||
|
expect(call.args[0].key).to.equal("c");
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
input.forEach(event => demuxed.write(event));
|
||||||
|
demuxed.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("demux() write should return false after if it has >= highWaterMark items buffered and drain should be emitted", t => {
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
t.plan(7);
|
||||||
|
interface Chunk {
|
||||||
|
key: string;
|
||||||
|
mapped: number[];
|
||||||
|
}
|
||||||
|
const input: Chunk[] = [
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
];
|
||||||
|
let pendingReads = input.length;
|
||||||
|
const highWaterMark = 5;
|
||||||
|
const slowProcessorSpeed = 25;
|
||||||
|
const construct = (destKey: string) => {
|
||||||
|
const first = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
await sleep(slowProcessorSpeed);
|
||||||
|
return { ...chunk, mapped: [1] };
|
||||||
|
},
|
||||||
|
{ highWaterMark: 1, objectMode: true },
|
||||||
|
);
|
||||||
|
|
||||||
|
first.on("data", chunk => {
|
||||||
|
expect(chunk.mapped).to.deep.equal([1]);
|
||||||
|
pendingReads--;
|
||||||
|
if (pendingReads === 0) {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
|
||||||
|
return first;
|
||||||
|
};
|
||||||
|
|
||||||
|
const _demux = demux(construct, "key", {
|
||||||
|
objectMode: true,
|
||||||
|
highWaterMark,
|
||||||
|
});
|
||||||
|
|
||||||
|
_demux.on("error", err => {
|
||||||
|
reject();
|
||||||
|
});
|
||||||
|
|
||||||
|
for (const item of input) {
|
||||||
|
const res = _demux.write(item);
|
||||||
|
expect(_demux._writableState.length).to.be.at.most(highWaterMark);
|
||||||
|
if (!res) {
|
||||||
|
await new Promise((resolv, rej) => {
|
||||||
|
_demux.once("drain", () => {
|
||||||
|
expect(_demux._writableState.length).to.be.equal(0);
|
||||||
|
t.pass();
|
||||||
|
resolv();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("demux() should emit one drain event after slowProcessorSpeed * highWaterMark ms", t => {
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
t.plan(7);
|
||||||
|
interface Chunk {
|
||||||
|
key: string;
|
||||||
|
mapped: number[];
|
||||||
|
}
|
||||||
|
const input: Chunk[] = [
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
];
|
||||||
|
let pendingReads = input.length;
|
||||||
|
const highWaterMark = 5;
|
||||||
|
const slowProcessorSpeed = 25;
|
||||||
|
|
||||||
|
const construct = (destKey: string) => {
|
||||||
|
const first = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
await sleep(slowProcessorSpeed);
|
||||||
|
chunk.mapped.push(1);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ highWaterMark: 1, objectMode: true },
|
||||||
|
);
|
||||||
|
|
||||||
|
first.on("data", () => {
|
||||||
|
t.pass();
|
||||||
|
pendingReads--;
|
||||||
|
if (pendingReads === 0) {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return first;
|
||||||
|
};
|
||||||
|
const _demux = demux(construct, "key", {
|
||||||
|
objectMode: true,
|
||||||
|
highWaterMark,
|
||||||
|
});
|
||||||
|
_demux.on("error", err => {
|
||||||
|
reject();
|
||||||
|
});
|
||||||
|
|
||||||
|
const start = performance.now();
|
||||||
|
for (const item of input) {
|
||||||
|
const res = _demux.write(item);
|
||||||
|
if (!res) {
|
||||||
|
await new Promise((resolv, rej) => {
|
||||||
|
// This event should be received after all items in demux are processed
|
||||||
|
_demux.once("drain", () => {
|
||||||
|
expect(performance.now() - start).to.be.greaterThan(
|
||||||
|
slowProcessorSpeed * highWaterMark,
|
||||||
|
);
|
||||||
|
t.pass();
|
||||||
|
resolv();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("demux() should emit one drain event when writing 6 items with highWaterMark of 5", t => {
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
t.plan(7);
|
||||||
|
interface Chunk {
|
||||||
|
key: string;
|
||||||
|
mapped: number[];
|
||||||
|
}
|
||||||
|
const highWaterMark = 5;
|
||||||
|
const input = [
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
];
|
||||||
|
let pendingReads = input.length;
|
||||||
|
const construct = (destKey: string) => {
|
||||||
|
const first = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
await sleep(50);
|
||||||
|
chunk.mapped.push(2);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ highWaterMark: 1, objectMode: true },
|
||||||
|
);
|
||||||
|
|
||||||
|
first.on("data", () => {
|
||||||
|
pendingReads--;
|
||||||
|
t.pass();
|
||||||
|
if (pendingReads === 0) {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return first;
|
||||||
|
};
|
||||||
|
const _demux = demux(construct, "key", {
|
||||||
|
objectMode: true,
|
||||||
|
highWaterMark: 5,
|
||||||
|
});
|
||||||
|
|
||||||
|
_demux.on("error", err => {
|
||||||
|
reject();
|
||||||
|
});
|
||||||
|
|
||||||
|
for (const item of input) {
|
||||||
|
const res = _demux.write(item);
|
||||||
|
expect(_demux._writableState.length).to.be.at.most(highWaterMark);
|
||||||
|
if (!res) {
|
||||||
|
await new Promise(_resolve => {
|
||||||
|
_demux.once("drain", () => {
|
||||||
|
_resolve();
|
||||||
|
expect(_demux._writableState.length).to.be.equal(0);
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb.only(
|
||||||
|
"demux() should emit drain event when third stream is bottleneck",
|
||||||
|
t => {
|
||||||
|
t.plan(8);
|
||||||
|
const slowProcessorSpeed = 100;
|
||||||
|
const highWaterMark = 5;
|
||||||
|
interface Chunk {
|
||||||
|
key: string;
|
||||||
|
mapped: number[];
|
||||||
|
}
|
||||||
|
const sink = new Writable({
|
||||||
|
objectMode: true,
|
||||||
|
write(chunk, encoding, cb) {
|
||||||
|
expect(chunk.mapped).to.deep.equal([1, 2]);
|
||||||
|
t.pass();
|
||||||
|
pendingReads--;
|
||||||
|
if (pendingReads === 0) {
|
||||||
|
t.end();
|
||||||
|
}
|
||||||
|
cb();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const construct = (destKey: string) => {
|
||||||
|
const first = map(
|
||||||
|
(chunk: Chunk) => {
|
||||||
|
chunk.mapped.push(1);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
|
);
|
||||||
|
|
||||||
|
const second = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
await sleep(slowProcessorSpeed);
|
||||||
|
chunk.mapped.push(2);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
|
);
|
||||||
|
|
||||||
|
first.pipe(second).pipe(sink);
|
||||||
|
return first;
|
||||||
|
};
|
||||||
|
const _demux = demux(construct, () => "a", {
|
||||||
|
objectMode: true,
|
||||||
|
highWaterMark,
|
||||||
|
});
|
||||||
|
_demux.on("error", err => {
|
||||||
|
t.end(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
// This event should be received after at least 5 * slowProcessorSpeed (two are read immediately by first and second, 5 remaining in demux before drain event)
|
||||||
|
_demux.on("drain", () => {
|
||||||
|
expect(_demux._writableState.length).to.be.equal(0);
|
||||||
|
expect(performance.now() - start).to.be.greaterThan(
|
||||||
|
slowProcessorSpeed * (input.length - 2),
|
||||||
|
);
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
|
||||||
|
const input = [
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "b", mapped: [] },
|
||||||
|
{ key: "c", mapped: [] },
|
||||||
|
{ key: "d", mapped: [] },
|
||||||
|
{ key: "e", mapped: [] },
|
||||||
|
{ key: "f", mapped: [] },
|
||||||
|
{ key: "g", mapped: [] },
|
||||||
|
];
|
||||||
|
let pendingReads = input.length;
|
||||||
|
|
||||||
|
const start = performance.now();
|
||||||
|
input.forEach(item => {
|
||||||
|
_demux.write(item);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"demux() should emit drain event when second stream is bottleneck",
|
||||||
|
t => {
|
||||||
|
t.plan(8);
|
||||||
|
const slowProcessorSpeed = 100;
|
||||||
|
const highWaterMark = 5;
|
||||||
|
interface Chunk {
|
||||||
|
key: string;
|
||||||
|
mapped: number[];
|
||||||
|
}
|
||||||
|
const sink = new Writable({
|
||||||
|
objectMode: true,
|
||||||
|
write(chunk, encoding, cb) {
|
||||||
|
expect(chunk.mapped).to.deep.equal([1, 2]);
|
||||||
|
t.pass();
|
||||||
|
pendingReads--;
|
||||||
|
if (pendingReads === 0) {
|
||||||
|
t.end();
|
||||||
|
}
|
||||||
|
cb();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const construct = (destKey: string) => {
|
||||||
|
const first = map(
|
||||||
|
(chunk: Chunk) => {
|
||||||
|
chunk.mapped.push(1);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
|
);
|
||||||
|
const second = map(
|
||||||
|
(chunk: Chunk) => {
|
||||||
|
chunk.mapped.push(2);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
|
);
|
||||||
|
|
||||||
|
const third = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
await sleep(slowProcessorSpeed);
|
||||||
|
chunk.mapped.push(3);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
|
);
|
||||||
|
|
||||||
|
first
|
||||||
|
.pipe(second)
|
||||||
|
.pipe(third)
|
||||||
|
.pipe(sink);
|
||||||
|
return first;
|
||||||
|
};
|
||||||
|
const _demux = demux(construct, () => "a", {
|
||||||
|
objectMode: true,
|
||||||
|
highWaterMark,
|
||||||
|
});
|
||||||
|
_demux.on("error", err => {
|
||||||
|
t.end(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
// This event should be received after at least 3 * slowProcessorSpeed (two are read immediately by first and second, 3 remaining in demux before drain event)
|
||||||
|
_demux.on("drain", () => {
|
||||||
|
expect(_demux._writableState.length).to.be.equal(0);
|
||||||
|
expect(performance.now() - start).to.be.greaterThan(
|
||||||
|
slowProcessorSpeed * (input.length - 4),
|
||||||
|
);
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
|
||||||
|
const input = [
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "b", mapped: [] },
|
||||||
|
{ key: "c", mapped: [] },
|
||||||
|
{ key: "d", mapped: [] },
|
||||||
|
{ key: "e", mapped: [] },
|
||||||
|
{ key: "f", mapped: [] },
|
||||||
|
{ key: "g", mapped: [] },
|
||||||
|
];
|
||||||
|
let pendingReads = input.length;
|
||||||
|
|
||||||
|
const start = performance.now();
|
||||||
|
input.forEach(item => {
|
||||||
|
_demux.write(item);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test("demux() should be blocked by slowest pipeline", t => {
|
||||||
|
t.plan(1);
|
||||||
|
const slowProcessorSpeed = 100;
|
||||||
|
interface Chunk {
|
||||||
|
key: string;
|
||||||
|
mapped: number[];
|
||||||
|
}
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
const construct = (destKey: string) => {
|
||||||
|
const first = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
await sleep(slowProcessorSpeed);
|
||||||
|
chunk.mapped.push(1);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
|
);
|
||||||
|
|
||||||
|
first.on("data", chunk => {
|
||||||
|
pendingReads--;
|
||||||
|
if (chunk.key === "b") {
|
||||||
|
expect(performance.now() - start).to.be.greaterThan(
|
||||||
|
slowProcessorSpeed * totalItems,
|
||||||
|
);
|
||||||
|
t.pass();
|
||||||
|
expect(pendingReads).to.equal(0);
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return first;
|
||||||
|
};
|
||||||
|
const _demux = demux(construct, "key", {
|
||||||
|
objectMode: true,
|
||||||
|
highWaterMark: 1,
|
||||||
|
});
|
||||||
|
_demux.on("error", err => {
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
const input = [
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "c", mapped: [] },
|
||||||
|
{ key: "c", mapped: [] },
|
||||||
|
{ key: "c", mapped: [] },
|
||||||
|
{ key: "b", mapped: [] },
|
||||||
|
];
|
||||||
|
|
||||||
|
let pendingReads = input.length;
|
||||||
|
const totalItems = input.length;
|
||||||
|
const start = performance.now();
|
||||||
|
for (const item of input) {
|
||||||
|
if (!_demux.write(item)) {
|
||||||
|
await new Promise(_resolve => {
|
||||||
|
_demux.once("drain", () => {
|
||||||
|
_resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("demux() should emit drain event when second stream in pipeline is bottleneck", t => {
|
||||||
|
t.plan(5);
|
||||||
|
const highWaterMark = 3;
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
interface Chunk {
|
||||||
|
key: string;
|
||||||
|
mapped: number[];
|
||||||
|
}
|
||||||
|
const sink = new Writable({
|
||||||
|
objectMode: true,
|
||||||
|
write(chunk, encoding, cb) {
|
||||||
|
expect(chunk.mapped).to.deep.equal([1, 2]);
|
||||||
|
t.pass();
|
||||||
|
cb();
|
||||||
|
if (pendingReads === 0) {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const construct = (destKey: string) => {
|
||||||
|
const first = map(
|
||||||
|
(chunk: Chunk) => {
|
||||||
|
expect(first._readableState.length).to.be.at.most(2);
|
||||||
|
chunk.mapped.push(1);
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true, highWaterMark: 2 },
|
||||||
|
);
|
||||||
|
|
||||||
|
const second = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
|
await sleep(100);
|
||||||
|
chunk.mapped.push(2);
|
||||||
|
expect(second._writableState.length).to.be.equal(1);
|
||||||
|
pendingReads--;
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
|
);
|
||||||
|
|
||||||
|
first.pipe(second).pipe(sink);
|
||||||
|
return first;
|
||||||
|
};
|
||||||
|
|
||||||
|
const _demux = demux(construct, "key", {
|
||||||
|
objectMode: true,
|
||||||
|
highWaterMark,
|
||||||
|
});
|
||||||
|
_demux.on("error", err => {
|
||||||
|
reject();
|
||||||
|
});
|
||||||
|
|
||||||
|
_demux.on("drain", () => {
|
||||||
|
expect(_demux._writableState.length).to.be.equal(0);
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
|
||||||
|
const input = [
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
{ key: "a", mapped: [] },
|
||||||
|
];
|
||||||
|
let pendingReads = input.length;
|
||||||
|
|
||||||
|
input.forEach(item => {
|
||||||
|
_demux.write(item);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
29
tests/duplex.spec.ts
Normal file
29
tests/duplex.spec.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import * as cp from "child_process";
|
||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { duplex } = mhysa();
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"duplex() combines a writable and readable stream into a ReadWrite stream",
|
||||||
|
t => {
|
||||||
|
t.plan(1);
|
||||||
|
const source = new Readable();
|
||||||
|
const catProcess = cp.exec("cat");
|
||||||
|
let out = "";
|
||||||
|
source
|
||||||
|
.pipe(duplex(catProcess.stdin!, catProcess.stdout!))
|
||||||
|
.on("data", chunk => (out += chunk))
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", () => {
|
||||||
|
expect(out).to.equal("abcdef");
|
||||||
|
t.pass();
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
source.push("ab");
|
||||||
|
source.push("cd");
|
||||||
|
source.push("ef");
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
117
tests/filter.spec.ts
Normal file
117
tests/filter.spec.ts
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import { Readable } from "stream";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { filter } = mhysa();
|
||||||
|
|
||||||
|
test.cb("filter() filters elements synchronously", t => {
|
||||||
|
t.plan(2);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["a", "c"];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
filter((element: string) => element !== "b", {
|
||||||
|
readableObjectMode: true,
|
||||||
|
writableObjectMode: true,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("filter() filters elements asynchronously", t => {
|
||||||
|
t.plan(2);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["a", "c"];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
filter(
|
||||||
|
async (element: string) => {
|
||||||
|
await Promise.resolve();
|
||||||
|
return element !== "b";
|
||||||
|
},
|
||||||
|
{ readableObjectMode: true, writableObjectMode: true },
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb.skip("filter() emits errors during synchronous filtering", t => {
|
||||||
|
t.plan(2);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
filter(
|
||||||
|
(element: string) => {
|
||||||
|
if (element !== "a") {
|
||||||
|
throw new Error("Failed filtering");
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
{ readableObjectMode: true, writableObjectMode: true },
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.resume()
|
||||||
|
.on("error", err => {
|
||||||
|
expect(err.message).to.equal("Failed filtering");
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb.skip("filter() emits errors during asynchronous filtering", t => {
|
||||||
|
t.plan(2);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
filter(
|
||||||
|
async (element: string) => {
|
||||||
|
await Promise.resolve();
|
||||||
|
if (element !== "a") {
|
||||||
|
throw new Error("Failed filtering");
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
{ readableObjectMode: true, writableObjectMode: true },
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.resume()
|
||||||
|
.on("error", err => {
|
||||||
|
expect(err.message).to.equal("Failed filtering");
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
101
tests/flatMap.spec.ts
Normal file
101
tests/flatMap.spec.ts
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { flatMap } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
|
test.cb("flatMap() maps elements synchronously", t => {
|
||||||
|
t.plan(6);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["a", "A", "b", "B", "c", "C"];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(flatMap((element: string) => [element, element.toUpperCase()]))
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("flatMap() maps elements asynchronously", t => {
|
||||||
|
t.plan(6);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["a", "A", "b", "B", "c", "C"];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
flatMap(async (element: string) => {
|
||||||
|
await Promise.resolve();
|
||||||
|
return [element, element.toUpperCase()];
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb.skip("flatMap() emits errors during synchronous mapping", t => {
|
||||||
|
t.plan(2);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
flatMap((element: string) => {
|
||||||
|
if (element !== "a") {
|
||||||
|
throw new Error("Failed mapping");
|
||||||
|
}
|
||||||
|
return [element, element.toUpperCase()];
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.resume()
|
||||||
|
.on("error", err => {
|
||||||
|
expect(err.message).to.equal("Failed mapping");
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb.skip("flatMap() emits errors during asynchronous mapping", t => {
|
||||||
|
t.plan(2);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
flatMap(async (element: string) => {
|
||||||
|
await Promise.resolve();
|
||||||
|
if (element !== "a") {
|
||||||
|
throw new Error("Failed mapping");
|
||||||
|
}
|
||||||
|
return [element, element.toUpperCase()];
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.resume()
|
||||||
|
.on("error", err => {
|
||||||
|
expect(err.message).to.equal("Failed mapping");
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
46
tests/fromArray.spec.ts
Normal file
46
tests/fromArray.spec.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { fromArray } = mhysa();
|
||||||
|
|
||||||
|
test.cb("fromArray() streams array elements in flowing mode", t => {
|
||||||
|
t.plan(3);
|
||||||
|
const elements = ["a", "b", "c"];
|
||||||
|
const stream = fromArray(elements);
|
||||||
|
let i = 0;
|
||||||
|
stream
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(elements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("fromArray() ends immediately if there are no array elements", t => {
|
||||||
|
t.plan(0);
|
||||||
|
fromArray([])
|
||||||
|
.on("data", () => t.fail())
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("fromArray() streams array elements in paused mode", t => {
|
||||||
|
t.plan(3);
|
||||||
|
const elements = ["a", "b", "c"];
|
||||||
|
const stream = fromArray(elements);
|
||||||
|
let i = 0;
|
||||||
|
stream
|
||||||
|
.on("readable", () => {
|
||||||
|
let element = stream.read();
|
||||||
|
while (element !== null) {
|
||||||
|
expect(element).to.equal(elements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
element = stream.read();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
});
|
||||||
57
tests/join.spec.ts
Normal file
57
tests/join.spec.ts
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { join } = mhysa();
|
||||||
|
|
||||||
|
test.cb("join() joins chunks using the specified separator", t => {
|
||||||
|
t.plan(9);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedParts = ["ab|", "|", "c|d", "|", "|", "|", "e", "|", "|f|"];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(join("|"))
|
||||||
|
.on("data", part => {
|
||||||
|
expect(part).to.equal(expectedParts[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("ab|");
|
||||||
|
source.push("c|d");
|
||||||
|
source.push("|");
|
||||||
|
source.push("e");
|
||||||
|
source.push("|f|");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"join() joins chunks using the specified separator without breaking up multi-byte characters " +
|
||||||
|
"spanning multiple chunks",
|
||||||
|
t => {
|
||||||
|
t.plan(5);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedParts = ["ø", "|", "ö", "|", "一"];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(join("|"))
|
||||||
|
.on("data", part => {
|
||||||
|
expect(part).to.equal(expectedParts[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks
|
||||||
|
source.push(Buffer.from("ø").slice(1, 2));
|
||||||
|
source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks
|
||||||
|
source.push(Buffer.from("ö").slice(1, 2));
|
||||||
|
source.push(Buffer.from("一").slice(0, 1)); // 3-byte character spanning three chunks
|
||||||
|
source.push(Buffer.from("一").slice(1, 2));
|
||||||
|
source.push(Buffer.from("一").slice(2, 3));
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
16
tests/last.spec.ts
Normal file
16
tests/last.spec.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { last } = mhysa();
|
||||||
|
|
||||||
|
test("last() resolves to the last chunk streamed by the given readable stream", async t => {
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const lastPromise = last(source);
|
||||||
|
source.push("ab");
|
||||||
|
source.push("cd");
|
||||||
|
source.push("ef");
|
||||||
|
source.push(null);
|
||||||
|
const lastChunk = await lastPromise;
|
||||||
|
expect(lastChunk).to.equal("ef");
|
||||||
|
});
|
||||||
57
tests/map.spec.ts
Normal file
57
tests/map.spec.ts
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { map } = mhysa();
|
||||||
|
|
||||||
|
test.cb("map() maps elements synchronously", t => {
|
||||||
|
t.plan(3);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const mapStream = map((element: string) => element.toUpperCase(), {
|
||||||
|
objectMode: true,
|
||||||
|
});
|
||||||
|
const expectedElements = ["A", "B", "C"];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(mapStream)
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("map() maps elements asynchronously", t => {
|
||||||
|
t.plan(3);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const mapStream = map(
|
||||||
|
async (element: string) => {
|
||||||
|
await Promise.resolve();
|
||||||
|
return element.toUpperCase();
|
||||||
|
},
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
const expectedElements = ["A", "B", "C"];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(mapStream)
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
61
tests/merge.spec.ts
Normal file
61
tests/merge.spec.ts
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { merge } = mhysa();
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"merge() merges multiple readable streams in chunk arrival order",
|
||||||
|
t => {
|
||||||
|
t.plan(6);
|
||||||
|
const source1 = new Readable({ objectMode: true, read: () => ({}) });
|
||||||
|
const source2 = new Readable({ objectMode: true, read: () => ({}) });
|
||||||
|
const expectedElements = ["a", "d", "b", "e", "c", "f"];
|
||||||
|
let i = 0;
|
||||||
|
merge(source1, source2)
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source1.push("a");
|
||||||
|
setTimeout(() => source2.push("d"), 10);
|
||||||
|
setTimeout(() => source1.push("b"), 20);
|
||||||
|
setTimeout(() => source2.push("e"), 30);
|
||||||
|
setTimeout(() => source1.push("c"), 40);
|
||||||
|
setTimeout(() => source2.push("f"), 50);
|
||||||
|
setTimeout(() => source2.push(null), 60);
|
||||||
|
setTimeout(() => source1.push(null), 70);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb("merge() merges a readable stream", t => {
|
||||||
|
t.plan(3);
|
||||||
|
const source = new Readable({ objectMode: true, read: () => ({}) });
|
||||||
|
const expectedElements = ["a", "b", "c"];
|
||||||
|
let i = 0;
|
||||||
|
merge(source)
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("merge() merges an empty list of readable streams", t => {
|
||||||
|
t.plan(0);
|
||||||
|
merge()
|
||||||
|
.on("data", () => t.pass())
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
});
|
||||||
78
tests/parallelMap.spec.ts
Normal file
78
tests/parallelMap.spec.ts
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import { performance } from "perf_hooks";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
import { sleep } from "../src/helpers";
|
||||||
|
const { parallelMap } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
|
test.cb("parallelMap() parallel mapping", t => {
|
||||||
|
t.plan(6);
|
||||||
|
const offset = 50;
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = [
|
||||||
|
"a_processed",
|
||||||
|
"b_processed",
|
||||||
|
"c_processed",
|
||||||
|
"d_processed",
|
||||||
|
"e_processed",
|
||||||
|
"f_processed",
|
||||||
|
];
|
||||||
|
interface IPerfData {
|
||||||
|
start: number;
|
||||||
|
output?: string;
|
||||||
|
finish?: number;
|
||||||
|
}
|
||||||
|
const orderedResults: IPerfData[] = [];
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
parallelMap(async (data: any) => {
|
||||||
|
const perfData: IPerfData = { start: performance.now() };
|
||||||
|
const c = data + "_processed";
|
||||||
|
perfData.output = c;
|
||||||
|
await sleep(offset);
|
||||||
|
perfData.finish = performance.now();
|
||||||
|
orderedResults.push(perfData);
|
||||||
|
return c;
|
||||||
|
}, 2),
|
||||||
|
)
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
t.true(expectedElements.includes(element));
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", async () => {
|
||||||
|
expect(orderedResults[0].finish).to.be.lessThan(
|
||||||
|
orderedResults[2].start,
|
||||||
|
);
|
||||||
|
expect(orderedResults[1].finish).to.be.lessThan(
|
||||||
|
orderedResults[3].start,
|
||||||
|
);
|
||||||
|
expect(orderedResults[2].finish).to.be.lessThan(
|
||||||
|
orderedResults[4].start,
|
||||||
|
);
|
||||||
|
expect(orderedResults[3].finish).to.be.lessThan(
|
||||||
|
orderedResults[5].start,
|
||||||
|
);
|
||||||
|
expect(orderedResults[0].start).to.be.lessThan(
|
||||||
|
orderedResults[2].start + offset,
|
||||||
|
);
|
||||||
|
expect(orderedResults[1].start).to.be.lessThan(
|
||||||
|
orderedResults[3].start + offset,
|
||||||
|
);
|
||||||
|
expect(orderedResults[2].start).to.be.lessThan(
|
||||||
|
orderedResults[4].start + offset,
|
||||||
|
);
|
||||||
|
expect(orderedResults[3].start).to.be.lessThan(
|
||||||
|
orderedResults[5].start + offset,
|
||||||
|
);
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push("d");
|
||||||
|
source.push("e");
|
||||||
|
source.push("f");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
41
tests/parse.spec.ts
Normal file
41
tests/parse.spec.ts
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { parse } = mhysa();
|
||||||
|
|
||||||
|
test.cb("parse() parses the streamed elements as JSON", t => {
|
||||||
|
t.plan(3);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["abc", {}, []];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(parse())
|
||||||
|
.on("data", part => {
|
||||||
|
expect(part).to.deep.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push('"abc"');
|
||||||
|
source.push("{}");
|
||||||
|
source.push("[]");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("parse() emits errors on invalid JSON", t => {
|
||||||
|
t.plan(2);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
source
|
||||||
|
.pipe(parse())
|
||||||
|
.resume()
|
||||||
|
.on("error", () => t.pass())
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("{}");
|
||||||
|
source.push({});
|
||||||
|
source.push([]);
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
90
tests/rate.spec.ts
Normal file
90
tests/rate.spec.ts
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import { performance } from "perf_hooks";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { rate } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
|
test.cb("rate() sends data at a rate of 150", t => {
|
||||||
|
t.plan(5);
|
||||||
|
const targetRate = 150;
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["a", "b", "c", "d", "e"];
|
||||||
|
const start = performance.now();
|
||||||
|
let i = 0;
|
||||||
|
|
||||||
|
source
|
||||||
|
.pipe(rate(targetRate))
|
||||||
|
.on("data", (element: string[]) => {
|
||||||
|
const currentRate = (i / (performance.now() - start)) * 1000;
|
||||||
|
expect(element).to.deep.equal(expectedElements[i]);
|
||||||
|
expect(currentRate).lessThan(targetRate);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push("d");
|
||||||
|
source.push("e");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("rate() sends data at a rate of 50", t => {
|
||||||
|
t.plan(5);
|
||||||
|
const targetRate = 50;
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["a", "b", "c", "d", "e"];
|
||||||
|
const start = performance.now();
|
||||||
|
let i = 0;
|
||||||
|
|
||||||
|
source
|
||||||
|
.pipe(rate(targetRate))
|
||||||
|
.on("data", (element: string[]) => {
|
||||||
|
const currentRate = (i / (performance.now() - start)) * 1000;
|
||||||
|
expect(element).to.deep.equal(expectedElements[i]);
|
||||||
|
expect(currentRate).lessThan(targetRate);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push("d");
|
||||||
|
source.push("e");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("rate() sends data at a rate of 1", t => {
|
||||||
|
t.plan(5);
|
||||||
|
const targetRate = 1;
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["a", "b", "c", "d", "e"];
|
||||||
|
const start = performance.now();
|
||||||
|
let i = 0;
|
||||||
|
|
||||||
|
source
|
||||||
|
.pipe(rate(targetRate))
|
||||||
|
.on("data", (element: string[]) => {
|
||||||
|
const currentRate = (i / (performance.now() - start)) * 1000;
|
||||||
|
expect(element).to.deep.equal(expectedElements[i]);
|
||||||
|
expect(currentRate).lessThan(targetRate);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push("d");
|
||||||
|
source.push("e");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
99
tests/reduce.spec.ts
Normal file
99
tests/reduce.spec.ts
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { reduce } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
|
test.cb("reduce() reduces elements synchronously", t => {
|
||||||
|
t.plan(1);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedValue = 6;
|
||||||
|
source
|
||||||
|
.pipe(reduce((acc: number, element: string) => acc + element.length, 0))
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(expectedValue);
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("ab");
|
||||||
|
source.push("cd");
|
||||||
|
source.push("ef");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("reduce() reduces elements asynchronously", t => {
|
||||||
|
t.plan(1);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedValue = 6;
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
reduce(async (acc: number, element: string) => {
|
||||||
|
await Promise.resolve();
|
||||||
|
return acc + element.length;
|
||||||
|
}, 0),
|
||||||
|
)
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(expectedValue);
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("ab");
|
||||||
|
source.push("cd");
|
||||||
|
source.push("ef");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb.skip("reduce() emits errors during synchronous reduce", t => {
|
||||||
|
t.plan(2);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
reduce((acc: number, element: string) => {
|
||||||
|
if (element !== "ab") {
|
||||||
|
throw new Error("Failed reduce");
|
||||||
|
}
|
||||||
|
return acc + element.length;
|
||||||
|
}, 0),
|
||||||
|
)
|
||||||
|
.resume()
|
||||||
|
.on("error", err => {
|
||||||
|
expect(err.message).to.equal("Failed reduce");
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("ab");
|
||||||
|
source.push("cd");
|
||||||
|
source.push("ef");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb.skip("reduce() emits errors during asynchronous reduce", t => {
|
||||||
|
t.plan(2);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
source
|
||||||
|
.pipe(
|
||||||
|
reduce(async (acc: number, element: string) => {
|
||||||
|
await Promise.resolve();
|
||||||
|
if (element !== "ab") {
|
||||||
|
throw new Error("Failed mapping");
|
||||||
|
}
|
||||||
|
return acc + element.length;
|
||||||
|
}, 0),
|
||||||
|
)
|
||||||
|
.resume()
|
||||||
|
.on("error", err => {
|
||||||
|
expect(err.message).to.equal("Failed mapping");
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("ab");
|
||||||
|
source.push("cd");
|
||||||
|
source.push("ef");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
81
tests/replace.spec.ts
Normal file
81
tests/replace.spec.ts
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { replace } = mhysa();
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"replace() replaces occurrences of the given string in the streamed elements with the specified " +
|
||||||
|
"replacement string",
|
||||||
|
t => {
|
||||||
|
t.plan(3);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["abc", "xyf", "ghi"];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(replace("de", "xy"))
|
||||||
|
.on("data", part => {
|
||||||
|
expect(part).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("abc");
|
||||||
|
source.push("def");
|
||||||
|
source.push("ghi");
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"replace() replaces occurrences of the given regular expression in the streamed elements with " +
|
||||||
|
"the specified replacement string",
|
||||||
|
t => {
|
||||||
|
t.plan(3);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["abc", "xyz", "ghi"];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(replace(/^def$/, "xyz"))
|
||||||
|
.on("data", part => {
|
||||||
|
expect(part).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("abc");
|
||||||
|
source.push("def");
|
||||||
|
source.push("ghi");
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"replace() replaces occurrences of the given multi-byte character even if it spans multiple chunks",
|
||||||
|
t => {
|
||||||
|
t.plan(3);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["ø", "O", "a"];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(replace("ö", "O"))
|
||||||
|
.on("data", part => {
|
||||||
|
expect(part).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks
|
||||||
|
source.push(Buffer.from("ø").slice(1, 2));
|
||||||
|
source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks
|
||||||
|
source.push(Buffer.from("ö").slice(1, 2));
|
||||||
|
source.push("a");
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
99
tests/split.spec.ts
Normal file
99
tests/split.spec.ts
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { split } = mhysa();
|
||||||
|
|
||||||
|
test.cb("split() splits chunks using the default separator (\\n)", t => {
|
||||||
|
t.plan(5);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedParts = ["ab", "c", "d", "ef", ""];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(split())
|
||||||
|
.on("data", part => {
|
||||||
|
expect(part).to.equal(expectedParts[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("ab\n");
|
||||||
|
source.push("c");
|
||||||
|
source.push("\n");
|
||||||
|
source.push("d");
|
||||||
|
source.push("\nef\n");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb("split() splits chunks using the specified separator", t => {
|
||||||
|
t.plan(6);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedParts = ["ab", "c", "d", "e", "f", ""];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(split("|"))
|
||||||
|
.on("data", (part: string) => {
|
||||||
|
expect(part).to.equal(expectedParts[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("ab|");
|
||||||
|
source.push("c|d");
|
||||||
|
source.push("|");
|
||||||
|
source.push("e");
|
||||||
|
source.push("|f|");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"split() splits utf8 encoded buffers using the specified separator",
|
||||||
|
t => {
|
||||||
|
t.plan(3);
|
||||||
|
const expectedElements = ["a", "b", "c"];
|
||||||
|
let i = 0;
|
||||||
|
const through = split(",");
|
||||||
|
const buf = Buffer.from("a,b,c");
|
||||||
|
through
|
||||||
|
.on("data", element => {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
i++;
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
for (let j = 0; j < buf.length; ++j) {
|
||||||
|
through.write(buf.slice(j, j + 1));
|
||||||
|
}
|
||||||
|
through.end();
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"split() splits utf8 encoded buffers with multi-byte characters using the specified separator",
|
||||||
|
t => {
|
||||||
|
t.plan(3);
|
||||||
|
const expectedElements = ["一", "一", "一"];
|
||||||
|
let i = 0;
|
||||||
|
const through = split(",");
|
||||||
|
const buf = Buffer.from("一,一,一"); // Those spaces are multi-byte utf8 characters (code: 4E00)
|
||||||
|
through
|
||||||
|
.on("data", element => {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
i++;
|
||||||
|
t.pass();
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
for (let j = 0; j < buf.length; ++j) {
|
||||||
|
through.write(buf.slice(j, j + 1));
|
||||||
|
}
|
||||||
|
through.end();
|
||||||
|
},
|
||||||
|
);
|
||||||
62
tests/stringify.spec.ts
Normal file
62
tests/stringify.spec.ts
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { stringify } = mhysa();
|
||||||
|
|
||||||
|
test.cb("stringify() stringifies the streamed elements as JSON", t => {
|
||||||
|
t.plan(4);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = [
|
||||||
|
'"abc"',
|
||||||
|
"0",
|
||||||
|
'{"a":"a","b":"b","c":"c"}',
|
||||||
|
'["a","b","c"]',
|
||||||
|
];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(stringify())
|
||||||
|
.on("data", part => {
|
||||||
|
expect(part).to.deep.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("abc");
|
||||||
|
source.push(0);
|
||||||
|
source.push({ a: "a", b: "b", c: "c" });
|
||||||
|
source.push(["a", "b", "c"]);
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.cb(
|
||||||
|
"stringify() stringifies the streamed elements as pretty-printed JSON",
|
||||||
|
t => {
|
||||||
|
t.plan(4);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = [
|
||||||
|
'"abc"',
|
||||||
|
"0",
|
||||||
|
'{\n "a": "a",\n "b": "b",\n "c": "c"\n}',
|
||||||
|
'[\n "a",\n "b",\n "c"\n]',
|
||||||
|
];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(stringify({ pretty: true }))
|
||||||
|
.on("data", part => {
|
||||||
|
expect(part).to.deep.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("abc");
|
||||||
|
source.push(0);
|
||||||
|
source.push({ a: "a", b: "b", c: "c" });
|
||||||
|
source.push(["a", "b", "c"]);
|
||||||
|
source.push(null);
|
||||||
|
},
|
||||||
|
);
|
||||||
27
tests/unbatch.spec.ts
Normal file
27
tests/unbatch.spec.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import { Readable } from "stream";
|
||||||
|
import test from "ava";
|
||||||
|
import { expect } from "chai";
|
||||||
|
import mhysa from "../src";
|
||||||
|
const { unbatch, batch } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
|
test.cb("unbatch() unbatches", t => {
|
||||||
|
t.plan(3);
|
||||||
|
const source = new Readable({ objectMode: true });
|
||||||
|
const expectedElements = ["a", "b", "c"];
|
||||||
|
let i = 0;
|
||||||
|
source
|
||||||
|
.pipe(batch(3))
|
||||||
|
.pipe(unbatch())
|
||||||
|
.on("data", (element: string) => {
|
||||||
|
expect(element).to.equal(expectedElements[i]);
|
||||||
|
t.pass();
|
||||||
|
i++;
|
||||||
|
})
|
||||||
|
.on("error", t.end)
|
||||||
|
.on("end", t.end);
|
||||||
|
|
||||||
|
source.push("a");
|
||||||
|
source.push("b");
|
||||||
|
source.push("c");
|
||||||
|
source.push(null);
|
||||||
|
});
|
||||||
@@ -3,16 +3,19 @@
|
|||||||
"noImplicitAny": true,
|
"noImplicitAny": true,
|
||||||
"strictNullChecks": true,
|
"strictNullChecks": true,
|
||||||
"noImplicitReturns": true,
|
"noImplicitReturns": true,
|
||||||
"noUnusedLocals": true,
|
"noUnusedLocals": false,
|
||||||
"noImplicitThis": true,
|
"noImplicitThis": true,
|
||||||
"forceConsistentCasingInFileNames": true,
|
"forceConsistentCasingInFileNames": true,
|
||||||
"suppressImplicitAnyIndexErrors": true,
|
"suppressImplicitAnyIndexErrors": true,
|
||||||
"outDir": "./dist",
|
"outDir": "./dist",
|
||||||
"module": "commonjs",
|
"module": "commonjs"
|
||||||
"target": "es5",
|
|
||||||
"lib": ["es2016"],
|
|
||||||
"sourceMap": true,
|
|
||||||
"declaration": true
|
|
||||||
},
|
},
|
||||||
"include": ["src/**/*.ts"]
|
"target": "es5",
|
||||||
|
"lib": [
|
||||||
|
"es2016"
|
||||||
|
],
|
||||||
|
"sourceMap": true,
|
||||||
|
"declaration": true,
|
||||||
|
"include": ["src/**/*"],
|
||||||
|
"exclude": ["tests", "node_modules"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,6 +9,7 @@
|
|||||||
"no-implicit-dependencies": [true, "dev"],
|
"no-implicit-dependencies": [true, "dev"],
|
||||||
"prettier": [true, ".prettierrc"],
|
"prettier": [true, ".prettierrc"],
|
||||||
"ordered-imports": false,
|
"ordered-imports": false,
|
||||||
"interface-name": false
|
"interface-name": false,
|
||||||
|
"object-literal-sort-keys": false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
379
yarn.lock
379
yarn.lock
@@ -37,41 +37,41 @@
|
|||||||
imurmurhash "^0.1.4"
|
imurmurhash "^0.1.4"
|
||||||
slide "^1.1.5"
|
slide "^1.1.5"
|
||||||
|
|
||||||
"@babel/code-frame@^7.0.0":
|
"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5":
|
||||||
version "7.0.0"
|
version "7.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.0.0.tgz#06e2ab19bdb535385559aabb5ba59729482800f8"
|
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d"
|
||||||
integrity sha512-OfC2uemaknXr87bdLUkWog7nYuliM9Ij5HUcajsVcMCpQrcLmtxRbVFTIqmcSkSeYRBFBRxs2FiUqFJDLdiebA==
|
integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@babel/highlight" "^7.0.0"
|
"@babel/highlight" "^7.0.0"
|
||||||
|
|
||||||
"@babel/core@^7.4.0":
|
"@babel/core@^7.4.0":
|
||||||
version "7.5.0"
|
version "7.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.5.0.tgz#6ed6a2881ad48a732c5433096d96d1b0ee5eb734"
|
resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.5.5.tgz#17b2686ef0d6bc58f963dddd68ab669755582c30"
|
||||||
integrity sha512-6Isr4X98pwXqHvtigw71CKgmhL1etZjPs5A67jL/w0TkLM9eqmFR40YrnJvEc1WnMZFsskjsmid8bHZyxKEAnw==
|
integrity sha512-i4qoSr2KTtce0DmkuuQBV4AuQgGPUcPXMr9L5MyYAtk06z068lQ10a4O009fe5OB/DfNV+h+qqT7ddNV8UnRjg==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@babel/code-frame" "^7.0.0"
|
"@babel/code-frame" "^7.5.5"
|
||||||
"@babel/generator" "^7.5.0"
|
"@babel/generator" "^7.5.5"
|
||||||
"@babel/helpers" "^7.5.0"
|
"@babel/helpers" "^7.5.5"
|
||||||
"@babel/parser" "^7.5.0"
|
"@babel/parser" "^7.5.5"
|
||||||
"@babel/template" "^7.4.4"
|
"@babel/template" "^7.4.4"
|
||||||
"@babel/traverse" "^7.5.0"
|
"@babel/traverse" "^7.5.5"
|
||||||
"@babel/types" "^7.5.0"
|
"@babel/types" "^7.5.5"
|
||||||
convert-source-map "^1.1.0"
|
convert-source-map "^1.1.0"
|
||||||
debug "^4.1.0"
|
debug "^4.1.0"
|
||||||
json5 "^2.1.0"
|
json5 "^2.1.0"
|
||||||
lodash "^4.17.11"
|
lodash "^4.17.13"
|
||||||
resolve "^1.3.2"
|
resolve "^1.3.2"
|
||||||
semver "^5.4.1"
|
semver "^5.4.1"
|
||||||
source-map "^0.5.0"
|
source-map "^0.5.0"
|
||||||
|
|
||||||
"@babel/generator@^7.0.0", "@babel/generator@^7.4.0", "@babel/generator@^7.5.0":
|
"@babel/generator@^7.0.0", "@babel/generator@^7.4.0", "@babel/generator@^7.5.5":
|
||||||
version "7.5.0"
|
version "7.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.5.0.tgz#f20e4b7a91750ee8b63656073d843d2a736dca4a"
|
resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.5.5.tgz#873a7f936a3c89491b43536d12245b626664e3cf"
|
||||||
integrity sha512-1TTVrt7J9rcG5PMjvO7VEG3FrEoEJNHxumRq66GemPmzboLWtIjjcJgk8rokuAS7IiRSpgVSu5Vb9lc99iJkOA==
|
integrity sha512-ETI/4vyTSxTzGnU2c49XHv2zhExkv9JHLTwDAFz85kmcwuShvYG2H08FwgIguQf4JC75CBnXAUM5PqeF4fj0nQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@babel/types" "^7.5.0"
|
"@babel/types" "^7.5.5"
|
||||||
jsesc "^2.5.1"
|
jsesc "^2.5.1"
|
||||||
lodash "^4.17.11"
|
lodash "^4.17.13"
|
||||||
source-map "^0.5.0"
|
source-map "^0.5.0"
|
||||||
trim-right "^1.0.1"
|
trim-right "^1.0.1"
|
||||||
|
|
||||||
@@ -122,16 +122,16 @@
|
|||||||
"@babel/types" "^7.0.0"
|
"@babel/types" "^7.0.0"
|
||||||
|
|
||||||
"@babel/helper-module-transforms@^7.4.4":
|
"@babel/helper-module-transforms@^7.4.4":
|
||||||
version "7.4.4"
|
version "7.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.4.4.tgz#96115ea42a2f139e619e98ed46df6019b94414b8"
|
resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.5.5.tgz#f84ff8a09038dcbca1fd4355661a500937165b4a"
|
||||||
integrity sha512-3Z1yp8TVQf+B4ynN7WoHPKS8EkdTbgAEy0nU0rs/1Kw4pDgmvYH3rz3aI11KgxKCba2cn7N+tqzV1mY2HMN96w==
|
integrity sha512-jBeCvETKuJqeiaCdyaheF40aXnnU1+wkSiUs/IQg3tB85up1LyL8x77ClY8qJpuRJUcXQo+ZtdNESmZl4j56Pw==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@babel/helper-module-imports" "^7.0.0"
|
"@babel/helper-module-imports" "^7.0.0"
|
||||||
"@babel/helper-simple-access" "^7.1.0"
|
"@babel/helper-simple-access" "^7.1.0"
|
||||||
"@babel/helper-split-export-declaration" "^7.4.4"
|
"@babel/helper-split-export-declaration" "^7.4.4"
|
||||||
"@babel/template" "^7.4.4"
|
"@babel/template" "^7.4.4"
|
||||||
"@babel/types" "^7.4.4"
|
"@babel/types" "^7.5.5"
|
||||||
lodash "^4.17.11"
|
lodash "^4.17.13"
|
||||||
|
|
||||||
"@babel/helper-plugin-utils@^7.0.0":
|
"@babel/helper-plugin-utils@^7.0.0":
|
||||||
version "7.0.0"
|
version "7.0.0"
|
||||||
@@ -139,11 +139,11 @@
|
|||||||
integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA==
|
integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA==
|
||||||
|
|
||||||
"@babel/helper-regex@^7.4.4":
|
"@babel/helper-regex@^7.4.4":
|
||||||
version "7.4.4"
|
version "7.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.4.4.tgz#a47e02bc91fb259d2e6727c2a30013e3ac13c4a2"
|
resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351"
|
||||||
integrity sha512-Y5nuB/kESmR3tKjU8Nkn1wMGEx1tjJX076HBMeL3XLQCu6vA/YRzuTW0bbb+qRnXvQGn+d6Rx953yffl8vEy7Q==
|
integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw==
|
||||||
dependencies:
|
dependencies:
|
||||||
lodash "^4.17.11"
|
lodash "^4.17.13"
|
||||||
|
|
||||||
"@babel/helper-remap-async-to-generator@^7.1.0":
|
"@babel/helper-remap-async-to-generator@^7.1.0":
|
||||||
version "7.1.0"
|
version "7.1.0"
|
||||||
@@ -181,14 +181,14 @@
|
|||||||
"@babel/traverse" "^7.1.0"
|
"@babel/traverse" "^7.1.0"
|
||||||
"@babel/types" "^7.2.0"
|
"@babel/types" "^7.2.0"
|
||||||
|
|
||||||
"@babel/helpers@^7.5.0":
|
"@babel/helpers@^7.5.5":
|
||||||
version "7.5.1"
|
version "7.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.5.1.tgz#65407c741a56ddd59dd86346cd112da3de912db3"
|
resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.5.5.tgz#63908d2a73942229d1e6685bc2a0e730dde3b75e"
|
||||||
integrity sha512-rVOTDv8sH8kNI72Unenusxw6u+1vEepZgLxeV+jHkhsQlYhzVhzL1EpfoWT7Ub3zpWSv2WV03V853dqsnyoQzA==
|
integrity sha512-nRq2BUhxZFnfEn/ciJuhklHvFOqjJUD5wpx+1bxUF2axL9C+v4DE/dmp5sT2dKnpOs4orZWzpAZqlCy8QqE/7g==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@babel/template" "^7.4.4"
|
"@babel/template" "^7.4.4"
|
||||||
"@babel/traverse" "^7.5.0"
|
"@babel/traverse" "^7.5.5"
|
||||||
"@babel/types" "^7.5.0"
|
"@babel/types" "^7.5.5"
|
||||||
|
|
||||||
"@babel/highlight@^7.0.0":
|
"@babel/highlight@^7.0.0":
|
||||||
version "7.5.0"
|
version "7.5.0"
|
||||||
@@ -199,10 +199,10 @@
|
|||||||
esutils "^2.0.2"
|
esutils "^2.0.2"
|
||||||
js-tokens "^4.0.0"
|
js-tokens "^4.0.0"
|
||||||
|
|
||||||
"@babel/parser@^7.0.0", "@babel/parser@^7.4.4", "@babel/parser@^7.5.0":
|
"@babel/parser@^7.0.0", "@babel/parser@^7.4.4", "@babel/parser@^7.5.5":
|
||||||
version "7.5.0"
|
version "7.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.5.0.tgz#3e0713dff89ad6ae37faec3b29dcfc5c979770b7"
|
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.5.5.tgz#02f077ac8817d3df4a832ef59de67565e71cca4b"
|
||||||
integrity sha512-I5nW8AhGpOXGCCNYGc+p7ExQIBxRFnS2fd/d862bNOKvmoEPjYPcfIjsfdy0ujagYOIYPczKgD9l3FsgTkAzKA==
|
integrity sha512-E5BN68cqR7dhKan1SfqgPGhQ178bkVKpXTPEXnFJBrEt8/DKRZlybmy+IgYLTeN7tp1R5Ccmbm2rBk17sHYU3g==
|
||||||
|
|
||||||
"@babel/plugin-proposal-async-generator-functions@^7.0.0":
|
"@babel/plugin-proposal-async-generator-functions@^7.0.0":
|
||||||
version "7.2.0"
|
version "7.2.0"
|
||||||
@@ -214,9 +214,9 @@
|
|||||||
"@babel/plugin-syntax-async-generators" "^7.2.0"
|
"@babel/plugin-syntax-async-generators" "^7.2.0"
|
||||||
|
|
||||||
"@babel/plugin-proposal-object-rest-spread@^7.0.0":
|
"@babel/plugin-proposal-object-rest-spread@^7.0.0":
|
||||||
version "7.5.1"
|
version "7.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.5.1.tgz#5788ab097c63135e4236548b4f112bfce09dd394"
|
resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.5.5.tgz#61939744f71ba76a3ae46b5eea18a54c16d22e58"
|
||||||
integrity sha512-PVGXx5LYHcT7L4MdoE+rM5uq68IKlvU9lljVQ4OXY6aUEnGvezcGbM4VNY57Ug+3R2Zg/nYHlEdiWoIBoRA0mw==
|
integrity sha512-F2DxJJSQ7f64FyTVl5cw/9MWn6naXGdk3Q3UhDbFEEHv+EilCPoeRD3Zh/Utx1CJz4uyKlQ4uH+bJPbEhMV7Zw==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@babel/helper-plugin-utils" "^7.0.0"
|
"@babel/helper-plugin-utils" "^7.0.0"
|
||||||
"@babel/plugin-syntax-object-rest-spread" "^7.2.0"
|
"@babel/plugin-syntax-object-rest-spread" "^7.2.0"
|
||||||
@@ -295,28 +295,28 @@
|
|||||||
"@babel/parser" "^7.4.4"
|
"@babel/parser" "^7.4.4"
|
||||||
"@babel/types" "^7.4.4"
|
"@babel/types" "^7.4.4"
|
||||||
|
|
||||||
"@babel/traverse@^7.1.0", "@babel/traverse@^7.5.0":
|
"@babel/traverse@^7.1.0", "@babel/traverse@^7.5.5":
|
||||||
version "7.5.0"
|
version "7.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.5.0.tgz#4216d6586854ef5c3c4592dab56ec7eb78485485"
|
resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.5.5.tgz#f664f8f368ed32988cd648da9f72d5ca70f165bb"
|
||||||
integrity sha512-SnA9aLbyOCcnnbQEGwdfBggnc142h/rbqqsXcaATj2hZcegCl903pUD/lfpsNBlBSuWow/YDfRyJuWi2EPR5cg==
|
integrity sha512-MqB0782whsfffYfSjH4TM+LMjrJnhCNEDMDIjeTpl+ASaUvxcjoiVCo/sM1GhS1pHOXYfWVCYneLjMckuUxDaQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@babel/code-frame" "^7.0.0"
|
"@babel/code-frame" "^7.5.5"
|
||||||
"@babel/generator" "^7.5.0"
|
"@babel/generator" "^7.5.5"
|
||||||
"@babel/helper-function-name" "^7.1.0"
|
"@babel/helper-function-name" "^7.1.0"
|
||||||
"@babel/helper-split-export-declaration" "^7.4.4"
|
"@babel/helper-split-export-declaration" "^7.4.4"
|
||||||
"@babel/parser" "^7.5.0"
|
"@babel/parser" "^7.5.5"
|
||||||
"@babel/types" "^7.5.0"
|
"@babel/types" "^7.5.5"
|
||||||
debug "^4.1.0"
|
debug "^4.1.0"
|
||||||
globals "^11.1.0"
|
globals "^11.1.0"
|
||||||
lodash "^4.17.11"
|
lodash "^4.17.13"
|
||||||
|
|
||||||
"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.0":
|
"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.5":
|
||||||
version "7.5.0"
|
version "7.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.5.0.tgz#e47d43840c2e7f9105bc4d3a2c371b4d0c7832ab"
|
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.5.5.tgz#97b9f728e182785909aa4ab56264f090a028d18a"
|
||||||
integrity sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==
|
integrity sha512-s63F9nJioLqOlW3UkyMd+BYhXt44YuaFm/VV0VwuteqjYwRrObkU7ra9pY4wAJR3oXi8hJrMcrcJdO/HH33vtw==
|
||||||
dependencies:
|
dependencies:
|
||||||
esutils "^2.0.2"
|
esutils "^2.0.2"
|
||||||
lodash "^4.17.11"
|
lodash "^4.17.13"
|
||||||
to-fast-properties "^2.0.0"
|
to-fast-properties "^2.0.0"
|
||||||
|
|
||||||
"@concordance/react@^2.0.0":
|
"@concordance/react@^2.0.0":
|
||||||
@@ -326,10 +326,39 @@
|
|||||||
dependencies:
|
dependencies:
|
||||||
arrify "^1.0.1"
|
arrify "^1.0.1"
|
||||||
|
|
||||||
|
"@sinonjs/commons@^1", "@sinonjs/commons@^1.3.0", "@sinonjs/commons@^1.4.0":
|
||||||
|
version "1.6.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.6.0.tgz#ec7670432ae9c8eb710400d112c201a362d83393"
|
||||||
|
integrity sha512-w4/WHG7C4WWFyE5geCieFJF6MZkbW4VAriol5KlmQXpAQdxvV0p26sqNZOW6Qyw6Y0l9K4g+cHvvczR2sEEpqg==
|
||||||
|
dependencies:
|
||||||
|
type-detect "4.0.8"
|
||||||
|
|
||||||
|
"@sinonjs/formatio@^3.2.1":
|
||||||
|
version "3.2.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/@sinonjs/formatio/-/formatio-3.2.1.tgz#52310f2f9bcbc67bdac18c94ad4901b95fde267e"
|
||||||
|
integrity sha512-tsHvOB24rvyvV2+zKMmPkZ7dXX6LSLKZ7aOtXY6Edklp0uRcgGpOsQTTGTcWViFyx4uhWc6GV8QdnALbIbIdeQ==
|
||||||
|
dependencies:
|
||||||
|
"@sinonjs/commons" "^1"
|
||||||
|
"@sinonjs/samsam" "^3.1.0"
|
||||||
|
|
||||||
|
"@sinonjs/samsam@^3.1.0", "@sinonjs/samsam@^3.3.3":
|
||||||
|
version "3.3.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-3.3.3.tgz#46682efd9967b259b81136b9f120fd54585feb4a"
|
||||||
|
integrity sha512-bKCMKZvWIjYD0BLGnNrxVuw4dkWCYsLqFOUWw8VgKF/+5Y+mE7LfHWPIYoDXowH+3a9LsWDMo0uAP8YDosPvHQ==
|
||||||
|
dependencies:
|
||||||
|
"@sinonjs/commons" "^1.3.0"
|
||||||
|
array-from "^2.1.1"
|
||||||
|
lodash "^4.17.15"
|
||||||
|
|
||||||
|
"@sinonjs/text-encoding@^0.7.1":
|
||||||
|
version "0.7.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz#8da5c6530915653f3a1f38fd5f101d8c3f8079c5"
|
||||||
|
integrity sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==
|
||||||
|
|
||||||
"@types/chai@^4.1.7":
|
"@types/chai@^4.1.7":
|
||||||
version "4.1.7"
|
version "4.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.1.7.tgz#1b8e33b61a8c09cbe1f85133071baa0dbf9fa71a"
|
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.0.tgz#2478260021408dec32c123a7cad3414beb811a07"
|
||||||
integrity sha512-2Y8uPt0/jwjhQ6EiluT0XCri1Dbplr0ZxfFXUz+ye13gaqE8u5gL5ppao1JrUYr9cIip5S6MvQzBS7Kke7U9VA==
|
integrity sha512-zw8UvoBEImn392tLjxoavuonblX/4Yb9ha4KBU10FirCfwgzhKO0dvyJSF9ByxV1xK1r2AgnAi/tvQaLgxQqxA==
|
||||||
|
|
||||||
"@types/events@*":
|
"@types/events@*":
|
||||||
version "3.0.0"
|
version "3.0.0"
|
||||||
@@ -350,22 +379,15 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d"
|
resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d"
|
||||||
integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==
|
integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==
|
||||||
|
|
||||||
"@types/node@*":
|
"@types/node@*", "@types/node@^12.7.2":
|
||||||
version "12.0.12"
|
version "12.7.2"
|
||||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.0.12.tgz#cc791b402360db1eaf7176479072f91ee6c6c7ca"
|
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.2.tgz#c4e63af5e8823ce9cc3f0b34f7b998c2171f0c44"
|
||||||
integrity sha512-Uy0PN4R5vgBUXFoJrKryf5aTk3kJ8Rv3PdlHjl6UaX+Cqp1QE0yPQ68MPXGrZOfG7gZVNDIJZYyot0B9ubXUrQ==
|
integrity sha512-dyYO+f6ihZEtNPDcWNR1fkoTDf3zAK3lAABDze3mz6POyIercH0lEUawUFXlG8xaQZmm1yEBON/4TsYv/laDYg==
|
||||||
|
|
||||||
"@types/node@^10.12.10":
|
"@types/sinon@^7.0.13":
|
||||||
version "10.14.12"
|
version "7.0.13"
|
||||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-10.14.12.tgz#0eec3155a46e6c4db1f27c3e588a205f767d622f"
|
resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-7.0.13.tgz#ca039c23a9e27ebea53e0901ef928ea2a1a6d313"
|
||||||
integrity sha512-QcAKpaO6nhHLlxWBvpc4WeLrTvPqlHOvaj0s5GriKkA1zq+bsFBPpfYCvQhLqLgYlIko8A9YrPdaMHCo5mBcpg==
|
integrity sha512-d7c/C/+H/knZ3L8/cxhicHUiTDxdgap0b/aNJfsmLwFu/iOP17mdgbQsbHA3SJmrzsjD0l3UEE5SN4xxuz5ung==
|
||||||
|
|
||||||
"@types/typescript@^2.0.0":
|
|
||||||
version "2.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/@types/typescript/-/typescript-2.0.0.tgz#c433539c98bae28682b307eaa7a0fd2115b83c28"
|
|
||||||
integrity sha1-xDNTnJi64oaCswfqp6D9IRW4PCg=
|
|
||||||
dependencies:
|
|
||||||
typescript "*"
|
|
||||||
|
|
||||||
abbrev@1:
|
abbrev@1:
|
||||||
version "1.1.1"
|
version "1.1.1"
|
||||||
@@ -427,6 +449,11 @@ are-we-there-yet@~1.1.2:
|
|||||||
delegates "^1.0.0"
|
delegates "^1.0.0"
|
||||||
readable-stream "^2.0.6"
|
readable-stream "^2.0.6"
|
||||||
|
|
||||||
|
arg@^4.1.0:
|
||||||
|
version "4.1.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.1.tgz#485f8e7c390ce4c5f78257dbea80d4be11feda4c"
|
||||||
|
integrity sha512-SlmP3fEA88MBv0PypnXZ8ZfJhwmDeIE3SP71j37AiXQBXYosPV0x6uISAaHYSlSVhmHOVkomen0tbGk6Anlebw==
|
||||||
|
|
||||||
argparse@^1.0.7:
|
argparse@^1.0.7:
|
||||||
version "1.0.10"
|
version "1.0.10"
|
||||||
resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
|
resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
|
||||||
@@ -459,6 +486,11 @@ array-find-index@^1.0.1:
|
|||||||
resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1"
|
resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1"
|
||||||
integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E=
|
integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E=
|
||||||
|
|
||||||
|
array-from@^2.1.1:
|
||||||
|
version "2.1.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/array-from/-/array-from-2.1.1.tgz#cfe9d8c26628b9dc5aecc62a9f5d8f1f352c1195"
|
||||||
|
integrity sha1-z+nYwmYoudxa7MYqn12PHzUsEZU=
|
||||||
|
|
||||||
array-union@^1.0.1, array-union@^1.0.2:
|
array-union@^1.0.1, array-union@^1.0.2:
|
||||||
version "1.0.2"
|
version "1.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39"
|
resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39"
|
||||||
@@ -678,7 +710,7 @@ braces@^2.3.1, braces@^2.3.2:
|
|||||||
split-string "^3.0.2"
|
split-string "^3.0.2"
|
||||||
to-regex "^3.0.1"
|
to-regex "^3.0.1"
|
||||||
|
|
||||||
buffer-from@^1.0.0, buffer-from@^1.1.0:
|
buffer-from@^1.0.0:
|
||||||
version "1.1.1"
|
version "1.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
|
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
|
||||||
integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
|
integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
|
||||||
@@ -818,9 +850,9 @@ class-utils@^0.3.5:
|
|||||||
static-extend "^0.1.1"
|
static-extend "^0.1.1"
|
||||||
|
|
||||||
clean-stack@^2.0.0:
|
clean-stack@^2.0.0:
|
||||||
version "2.1.0"
|
version "2.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.1.0.tgz#9e7fec7f3f8340a2ab4f127c80273085e8fbbdd0"
|
resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b"
|
||||||
integrity sha512-uQWrpRm+iZZUCAp7ZZJQbd4Za9I3AjR/3YTjmcnAtkauaIm/T5CT6U8zVI6e60T6OANqBFAzuR9/HB3NzuZCRA==
|
integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==
|
||||||
|
|
||||||
clean-yaml-object@^0.1.0:
|
clean-yaml-object@^0.1.0:
|
||||||
version "0.1.0"
|
version "0.1.0"
|
||||||
@@ -1120,11 +1152,16 @@ detect-libc@^1.0.2:
|
|||||||
resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b"
|
resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b"
|
||||||
integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=
|
integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=
|
||||||
|
|
||||||
diff@^3.1.0, diff@^3.2.0:
|
diff@^3.2.0, diff@^3.5.0:
|
||||||
version "3.5.0"
|
version "3.5.0"
|
||||||
resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12"
|
resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12"
|
||||||
integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==
|
integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==
|
||||||
|
|
||||||
|
diff@^4.0.1:
|
||||||
|
version "4.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.1.tgz#0c667cb467ebbb5cea7f14f135cc2dba7780a8ff"
|
||||||
|
integrity sha512-s2+XdvhPCOF01LRQBC8hf4vhbVmI2CGS5aZnxLJlT5FtdhPCDFq80q++zK2KlrVorVDdL5BOGZ/VfLrVtYNF+Q==
|
||||||
|
|
||||||
dir-glob@^2.0.0:
|
dir-glob@^2.0.0:
|
||||||
version "2.2.2"
|
version "2.2.2"
|
||||||
resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.2.2.tgz#fa09f0694153c8918b18ba0deafae94769fc50c4"
|
resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.2.2.tgz#fa09f0694153c8918b18ba0deafae94769fc50c4"
|
||||||
@@ -1215,14 +1252,14 @@ espurify@^1.6.0:
|
|||||||
core-js "^2.0.0"
|
core-js "^2.0.0"
|
||||||
|
|
||||||
estraverse@^4.0.0, estraverse@^4.1.1:
|
estraverse@^4.0.0, estraverse@^4.1.1:
|
||||||
version "4.2.0"
|
version "4.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13"
|
resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d"
|
||||||
integrity sha1-De4/7TH81GlhjOc0IJn8GvoL2xM=
|
integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==
|
||||||
|
|
||||||
esutils@^2.0.2:
|
esutils@^2.0.2:
|
||||||
version "2.0.2"
|
version "2.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b"
|
resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
|
||||||
integrity sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=
|
integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
|
||||||
|
|
||||||
execa@^0.7.0:
|
execa@^0.7.0:
|
||||||
version "0.7.0"
|
version "0.7.0"
|
||||||
@@ -1459,9 +1496,9 @@ got@^6.7.1:
|
|||||||
url-parse-lax "^1.0.0"
|
url-parse-lax "^1.0.0"
|
||||||
|
|
||||||
graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2:
|
graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2:
|
||||||
version "4.2.0"
|
version "4.2.2"
|
||||||
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.0.tgz#8d8fdc73977cb04104721cb53666c1ca64cd328b"
|
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.2.tgz#6f0952605d0140c1cfdb138ed005775b92d67b02"
|
||||||
integrity sha512-jpSvDPV4Cq/bgtpndIWbI5hmYxhQGHPC4d4cqBPb4DLniCfhJokdXhwhaDuLBGLQdvvRum/UiX6ECVIPvDXqdg==
|
integrity sha512-IItsdsea19BoLC7ELy13q1iJFNmd7ofZH5+X/pJr90/nRoPEX0DJo1dHDbgtYWOhJhcCgMDTOw84RZ72q6lB+Q==
|
||||||
|
|
||||||
has-flag@^3.0.0:
|
has-flag@^3.0.0:
|
||||||
version "3.0.0"
|
version "3.0.0"
|
||||||
@@ -1517,9 +1554,9 @@ hasha@^3.0.0:
|
|||||||
is-stream "^1.0.1"
|
is-stream "^1.0.1"
|
||||||
|
|
||||||
hosted-git-info@^2.1.4:
|
hosted-git-info@^2.1.4:
|
||||||
version "2.7.1"
|
version "2.8.4"
|
||||||
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.7.1.tgz#97f236977bd6e125408930ff6de3eec6281ec047"
|
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.4.tgz#44119abaf4bc64692a16ace34700fed9c03e2546"
|
||||||
integrity sha512-7T/BxH19zbcCTa8XkMlbK5lTo1WtgkFi3GvdWEyNuc4Vex7/9Dqbnpsf4JMydcfj9HCg4zUWFTL3Za6lapg5/w==
|
integrity sha512-pzXIvANXEFrc5oFFXRMkbLPQ2rXRoDERwDLyrcUxGhaZhgP54BBSl9Oheh7Vv0T090cszWBxPjkQQ5Sq1PbBRQ==
|
||||||
|
|
||||||
iconv-lite@^0.4.4:
|
iconv-lite@^0.4.4:
|
||||||
version "0.4.24"
|
version "0.4.24"
|
||||||
@@ -1828,6 +1865,11 @@ is-windows@^1.0.2:
|
|||||||
resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d"
|
resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d"
|
||||||
integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==
|
integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==
|
||||||
|
|
||||||
|
isarray@0.0.1:
|
||||||
|
version "0.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
|
||||||
|
integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=
|
||||||
|
|
||||||
isarray@1.0.0, isarray@~1.0.0:
|
isarray@1.0.0, isarray@~1.0.0:
|
||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
|
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
|
||||||
@@ -1895,6 +1937,11 @@ json5@^2.1.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
minimist "^1.2.0"
|
minimist "^1.2.0"
|
||||||
|
|
||||||
|
just-extend@^4.0.2:
|
||||||
|
version "4.0.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.0.2.tgz#f3f47f7dfca0f989c55410a7ebc8854b07108afc"
|
||||||
|
integrity sha512-FrLwOgm+iXrPV+5zDU6Jqu4gCRXbWEQg2O3SKONsWE4w7AXFRkryS53bpWdaL9cNol+AmR3AEYz6kn+o0fCPnw==
|
||||||
|
|
||||||
kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0:
|
kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0:
|
||||||
version "3.2.2"
|
version "3.2.2"
|
||||||
resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64"
|
resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64"
|
||||||
@@ -2009,14 +2056,14 @@ lodash.islength@^4.0.1:
|
|||||||
integrity sha1-Tpho1FJXXXUK/9NYyXlUPcIO1Xc=
|
integrity sha1-Tpho1FJXXXUK/9NYyXlUPcIO1Xc=
|
||||||
|
|
||||||
lodash.merge@^4.6.1:
|
lodash.merge@^4.6.1:
|
||||||
version "4.6.1"
|
version "4.6.2"
|
||||||
resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.1.tgz#adc25d9cb99b9391c59624f379fbba60d7111d54"
|
resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a"
|
||||||
integrity sha512-AOYza4+Hf5z1/0Hztxpm2/xiPZgi/cjMqdnKTUWTBSKchJlxXXuUSxCCl8rJlf4g6yww/j6mA8nC8Hw/EZWxKQ==
|
integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==
|
||||||
|
|
||||||
lodash@^4.17.11:
|
lodash@^4.17.13, lodash@^4.17.15:
|
||||||
version "4.17.11"
|
version "4.17.15"
|
||||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d"
|
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
|
||||||
integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==
|
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
|
||||||
|
|
||||||
log-symbols@^2.2.0:
|
log-symbols@^2.2.0:
|
||||||
version "2.2.0"
|
version "2.2.0"
|
||||||
@@ -2025,6 +2072,11 @@ log-symbols@^2.2.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
chalk "^2.0.1"
|
chalk "^2.0.1"
|
||||||
|
|
||||||
|
lolex@^4.1.0, lolex@^4.2.0:
|
||||||
|
version "4.2.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/lolex/-/lolex-4.2.0.tgz#ddbd7f6213ca1ea5826901ab1222b65d714b3cd7"
|
||||||
|
integrity sha512-gKO5uExCXvSm6zbF562EvM+rd1kQDnB9AZBbiQVzf1ZmdDpxUSvpnAaVOP83N/31mRK8Ml8/VE8DMvsAZQ+7wg==
|
||||||
|
|
||||||
loud-rejection@^1.0.0, loud-rejection@^1.2.0:
|
loud-rejection@^1.0.0, loud-rejection@^1.2.0:
|
||||||
version "1.6.0"
|
version "1.6.0"
|
||||||
resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f"
|
resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f"
|
||||||
@@ -2255,6 +2307,17 @@ needle@^2.2.1:
|
|||||||
iconv-lite "^0.4.4"
|
iconv-lite "^0.4.4"
|
||||||
sax "^1.2.4"
|
sax "^1.2.4"
|
||||||
|
|
||||||
|
nise@^1.5.2:
|
||||||
|
version "1.5.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/nise/-/nise-1.5.2.tgz#b6d29af10e48b321b307e10e065199338eeb2652"
|
||||||
|
integrity sha512-/6RhOUlicRCbE9s+94qCUsyE+pKlVJ5AhIv+jEE7ESKwnbXqulKZ1FYU+XAtHHWE9TinYvAxDUJAb912PwPoWA==
|
||||||
|
dependencies:
|
||||||
|
"@sinonjs/formatio" "^3.2.1"
|
||||||
|
"@sinonjs/text-encoding" "^0.7.1"
|
||||||
|
just-extend "^4.0.2"
|
||||||
|
lolex "^4.1.0"
|
||||||
|
path-to-regexp "^1.7.0"
|
||||||
|
|
||||||
node-pre-gyp@^0.12.0:
|
node-pre-gyp@^0.12.0:
|
||||||
version "0.12.0"
|
version "0.12.0"
|
||||||
resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149"
|
resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149"
|
||||||
@@ -2547,6 +2610,13 @@ path-parse@^1.0.6:
|
|||||||
resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c"
|
resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c"
|
||||||
integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==
|
integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==
|
||||||
|
|
||||||
|
path-to-regexp@^1.7.0:
|
||||||
|
version "1.7.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.7.0.tgz#59fde0f435badacba103a84e9d3bc64e96b9937d"
|
||||||
|
integrity sha1-Wf3g9DW62suhA6hOnTvGTpa5k30=
|
||||||
|
dependencies:
|
||||||
|
isarray "0.0.1"
|
||||||
|
|
||||||
path-type@^3.0.0:
|
path-type@^3.0.0:
|
||||||
version "3.0.0"
|
version "3.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f"
|
resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f"
|
||||||
@@ -2702,7 +2772,7 @@ redent@^2.0.0:
|
|||||||
indent-string "^3.0.0"
|
indent-string "^3.0.0"
|
||||||
strip-indent "^2.0.0"
|
strip-indent "^2.0.0"
|
||||||
|
|
||||||
regenerate-unicode-properties@^8.0.2:
|
regenerate-unicode-properties@^8.1.0:
|
||||||
version "8.1.0"
|
version "8.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e"
|
resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e"
|
||||||
integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA==
|
integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA==
|
||||||
@@ -2723,12 +2793,12 @@ regex-not@^1.0.0, regex-not@^1.0.2:
|
|||||||
safe-regex "^1.1.0"
|
safe-regex "^1.1.0"
|
||||||
|
|
||||||
regexpu-core@^4.5.4:
|
regexpu-core@^4.5.4:
|
||||||
version "4.5.4"
|
version "4.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.5.4.tgz#080d9d02289aa87fe1667a4f5136bc98a6aebaae"
|
resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.5.5.tgz#aaffe61c2af58269b3e516b61a73790376326411"
|
||||||
integrity sha512-BtizvGtFQKGPUcTy56o3nk1bGRp4SZOTYrDtGNlqCQufptV5IkkLN6Emw+yunAJjzf+C9FQFtvq7IoA3+oMYHQ==
|
integrity sha512-FpI67+ky9J+cDizQUJlIlNZFKual/lUkFr1AG6zOCpwZ9cLrg8UUVakyUQJD7fCDIe9Z2nwTQJNPyonatNmDFQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
regenerate "^1.4.0"
|
regenerate "^1.4.0"
|
||||||
regenerate-unicode-properties "^8.0.2"
|
regenerate-unicode-properties "^8.1.0"
|
||||||
regjsgen "^0.5.0"
|
regjsgen "^0.5.0"
|
||||||
regjsparser "^0.6.0"
|
regjsparser "^0.6.0"
|
||||||
unicode-match-property-ecmascript "^1.0.4"
|
unicode-match-property-ecmascript "^1.0.4"
|
||||||
@@ -2806,9 +2876,9 @@ resolve-url@^0.2.1:
|
|||||||
integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=
|
integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=
|
||||||
|
|
||||||
resolve@^1.10.0, resolve@^1.3.2:
|
resolve@^1.10.0, resolve@^1.3.2:
|
||||||
version "1.11.1"
|
version "1.12.0"
|
||||||
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.11.1.tgz#ea10d8110376982fef578df8fc30b9ac30a07a3e"
|
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.0.tgz#3fc644a35c84a48554609ff26ec52b66fa577df6"
|
||||||
integrity sha512-vIpgF6wfuJOZI7KKKSP+HmiKggadPQAdsp5HiC1mvqnfp0gF1vdwgBWZIdrVft9pgqoMFQN+R7BSWZiBxx+BBw==
|
integrity sha512-B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w==
|
||||||
dependencies:
|
dependencies:
|
||||||
path-parse "^1.0.6"
|
path-parse "^1.0.6"
|
||||||
|
|
||||||
@@ -2826,9 +2896,9 @@ ret@~0.1.10:
|
|||||||
integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==
|
integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==
|
||||||
|
|
||||||
rimraf@^2.6.1, rimraf@^2.6.3:
|
rimraf@^2.6.1, rimraf@^2.6.3:
|
||||||
version "2.6.3"
|
version "2.7.1"
|
||||||
resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab"
|
resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec"
|
||||||
integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==
|
integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==
|
||||||
dependencies:
|
dependencies:
|
||||||
glob "^7.1.3"
|
glob "^7.1.3"
|
||||||
|
|
||||||
@@ -2867,9 +2937,9 @@ semver-diff@^2.0.0:
|
|||||||
semver "^5.0.3"
|
semver "^5.0.3"
|
||||||
|
|
||||||
"semver@2 || 3 || 4 || 5", semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@^5.4.1, semver@^5.5.1, semver@^5.6.0:
|
"semver@2 || 3 || 4 || 5", semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@^5.4.1, semver@^5.5.1, semver@^5.6.0:
|
||||||
version "5.7.0"
|
version "5.7.1"
|
||||||
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.0.tgz#790a7cf6fea5459bac96110b29b60412dc8ff96b"
|
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
|
||||||
integrity sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA==
|
integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
|
||||||
|
|
||||||
serialize-error@^2.1.0:
|
serialize-error@^2.1.0:
|
||||||
version "2.1.0"
|
version "2.1.0"
|
||||||
@@ -2908,6 +2978,19 @@ signal-exit@^3.0.0, signal-exit@^3.0.2:
|
|||||||
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d"
|
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d"
|
||||||
integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=
|
integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=
|
||||||
|
|
||||||
|
sinon@^7.4.2:
|
||||||
|
version "7.4.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/sinon/-/sinon-7.4.2.tgz#ecd54158fef2fcfbdb231a3fa55140e8cb02ad6c"
|
||||||
|
integrity sha512-pY5RY99DKelU3pjNxcWo6XqeB1S118GBcVIIdDi6V+h6hevn1izcg2xv1hTHW/sViRXU7sUOxt4wTUJ3gsW2CQ==
|
||||||
|
dependencies:
|
||||||
|
"@sinonjs/commons" "^1.4.0"
|
||||||
|
"@sinonjs/formatio" "^3.2.1"
|
||||||
|
"@sinonjs/samsam" "^3.3.3"
|
||||||
|
diff "^3.5.0"
|
||||||
|
lolex "^4.2.0"
|
||||||
|
nise "^1.5.2"
|
||||||
|
supports-color "^5.5.0"
|
||||||
|
|
||||||
slash@^1.0.0:
|
slash@^1.0.0:
|
||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55"
|
resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55"
|
||||||
@@ -2971,18 +3054,10 @@ source-map-resolve@^0.5.0:
|
|||||||
source-map-url "^0.4.0"
|
source-map-url "^0.4.0"
|
||||||
urix "^0.1.0"
|
urix "^0.1.0"
|
||||||
|
|
||||||
source-map-support@^0.5.11:
|
source-map-support@^0.5.11, source-map-support@^0.5.6:
|
||||||
version "0.5.12"
|
version "0.5.13"
|
||||||
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.12.tgz#b4f3b10d51857a5af0138d3ce8003b201613d599"
|
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932"
|
||||||
integrity sha512-4h2Pbvyy15EE02G+JOZpUCmqWJuqrs+sEkzewTm++BPi7Hvn/HwcqLAcNxYAyI0x13CpPPn+kMjl+hplXMHITQ==
|
integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==
|
||||||
dependencies:
|
|
||||||
buffer-from "^1.0.0"
|
|
||||||
source-map "^0.6.0"
|
|
||||||
|
|
||||||
source-map-support@^0.5.6:
|
|
||||||
version "0.5.9"
|
|
||||||
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.9.tgz#41bc953b2534267ea2d605bccfa7bfa3111ced5f"
|
|
||||||
integrity sha512-gR6Rw4MvUlYy83vP0vxoVNzM6t8MUXqNuRsuBmBHQDu1Fh6X015FrLdgoDKcNdkwGubozq0P4N0Q37UyFVr1EA==
|
|
||||||
dependencies:
|
dependencies:
|
||||||
buffer-from "^1.0.0"
|
buffer-from "^1.0.0"
|
||||||
source-map "^0.6.0"
|
source-map "^0.6.0"
|
||||||
@@ -3024,9 +3099,9 @@ spdx-expression-parse@^3.0.0:
|
|||||||
spdx-license-ids "^3.0.0"
|
spdx-license-ids "^3.0.0"
|
||||||
|
|
||||||
spdx-license-ids@^3.0.0:
|
spdx-license-ids@^3.0.0:
|
||||||
version "3.0.4"
|
version "3.0.5"
|
||||||
resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.4.tgz#75ecd1a88de8c184ef015eafb51b5b48bfd11bb1"
|
resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654"
|
||||||
integrity sha512-7j8LYJLeY/Yb6ACbQ7F76qy5jHkp0U6jgBfJsk97bwWlVUnUWsAgpyaCvo17h0/RQGnQ036tVDomiwoI4pDkQA==
|
integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==
|
||||||
|
|
||||||
split-string@^3.0.1, split-string@^3.0.2:
|
split-string@^3.0.1, split-string@^3.0.2:
|
||||||
version "3.1.0"
|
version "3.1.0"
|
||||||
@@ -3136,7 +3211,7 @@ supertap@^1.0.0:
|
|||||||
serialize-error "^2.1.0"
|
serialize-error "^2.1.0"
|
||||||
strip-ansi "^4.0.0"
|
strip-ansi "^4.0.0"
|
||||||
|
|
||||||
supports-color@^5.3.0:
|
supports-color@^5.3.0, supports-color@^5.5.0:
|
||||||
version "5.5.0"
|
version "5.5.0"
|
||||||
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
|
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
|
||||||
integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
|
integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
|
||||||
@@ -3235,26 +3310,18 @@ trim-right@^1.0.1:
|
|||||||
resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003"
|
resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003"
|
||||||
integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM=
|
integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM=
|
||||||
|
|
||||||
ts-node@^7.0.1:
|
ts-node@^8.3.0:
|
||||||
version "7.0.1"
|
version "8.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-7.0.1.tgz#9562dc2d1e6d248d24bc55f773e3f614337d9baf"
|
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-8.3.0.tgz#e4059618411371924a1fb5f3b125915f324efb57"
|
||||||
integrity sha512-BVwVbPJRspzNh2yfslyT1PSbl5uIk03EZlb493RKHN4qej/D06n1cEhjlOJG69oFsE7OT8XjpTUcYf6pKTLMhw==
|
integrity sha512-dyNS/RqyVTDcmNM4NIBAeDMpsAdaQ+ojdf0GOLqE6nwJOgzEkdRNzJywhDfwnuvB10oa6NLVG1rUJQCpRN7qoQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
arrify "^1.0.0"
|
arg "^4.1.0"
|
||||||
buffer-from "^1.1.0"
|
diff "^4.0.1"
|
||||||
diff "^3.1.0"
|
|
||||||
make-error "^1.1.1"
|
make-error "^1.1.1"
|
||||||
minimist "^1.2.0"
|
|
||||||
mkdirp "^0.5.1"
|
|
||||||
source-map-support "^0.5.6"
|
source-map-support "^0.5.6"
|
||||||
yn "^2.0.0"
|
yn "^3.0.0"
|
||||||
|
|
||||||
tslib@^1.7.1:
|
tslib@^1.7.1, tslib@^1.8.0, tslib@^1.8.1:
|
||||||
version "1.9.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.3.tgz#d7e4dd79245d85428c4d7e4822a79917954ca286"
|
|
||||||
integrity sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ==
|
|
||||||
|
|
||||||
tslib@^1.8.0, tslib@^1.8.1:
|
|
||||||
version "1.10.0"
|
version "1.10.0"
|
||||||
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a"
|
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a"
|
||||||
integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==
|
integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==
|
||||||
@@ -3299,7 +3366,7 @@ tsutils@^2.29.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
tslib "^1.8.1"
|
tslib "^1.8.1"
|
||||||
|
|
||||||
type-detect@^4.0.0, type-detect@^4.0.5:
|
type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.5:
|
||||||
version "4.0.8"
|
version "4.0.8"
|
||||||
resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c"
|
resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c"
|
||||||
integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==
|
integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==
|
||||||
@@ -3309,10 +3376,10 @@ type-fest@^0.3.0:
|
|||||||
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1"
|
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1"
|
||||||
integrity sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==
|
integrity sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==
|
||||||
|
|
||||||
typescript@*, typescript@^3.1.6:
|
typescript@^3.5.3:
|
||||||
version "3.5.2"
|
version "3.5.3"
|
||||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.2.tgz#a09e1dc69bc9551cadf17dba10ee42cf55e5d56c"
|
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977"
|
||||||
integrity sha512-7KxJovlYhTX5RaRbUdkAXN1KUZ8PwWlTzQdHV6xNqvuFOs7+WBo10TQUqT19Q/Jz2hk5v9TQDIhyLhhJY4p5AA==
|
integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g==
|
||||||
|
|
||||||
uid2@0.0.3:
|
uid2@0.0.3:
|
||||||
version "0.0.3"
|
version "0.0.3"
|
||||||
@@ -3485,9 +3552,9 @@ xdg-basedir@^3.0.0:
|
|||||||
integrity sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ=
|
integrity sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ=
|
||||||
|
|
||||||
xtend@^4.0.0:
|
xtend@^4.0.0:
|
||||||
version "4.0.1"
|
version "4.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af"
|
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
|
||||||
integrity sha1-pcbVMr5lbiPbgg77lDofBJmNY68=
|
integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==
|
||||||
|
|
||||||
yallist@^2.1.2:
|
yallist@^2.1.2:
|
||||||
version "2.1.2"
|
version "2.1.2"
|
||||||
@@ -3506,7 +3573,7 @@ yargs-parser@^10.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
camelcase "^4.1.0"
|
camelcase "^4.1.0"
|
||||||
|
|
||||||
yn@^2.0.0:
|
yn@^3.0.0:
|
||||||
version "2.0.0"
|
version "3.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/yn/-/yn-2.0.0.tgz#e5adabc8acf408f6385fc76495684c88e6af689a"
|
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
|
||||||
integrity sha1-5a2ryKz0CPY4X8dklWhMiOavaJo=
|
integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==
|
||||||
|
|||||||
Reference in New Issue
Block a user