Merge pull request #2 from Jogogoplay/feature/accumulator
batch, unbatch, rate, parallel, accumulator with rolling / sliding windows, demultiplexer
This commit is contained in:
commit
e08558ca88
33
package.json
33
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "mhysa",
|
||||
"version": "1.0.2",
|
||||
"name": "@jogogo/mhysa",
|
||||
"version": "0.0.1-alpha.1",
|
||||
"description": "Streams and event emitter utils for Node.js",
|
||||
"keywords": [
|
||||
"promise",
|
||||
@ -11,18 +11,33 @@
|
||||
"author": {
|
||||
"name": "Wenzil"
|
||||
},
|
||||
"contributors": [
|
||||
{
|
||||
"name": "jerry",
|
||||
"email": "jerry@jogogo.co"
|
||||
},
|
||||
{
|
||||
"name": "lewis",
|
||||
"email": "lewis@jogogo.co"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"publishConfig": {
|
||||
"registry": "https://npm.dev.jogogo.co/"
|
||||
},
|
||||
"repository": {
|
||||
"url": "git@github.com:Wenzil/Mhysa.git",
|
||||
"url": "git@github.com:Jogogoplay/mhysa.git",
|
||||
"type": "git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "ava",
|
||||
"test": "NODE_PATH=src node node_modules/.bin/ava 'tests/*.spec.ts' -e",
|
||||
"test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.js",
|
||||
"test:all": "NODE_PATH=src node node_modules/.bin/ava",
|
||||
"lint": "tslint -p tsconfig.json",
|
||||
"validate:tslint": "tslint-config-prettier-check ./tslint.json",
|
||||
"prepublishOnly": "yarn lint && yarn test && yarn tsc"
|
||||
@ -30,20 +45,22 @@
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"@types/chai": "^4.1.7",
|
||||
"@types/node": "^10.12.10",
|
||||
"@types/node": "^12.7.2",
|
||||
"@types/sinon": "^7.0.13",
|
||||
"ava": "^1.0.0-rc.2",
|
||||
"chai": "^4.2.0",
|
||||
"mhysa": "./",
|
||||
"prettier": "^1.14.3",
|
||||
"ts-node": "^7.0.1",
|
||||
"sinon": "^7.4.2",
|
||||
"ts-node": "^8.3.0",
|
||||
"tslint": "^5.11.0",
|
||||
"tslint-config-prettier": "^1.16.0",
|
||||
"tslint-plugin-prettier": "^2.0.1",
|
||||
"typescript": "^3.1.6"
|
||||
"typescript": "^3.5.3"
|
||||
},
|
||||
"ava": {
|
||||
"files": [
|
||||
"src/**/*.spec.ts"
|
||||
"tests/*.spec.ts"
|
||||
],
|
||||
"sources": [
|
||||
"src/**/*.ts"
|
||||
|
181
src/functions/accumulator.ts
Normal file
181
src/functions/accumulator.ts
Normal file
@ -0,0 +1,181 @@
|
||||
import { Transform, TransformOptions } from "stream";
|
||||
import { batch } from ".";
|
||||
|
||||
export enum FlushStrategy {
|
||||
rolling = "rolling",
|
||||
sliding = "sliding",
|
||||
}
|
||||
|
||||
export type AccumulatorByIteratee<T> = (event: T, bufferChunk: T) => boolean;
|
||||
|
||||
function _accumulator<T>(
|
||||
accumulateBy: (data: T, buffer: T[], stream: Transform) => void,
|
||||
shouldFlush: boolean = true,
|
||||
options: TransformOptions = {},
|
||||
) {
|
||||
const buffer: T[] = [];
|
||||
return new Transform({
|
||||
...options,
|
||||
transform(data: T, encoding, callback) {
|
||||
accumulateBy(data, buffer, this);
|
||||
callback();
|
||||
},
|
||||
flush(callback) {
|
||||
if (shouldFlush) {
|
||||
this.push(buffer);
|
||||
}
|
||||
callback();
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function _sliding<T>(
|
||||
windowLength: number,
|
||||
key?: string,
|
||||
): (event: T, buffer: T[], stream: Transform) => void {
|
||||
return (event: T, buffer: T[], stream: Transform) => {
|
||||
if (key) {
|
||||
let index = 0;
|
||||
if (event[key] === undefined) {
|
||||
stream.emit(
|
||||
"error",
|
||||
new Error(
|
||||
`Key is missing in event: (${key}, ${JSON.stringify(
|
||||
event,
|
||||
)})`,
|
||||
),
|
||||
);
|
||||
stream.resume();
|
||||
return;
|
||||
}
|
||||
while (
|
||||
index < buffer.length &&
|
||||
buffer[index][key] + windowLength <= event[key]
|
||||
) {
|
||||
index++;
|
||||
}
|
||||
buffer.splice(0, index);
|
||||
} else if (buffer.length === windowLength) {
|
||||
buffer.shift();
|
||||
}
|
||||
buffer.push(event);
|
||||
stream.push(buffer);
|
||||
};
|
||||
}
|
||||
|
||||
function _slidingByFunction<T>(
|
||||
iteratee: AccumulatorByIteratee<T>,
|
||||
): (event: T, buffer: T[], stream: Transform) => void {
|
||||
return (event: T, buffer: T[], stream: Transform) => {
|
||||
let index = 0;
|
||||
while (index < buffer.length && iteratee(event, buffer[index])) {
|
||||
index++;
|
||||
}
|
||||
buffer.splice(0, index);
|
||||
buffer.push(event);
|
||||
stream.push(buffer);
|
||||
};
|
||||
}
|
||||
|
||||
function _rollingByFunction<T>(
|
||||
iteratee: AccumulatorByIteratee<T>,
|
||||
): (event: T, buffer: T[], stream: Transform) => void {
|
||||
return (event: T, buffer: T[], stream: Transform) => {
|
||||
if (iteratee) {
|
||||
if (buffer.length > 0 && iteratee(event, buffer[0])) {
|
||||
stream.push(buffer.slice(0));
|
||||
buffer.length = 0;
|
||||
}
|
||||
}
|
||||
buffer.push(event);
|
||||
};
|
||||
}
|
||||
|
||||
function _rolling<T>(
|
||||
windowLength: number,
|
||||
key?: string,
|
||||
): (event: T, buffer: T[], stream: Transform) => void {
|
||||
return (event: T, buffer: T[], stream: Transform) => {
|
||||
if (key) {
|
||||
if (event[key] === undefined) {
|
||||
stream.emit(
|
||||
"error",
|
||||
new Error(
|
||||
`Key is missing in event: (${key}, ${JSON.stringify(
|
||||
event,
|
||||
)})`,
|
||||
),
|
||||
);
|
||||
stream.resume();
|
||||
return;
|
||||
} else if (
|
||||
buffer.length > 0 &&
|
||||
buffer[0][key] + windowLength <= event[key]
|
||||
) {
|
||||
stream.push(buffer.slice(0));
|
||||
buffer.length = 0;
|
||||
}
|
||||
} else if (buffer.length === windowLength) {
|
||||
stream.push(buffer.slice(0));
|
||||
buffer.length = 0;
|
||||
}
|
||||
buffer.push(event);
|
||||
};
|
||||
}
|
||||
|
||||
export function accumulator(
|
||||
flushStrategy: FlushStrategy,
|
||||
batchSize: number,
|
||||
keyBy?: string,
|
||||
options?: TransformOptions,
|
||||
): Transform {
|
||||
if (flushStrategy === FlushStrategy.sliding) {
|
||||
return sliding(batchSize, keyBy, options);
|
||||
} else if (flushStrategy === FlushStrategy.rolling) {
|
||||
return rolling(batchSize, keyBy, options);
|
||||
} else {
|
||||
return batch(batchSize);
|
||||
}
|
||||
}
|
||||
|
||||
export function accumulatorBy<T, S extends FlushStrategy>(
|
||||
flushStrategy: S,
|
||||
iteratee: AccumulatorByIteratee<T>,
|
||||
options?: TransformOptions,
|
||||
): Transform {
|
||||
if (flushStrategy === FlushStrategy.sliding) {
|
||||
return slidingBy(iteratee, options);
|
||||
} else {
|
||||
return rollingBy(iteratee, options);
|
||||
}
|
||||
}
|
||||
|
||||
function sliding(
|
||||
windowLength: number,
|
||||
key?: string,
|
||||
options?: TransformOptions,
|
||||
): Transform {
|
||||
return _accumulator(_sliding(windowLength, key), false, options);
|
||||
}
|
||||
|
||||
function slidingBy<T>(
|
||||
iteratee: AccumulatorByIteratee<T>,
|
||||
options?: TransformOptions,
|
||||
): Transform {
|
||||
return _accumulator(_slidingByFunction(iteratee), false, options);
|
||||
}
|
||||
|
||||
function rolling(
|
||||
windowLength: number,
|
||||
key?: string,
|
||||
options?: TransformOptions,
|
||||
): Transform {
|
||||
return _accumulator(_rolling(windowLength, key), true, options);
|
||||
}
|
||||
|
||||
function rollingBy<T>(
|
||||
iteratee: AccumulatorByIteratee<T>,
|
||||
options?: TransformOptions,
|
||||
): Transform {
|
||||
return _accumulator(_rollingByFunction(iteratee), true, options);
|
||||
}
|
14
src/functions/baseDefinitions.ts
Normal file
14
src/functions/baseDefinitions.ts
Normal file
@ -0,0 +1,14 @@
|
||||
export interface WithEncoding {
|
||||
encoding: string;
|
||||
}
|
||||
|
||||
export enum SerializationFormats {
|
||||
utf8 = "utf8",
|
||||
}
|
||||
|
||||
type JsonPrimitive = string | number | object;
|
||||
export type JsonValue = JsonPrimitive | JsonPrimitive[];
|
||||
|
||||
export interface JsonParseOptions {
|
||||
pretty: boolean;
|
||||
}
|
23
src/functions/baseFunctions.ts
Normal file
23
src/functions/baseFunctions.ts
Normal file
@ -0,0 +1,23 @@
|
||||
export { accumulator, accumulatorBy } from "./accumulator";
|
||||
export { batch } from "./batch";
|
||||
export { child } from "./child";
|
||||
export { collect } from "./collect";
|
||||
export { concat } from "./concat";
|
||||
export { duplex } from "./duplex";
|
||||
export { filter } from "./filter";
|
||||
export { flatMap } from "./flatMap";
|
||||
export { fromArray } from "./fromArray";
|
||||
export { join } from "./join";
|
||||
export { last } from "./last";
|
||||
export { map } from "./map";
|
||||
export { merge } from "./merge";
|
||||
export { parallelMap } from "./parallelMap";
|
||||
export { parse } from "./parse";
|
||||
export { rate } from "./rate";
|
||||
export { reduce } from "./reduce";
|
||||
export { replace } from "./replace";
|
||||
export { split } from "./split";
|
||||
export { stringify } from "./stringify";
|
||||
export { unbatch } from "./unbatch";
|
||||
export { compose } from "./compose";
|
||||
export { demux } from "./demux";
|
40
src/functions/batch.ts
Normal file
40
src/functions/batch.ts
Normal file
@ -0,0 +1,40 @@
|
||||
import { Transform, TransformOptions } from "stream";
|
||||
|
||||
export function batch(
|
||||
batchSize: number = 1000,
|
||||
maxBatchAge: number = 500,
|
||||
options: TransformOptions = {
|
||||
objectMode: true,
|
||||
},
|
||||
): Transform {
|
||||
let buffer: any[] = [];
|
||||
let timer: NodeJS.Timer | null = null;
|
||||
const sendChunk = (self: Transform) => {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
timer = null;
|
||||
self.push(buffer);
|
||||
buffer = [];
|
||||
};
|
||||
return new Transform({
|
||||
...options,
|
||||
transform(chunk, encoding, callback) {
|
||||
buffer.push(chunk);
|
||||
if (buffer.length === batchSize) {
|
||||
sendChunk(this);
|
||||
} else {
|
||||
if (timer === null) {
|
||||
timer = setInterval(() => {
|
||||
sendChunk(this);
|
||||
}, maxBatchAge);
|
||||
}
|
||||
}
|
||||
callback();
|
||||
},
|
||||
flush(callback) {
|
||||
sendChunk(this);
|
||||
callback();
|
||||
},
|
||||
});
|
||||
}
|
11
src/functions/child.ts
Normal file
11
src/functions/child.ts
Normal file
@ -0,0 +1,11 @@
|
||||
import { ChildProcess } from "child_process";
|
||||
import { duplex } from "./baseFunctions";
|
||||
|
||||
export function child(childProcess: ChildProcess) {
|
||||
if (childProcess.stdin === null) {
|
||||
throw new Error("childProcess.stdin is null");
|
||||
} else if (childProcess.stdout === null) {
|
||||
throw new Error("childProcess.stdout is null");
|
||||
}
|
||||
return duplex(childProcess.stdin, childProcess.stdout);
|
||||
}
|
18
src/functions/collect.ts
Normal file
18
src/functions/collect.ts
Normal file
@ -0,0 +1,18 @@
|
||||
import { Transform, TransformOptions } from "stream";
|
||||
|
||||
export function collect(options: TransformOptions = {}): Transform {
|
||||
const collected: any[] = [];
|
||||
return new Transform({
|
||||
...options,
|
||||
transform(data, encoding, callback) {
|
||||
collected.push(data);
|
||||
callback();
|
||||
},
|
||||
flush(callback) {
|
||||
this.push(
|
||||
options.objectMode ? collected : Buffer.concat(collected),
|
||||
);
|
||||
callback();
|
||||
},
|
||||
});
|
||||
}
|
106
src/functions/compose.ts
Normal file
106
src/functions/compose.ts
Normal file
@ -0,0 +1,106 @@
|
||||
import { pipeline, Duplex, DuplexOptions } from "stream";
|
||||
|
||||
export function compose(
|
||||
streams: Array<
|
||||
NodeJS.ReadableStream | NodeJS.ReadWriteStream | NodeJS.WritableStream
|
||||
>,
|
||||
options?: DuplexOptions,
|
||||
): Compose {
|
||||
if (streams.length < 2) {
|
||||
throw new Error("At least two streams are required to compose");
|
||||
}
|
||||
|
||||
return new Compose(streams, options);
|
||||
}
|
||||
|
||||
enum EventSubscription {
|
||||
Last = 0,
|
||||
First,
|
||||
All,
|
||||
Self,
|
||||
}
|
||||
|
||||
const eventsTarget = {
|
||||
close: EventSubscription.Last,
|
||||
data: EventSubscription.Last,
|
||||
drain: EventSubscription.Self,
|
||||
end: EventSubscription.Last,
|
||||
error: EventSubscription.Self,
|
||||
finish: EventSubscription.Last,
|
||||
pause: EventSubscription.Last,
|
||||
pipe: EventSubscription.First,
|
||||
readable: EventSubscription.Last,
|
||||
resume: EventSubscription.Last,
|
||||
unpipe: EventSubscription.First,
|
||||
};
|
||||
|
||||
type AllStreams =
|
||||
| NodeJS.ReadableStream
|
||||
| NodeJS.ReadWriteStream
|
||||
| NodeJS.WritableStream;
|
||||
|
||||
export class Compose extends Duplex {
|
||||
private first: AllStreams;
|
||||
private last: AllStreams;
|
||||
private streams: AllStreams[];
|
||||
|
||||
constructor(streams: AllStreams[], options?: DuplexOptions) {
|
||||
super(options);
|
||||
this.first = streams[0];
|
||||
this.last = streams[streams.length - 1];
|
||||
this.streams = streams;
|
||||
pipeline(streams, (err: any) => {
|
||||
this.emit("error", err);
|
||||
});
|
||||
}
|
||||
|
||||
public pipe<T extends NodeJS.WritableStream>(dest: T) {
|
||||
return (this.last as NodeJS.ReadableStream).pipe(dest);
|
||||
}
|
||||
|
||||
public _write(chunk: any, encoding: string, cb: any) {
|
||||
(this.first as NodeJS.WritableStream).write(chunk, encoding, cb);
|
||||
}
|
||||
|
||||
public bubble(...events: string[]) {
|
||||
this.streams.forEach(s => {
|
||||
events.forEach(e => {
|
||||
s.on(e, (...args) => super.emit(e, ...args));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public on(event: string, cb: any) {
|
||||
switch (eventsTarget[event]) {
|
||||
case EventSubscription.First:
|
||||
this.first.on(event, cb);
|
||||
break;
|
||||
case EventSubscription.Last:
|
||||
this.last.on(event, cb);
|
||||
break;
|
||||
case EventSubscription.All:
|
||||
this.streams.forEach(s => s.on(event, cb));
|
||||
break;
|
||||
default:
|
||||
super.on(event, cb);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public once(event: string, cb: any) {
|
||||
switch (eventsTarget[event]) {
|
||||
case EventSubscription.First:
|
||||
this.first.once(event, cb);
|
||||
break;
|
||||
case EventSubscription.Last:
|
||||
this.last.once(event, cb);
|
||||
break;
|
||||
case EventSubscription.All:
|
||||
this.streams.forEach(s => s.once(event, cb));
|
||||
break;
|
||||
default:
|
||||
super.once(event, cb);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
}
|
37
src/functions/concat.ts
Normal file
37
src/functions/concat.ts
Normal file
@ -0,0 +1,37 @@
|
||||
import { Readable } from "stream";
|
||||
|
||||
export function concat(...streams: NodeJS.ReadableStream[]): Readable {
|
||||
let isStarted = false;
|
||||
let currentStreamIndex = 0;
|
||||
const startCurrentStream = () => {
|
||||
if (currentStreamIndex >= streams.length) {
|
||||
wrapper.push(null);
|
||||
} else {
|
||||
streams[currentStreamIndex]
|
||||
.on("data", chunk => {
|
||||
if (!wrapper.push(chunk)) {
|
||||
streams[currentStreamIndex].pause();
|
||||
}
|
||||
})
|
||||
.on("error", err => wrapper.emit("error", err))
|
||||
.on("end", () => {
|
||||
currentStreamIndex++;
|
||||
startCurrentStream();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const wrapper = new Readable({
|
||||
objectMode: true,
|
||||
read() {
|
||||
if (!isStarted) {
|
||||
isStarted = true;
|
||||
startCurrentStream();
|
||||
}
|
||||
if (currentStreamIndex < streams.length) {
|
||||
streams[currentStreamIndex].resume();
|
||||
}
|
||||
},
|
||||
});
|
||||
return wrapper;
|
||||
}
|
99
src/functions/demux.ts
Normal file
99
src/functions/demux.ts
Normal file
@ -0,0 +1,99 @@
|
||||
import { WritableOptions, Writable } from "stream";
|
||||
|
||||
enum EventSubscription {
|
||||
Last = 0,
|
||||
First,
|
||||
All,
|
||||
Self,
|
||||
Unhandled,
|
||||
}
|
||||
|
||||
const eventsTarget = {
|
||||
close: EventSubscription.Self,
|
||||
data: EventSubscription.All,
|
||||
drain: EventSubscription.Self,
|
||||
end: EventSubscription.Self,
|
||||
error: EventSubscription.Self,
|
||||
finish: EventSubscription.Self,
|
||||
pause: EventSubscription.Self,
|
||||
pipe: EventSubscription.Self,
|
||||
readable: EventSubscription.Self,
|
||||
resume: EventSubscription.Self,
|
||||
unpipe: EventSubscription.Self,
|
||||
};
|
||||
|
||||
type DemuxStreams = NodeJS.WritableStream | NodeJS.ReadWriteStream;
|
||||
|
||||
export function demux(
|
||||
construct: () => DemuxStreams,
|
||||
demuxBy: string | ((chunk: any) => string),
|
||||
options?: WritableOptions,
|
||||
): Writable {
|
||||
return new Demux(construct, demuxBy, options);
|
||||
}
|
||||
|
||||
// @TODO handle pipe event ie) Multiplex
|
||||
class Demux extends Writable {
|
||||
private streamsByKey: {
|
||||
[key: string]: DemuxStreams;
|
||||
};
|
||||
private demuxer: (chunk: any) => string;
|
||||
private construct: (destKey?: string) => DemuxStreams;
|
||||
constructor(
|
||||
construct: (destKey?: string) => DemuxStreams,
|
||||
demuxBy: string | ((chunk: any) => string),
|
||||
options: WritableOptions = {},
|
||||
) {
|
||||
super(options);
|
||||
this.demuxer =
|
||||
typeof demuxBy === "string" ? chunk => chunk[demuxBy] : demuxBy;
|
||||
this.construct = construct;
|
||||
this.streamsByKey = {};
|
||||
}
|
||||
|
||||
public _write(chunk: any, encoding: any, cb: any) {
|
||||
const destKey = this.demuxer(chunk);
|
||||
if (this.streamsByKey[destKey] === undefined) {
|
||||
this.streamsByKey[destKey] = this.construct(destKey);
|
||||
}
|
||||
if (!this.streamsByKey[destKey].write(chunk, encoding)) {
|
||||
this.streamsByKey[destKey].once("drain", () => {
|
||||
cb();
|
||||
});
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
}
|
||||
|
||||
public on(event: string, cb: any) {
|
||||
switch (eventsTarget[event]) {
|
||||
case EventSubscription.Self:
|
||||
super.on(event, cb);
|
||||
break;
|
||||
case EventSubscription.All:
|
||||
Object.keys(this.streamsByKey).forEach(key =>
|
||||
this.streamsByKey[key].on(event, cb),
|
||||
);
|
||||
break;
|
||||
default:
|
||||
super.on(event, cb);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public once(event: string, cb: any) {
|
||||
switch (eventsTarget[event]) {
|
||||
case EventSubscription.Self:
|
||||
super.once(event, cb);
|
||||
break;
|
||||
case EventSubscription.All:
|
||||
Object.keys(this.streamsByKey).forEach(key =>
|
||||
this.streamsByKey[key].once(event, cb),
|
||||
);
|
||||
break;
|
||||
default:
|
||||
super.once(event, cb);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
}
|
31
src/functions/duplex.ts
Normal file
31
src/functions/duplex.ts
Normal file
@ -0,0 +1,31 @@
|
||||
import { Duplex } from "stream";
|
||||
|
||||
export function duplex(
|
||||
writable: NodeJS.WritableStream,
|
||||
readable: NodeJS.ReadableStream,
|
||||
) {
|
||||
const wrapper = new Duplex({
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true,
|
||||
read() {
|
||||
readable.resume();
|
||||
},
|
||||
write(chunk, encoding, callback) {
|
||||
return writable.write(chunk, encoding, callback);
|
||||
},
|
||||
final(callback) {
|
||||
writable.end(callback);
|
||||
},
|
||||
});
|
||||
readable
|
||||
.on("data", chunk => {
|
||||
if (!wrapper.push(chunk)) {
|
||||
readable.pause();
|
||||
}
|
||||
})
|
||||
.on("error", err => wrapper.emit("error", err))
|
||||
.on("end", () => wrapper.push(null));
|
||||
writable.on("drain", () => wrapper.emit("drain"));
|
||||
writable.on("error", err => wrapper.emit("error", err));
|
||||
return wrapper;
|
||||
}
|
20
src/functions/filter.ts
Normal file
20
src/functions/filter.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import { Transform, TransformOptions } from "stream";
|
||||
|
||||
export function filter<T>(
|
||||
predicate:
|
||||
| ((chunk: T, encoding: string) => boolean)
|
||||
| ((chunk: T, encoding: string) => Promise<boolean>),
|
||||
options?: TransformOptions,
|
||||
) {
|
||||
return new Transform({
|
||||
...options,
|
||||
async transform(chunk: T, encoding?: any, callback?: any) {
|
||||
const result = await predicate(chunk, encoding);
|
||||
if (result === true) {
|
||||
callback(null, chunk);
|
||||
} else {
|
||||
callback();
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
18
src/functions/flatMap.ts
Normal file
18
src/functions/flatMap.ts
Normal file
@ -0,0 +1,18 @@
|
||||
import { Transform, TransformOptions } from "stream";
|
||||
|
||||
export function flatMap<T, R>(
|
||||
mapper:
|
||||
| ((chunk: T, encoding: string) => R[])
|
||||
| ((chunk: T, encoding: string) => Promise<R[]>),
|
||||
options: TransformOptions = {
|
||||
objectMode: true,
|
||||
},
|
||||
): Transform {
|
||||
return new Transform({
|
||||
...options,
|
||||
async transform(chunk: T, encoding, callback) {
|
||||
(await mapper(chunk, encoding)).forEach(c => this.push(c));
|
||||
callback();
|
||||
},
|
||||
});
|
||||
}
|
16
src/functions/fromArray.ts
Normal file
16
src/functions/fromArray.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { Readable } from "stream";
|
||||
|
||||
export function fromArray(array: any[]): Readable {
|
||||
let cursor = 0;
|
||||
return new Readable({
|
||||
objectMode: true,
|
||||
read() {
|
||||
if (cursor < array.length) {
|
||||
this.push(array[cursor]);
|
||||
cursor++;
|
||||
} else {
|
||||
this.push(null);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
214
src/functions/index.ts
Normal file
214
src/functions/index.ts
Normal file
@ -0,0 +1,214 @@
|
||||
import { Transform } from "stream";
|
||||
import * as baseFunctions from "./baseFunctions";
|
||||
|
||||
/**
|
||||
* Convert an array into a Readable stream of its elements
|
||||
* @param array Array of elements to stream
|
||||
*/
|
||||
export const fromArray = baseFunctions.fromArray;
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that maps streamed chunks
|
||||
* @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such)
|
||||
* @param options?
|
||||
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
|
||||
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
|
||||
*/
|
||||
export const map = baseFunctions.map;
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that flat maps streamed chunks
|
||||
* @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such)
|
||||
* @param options?
|
||||
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
|
||||
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
|
||||
*/
|
||||
export const flatMap = baseFunctions.flatMap;
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
|
||||
* @param predicate Predicate with which to filter scream chunks
|
||||
* @param options?
|
||||
* @param options.objectMode? Whether this stream should behave as a stream of objects.
|
||||
*/
|
||||
export const filter = baseFunctions.filter;
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
|
||||
* value
|
||||
* @param iteratee Reducer function to apply on each streamed chunk
|
||||
* @param initialValue Initial value
|
||||
* @param options?
|
||||
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
|
||||
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
|
||||
*/
|
||||
export const reduce = baseFunctions.reduce;
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that splits streamed chunks using the given separator
|
||||
* @param separator? Separator to split by, defaulting to "\n"
|
||||
* @param options? Defaults to encoding: utf8
|
||||
* @param options.encoding? Encoding written chunks are assumed to use
|
||||
*/
|
||||
export const split = baseFunctions.split;
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that joins streamed chunks using the given separator
|
||||
* @param separator Separator to join with
|
||||
* @param options? Defaults to encoding: utf8
|
||||
* @param options.encoding? Encoding written chunks are assumed to use
|
||||
*/
|
||||
export const join = baseFunctions.join;
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
|
||||
* the streamed chunks with the specified replacement string
|
||||
* @param searchValue Search string to use
|
||||
* @param replaceValue Replacement string to use
|
||||
* @param options? Defaults to encoding: utf8
|
||||
* @param options.encoding Encoding written chunks are assumed to use
|
||||
*/
|
||||
export const replace = baseFunctions.replace;
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
|
||||
* must be a fully defined JSON string in utf8.
|
||||
*/
|
||||
export const parse = baseFunctions.parse;
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that stringifies the streamed chunks to JSON
|
||||
* @param options?
|
||||
* @param options.pretty If true, whitespace is inserted into the stringified chunks.
|
||||
*
|
||||
*/
|
||||
export const stringify = baseFunctions.stringify;
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
|
||||
* @param options?
|
||||
* @param options.objectMode? Whether this stream should behave as a stream of objects
|
||||
*/
|
||||
export const collect = baseFunctions.collect;
|
||||
|
||||
/**
|
||||
* Return a Readable stream of readable streams concatenated together
|
||||
* @param streams Readable streams to concatenate
|
||||
*/
|
||||
export const concat = baseFunctions.concat;
|
||||
|
||||
/**
|
||||
* Return a Readable stream of readable streams concatenated together
|
||||
* @param streams Readable streams to merge
|
||||
*/
|
||||
export const merge = baseFunctions.merge;
|
||||
|
||||
/**
|
||||
* Return a Duplex stream from a writable stream that is assumed to somehow, when written to,
|
||||
* cause the given readable stream to yield chunks
|
||||
* @param writable Writable stream assumed to cause the readable stream to yield chunks when written to
|
||||
* @param readable Readable stream assumed to yield chunks when the writable stream is written to
|
||||
*/
|
||||
export const duplex = baseFunctions.duplex;
|
||||
|
||||
/**
|
||||
* Return a Duplex stream from a child process' stdin and stdout
|
||||
* @param childProcess Child process from which to create duplex stream
|
||||
*/
|
||||
export const child = baseFunctions.child;
|
||||
|
||||
/**
|
||||
* Return a Promise resolving to the last streamed chunk of the given readable stream, after it has
|
||||
* ended
|
||||
* @param readable Readable stream to wait on
|
||||
*/
|
||||
export const last = baseFunctions.last;
|
||||
|
||||
/**
|
||||
* Stores chunks of data internally in array and batches when batchSize is reached.
|
||||
* @param batchSize Size of the batches, defaults to 1000.
|
||||
* @param maxBatchAge? Max lifetime of a batch, defaults to 500
|
||||
*/
|
||||
export function batch(batchSize?: number, maxBatchAge?: number): Transform {
|
||||
return baseFunctions.batch(batchSize, maxBatchAge);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unbatches and sends individual chunks of data.
|
||||
*/
|
||||
export const unbatch = baseFunctions.unbatch;
|
||||
|
||||
/**
|
||||
* Limits rate of data transferred into stream.
|
||||
* @param options?
|
||||
* @param targetRate? Desired rate in ms.
|
||||
* @param period? Period to sleep for when rate is above or equal to targetRate.
|
||||
*/
|
||||
export function rate(targetRate?: number, period?: number): Transform {
|
||||
return baseFunctions.rate(targetRate, period);
|
||||
}
|
||||
|
||||
/**
|
||||
* Limits number of parallel processes in flight.
|
||||
* @param parallel Max number of parallel processes.
|
||||
* @param func Function to execute on each data chunk.
|
||||
* @param pause Amount of time to pause processing when max number of parallel processes are executing.
|
||||
*/
|
||||
export const parallelMap = baseFunctions.parallelMap;
|
||||
|
||||
/**
|
||||
* Accummulates and sends batches of data. Each chunk that flows into the stream is checked against items
|
||||
* in the buffer. How the buffer is mutated is based on 1 of 2 possible buffering strategies:
|
||||
* 1. Sliding
|
||||
* - If the buffer is larger than the batchSize, the front of the buffer is popped to maintain
|
||||
* the batchSize. When no key is provided, the batchSize is effectively the buffer length. When
|
||||
* a key is provided, the batchSize is based on the value at that key. For example, given a key
|
||||
* of `timestamp` and a batchSize of 3000, each item in the buffer will be guaranteed to be
|
||||
* within 3000 timestamp units from the first element. This means that with a key, multiple elements
|
||||
* may be spliced off the front of the buffer. The buffer is then pushed into the stream.
|
||||
* 2. Rolling
|
||||
* - If the buffer is larger than the batchSize, the buffer is cleared and pushed into the stream.
|
||||
* When no key is provided, the batchSize is the buffer length. When a key is provided, the batchSize
|
||||
* is based on the value at that key. For example, given a key of `timestamp` and a batchSize of 3000,
|
||||
* each item in the buffer will be guaranteed to be within 3000 timestamp units from the first element.
|
||||
* @param flushStrategy Buffering strategy to use.
|
||||
* @param batchSize Size of the batch (in units of buffer length or value at key).
|
||||
* @param batchRate Desired rate of data transfer to next stream.
|
||||
* @param keyBy Key to determine if element fits into buffer or items need to be cleared from buffer.
|
||||
* @param options Transform stream options
|
||||
*/
|
||||
export const accumulator = baseFunctions.accumulator;
|
||||
|
||||
/**
|
||||
* Accummulates and sends batches of data. Each chunk that flows into the stream is checked against items
|
||||
* in the buffer. How the buffer is mutated is based on 1 of 2 possible buffering strategies:
|
||||
* 1. Sliding
|
||||
* - If the iteratee returns false, the front of the buffer is popped until iteratee returns true. The
|
||||
* item is pushed into the buffer and buffer is pushed into stream.
|
||||
* 2. Rolling
|
||||
* - If the iteratee returns false, the buffer is cleared and pushed into stream. The item is
|
||||
* then pushed into the buffer.
|
||||
* @param flushStrategy Buffering strategy to use.
|
||||
* @param iteratee Function applied to buffer when a chunk of data enters stream to determine if element fits into
|
||||
* or items need to be cleared from buffer.
|
||||
* @param batchRate Desired rate of data transfer to next stream.
|
||||
* @param options Transform stream options
|
||||
*/
|
||||
export const accumulatorBy = baseFunctions.accumulatorBy;
|
||||
|
||||
/**
|
||||
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
|
||||
* @param streams Array of streams to compose. Minimum of two.
|
||||
* @param options Transform stream options
|
||||
*/
|
||||
export const compose = baseFunctions.compose;
|
||||
|
||||
/**
|
||||
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
|
||||
* @param construct Constructor for new output source. Should return a Writable or ReadWrite stream.
|
||||
* @param demuxBy
|
||||
* @param demuxBy.key? Key to fetch value from source chunks to demultiplex source.
|
||||
* @param demuxBy.keyBy? Function to fetch value from source chunks to demultiplex source.
|
||||
* @param options Writable stream options
|
||||
*/
|
||||
export const demux = baseFunctions.demux;
|
26
src/functions/join.ts
Normal file
26
src/functions/join.ts
Normal file
@ -0,0 +1,26 @@
|
||||
import { Transform } from "stream";
|
||||
import { StringDecoder } from "string_decoder";
|
||||
import { WithEncoding } from "./baseDefinitions";
|
||||
|
||||
export function join(
|
||||
separator: string,
|
||||
options: WithEncoding = { encoding: "utf8" },
|
||||
): Transform {
|
||||
let isFirstChunk = true;
|
||||
const decoder = new StringDecoder(options.encoding);
|
||||
return new Transform({
|
||||
readableObjectMode: true,
|
||||
async transform(chunk: Buffer, encoding, callback) {
|
||||
const asString = decoder.write(chunk);
|
||||
// Take care not to break up multi-byte characters spanning multiple chunks
|
||||
if (asString !== "" || chunk.length === 0) {
|
||||
if (!isFirstChunk) {
|
||||
this.push(separator);
|
||||
}
|
||||
this.push(asString);
|
||||
isFirstChunk = false;
|
||||
}
|
||||
callback();
|
||||
},
|
||||
});
|
||||
}
|
8
src/functions/last.ts
Normal file
8
src/functions/last.ts
Normal file
@ -0,0 +1,8 @@
|
||||
export function last<T>(readable: NodeJS.ReadableStream): Promise<T | null> {
|
||||
let lastChunk: T | null = null;
|
||||
return new Promise((resolve, _) => {
|
||||
readable
|
||||
.on("data", chunk => (lastChunk = chunk))
|
||||
.on("end", () => resolve(lastChunk));
|
||||
});
|
||||
}
|
13
src/functions/map.ts
Normal file
13
src/functions/map.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import { Transform, TransformOptions } from "stream";
|
||||
|
||||
export function map<T, R>(
|
||||
mapper: (chunk: T, encoding: string) => R,
|
||||
options: TransformOptions = { objectMode: true },
|
||||
): Transform {
|
||||
return new Transform({
|
||||
...options,
|
||||
async transform(chunk: T, encoding, callback) {
|
||||
callback(null, await mapper(chunk, encoding));
|
||||
},
|
||||
});
|
||||
}
|
33
src/functions/merge.ts
Normal file
33
src/functions/merge.ts
Normal file
@ -0,0 +1,33 @@
|
||||
import { Readable } from "stream";
|
||||
|
||||
export function merge(...streams: Readable[]): Readable {
|
||||
let isStarted = false;
|
||||
let streamEndedCount = 0;
|
||||
return new Readable({
|
||||
objectMode: true,
|
||||
read() {
|
||||
if (streamEndedCount >= streams.length) {
|
||||
this.push(null);
|
||||
} else if (!isStarted) {
|
||||
isStarted = true;
|
||||
streams.forEach(stream =>
|
||||
stream
|
||||
.on("data", chunk => {
|
||||
if (!this.push(chunk)) {
|
||||
streams.forEach(s => s.pause());
|
||||
}
|
||||
})
|
||||
.on("error", err => this.emit("error", err))
|
||||
.on("end", () => {
|
||||
streamEndedCount++;
|
||||
if (streamEndedCount === streams.length) {
|
||||
this.push(null);
|
||||
}
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
streams.forEach(s => s.resume());
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
32
src/functions/parallelMap.ts
Normal file
32
src/functions/parallelMap.ts
Normal file
@ -0,0 +1,32 @@
|
||||
import { Transform, TransformOptions } from "stream";
|
||||
import { sleep } from "../helpers";
|
||||
|
||||
export function parallelMap<T, R>(
|
||||
mapper: (data: T) => R,
|
||||
parallel: number = 10,
|
||||
sleepTime: number = 5,
|
||||
options: TransformOptions = {
|
||||
objectMode: true,
|
||||
},
|
||||
) {
|
||||
let inflight = 0;
|
||||
return new Transform({
|
||||
...options,
|
||||
async transform(data, encoding, callback) {
|
||||
while (parallel <= inflight) {
|
||||
await sleep(sleepTime);
|
||||
}
|
||||
inflight += 1;
|
||||
callback();
|
||||
const res = await mapper(data);
|
||||
this.push(res);
|
||||
inflight -= 1;
|
||||
},
|
||||
async flush(callback) {
|
||||
while (inflight > 0) {
|
||||
await sleep(sleepTime);
|
||||
}
|
||||
callback();
|
||||
},
|
||||
});
|
||||
}
|
22
src/functions/parse.ts
Normal file
22
src/functions/parse.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import { Transform } from "stream";
|
||||
import { StringDecoder } from "string_decoder";
|
||||
import { SerializationFormats } from "./baseDefinitions";
|
||||
|
||||
export function parse(
|
||||
format: SerializationFormats = SerializationFormats.utf8,
|
||||
): Transform {
|
||||
const decoder = new StringDecoder(format);
|
||||
return new Transform({
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true,
|
||||
async transform(chunk: Buffer, encoding, callback) {
|
||||
try {
|
||||
const asString = decoder.write(chunk);
|
||||
// Using await causes parsing errors to be emitted
|
||||
callback(undefined, await JSON.parse(asString));
|
||||
} catch (err) {
|
||||
callback(err);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
26
src/functions/rate.ts
Normal file
26
src/functions/rate.ts
Normal file
@ -0,0 +1,26 @@
|
||||
import { Transform, TransformOptions } from "stream";
|
||||
import { performance } from "perf_hooks";
|
||||
import { sleep } from "../helpers";
|
||||
|
||||
export function rate(
|
||||
targetRate: number = 50,
|
||||
period: number = 1,
|
||||
options: TransformOptions = {
|
||||
objectMode: true,
|
||||
},
|
||||
): Transform {
|
||||
const deltaMS = ((1 / targetRate) * 1000) / period; // Skip a full period
|
||||
let total = 0;
|
||||
const start = performance.now();
|
||||
return new Transform({
|
||||
...options,
|
||||
async transform(data, encoding, callback) {
|
||||
const currentRate = (total / (performance.now() - start)) * 1000;
|
||||
if (targetRate && currentRate > targetRate) {
|
||||
await sleep(deltaMS);
|
||||
}
|
||||
total += 1;
|
||||
callback(undefined, data);
|
||||
},
|
||||
});
|
||||
}
|
33
src/functions/reduce.ts
Normal file
33
src/functions/reduce.ts
Normal file
@ -0,0 +1,33 @@
|
||||
import { Transform, TransformOptions } from "stream";
|
||||
|
||||
export function reduce<T, R>(
|
||||
iteratee:
|
||||
| ((previousValue: R, chunk: T, encoding: string) => R)
|
||||
| ((previousValue: R, chunk: T, encoding: string) => Promise<R>),
|
||||
initialValue: R,
|
||||
options: TransformOptions = {
|
||||
objectMode: true,
|
||||
},
|
||||
) {
|
||||
let value = initialValue;
|
||||
return new Transform({
|
||||
...options,
|
||||
async transform(chunk: T, encoding, callback) {
|
||||
value = await iteratee(value, chunk, encoding);
|
||||
callback();
|
||||
},
|
||||
flush(callback) {
|
||||
// Best effort attempt at yielding the final value (will throw if e.g. yielding an object and
|
||||
// downstream doesn't expect objects)
|
||||
try {
|
||||
callback(undefined, value);
|
||||
} catch (err) {
|
||||
try {
|
||||
this.emit("error", err);
|
||||
} catch {
|
||||
// Best effort was made
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
26
src/functions/replace.ts
Normal file
26
src/functions/replace.ts
Normal file
@ -0,0 +1,26 @@
|
||||
import { Transform } from "stream";
|
||||
import { StringDecoder } from "string_decoder";
|
||||
import { WithEncoding } from "./baseDefinitions";
|
||||
|
||||
export function replace(
|
||||
searchValue: string | RegExp,
|
||||
replaceValue: string,
|
||||
options: WithEncoding = { encoding: "utf8" },
|
||||
): Transform {
|
||||
const decoder = new StringDecoder(options.encoding);
|
||||
return new Transform({
|
||||
readableObjectMode: true,
|
||||
transform(chunk: Buffer, encoding, callback) {
|
||||
const asString = decoder.write(chunk);
|
||||
// Take care not to break up multi-byte characters spanning multiple chunks
|
||||
if (asString !== "" || chunk.length === 0) {
|
||||
callback(
|
||||
undefined,
|
||||
asString.replace(searchValue, replaceValue),
|
||||
);
|
||||
} else {
|
||||
callback();
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
29
src/functions/split.ts
Normal file
29
src/functions/split.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import { Transform } from "stream";
|
||||
import { StringDecoder } from "string_decoder";
|
||||
import { WithEncoding } from "./baseDefinitions";
|
||||
|
||||
export function split(
|
||||
separator: string | RegExp = "\n",
|
||||
options: WithEncoding = { encoding: "utf8" },
|
||||
): Transform {
|
||||
let buffered = "";
|
||||
const decoder = new StringDecoder(options.encoding);
|
||||
|
||||
return new Transform({
|
||||
readableObjectMode: true,
|
||||
transform(chunk: Buffer, encoding, callback) {
|
||||
const asString = decoder.write(chunk);
|
||||
const splitted = asString.split(separator);
|
||||
if (splitted.length > 1) {
|
||||
splitted[0] = buffered.concat(splitted[0]);
|
||||
buffered = "";
|
||||
}
|
||||
buffered += splitted[splitted.length - 1];
|
||||
splitted.slice(0, -1).forEach((part: string) => this.push(part));
|
||||
callback();
|
||||
},
|
||||
flush(callback) {
|
||||
callback(undefined, buffered + decoder.end());
|
||||
},
|
||||
});
|
||||
}
|
19
src/functions/stringify.ts
Normal file
19
src/functions/stringify.ts
Normal file
@ -0,0 +1,19 @@
|
||||
import { Transform } from "stream";
|
||||
import { JsonValue, JsonParseOptions } from "./baseDefinitions";
|
||||
|
||||
export function stringify(
|
||||
options: JsonParseOptions = { pretty: false },
|
||||
): Transform {
|
||||
return new Transform({
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true,
|
||||
transform(chunk: JsonValue, encoding, callback) {
|
||||
callback(
|
||||
undefined,
|
||||
options.pretty
|
||||
? JSON.stringify(chunk, null, 2)
|
||||
: JSON.stringify(chunk),
|
||||
);
|
||||
},
|
||||
});
|
||||
}
|
17
src/functions/unbatch.ts
Normal file
17
src/functions/unbatch.ts
Normal file
@ -0,0 +1,17 @@
|
||||
import { Transform, TransformOptions } from "stream";
|
||||
|
||||
export function unbatch(
|
||||
options: TransformOptions = {
|
||||
objectMode: true,
|
||||
},
|
||||
) {
|
||||
return new Transform({
|
||||
...options,
|
||||
transform(data, encoding, callback) {
|
||||
for (const d of data) {
|
||||
this.push(d);
|
||||
}
|
||||
callback();
|
||||
},
|
||||
});
|
||||
}
|
3
src/helpers.ts
Normal file
3
src/helpers.ts
Normal file
@ -0,0 +1,3 @@
|
||||
export async function sleep(time: number): Promise<{} | null> {
|
||||
return time > 0 ? new Promise(resolve => setTimeout(resolve, time)) : null;
|
||||
}
|
1182
src/index.spec.ts
1182
src/index.spec.ts
File diff suppressed because it is too large
Load Diff
527
src/index.ts
527
src/index.ts
@ -1,501 +1,26 @@
|
||||
import { Transform, Readable, Writable, Duplex } from "stream";
|
||||
import { ChildProcess } from "child_process";
|
||||
import { StringDecoder } from "string_decoder";
|
||||
|
||||
export interface ThroughOptions {
|
||||
objectMode?: boolean;
|
||||
}
|
||||
export interface TransformOptions {
|
||||
readableObjectMode?: boolean;
|
||||
writableObjectMode?: boolean;
|
||||
}
|
||||
export interface WithEncoding {
|
||||
encoding: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an array into a Readable stream of its elements
|
||||
* @param array Array of elements to stream
|
||||
*/
|
||||
export function fromArray(array: any[]): NodeJS.ReadableStream {
|
||||
let cursor = 0;
|
||||
return new Readable({
|
||||
objectMode: true,
|
||||
read() {
|
||||
if (cursor < array.length) {
|
||||
this.push(array[cursor]);
|
||||
cursor++;
|
||||
} else {
|
||||
this.push(null);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that maps streamed chunks
|
||||
* @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such)
|
||||
* @param options
|
||||
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
|
||||
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
|
||||
*/
|
||||
export function map<T, R>(
|
||||
mapper: (chunk: T, encoding: string) => R,
|
||||
options: TransformOptions = {
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true,
|
||||
},
|
||||
): NodeJS.ReadWriteStream {
|
||||
return new Transform({
|
||||
...options,
|
||||
async transform(chunk: T, encoding, callback) {
|
||||
let isPromise = false;
|
||||
try {
|
||||
const mapped = mapper(chunk, encoding);
|
||||
isPromise = mapped instanceof Promise;
|
||||
callback(undefined, await mapped);
|
||||
} catch (err) {
|
||||
if (isPromise) {
|
||||
// Calling the callback asynchronously with an error wouldn't emit the error, so emit directly
|
||||
this.emit("error", err);
|
||||
callback();
|
||||
} else {
|
||||
callback(err);
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that flat maps streamed chunks
|
||||
* @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such)
|
||||
* @param options
|
||||
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
|
||||
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
|
||||
*/
|
||||
export function flatMap<T, R>(
|
||||
mapper:
|
||||
| ((chunk: T, encoding: string) => R[])
|
||||
| ((chunk: T, encoding: string) => Promise<R[]>),
|
||||
options: TransformOptions = {
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true,
|
||||
},
|
||||
): NodeJS.ReadWriteStream {
|
||||
return new Transform({
|
||||
...options,
|
||||
async transform(chunk: T, encoding, callback) {
|
||||
let isPromise = false;
|
||||
try {
|
||||
const mapped = mapper(chunk, encoding);
|
||||
isPromise = mapped instanceof Promise;
|
||||
(await mapped).forEach(c => this.push(c));
|
||||
callback();
|
||||
} catch (err) {
|
||||
if (isPromise) {
|
||||
// Calling the callback asynchronously with an error wouldn't emit the error, so emit directly
|
||||
this.emit("error", err);
|
||||
callback();
|
||||
} else {
|
||||
callback(err);
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
|
||||
* @param predicate Predicate with which to filter scream chunks
|
||||
* @param options
|
||||
* @param options.objectMode Whether this stream should behave as a stream of objects
|
||||
*/
|
||||
export function filter<T>(
|
||||
predicate:
|
||||
| ((chunk: T, encoding: string) => boolean)
|
||||
| ((chunk: T, encoding: string) => Promise<boolean>),
|
||||
options: ThroughOptions = {
|
||||
objectMode: true,
|
||||
},
|
||||
) {
|
||||
return new Transform({
|
||||
readableObjectMode: options.objectMode,
|
||||
writableObjectMode: options.objectMode,
|
||||
async transform(chunk: T, encoding, callback) {
|
||||
let isPromise = false;
|
||||
try {
|
||||
const result = predicate(chunk, encoding);
|
||||
isPromise = result instanceof Promise;
|
||||
if (!!(await result)) {
|
||||
callback(undefined, chunk);
|
||||
} else {
|
||||
callback();
|
||||
}
|
||||
} catch (err) {
|
||||
if (isPromise) {
|
||||
// Calling the callback asynchronously with an error wouldn't emit the error, so emit directly
|
||||
this.emit("error", err);
|
||||
callback();
|
||||
} else {
|
||||
callback(err);
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
|
||||
* value
|
||||
* @param iteratee Reducer function to apply on each streamed chunk
|
||||
* @param initialValue Initial value
|
||||
* @param options
|
||||
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
|
||||
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
|
||||
*/
|
||||
export function reduce<T, R>(
|
||||
iteratee:
|
||||
| ((previousValue: R, chunk: T, encoding: string) => R)
|
||||
| ((previousValue: R, chunk: T, encoding: string) => Promise<R>),
|
||||
initialValue: R,
|
||||
options: TransformOptions = {
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true,
|
||||
},
|
||||
) {
|
||||
let value = initialValue;
|
||||
return new Transform({
|
||||
readableObjectMode: options.readableObjectMode,
|
||||
writableObjectMode: options.writableObjectMode,
|
||||
async transform(chunk: T, encoding, callback) {
|
||||
let isPromise = false;
|
||||
try {
|
||||
const result = iteratee(value, chunk, encoding);
|
||||
isPromise = result instanceof Promise;
|
||||
value = await result;
|
||||
callback();
|
||||
} catch (err) {
|
||||
if (isPromise) {
|
||||
// Calling the callback asynchronously with an error wouldn't emit the error, so emit directly
|
||||
this.emit("error", err);
|
||||
callback();
|
||||
} else {
|
||||
callback(err);
|
||||
}
|
||||
}
|
||||
},
|
||||
flush(callback) {
|
||||
// Best effort attempt at yielding the final value (will throw if e.g. yielding an object and
|
||||
// downstream doesn't expect objects)
|
||||
try {
|
||||
callback(undefined, value);
|
||||
} catch (err) {
|
||||
try {
|
||||
this.emit("error", err);
|
||||
} catch {
|
||||
// Best effort was made
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that splits streamed chunks using the given separator
|
||||
* @param separator Separator to split by, defaulting to "\n"
|
||||
* @param options
|
||||
* @param options.encoding Encoding written chunks are assumed to use
|
||||
*/
|
||||
export function split(
|
||||
separator: string | RegExp = "\n",
|
||||
options: WithEncoding = { encoding: "utf8" },
|
||||
): NodeJS.ReadWriteStream {
|
||||
let buffered = "";
|
||||
const decoder = new StringDecoder(options.encoding);
|
||||
|
||||
return new Transform({
|
||||
readableObjectMode: true,
|
||||
transform(chunk: Buffer, encoding, callback) {
|
||||
const asString = decoder.write(chunk);
|
||||
const splitted = asString.split(separator);
|
||||
if (splitted.length > 1) {
|
||||
splitted[0] = buffered.concat(splitted[0]);
|
||||
buffered = "";
|
||||
}
|
||||
buffered += splitted[splitted.length - 1];
|
||||
splitted.slice(0, -1).forEach((part: string) => this.push(part));
|
||||
callback();
|
||||
},
|
||||
flush(callback) {
|
||||
callback(undefined, buffered + decoder.end());
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that joins streamed chunks using the given separator
|
||||
* @param separator Separator to join with
|
||||
* @param options
|
||||
* @param options.encoding Encoding written chunks are assumed to use
|
||||
*/
|
||||
export function join(
|
||||
separator: string,
|
||||
options: WithEncoding = { encoding: "utf8" },
|
||||
): NodeJS.ReadWriteStream {
|
||||
let isFirstChunk = true;
|
||||
const decoder = new StringDecoder(options.encoding);
|
||||
return new Transform({
|
||||
readableObjectMode: true,
|
||||
async transform(chunk: Buffer, encoding, callback) {
|
||||
const asString = decoder.write(chunk);
|
||||
// Take care not to break up multi-byte characters spanning multiple chunks
|
||||
if (asString !== "" || chunk.length === 0) {
|
||||
if (!isFirstChunk) {
|
||||
this.push(separator);
|
||||
}
|
||||
this.push(asString);
|
||||
isFirstChunk = false;
|
||||
}
|
||||
callback();
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
|
||||
* the streamed chunks with the specified replacement string
|
||||
* @param searchValue Search string to use
|
||||
* @param replaceValue Replacement string to use
|
||||
* @param options
|
||||
* @param options.encoding Encoding written chunks are assumed to use
|
||||
*/
|
||||
export function replace(
|
||||
searchValue: string | RegExp,
|
||||
replaceValue: string,
|
||||
options: WithEncoding = { encoding: "utf8" },
|
||||
): NodeJS.ReadWriteStream {
|
||||
const decoder = new StringDecoder(options.encoding);
|
||||
return new Transform({
|
||||
readableObjectMode: true,
|
||||
transform(chunk: Buffer, encoding, callback) {
|
||||
const asString = decoder.write(chunk);
|
||||
// Take care not to break up multi-byte characters spanning multiple chunks
|
||||
if (asString !== "" || chunk.length === 0) {
|
||||
callback(
|
||||
undefined,
|
||||
asString.replace(searchValue, replaceValue),
|
||||
);
|
||||
} else {
|
||||
callback();
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
|
||||
* must be a fully defined JSON string.
|
||||
*/
|
||||
export function parse(): NodeJS.ReadWriteStream {
|
||||
const decoder = new StringDecoder("utf8"); // JSON must be utf8
|
||||
return new Transform({
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true,
|
||||
async transform(chunk: Buffer, encoding, callback) {
|
||||
try {
|
||||
const asString = decoder.write(chunk);
|
||||
// Using await causes parsing errors to be emitted
|
||||
callback(undefined, await JSON.parse(asString));
|
||||
} catch (err) {
|
||||
callback(err);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
type JsonPrimitive = string | number | object;
|
||||
type JsonValue = JsonPrimitive | JsonPrimitive[];
|
||||
interface JsonParseOptions {
|
||||
pretty: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that stringifies the streamed chunks to JSON
|
||||
*/
|
||||
export function stringify(
|
||||
options: JsonParseOptions = { pretty: false },
|
||||
): NodeJS.ReadWriteStream {
|
||||
return new Transform({
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true,
|
||||
transform(chunk: JsonValue, encoding, callback) {
|
||||
callback(
|
||||
undefined,
|
||||
options.pretty
|
||||
? JSON.stringify(chunk, null, 2)
|
||||
: JSON.stringify(chunk),
|
||||
);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
|
||||
* @param options
|
||||
* @param options.objectMode Whether this stream should behave as a stream of objects
|
||||
*/
|
||||
export function collect(
|
||||
options: ThroughOptions = { objectMode: false },
|
||||
): NodeJS.ReadWriteStream {
|
||||
const collected: any[] = [];
|
||||
return new Transform({
|
||||
readableObjectMode: options.objectMode,
|
||||
writableObjectMode: options.objectMode,
|
||||
transform(data, encoding, callback) {
|
||||
collected.push(data);
|
||||
callback();
|
||||
},
|
||||
flush(callback) {
|
||||
this.push(
|
||||
options.objectMode ? collected : Buffer.concat(collected),
|
||||
);
|
||||
callback();
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a Readable stream of readable streams concatenated together
|
||||
* @param streams Readable streams to concatenate
|
||||
*/
|
||||
export function concat(
|
||||
...streams: NodeJS.ReadableStream[]
|
||||
): NodeJS.ReadableStream {
|
||||
let isStarted = false;
|
||||
let currentStreamIndex = 0;
|
||||
const startCurrentStream = () => {
|
||||
if (currentStreamIndex >= streams.length) {
|
||||
wrapper.push(null);
|
||||
} else {
|
||||
streams[currentStreamIndex]
|
||||
.on("data", chunk => {
|
||||
if (!wrapper.push(chunk)) {
|
||||
streams[currentStreamIndex].pause();
|
||||
}
|
||||
})
|
||||
.on("error", err => wrapper.emit("error", err))
|
||||
.on("end", () => {
|
||||
currentStreamIndex++;
|
||||
startCurrentStream();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const wrapper = new Readable({
|
||||
objectMode: true,
|
||||
read() {
|
||||
if (!isStarted) {
|
||||
isStarted = true;
|
||||
startCurrentStream();
|
||||
}
|
||||
if (currentStreamIndex < streams.length) {
|
||||
streams[currentStreamIndex].resume();
|
||||
}
|
||||
},
|
||||
});
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a Readable stream of readable streams merged together in chunk arrival order
|
||||
* @param streams Readable streams to merge
|
||||
*/
|
||||
export function merge(
|
||||
...streams: NodeJS.ReadableStream[]
|
||||
): NodeJS.ReadableStream {
|
||||
let isStarted = false;
|
||||
let streamEndedCount = 0;
|
||||
return new Readable({
|
||||
objectMode: true,
|
||||
read() {
|
||||
if (streamEndedCount >= streams.length) {
|
||||
this.push(null);
|
||||
} else if (!isStarted) {
|
||||
isStarted = true;
|
||||
streams.forEach(stream =>
|
||||
stream
|
||||
.on("data", chunk => {
|
||||
if (!this.push(chunk)) {
|
||||
streams.forEach(s => s.pause());
|
||||
}
|
||||
})
|
||||
.on("error", err => this.emit("error", err))
|
||||
.on("end", () => {
|
||||
streamEndedCount++;
|
||||
if (streamEndedCount === streams.length) {
|
||||
this.push(null);
|
||||
}
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
streams.forEach(s => s.resume());
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a Duplex stream from a writable stream that is assumed to somehow, when written to,
|
||||
* cause the given readable stream to yield chunks
|
||||
* @param writable Writable stream assumed to cause the readable stream to yield chunks when written to
|
||||
* @param readable Readable stream assumed to yield chunks when the writable stream is written to
|
||||
*/
|
||||
export function duplex(writable: Writable, readable: Readable) {
|
||||
const wrapper = new Duplex({
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true,
|
||||
read() {
|
||||
readable.resume();
|
||||
},
|
||||
write(chunk, encoding, callback) {
|
||||
return writable.write(chunk, encoding, callback);
|
||||
},
|
||||
final(callback) {
|
||||
writable.end(callback);
|
||||
},
|
||||
});
|
||||
readable
|
||||
.on("data", chunk => {
|
||||
if (!wrapper.push(chunk)) {
|
||||
readable.pause();
|
||||
}
|
||||
})
|
||||
.on("error", err => wrapper.emit("error", err))
|
||||
.on("end", () => wrapper.push(null));
|
||||
writable.on("drain", () => wrapper.emit("drain"));
|
||||
writable.on("error", err => wrapper.emit("error", err));
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a Duplex stream from a child process' stdin and stdout
|
||||
* @param childProcess Child process from which to create duplex stream
|
||||
*/
|
||||
export function child(childProcess: ChildProcess) {
|
||||
return duplex(childProcess.stdin, childProcess.stdout);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a Promise resolving to the last streamed chunk of the given readable stream, after it has
|
||||
* ended
|
||||
* @param readable Readable stream to wait on
|
||||
*/
|
||||
export function last<T>(readable: Readable): Promise<T | null> {
|
||||
let lastChunk: T | null = null;
|
||||
return new Promise((resolve, reject) => {
|
||||
readable
|
||||
.on("data", chunk => (lastChunk = chunk))
|
||||
.on("end", () => resolve(lastChunk));
|
||||
});
|
||||
}
|
||||
export {
|
||||
fromArray,
|
||||
map,
|
||||
flatMap,
|
||||
filter,
|
||||
reduce,
|
||||
split,
|
||||
join,
|
||||
replace,
|
||||
parse,
|
||||
stringify,
|
||||
collect,
|
||||
concat,
|
||||
merge,
|
||||
duplex,
|
||||
child,
|
||||
last,
|
||||
batch,
|
||||
unbatch,
|
||||
rate,
|
||||
parallelMap,
|
||||
accumulator,
|
||||
accumulatorBy,
|
||||
compose,
|
||||
demux,
|
||||
} from "./functions";
|
||||
|
556
tests/accumulator.spec.ts
Normal file
556
tests/accumulator.spec.ts
Normal file
@ -0,0 +1,556 @@
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { Readable } from "stream";
|
||||
import { accumulator, accumulatorBy } from "../src";
|
||||
import { FlushStrategy } from "../src/functions/accumulator";
|
||||
import { performance } from "perf_hooks";
|
||||
|
||||
test.cb("accumulator() rolling", t => {
|
||||
t.plan(3);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||
const secondFlush = [{ ts: 2, key: "d" }, { ts: 3, key: "e" }];
|
||||
const thirdFlush = [{ ts: 4, key: "f" }];
|
||||
const flushes = [firstFlush, secondFlush, thirdFlush];
|
||||
|
||||
source
|
||||
.pipe(
|
||||
accumulator(FlushStrategy.rolling, 2, undefined, {
|
||||
objectMode: true,
|
||||
}),
|
||||
)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (e: any) => {
|
||||
t.end(e);
|
||||
})
|
||||
.on("end", t.end);
|
||||
[...firstFlush, ...secondFlush, ...thirdFlush].forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("accumulator() rolling with key", t => {
|
||||
t.plan(2);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const firstFlush = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 2, key: "d" },
|
||||
];
|
||||
const secondFlush = [{ ts: 3, key: "e" }];
|
||||
const flushes = [firstFlush, secondFlush];
|
||||
|
||||
source
|
||||
.pipe(accumulator(FlushStrategy.rolling, 3, "ts", { objectMode: true }))
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (e: any) => {
|
||||
t.end(e);
|
||||
})
|
||||
.on("end", t.end);
|
||||
[...firstFlush, ...secondFlush].forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"accumulator() rolling should emit error and ignore chunk when its missing key",
|
||||
t => {
|
||||
t.plan(2);
|
||||
let index = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const accumulatorStream = accumulator(
|
||||
FlushStrategy.rolling,
|
||||
3,
|
||||
"nonExistingKey",
|
||||
{ objectMode: true },
|
||||
);
|
||||
const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||
|
||||
source
|
||||
.pipe(accumulatorStream)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
// No valid data output
|
||||
expect(flush).to.deep.equal([]);
|
||||
})
|
||||
.on("error", (err: any) => {
|
||||
source.pipe(accumulatorStream);
|
||||
accumulatorStream.resume();
|
||||
expect(err.message).to.equal(
|
||||
`Key is missing in event: (nonExistingKey, ${JSON.stringify(
|
||||
input[index],
|
||||
)})`,
|
||||
);
|
||||
index++;
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"accumulator() rolling should emit error, ignore chunk when key is missing and continue processing chunks correctly",
|
||||
t => {
|
||||
t.plan(3);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const accumulatorStream = accumulator(FlushStrategy.rolling, 3, "ts", {
|
||||
objectMode: true,
|
||||
});
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ key: "d" },
|
||||
{ ts: 3, key: "e" },
|
||||
];
|
||||
const firstFlush = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
];
|
||||
const secondFlush = [{ ts: 3, key: "e" }];
|
||||
const flushes = [firstFlush, secondFlush];
|
||||
|
||||
source
|
||||
.pipe(accumulatorStream)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (err: any) => {
|
||||
source.pipe(accumulatorStream);
|
||||
accumulatorStream.resume();
|
||||
expect(err.message).to.equal(
|
||||
`Key is missing in event: (ts, ${JSON.stringify(
|
||||
input[3],
|
||||
)})`,
|
||||
);
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb("accumulator() sliding", t => {
|
||||
t.plan(4);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 4, key: "d" },
|
||||
];
|
||||
const firstFlush = [{ ts: 0, key: "a" }];
|
||||
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||
const thirdFlush = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
];
|
||||
const fourthFlush = [
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 4, key: "d" },
|
||||
];
|
||||
|
||||
const flushes = [firstFlush, secondFlush, thirdFlush, fourthFlush];
|
||||
source
|
||||
.pipe(
|
||||
accumulator(FlushStrategy.sliding, 3, undefined, {
|
||||
objectMode: true,
|
||||
}),
|
||||
)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (e: any) => {
|
||||
t.end(e);
|
||||
})
|
||||
.on("end", t.end);
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("accumulator() sliding with key", t => {
|
||||
t.plan(6);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 3, key: "d" },
|
||||
{ ts: 5, key: "f" },
|
||||
{ ts: 6, key: "g" },
|
||||
];
|
||||
const firstFlush = [{ ts: 0, key: "a" }];
|
||||
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||
const thirdFlush = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
];
|
||||
const fourthFlush = [
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 3, key: "d" },
|
||||
];
|
||||
const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
|
||||
const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
|
||||
|
||||
const flushes = [
|
||||
firstFlush,
|
||||
secondFlush,
|
||||
thirdFlush,
|
||||
fourthFlush,
|
||||
fifthFlush,
|
||||
sixthFlush,
|
||||
];
|
||||
source
|
||||
.pipe(accumulator(FlushStrategy.sliding, 3, "ts", { objectMode: true }))
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (e: any) => {
|
||||
t.end(e);
|
||||
})
|
||||
.on("end", t.end);
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"accumulator() sliding should emit error and ignore chunk when key is missing",
|
||||
t => {
|
||||
t.plan(2);
|
||||
let index = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const accumulatorStream = accumulator(
|
||||
FlushStrategy.sliding,
|
||||
3,
|
||||
"nonExistingKey",
|
||||
{ objectMode: true },
|
||||
);
|
||||
const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||
|
||||
source
|
||||
.pipe(accumulatorStream)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
expect(flush).to.deep.equal([]);
|
||||
})
|
||||
.on("error", (err: any) => {
|
||||
source.pipe(accumulatorStream);
|
||||
accumulatorStream.resume();
|
||||
expect(err.message).to.equal(
|
||||
`Key is missing in event: (nonExistingKey, ${JSON.stringify(
|
||||
input[index],
|
||||
)})`,
|
||||
);
|
||||
index++;
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"accumulator() sliding should emit error, ignore chunk when key is missing and continue processing chunks correctly",
|
||||
t => {
|
||||
t.plan(6);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const accumulatorStream = accumulator(FlushStrategy.sliding, 3, "ts", {
|
||||
objectMode: true,
|
||||
});
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 3, key: "d" },
|
||||
{ ts: 5, key: "f" },
|
||||
{ ts: 6, key: "g" },
|
||||
];
|
||||
const firstFlush = [{ ts: 0, key: "a" }];
|
||||
const secondFlush = [{ ts: 0, key: "a" }, { ts: 2, key: "c" }];
|
||||
const thirdFlush = [{ ts: 2, key: "c" }, { ts: 3, key: "d" }];
|
||||
const fourthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
|
||||
const fifthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
|
||||
|
||||
const flushes = [
|
||||
firstFlush,
|
||||
secondFlush,
|
||||
thirdFlush,
|
||||
fourthFlush,
|
||||
fifthFlush,
|
||||
];
|
||||
source
|
||||
.pipe(accumulatorStream)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (err: any) => {
|
||||
source.pipe(accumulatorStream);
|
||||
accumulatorStream.resume();
|
||||
expect(err.message).to.equal(
|
||||
`Key is missing in event: (ts, ${JSON.stringify(
|
||||
input[1],
|
||||
)})`,
|
||||
);
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb("accumulatorBy() rolling", t => {
|
||||
t.plan(2);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const firstFlush = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 2, key: "d" },
|
||||
];
|
||||
const secondFlush = [{ ts: 3, key: "e" }];
|
||||
const flushes = [firstFlush, secondFlush];
|
||||
|
||||
source
|
||||
.pipe(
|
||||
accumulatorBy(
|
||||
FlushStrategy.rolling,
|
||||
(event: TestObject, bufferChunk: TestObject) => {
|
||||
return bufferChunk.ts + 3 <= event.ts;
|
||||
},
|
||||
{ objectMode: true },
|
||||
),
|
||||
)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (e: any) => {
|
||||
t.end(e);
|
||||
})
|
||||
.on("end", t.end);
|
||||
[...firstFlush, ...secondFlush].forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb.skip(
|
||||
"accumulatorBy() rolling should emit error when key iteratee throws",
|
||||
t => {
|
||||
t.plan(2);
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
];
|
||||
const accumulaterStream = accumulatorBy(
|
||||
FlushStrategy.rolling,
|
||||
(event: TestObject, bufferChunk: TestObject) => {
|
||||
if (event.key !== "a") {
|
||||
throw new Error("Failed mapping");
|
||||
}
|
||||
return bufferChunk.ts + 3 <= event.ts;
|
||||
},
|
||||
{ objectMode: true },
|
||||
);
|
||||
source
|
||||
.pipe(accumulaterStream)
|
||||
.on("error", (err: any) => {
|
||||
source.pipe(accumulaterStream);
|
||||
accumulaterStream.resume();
|
||||
expect(err.message).to.equal("Failed mapping");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb("accumulatorBy() sliding", t => {
|
||||
t.plan(6);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 3, key: "d" },
|
||||
{ ts: 5, key: "f" },
|
||||
{ ts: 6, key: "g" },
|
||||
];
|
||||
const firstFlush = [{ ts: 0, key: "a" }];
|
||||
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||
const thirdFlush = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
];
|
||||
const fourthFlush = [
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 3, key: "d" },
|
||||
];
|
||||
const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
|
||||
const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
|
||||
|
||||
const flushes = [
|
||||
firstFlush,
|
||||
secondFlush,
|
||||
thirdFlush,
|
||||
fourthFlush,
|
||||
fifthFlush,
|
||||
sixthFlush,
|
||||
];
|
||||
source
|
||||
.pipe(
|
||||
accumulatorBy(
|
||||
FlushStrategy.sliding,
|
||||
(event: TestObject, bufferChunk: TestObject) => {
|
||||
return bufferChunk.ts + 3 <= event.ts ? true : false;
|
||||
},
|
||||
{ objectMode: true },
|
||||
),
|
||||
)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (e: any) => {
|
||||
t.end(e);
|
||||
})
|
||||
.on("end", t.end);
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb.skip(
|
||||
"accumulatorBy() sliding should emit error when key iteratee throws",
|
||||
t => {
|
||||
t.plan(2);
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
];
|
||||
const accumulaterStream = accumulatorBy(
|
||||
FlushStrategy.sliding,
|
||||
(event: TestObject, bufferChunk: TestObject) => {
|
||||
if (event.key !== "a") {
|
||||
throw new Error("Failed mapping");
|
||||
}
|
||||
return bufferChunk.ts + 3 <= event.ts ? true : false;
|
||||
},
|
||||
{ objectMode: true },
|
||||
);
|
||||
source
|
||||
.pipe(accumulaterStream)
|
||||
.on("error", (err: any) => {
|
||||
source.pipe(accumulaterStream);
|
||||
accumulaterStream.resume();
|
||||
expect(err.message).to.equal("Failed mapping");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
},
|
||||
);
|
56
tests/batch.spec.ts
Normal file
56
tests/batch.spec.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { batch } from "../src";
|
||||
|
||||
test.cb("batch() batches chunks together", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = [["a", "b", "c"], ["d", "e", "f"], ["g"]];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(batch(3))
|
||||
.on("data", (element: string[]) => {
|
||||
t.deepEqual(element, expectedElements[i]);
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push("d");
|
||||
source.push("e");
|
||||
source.push("f");
|
||||
source.push("g");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("batch() yields a batch after the timeout", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({
|
||||
objectMode: true,
|
||||
read(size: number) {},
|
||||
});
|
||||
const expectedElements = [["a", "b"], ["c"], ["d"]];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(batch(3))
|
||||
.on("data", (element: string[]) => {
|
||||
t.deepEqual(element, expectedElements[i]);
|
||||
i++;
|
||||
})
|
||||
.on("error", t.fail)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
setTimeout(() => {
|
||||
source.push("c");
|
||||
}, 600);
|
||||
setTimeout(() => {
|
||||
source.push("d");
|
||||
source.push(null);
|
||||
}, 600 * 2);
|
||||
});
|
28
tests/child.spec.ts
Normal file
28
tests/child.spec.ts
Normal file
@ -0,0 +1,28 @@
|
||||
import * as cp from "child_process";
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { child } from "../src";
|
||||
|
||||
test.cb(
|
||||
"child() allows easily writing to child process stdin and reading from its stdout",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable();
|
||||
const catProcess = cp.exec("cat");
|
||||
let out = "";
|
||||
source
|
||||
.pipe(child(catProcess))
|
||||
.on("data", chunk => (out += chunk))
|
||||
.on("error", t.end)
|
||||
.on("end", () => {
|
||||
expect(out).to.equal("abcdef");
|
||||
t.pass();
|
||||
t.end();
|
||||
});
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
132
tests/collect.spec.ts
Normal file
132
tests/collect.spec.ts
Normal file
@ -0,0 +1,132 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { collect } from "../src";
|
||||
|
||||
test.cb(
|
||||
"collect() collects streamed elements into an array (object, flowing mode)",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: true });
|
||||
|
||||
source
|
||||
.pipe(collect({ objectMode: true }))
|
||||
.on("data", collected => {
|
||||
expect(collected).to.deep.equal(["a", "b", "c"]);
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"collect() collects streamed elements into an array (object, paused mode)",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const collector = source.pipe(collect({ objectMode: true }));
|
||||
|
||||
collector
|
||||
.on("readable", () => {
|
||||
let collected = collector.read();
|
||||
while (collected !== null) {
|
||||
expect(collected).to.deep.equal(["a", "b", "c"]);
|
||||
t.pass();
|
||||
collected = collector.read();
|
||||
}
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"collect() collects streamed bytes into a buffer (non-object, flowing mode)",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: false });
|
||||
|
||||
source
|
||||
.pipe(collect())
|
||||
.on("data", collected => {
|
||||
expect(collected).to.deep.equal(Buffer.from("abc"));
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"collect() collects streamed bytes into a buffer (non-object, paused mode)",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: false });
|
||||
const collector = source.pipe(collect({ objectMode: false }));
|
||||
collector
|
||||
.on("readable", () => {
|
||||
let collected = collector.read();
|
||||
while (collected !== null) {
|
||||
expect(collected).to.deep.equal(Buffer.from("abc"));
|
||||
t.pass();
|
||||
collected = collector.read();
|
||||
}
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"collect() emits an empty array if the source was empty (object mode)",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const collector = source.pipe(collect({ objectMode: true }));
|
||||
collector
|
||||
.on("data", collected => {
|
||||
expect(collected).to.deep.equal([]);
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"collect() emits nothing if the source was empty (non-object mode)",
|
||||
t => {
|
||||
t.plan(0);
|
||||
const source = new Readable({ objectMode: false });
|
||||
const collector = source.pipe(collect({ objectMode: false }));
|
||||
collector
|
||||
.on("data", () => t.fail())
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push(null);
|
||||
},
|
||||
);
|
511
tests/compose.spec.ts
Normal file
511
tests/compose.spec.ts
Normal file
@ -0,0 +1,511 @@
|
||||
const test = require("ava");
|
||||
const { expect } = require("chai");
|
||||
const { compose, map } = require("../src");
|
||||
const { sleep } = require("../src/helpers");
|
||||
import { performance } from "perf_hooks";
|
||||
|
||||
test.cb("compose() chains two streams together in the correct order", t => {
|
||||
t.plan(3);
|
||||
interface Chunk {
|
||||
visited: number[];
|
||||
key: string;
|
||||
}
|
||||
|
||||
let i = 0;
|
||||
const first = map((chunk: Chunk) => {
|
||||
chunk.visited.push(1);
|
||||
return chunk;
|
||||
});
|
||||
const second = map((chunk: Chunk) => {
|
||||
chunk.visited.push(2);
|
||||
return chunk;
|
||||
});
|
||||
|
||||
const composed = compose(
|
||||
[first, second],
|
||||
{ objectMode: true },
|
||||
);
|
||||
|
||||
composed.on("data", data => {
|
||||
expect(data).to.deep.equal(result[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
if (i === 3) {
|
||||
t.end();
|
||||
}
|
||||
});
|
||||
composed.on("error", err => {
|
||||
t.end(err);
|
||||
});
|
||||
composed.on("end", () => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
const input = [
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "b", visited: [] },
|
||||
{ key: "c", visited: [] },
|
||||
];
|
||||
const result = [
|
||||
{ key: "a", visited: [1, 2] },
|
||||
{ key: "b", visited: [1, 2] },
|
||||
{ key: "c", visited: [1, 2] },
|
||||
];
|
||||
|
||||
input.forEach(item => composed.write(item));
|
||||
});
|
||||
|
||||
test.cb("piping compose() maintains correct order", t => {
|
||||
t.plan(3);
|
||||
interface Chunk {
|
||||
visited: number[];
|
||||
key: string;
|
||||
}
|
||||
let i = 0;
|
||||
const first = map((chunk: Chunk) => {
|
||||
chunk.visited.push(1);
|
||||
return chunk;
|
||||
});
|
||||
const second = map((chunk: Chunk) => {
|
||||
chunk.visited.push(2);
|
||||
return chunk;
|
||||
});
|
||||
|
||||
const composed = compose(
|
||||
[first, second],
|
||||
{ objectMode: true },
|
||||
);
|
||||
const third = map((chunk: Chunk) => {
|
||||
chunk.visited.push(3);
|
||||
return chunk;
|
||||
});
|
||||
|
||||
composed.pipe(third).on("data", data => {
|
||||
expect(data).to.deep.equal(result[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
if (i === 3) {
|
||||
t.end();
|
||||
}
|
||||
});
|
||||
|
||||
composed.on("error", err => {
|
||||
t.end(err);
|
||||
});
|
||||
|
||||
const input = [
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "b", visited: [] },
|
||||
{ key: "c", visited: [] },
|
||||
];
|
||||
const result = [
|
||||
{ key: "a", visited: [1, 2, 3] },
|
||||
{ key: "b", visited: [1, 2, 3] },
|
||||
{ key: "c", visited: [1, 2, 3] },
|
||||
];
|
||||
|
||||
input.forEach(item => composed.write(item));
|
||||
});
|
||||
|
||||
test("compose() writable length should be less than highWaterMark when handing writes", async t => {
|
||||
t.plan(7);
|
||||
return new Promise(async (resolve, reject) => {
|
||||
interface Chunk {
|
||||
key: string;
|
||||
mapped: number[];
|
||||
}
|
||||
const first = map(
|
||||
async (chunk: Chunk) => {
|
||||
chunk.mapped.push(1);
|
||||
return chunk;
|
||||
},
|
||||
{
|
||||
objectMode: true,
|
||||
},
|
||||
);
|
||||
|
||||
const second = map(
|
||||
async (chunk: Chunk) => {
|
||||
chunk.mapped.push(2);
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true },
|
||||
);
|
||||
|
||||
const composed = compose(
|
||||
[first, second],
|
||||
{ objectMode: true, highWaterMark: 2 },
|
||||
);
|
||||
composed.on("error", err => {
|
||||
reject();
|
||||
});
|
||||
|
||||
composed.on("drain", () => {
|
||||
t.pass();
|
||||
expect(composed._writableState.length).to.be.equal(0);
|
||||
});
|
||||
|
||||
composed.on("data", (chunk: Chunk) => {
|
||||
if (chunk.key === "e") {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
|
||||
const input = [
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "b", mapped: [] },
|
||||
{ key: "c", mapped: [] },
|
||||
{ key: "d", mapped: [] },
|
||||
{ key: "e", mapped: [] },
|
||||
];
|
||||
|
||||
for (const item of input) {
|
||||
const res = composed.write(item);
|
||||
expect(composed._writableState.length).to.be.at.most(2);
|
||||
t.pass();
|
||||
if (!res) {
|
||||
await sleep(10);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("compose() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", async t => {
|
||||
t.plan(7);
|
||||
const _rate = 100;
|
||||
const highWaterMark = 2;
|
||||
return new Promise(async (resolve, reject) => {
|
||||
interface Chunk {
|
||||
key: string;
|
||||
mapped: number[];
|
||||
}
|
||||
const first = map(
|
||||
async (chunk: Chunk) => {
|
||||
await sleep(_rate);
|
||||
chunk.mapped.push(1);
|
||||
return chunk;
|
||||
},
|
||||
{
|
||||
objectMode: true,
|
||||
},
|
||||
);
|
||||
|
||||
const second = map(
|
||||
async (chunk: Chunk) => {
|
||||
chunk.mapped.push(2);
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true },
|
||||
);
|
||||
|
||||
const composed = compose(
|
||||
[first, second],
|
||||
{ objectMode: true, highWaterMark },
|
||||
);
|
||||
composed.on("error", err => {
|
||||
reject();
|
||||
});
|
||||
|
||||
composed.on("drain", () => {
|
||||
t.pass();
|
||||
expect(composed._writableState.length).to.be.equal(0);
|
||||
expect(performance.now() - start).to.be.closeTo(
|
||||
_rate * highWaterMark,
|
||||
10,
|
||||
);
|
||||
});
|
||||
|
||||
composed.on("data", (chunk: Chunk) => {
|
||||
pendingReads--;
|
||||
if (pendingReads === 0) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
|
||||
const input = [
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "b", mapped: [] },
|
||||
{ key: "c", mapped: [] },
|
||||
{ key: "d", mapped: [] },
|
||||
{ key: "e", mapped: [] },
|
||||
];
|
||||
|
||||
let start = performance.now();
|
||||
let pendingReads = input.length;
|
||||
start = performance.now();
|
||||
for (const item of input) {
|
||||
const res = composed.write(item);
|
||||
expect(composed._writableState.length).to.be.at.most(highWaterMark);
|
||||
t.pass();
|
||||
if (!res) {
|
||||
await sleep(_rate * highWaterMark * 2);
|
||||
start = performance.now();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"compose() should emit drain event after 500 ms when writing 5 items that take 100ms to process with a highWaterMark of 5 ",
|
||||
t => {
|
||||
t.plan(6);
|
||||
const _rate = 100;
|
||||
interface Chunk {
|
||||
key: string;
|
||||
mapped: number[];
|
||||
}
|
||||
const first = map(
|
||||
async (chunk: Chunk) => {
|
||||
await sleep(_rate);
|
||||
chunk.mapped.push(1);
|
||||
return chunk;
|
||||
},
|
||||
{
|
||||
objectMode: true,
|
||||
},
|
||||
);
|
||||
|
||||
const second = map(
|
||||
async (chunk: Chunk) => {
|
||||
chunk.mapped.push(2);
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true },
|
||||
);
|
||||
|
||||
const composed = compose(
|
||||
[first, second],
|
||||
{ objectMode: true, highWaterMark: 5 },
|
||||
);
|
||||
|
||||
composed.on("error", err => {
|
||||
t.end(err);
|
||||
});
|
||||
|
||||
composed.on("drain", () => {
|
||||
expect(composed._writableState.length).to.be.equal(0);
|
||||
expect(performance.now() - start).to.be.closeTo(
|
||||
_rate * input.length,
|
||||
25,
|
||||
);
|
||||
t.pass();
|
||||
});
|
||||
|
||||
composed.on("data", (chunk: Chunk) => {
|
||||
t.pass();
|
||||
if (chunk.key === "e") {
|
||||
t.end();
|
||||
}
|
||||
});
|
||||
|
||||
const input = [
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "b", mapped: [] },
|
||||
{ key: "c", mapped: [] },
|
||||
{ key: "d", mapped: [] },
|
||||
{ key: "e", mapped: [] },
|
||||
];
|
||||
input.forEach(item => {
|
||||
composed.write(item);
|
||||
});
|
||||
const start = performance.now();
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"compose() should emit drain event immediately when second stream is bottleneck",
|
||||
t => {
|
||||
t.plan(6);
|
||||
const _rate = 200;
|
||||
interface Chunk {
|
||||
key: string;
|
||||
mapped: number[];
|
||||
}
|
||||
const first = map(
|
||||
(chunk: Chunk) => {
|
||||
chunk.mapped.push(1);
|
||||
return chunk;
|
||||
},
|
||||
{
|
||||
objectMode: true,
|
||||
},
|
||||
);
|
||||
|
||||
const second = map(
|
||||
async (chunk: Chunk) => {
|
||||
pendingReads--;
|
||||
await sleep(_rate);
|
||||
expect(second._writableState.length).to.be.equal(1);
|
||||
expect(first._readableState.length).to.equal(pendingReads);
|
||||
chunk.mapped.push(2);
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true, highWaterMark: 1 },
|
||||
);
|
||||
|
||||
const composed = compose(
|
||||
[first, second],
|
||||
{ objectMode: true, highWaterMark: 5 },
|
||||
);
|
||||
composed.on("error", err => {
|
||||
t.end(err);
|
||||
});
|
||||
|
||||
composed.on("drain", () => {
|
||||
expect(composed._writableState.length).to.be.equal(0);
|
||||
expect(performance.now() - start).to.be.lessThan(_rate);
|
||||
t.pass();
|
||||
});
|
||||
|
||||
composed.on("data", (chunk: Chunk) => {
|
||||
expect(composed._writableState.length).to.be.equal(0);
|
||||
t.pass();
|
||||
if (chunk.key === "e") {
|
||||
t.end();
|
||||
}
|
||||
});
|
||||
|
||||
const input = [
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "b", mapped: [] },
|
||||
{ key: "c", mapped: [] },
|
||||
{ key: "d", mapped: [] },
|
||||
{ key: "e", mapped: [] },
|
||||
];
|
||||
let pendingReads = input.length;
|
||||
|
||||
input.forEach(item => {
|
||||
composed.write(item);
|
||||
});
|
||||
|
||||
const start = performance.now();
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"compose() should emit drain event and first should contain up to highWaterMark items in readable state when second is bottleneck",
|
||||
t => {
|
||||
t.plan(6);
|
||||
interface Chunk {
|
||||
index: number;
|
||||
mapped: string[];
|
||||
}
|
||||
const first = map(
|
||||
async (chunk: Chunk) => {
|
||||
expect(first._readableState.length).to.be.at.most(2);
|
||||
chunk.mapped.push("first");
|
||||
return chunk;
|
||||
},
|
||||
{
|
||||
objectMode: true,
|
||||
highWaterMark: 2,
|
||||
},
|
||||
);
|
||||
|
||||
const second = map(
|
||||
async (chunk: Chunk) => {
|
||||
expect(second._writableState.length).to.be.equal(1);
|
||||
await sleep(100);
|
||||
chunk.mapped.push("second");
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true, highWaterMark: 2 },
|
||||
);
|
||||
|
||||
const composed = compose(
|
||||
[first, second],
|
||||
{ objectMode: true, highWaterMark: 5 },
|
||||
);
|
||||
composed.on("error", err => {
|
||||
t.end(err);
|
||||
});
|
||||
|
||||
composed.on("data", (chunk: Chunk) => {
|
||||
expect(chunk.mapped.length).to.equal(2);
|
||||
expect(chunk.mapped).to.deep.equal(["first", "second"]);
|
||||
t.pass();
|
||||
if (chunk.index === 5) {
|
||||
t.end();
|
||||
}
|
||||
});
|
||||
|
||||
composed.on("drain", () => {
|
||||
expect(composed._writableState.length).to.be.equal(0);
|
||||
t.pass();
|
||||
});
|
||||
|
||||
const input = [
|
||||
{ index: 1, mapped: [] },
|
||||
{ index: 2, mapped: [] },
|
||||
{ index: 3, mapped: [] },
|
||||
{ index: 4, mapped: [] },
|
||||
{ index: 5, mapped: [] },
|
||||
];
|
||||
|
||||
input.forEach(item => {
|
||||
composed.write(item);
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"compose() should not emit drain event writing 5 items to compose with a highWaterMark of 6",
|
||||
t => {
|
||||
t.plan(5);
|
||||
const _rate = 100;
|
||||
interface Chunk {
|
||||
key: string;
|
||||
mapped: number[];
|
||||
}
|
||||
const first = map(
|
||||
async (chunk: Chunk) => {
|
||||
await sleep(_rate);
|
||||
chunk.mapped.push(1);
|
||||
return chunk;
|
||||
},
|
||||
{
|
||||
objectMode: true,
|
||||
},
|
||||
);
|
||||
|
||||
const second = map(
|
||||
async (chunk: Chunk) => {
|
||||
chunk.mapped.push(2);
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true },
|
||||
);
|
||||
|
||||
const composed = compose(
|
||||
[first, second],
|
||||
{ objectMode: true, highWaterMark: 6 },
|
||||
);
|
||||
|
||||
composed.on("error", err => {
|
||||
t.end(err);
|
||||
});
|
||||
|
||||
composed.on("drain", () => {
|
||||
t.end(new Error("Drain should not be emitted"));
|
||||
});
|
||||
|
||||
composed.on("data", (chunk: Chunk) => {
|
||||
t.pass();
|
||||
if (chunk.key === "e") {
|
||||
t.end();
|
||||
}
|
||||
});
|
||||
|
||||
const input = [
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "b", mapped: [] },
|
||||
{ key: "c", mapped: [] },
|
||||
{ key: "d", mapped: [] },
|
||||
{ key: "e", mapped: [] },
|
||||
];
|
||||
|
||||
input.forEach(item => {
|
||||
composed.write(item);
|
||||
});
|
||||
},
|
||||
);
|
180
tests/concat.spec.ts
Normal file
180
tests/concat.spec.ts
Normal file
@ -0,0 +1,180 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { concat, collect } from "../src";
|
||||
|
||||
test.cb(
|
||||
"concat() concatenates multiple readable streams (object, flowing mode)",
|
||||
t => {
|
||||
t.plan(6);
|
||||
const source1 = new Readable({ objectMode: true });
|
||||
const source2 = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||
let i = 0;
|
||||
concat(source1, source2)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source1.push("a");
|
||||
source2.push("d");
|
||||
source1.push("b");
|
||||
source2.push("e");
|
||||
source1.push("c");
|
||||
source2.push("f");
|
||||
source2.push(null);
|
||||
source1.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"concat() concatenates multiple readable streams (object, paused mode)",
|
||||
t => {
|
||||
t.plan(6);
|
||||
const source1 = new Readable({ objectMode: true });
|
||||
const source2 = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||
let i = 0;
|
||||
const concatenation = concat(source1, source2)
|
||||
.on("readable", () => {
|
||||
let element = concatenation.read();
|
||||
while (element !== null) {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
element = concatenation.read();
|
||||
}
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source1.push("a");
|
||||
source2.push("d");
|
||||
source1.push("b");
|
||||
source2.push("e");
|
||||
source1.push("c");
|
||||
source2.push("f");
|
||||
source2.push(null);
|
||||
source1.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"concat() concatenates multiple readable streams (non-object, flowing mode)",
|
||||
t => {
|
||||
t.plan(6);
|
||||
const source1 = new Readable({ objectMode: false });
|
||||
const source2 = new Readable({ objectMode: false });
|
||||
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||
let i = 0;
|
||||
concat(source1, source2)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.deep.equal(Buffer.from(expectedElements[i]));
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source1.push("a");
|
||||
source2.push("d");
|
||||
source1.push("b");
|
||||
source2.push("e");
|
||||
source1.push("c");
|
||||
source2.push("f");
|
||||
source2.push(null);
|
||||
source1.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"concat() concatenates multiple readable streams (non-object, paused mode)",
|
||||
t => {
|
||||
t.plan(6);
|
||||
const source1 = new Readable({ objectMode: false, read: () => ({}) });
|
||||
const source2 = new Readable({ objectMode: false, read: () => ({}) });
|
||||
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||
let i = 0;
|
||||
const concatenation = concat(source1, source2)
|
||||
.on("readable", () => {
|
||||
let element = concatenation.read();
|
||||
while (element !== null) {
|
||||
expect(element).to.deep.equal(
|
||||
Buffer.from(expectedElements[i]),
|
||||
);
|
||||
t.pass();
|
||||
i++;
|
||||
element = concatenation.read();
|
||||
}
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source1.push("a");
|
||||
setTimeout(() => source2.push("d"), 10);
|
||||
setTimeout(() => source1.push("b"), 20);
|
||||
setTimeout(() => source2.push("e"), 30);
|
||||
setTimeout(() => source1.push("c"), 40);
|
||||
setTimeout(() => source2.push("f"), 50);
|
||||
setTimeout(() => source2.push(null), 60);
|
||||
setTimeout(() => source1.push(null), 70);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb("concat() concatenates a single readable stream (object mode)", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||
let i = 0;
|
||||
concat(source)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"concat() concatenates a single readable stream (non-object mode)",
|
||||
t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: false });
|
||||
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||
let i = 0;
|
||||
concat(source)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.deep.equal(Buffer.from(expectedElements[i]));
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb("concat() concatenates empty list of readable streams", t => {
|
||||
t.plan(0);
|
||||
concat()
|
||||
.pipe(collect())
|
||||
.on("data", _ => {
|
||||
t.fail();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
});
|
656
tests/demux.spec.ts
Normal file
656
tests/demux.spec.ts
Normal file
@ -0,0 +1,656 @@
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
const { demux, map } = require("../src");
|
||||
import { Writable } from "stream";
|
||||
const sinon = require("sinon");
|
||||
const { sleep } = require("../src/helpers");
|
||||
import { performance } from "perf_hooks";
|
||||
|
||||
interface Test {
|
||||
key: string;
|
||||
visited: number[];
|
||||
}
|
||||
|
||||
test.cb("demux() constructor should be called once per key", t => {
|
||||
t.plan(1);
|
||||
const input = [
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "b", visited: [] },
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "c", visited: [] },
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "b", visited: [] },
|
||||
];
|
||||
const construct = sinon.spy((destKey: string) => {
|
||||
const dest = map((chunk: Test) => {
|
||||
chunk.visited.push(1);
|
||||
return chunk;
|
||||
});
|
||||
|
||||
return dest;
|
||||
});
|
||||
|
||||
const demuxed = demux(construct, "key", { objectMode: true });
|
||||
|
||||
demuxed.on("finish", () => {
|
||||
expect(construct.withArgs("a").callCount).to.equal(1);
|
||||
expect(construct.withArgs("b").callCount).to.equal(1);
|
||||
expect(construct.withArgs("c").callCount).to.equal(1);
|
||||
t.pass();
|
||||
t.end();
|
||||
});
|
||||
|
||||
input.forEach(event => demuxed.write(event));
|
||||
demuxed.end();
|
||||
});
|
||||
|
||||
test.cb("demux() should send input through correct pipeline", t => {
|
||||
t.plan(6);
|
||||
const input = [
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "b", visited: [] },
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "c", visited: [] },
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "b", visited: [] },
|
||||
];
|
||||
const pipelineSpies = {};
|
||||
const construct = (destKey: string) => {
|
||||
const mapper = sinon.spy((chunk: Test) => {
|
||||
return { ...chunk, visited: [1] };
|
||||
});
|
||||
const dest = map(mapper);
|
||||
pipelineSpies[destKey] = mapper;
|
||||
|
||||
return dest;
|
||||
};
|
||||
|
||||
const demuxed = demux(construct, "key", { objectMode: true });
|
||||
|
||||
demuxed.on("finish", () => {
|
||||
pipelineSpies["a"].getCalls().forEach(call => {
|
||||
expect(call.args[0].key).to.equal("a");
|
||||
t.pass();
|
||||
});
|
||||
pipelineSpies["b"].getCalls().forEach(call => {
|
||||
expect(call.args[0].key).to.equal("b");
|
||||
t.pass();
|
||||
});
|
||||
pipelineSpies["c"].getCalls().forEach(call => {
|
||||
expect(call.args[0].key).to.equal("c");
|
||||
t.pass();
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
|
||||
input.forEach(event => demuxed.write(event));
|
||||
demuxed.end();
|
||||
});
|
||||
|
||||
test.cb("demux() constructor should be called once per key using keyBy", t => {
|
||||
t.plan(1);
|
||||
const input = [
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "b", visited: [] },
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "c", visited: [] },
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "b", visited: [] },
|
||||
];
|
||||
|
||||
const construct = sinon.spy((destKey: string) => {
|
||||
const dest = map((chunk: Test) => {
|
||||
chunk.visited.push(1);
|
||||
return chunk;
|
||||
});
|
||||
|
||||
return dest;
|
||||
});
|
||||
|
||||
const demuxed = demux(construct, item => item.key, { objectMode: true });
|
||||
|
||||
demuxed.on("finish", () => {
|
||||
expect(construct.withArgs("a").callCount).to.equal(1);
|
||||
expect(construct.withArgs("b").callCount).to.equal(1);
|
||||
expect(construct.withArgs("c").callCount).to.equal(1);
|
||||
t.pass();
|
||||
t.end();
|
||||
});
|
||||
|
||||
input.forEach(event => demuxed.write(event));
|
||||
demuxed.end();
|
||||
});
|
||||
|
||||
test.cb("demux() should send input through correct pipeline using keyBy", t => {
|
||||
t.plan(6);
|
||||
const input = [
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "b", visited: [] },
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "c", visited: [] },
|
||||
{ key: "a", visited: [] },
|
||||
{ key: "b", visited: [] },
|
||||
];
|
||||
const pipelineSpies = {};
|
||||
const construct = (destKey: string) => {
|
||||
const mapper = sinon.spy((chunk: Test) => {
|
||||
return { ...chunk, visited: [1] };
|
||||
});
|
||||
const dest = map(mapper);
|
||||
pipelineSpies[destKey] = mapper;
|
||||
|
||||
return dest;
|
||||
};
|
||||
|
||||
const demuxed = demux(construct, item => item.key, { objectMode: true });
|
||||
|
||||
demuxed.on("finish", () => {
|
||||
pipelineSpies["a"].getCalls().forEach(call => {
|
||||
expect(call.args[0].key).to.equal("a");
|
||||
t.pass();
|
||||
});
|
||||
pipelineSpies["b"].getCalls().forEach(call => {
|
||||
expect(call.args[0].key).to.equal("b");
|
||||
t.pass();
|
||||
});
|
||||
pipelineSpies["c"].getCalls().forEach(call => {
|
||||
expect(call.args[0].key).to.equal("c");
|
||||
t.pass();
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
|
||||
input.forEach(event => demuxed.write(event));
|
||||
demuxed.end();
|
||||
});
|
||||
|
||||
test("demux() write should return false after if it has >= highWaterMark items buffered and drain should be emitted", t => {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
t.plan(7);
|
||||
interface Chunk {
|
||||
key: string;
|
||||
mapped: number[];
|
||||
}
|
||||
const input: Chunk[] = [
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
];
|
||||
let pendingReads = input.length;
|
||||
const highWaterMark = 5;
|
||||
const slowProcessorSpeed = 25;
|
||||
const construct = (destKey: string) => {
|
||||
const first = map(
|
||||
async (chunk: Chunk) => {
|
||||
await sleep(slowProcessorSpeed);
|
||||
return { ...chunk, mapped: [1] };
|
||||
},
|
||||
{ highWaterMark: 1, objectMode: true },
|
||||
);
|
||||
|
||||
first.on("data", chunk => {
|
||||
expect(chunk.mapped).to.deep.equal([1]);
|
||||
pendingReads--;
|
||||
if (pendingReads === 0) {
|
||||
resolve();
|
||||
}
|
||||
t.pass();
|
||||
});
|
||||
|
||||
return first;
|
||||
};
|
||||
|
||||
const _demux = demux(construct, "key", {
|
||||
objectMode: true,
|
||||
highWaterMark,
|
||||
});
|
||||
|
||||
_demux.on("error", err => {
|
||||
reject();
|
||||
});
|
||||
|
||||
for (const item of input) {
|
||||
const res = _demux.write(item);
|
||||
expect(_demux._writableState.length).to.be.at.most(highWaterMark);
|
||||
if (!res) {
|
||||
await new Promise((resolv, rej) => {
|
||||
_demux.once("drain", () => {
|
||||
expect(_demux._writableState.length).to.be.equal(0);
|
||||
t.pass();
|
||||
resolv();
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("demux() should emit one drain event after slowProcessorSpeed * highWaterMark ms", t => {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
t.plan(7);
|
||||
interface Chunk {
|
||||
key: string;
|
||||
mapped: number[];
|
||||
}
|
||||
const input: Chunk[] = [
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
];
|
||||
let pendingReads = input.length;
|
||||
const highWaterMark = 5;
|
||||
const slowProcessorSpeed = 25;
|
||||
|
||||
const construct = (destKey: string) => {
|
||||
const first = map(
|
||||
async (chunk: Chunk) => {
|
||||
await sleep(slowProcessorSpeed);
|
||||
chunk.mapped.push(1);
|
||||
return chunk;
|
||||
},
|
||||
{ highWaterMark: 1, objectMode: true },
|
||||
);
|
||||
|
||||
first.on("data", () => {
|
||||
t.pass();
|
||||
pendingReads--;
|
||||
if (pendingReads === 0) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
return first;
|
||||
};
|
||||
const _demux = demux(construct, "key", {
|
||||
objectMode: true,
|
||||
highWaterMark,
|
||||
});
|
||||
_demux.on("error", err => {
|
||||
reject();
|
||||
});
|
||||
|
||||
const start = performance.now();
|
||||
for (const item of input) {
|
||||
const res = _demux.write(item);
|
||||
if (!res) {
|
||||
await new Promise((resolv, rej) => {
|
||||
// This event should be received after all items in demux are processed
|
||||
_demux.once("drain", () => {
|
||||
expect(performance.now() - start).to.be.greaterThan(
|
||||
slowProcessorSpeed * highWaterMark,
|
||||
);
|
||||
t.pass();
|
||||
resolv();
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("demux() should emit one drain event when writing 6 items with highWaterMark of 5", t => {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
t.plan(7);
|
||||
interface Chunk {
|
||||
key: string;
|
||||
mapped: number[];
|
||||
}
|
||||
const highWaterMark = 5;
|
||||
const input = [
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
];
|
||||
let pendingReads = input.length;
|
||||
const construct = (destKey: string) => {
|
||||
const first = map(
|
||||
async (chunk: Chunk) => {
|
||||
await sleep(50);
|
||||
chunk.mapped.push(2);
|
||||
return chunk;
|
||||
},
|
||||
{ highWaterMark: 1, objectMode: true },
|
||||
);
|
||||
|
||||
first.on("data", () => {
|
||||
pendingReads--;
|
||||
t.pass();
|
||||
if (pendingReads === 0) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
return first;
|
||||
};
|
||||
const _demux = demux(construct, "key", {
|
||||
objectMode: true,
|
||||
highWaterMark: 5,
|
||||
});
|
||||
|
||||
_demux.on("error", err => {
|
||||
reject();
|
||||
});
|
||||
|
||||
for (const item of input) {
|
||||
const res = _demux.write(item);
|
||||
expect(_demux._writableState.length).to.be.at.most(highWaterMark);
|
||||
if (!res) {
|
||||
await new Promise(_resolve => {
|
||||
_demux.once("drain", () => {
|
||||
_resolve();
|
||||
expect(_demux._writableState.length).to.be.equal(0);
|
||||
t.pass();
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test.cb.only(
|
||||
"demux() should emit drain event when third stream is bottleneck",
|
||||
t => {
|
||||
t.plan(8);
|
||||
const slowProcessorSpeed = 100;
|
||||
const highWaterMark = 5;
|
||||
interface Chunk {
|
||||
key: string;
|
||||
mapped: number[];
|
||||
}
|
||||
const sink = new Writable({
|
||||
objectMode: true,
|
||||
write(chunk, encoding, cb) {
|
||||
expect(chunk.mapped).to.deep.equal([1, 2]);
|
||||
t.pass();
|
||||
pendingReads--;
|
||||
if (pendingReads === 0) {
|
||||
t.end();
|
||||
}
|
||||
cb();
|
||||
},
|
||||
});
|
||||
const construct = (destKey: string) => {
|
||||
const first = map(
|
||||
(chunk: Chunk) => {
|
||||
chunk.mapped.push(1);
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true, highWaterMark: 1 },
|
||||
);
|
||||
|
||||
const second = map(
|
||||
async (chunk: Chunk) => {
|
||||
await sleep(slowProcessorSpeed);
|
||||
chunk.mapped.push(2);
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true, highWaterMark: 1 },
|
||||
);
|
||||
|
||||
first.pipe(second).pipe(sink);
|
||||
return first;
|
||||
};
|
||||
const _demux = demux(construct, () => "a", {
|
||||
objectMode: true,
|
||||
highWaterMark,
|
||||
});
|
||||
_demux.on("error", err => {
|
||||
t.end(err);
|
||||
});
|
||||
|
||||
// This event should be received after at least 5 * slowProcessorSpeed (two are read immediately by first and second, 5 remaining in demux before drain event)
|
||||
_demux.on("drain", () => {
|
||||
expect(_demux._writableState.length).to.be.equal(0);
|
||||
expect(performance.now() - start).to.be.greaterThan(
|
||||
slowProcessorSpeed * (input.length - 2),
|
||||
);
|
||||
t.pass();
|
||||
});
|
||||
|
||||
const input = [
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "b", mapped: [] },
|
||||
{ key: "c", mapped: [] },
|
||||
{ key: "d", mapped: [] },
|
||||
{ key: "e", mapped: [] },
|
||||
{ key: "f", mapped: [] },
|
||||
{ key: "g", mapped: [] },
|
||||
];
|
||||
let pendingReads = input.length;
|
||||
|
||||
const start = performance.now();
|
||||
input.forEach(item => {
|
||||
_demux.write(item);
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"demux() should emit drain event when second stream is bottleneck",
|
||||
t => {
|
||||
t.plan(8);
|
||||
const slowProcessorSpeed = 100;
|
||||
const highWaterMark = 5;
|
||||
interface Chunk {
|
||||
key: string;
|
||||
mapped: number[];
|
||||
}
|
||||
const sink = new Writable({
|
||||
objectMode: true,
|
||||
write(chunk, encoding, cb) {
|
||||
expect(chunk.mapped).to.deep.equal([1, 2]);
|
||||
t.pass();
|
||||
pendingReads--;
|
||||
if (pendingReads === 0) {
|
||||
t.end();
|
||||
}
|
||||
cb();
|
||||
},
|
||||
});
|
||||
const construct = (destKey: string) => {
|
||||
const first = map(
|
||||
(chunk: Chunk) => {
|
||||
chunk.mapped.push(1);
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true, highWaterMark: 1 },
|
||||
);
|
||||
const second = map(
|
||||
(chunk: Chunk) => {
|
||||
chunk.mapped.push(2);
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true, highWaterMark: 1 },
|
||||
);
|
||||
|
||||
const third = map(
|
||||
async (chunk: Chunk) => {
|
||||
await sleep(slowProcessorSpeed);
|
||||
chunk.mapped.push(3);
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true, highWaterMark: 1 },
|
||||
);
|
||||
|
||||
first
|
||||
.pipe(second)
|
||||
.pipe(third)
|
||||
.pipe(sink);
|
||||
return first;
|
||||
};
|
||||
const _demux = demux(construct, () => "a", {
|
||||
objectMode: true,
|
||||
highWaterMark,
|
||||
});
|
||||
_demux.on("error", err => {
|
||||
t.end(err);
|
||||
});
|
||||
|
||||
// This event should be received after at least 3 * slowProcessorSpeed (two are read immediately by first and second, 3 remaining in demux before drain event)
|
||||
_demux.on("drain", () => {
|
||||
expect(_demux._writableState.length).to.be.equal(0);
|
||||
expect(performance.now() - start).to.be.greaterThan(
|
||||
slowProcessorSpeed * (input.length - 4),
|
||||
);
|
||||
t.pass();
|
||||
});
|
||||
|
||||
const input = [
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "b", mapped: [] },
|
||||
{ key: "c", mapped: [] },
|
||||
{ key: "d", mapped: [] },
|
||||
{ key: "e", mapped: [] },
|
||||
{ key: "f", mapped: [] },
|
||||
{ key: "g", mapped: [] },
|
||||
];
|
||||
let pendingReads = input.length;
|
||||
|
||||
const start = performance.now();
|
||||
input.forEach(item => {
|
||||
_demux.write(item);
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
test("demux() should be blocked by slowest pipeline", t => {
|
||||
t.plan(1);
|
||||
const slowProcessorSpeed = 100;
|
||||
interface Chunk {
|
||||
key: string;
|
||||
mapped: number[];
|
||||
}
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const construct = (destKey: string) => {
|
||||
const first = map(
|
||||
async (chunk: Chunk) => {
|
||||
await sleep(slowProcessorSpeed);
|
||||
chunk.mapped.push(1);
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true, highWaterMark: 1 },
|
||||
);
|
||||
|
||||
first.on("data", chunk => {
|
||||
pendingReads--;
|
||||
if (chunk.key === "b") {
|
||||
expect(performance.now() - start).to.be.greaterThan(
|
||||
slowProcessorSpeed * totalItems,
|
||||
);
|
||||
t.pass();
|
||||
expect(pendingReads).to.equal(0);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
return first;
|
||||
};
|
||||
const _demux = demux(construct, "key", {
|
||||
objectMode: true,
|
||||
highWaterMark: 1,
|
||||
});
|
||||
_demux.on("error", err => {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
const input = [
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "c", mapped: [] },
|
||||
{ key: "c", mapped: [] },
|
||||
{ key: "c", mapped: [] },
|
||||
{ key: "b", mapped: [] },
|
||||
];
|
||||
|
||||
let pendingReads = input.length;
|
||||
const totalItems = input.length;
|
||||
const start = performance.now();
|
||||
for (const item of input) {
|
||||
if (!_demux.write(item)) {
|
||||
await new Promise(_resolve => {
|
||||
_demux.once("drain", () => {
|
||||
_resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("demux() should emit drain event when second stream in pipeline is bottleneck", t => {
|
||||
t.plan(5);
|
||||
const highWaterMark = 3;
|
||||
return new Promise(async (resolve, reject) => {
|
||||
interface Chunk {
|
||||
key: string;
|
||||
mapped: number[];
|
||||
}
|
||||
const sink = new Writable({
|
||||
objectMode: true,
|
||||
write(chunk, encoding, cb) {
|
||||
expect(chunk.mapped).to.deep.equal([1, 2]);
|
||||
t.pass();
|
||||
cb();
|
||||
if (pendingReads === 0) {
|
||||
resolve();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const construct = (destKey: string) => {
|
||||
const first = map(
|
||||
(chunk: Chunk) => {
|
||||
expect(first._readableState.length).to.be.at.most(2);
|
||||
chunk.mapped.push(1);
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true, highWaterMark: 2 },
|
||||
);
|
||||
|
||||
const second = map(
|
||||
async (chunk: Chunk) => {
|
||||
await sleep(100);
|
||||
chunk.mapped.push(2);
|
||||
expect(second._writableState.length).to.be.equal(1);
|
||||
pendingReads--;
|
||||
return chunk;
|
||||
},
|
||||
{ objectMode: true, highWaterMark: 1 },
|
||||
);
|
||||
|
||||
first.pipe(second).pipe(sink);
|
||||
return first;
|
||||
};
|
||||
|
||||
const _demux = demux(construct, "key", {
|
||||
objectMode: true,
|
||||
highWaterMark,
|
||||
});
|
||||
_demux.on("error", err => {
|
||||
reject();
|
||||
});
|
||||
|
||||
_demux.on("drain", () => {
|
||||
expect(_demux._writableState.length).to.be.equal(0);
|
||||
t.pass();
|
||||
});
|
||||
|
||||
const input = [
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
{ key: "a", mapped: [] },
|
||||
];
|
||||
let pendingReads = input.length;
|
||||
|
||||
input.forEach(item => {
|
||||
_demux.write(item);
|
||||
});
|
||||
});
|
||||
});
|
28
tests/duplex.spec.ts
Normal file
28
tests/duplex.spec.ts
Normal file
@ -0,0 +1,28 @@
|
||||
import * as cp from "child_process";
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { duplex } from "../src";
|
||||
|
||||
test.cb(
|
||||
"duplex() combines a writable and readable stream into a ReadWrite stream",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable();
|
||||
const catProcess = cp.exec("cat");
|
||||
let out = "";
|
||||
source
|
||||
.pipe(duplex(catProcess.stdin!, catProcess.stdout!))
|
||||
.on("data", chunk => (out += chunk))
|
||||
.on("error", t.end)
|
||||
.on("end", () => {
|
||||
expect(out).to.equal("abcdef");
|
||||
t.pass();
|
||||
t.end();
|
||||
});
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
116
tests/filter.spec.ts
Normal file
116
tests/filter.spec.ts
Normal file
@ -0,0 +1,116 @@
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { Readable } from "stream";
|
||||
import { filter } from "../src";
|
||||
|
||||
test.cb("filter() filters elements synchronously", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "c"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(
|
||||
filter((element: string) => element !== "b", {
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true,
|
||||
}),
|
||||
)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("filter() filters elements asynchronously", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "c"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(
|
||||
filter(
|
||||
async (element: string) => {
|
||||
await Promise.resolve();
|
||||
return element !== "b";
|
||||
},
|
||||
{ readableObjectMode: true, writableObjectMode: true },
|
||||
),
|
||||
)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb.skip("filter() emits errors during synchronous filtering", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(
|
||||
filter(
|
||||
(element: string) => {
|
||||
if (element !== "a") {
|
||||
throw new Error("Failed filtering");
|
||||
}
|
||||
return true;
|
||||
},
|
||||
{ readableObjectMode: true, writableObjectMode: true },
|
||||
),
|
||||
)
|
||||
.resume()
|
||||
.on("error", err => {
|
||||
expect(err.message).to.equal("Failed filtering");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb.skip("filter() emits errors during asynchronous filtering", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(
|
||||
filter(
|
||||
async (element: string) => {
|
||||
await Promise.resolve();
|
||||
if (element !== "a") {
|
||||
throw new Error("Failed filtering");
|
||||
}
|
||||
return true;
|
||||
},
|
||||
{ readableObjectMode: true, writableObjectMode: true },
|
||||
),
|
||||
)
|
||||
.resume()
|
||||
.on("error", err => {
|
||||
expect(err.message).to.equal("Failed filtering");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
100
tests/flatMap.spec.ts
Normal file
100
tests/flatMap.spec.ts
Normal file
@ -0,0 +1,100 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { flatMap } from "../src";
|
||||
|
||||
test.cb("flatMap() maps elements synchronously", t => {
|
||||
t.plan(6);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "A", "b", "B", "c", "C"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(flatMap((element: string) => [element, element.toUpperCase()]))
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("flatMap() maps elements asynchronously", t => {
|
||||
t.plan(6);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "A", "b", "B", "c", "C"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(
|
||||
flatMap(async (element: string) => {
|
||||
await Promise.resolve();
|
||||
return [element, element.toUpperCase()];
|
||||
}),
|
||||
)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb.skip("flatMap() emits errors during synchronous mapping", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(
|
||||
flatMap((element: string) => {
|
||||
if (element !== "a") {
|
||||
throw new Error("Failed mapping");
|
||||
}
|
||||
return [element, element.toUpperCase()];
|
||||
}),
|
||||
)
|
||||
.resume()
|
||||
.on("error", err => {
|
||||
expect(err.message).to.equal("Failed mapping");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb.skip("flatMap() emits errors during asynchronous mapping", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(
|
||||
flatMap(async (element: string) => {
|
||||
await Promise.resolve();
|
||||
if (element !== "a") {
|
||||
throw new Error("Failed mapping");
|
||||
}
|
||||
return [element, element.toUpperCase()];
|
||||
}),
|
||||
)
|
||||
.resume()
|
||||
.on("error", err => {
|
||||
expect(err.message).to.equal("Failed mapping");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
45
tests/fromArray.spec.ts
Normal file
45
tests/fromArray.spec.ts
Normal file
@ -0,0 +1,45 @@
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { fromArray } from "../src";
|
||||
|
||||
test.cb("fromArray() streams array elements in flowing mode", t => {
|
||||
t.plan(3);
|
||||
const elements = ["a", "b", "c"];
|
||||
const stream = fromArray(elements);
|
||||
let i = 0;
|
||||
stream
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(elements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
});
|
||||
|
||||
test.cb("fromArray() ends immediately if there are no array elements", t => {
|
||||
t.plan(0);
|
||||
fromArray([])
|
||||
.on("data", () => t.fail())
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
});
|
||||
|
||||
test.cb("fromArray() streams array elements in paused mode", t => {
|
||||
t.plan(3);
|
||||
const elements = ["a", "b", "c"];
|
||||
const stream = fromArray(elements);
|
||||
let i = 0;
|
||||
stream
|
||||
.on("readable", () => {
|
||||
let element = stream.read();
|
||||
while (element !== null) {
|
||||
expect(element).to.equal(elements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
element = stream.read();
|
||||
}
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
});
|
56
tests/join.spec.ts
Normal file
56
tests/join.spec.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { join } from "../src";
|
||||
|
||||
test.cb("join() joins chunks using the specified separator", t => {
|
||||
t.plan(9);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedParts = ["ab|", "|", "c|d", "|", "|", "|", "e", "|", "|f|"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(join("|"))
|
||||
.on("data", part => {
|
||||
expect(part).to.equal(expectedParts[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab|");
|
||||
source.push("c|d");
|
||||
source.push("|");
|
||||
source.push("e");
|
||||
source.push("|f|");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"join() joins chunks using the specified separator without breaking up multi-byte characters " +
|
||||
"spanning multiple chunks",
|
||||
t => {
|
||||
t.plan(5);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedParts = ["ø", "|", "ö", "|", "一"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(join("|"))
|
||||
.on("data", part => {
|
||||
expect(part).to.equal(expectedParts[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks
|
||||
source.push(Buffer.from("ø").slice(1, 2));
|
||||
source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks
|
||||
source.push(Buffer.from("ö").slice(1, 2));
|
||||
source.push(Buffer.from("一").slice(0, 1)); // 3-byte character spanning three chunks
|
||||
source.push(Buffer.from("一").slice(1, 2));
|
||||
source.push(Buffer.from("一").slice(2, 3));
|
||||
source.push(null);
|
||||
},
|
||||
);
|
15
tests/last.spec.ts
Normal file
15
tests/last.spec.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { last } from "../src";
|
||||
|
||||
test("last() resolves to the last chunk streamed by the given readable stream", async t => {
|
||||
const source = new Readable({ objectMode: true });
|
||||
const lastPromise = last(source);
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
const lastChunk = await lastPromise;
|
||||
expect(lastChunk).to.equal("ef");
|
||||
});
|
51
tests/map.spec.ts
Normal file
51
tests/map.spec.ts
Normal file
@ -0,0 +1,51 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { map } from "../src";
|
||||
|
||||
test.cb("map() maps elements synchronously", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const mapStream = map((element: string) => element.toUpperCase());
|
||||
const expectedElements = ["A", "B", "C"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(mapStream)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("map() maps elements asynchronously", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const mapStream = map(async (element: string) => {
|
||||
await Promise.resolve();
|
||||
return element.toUpperCase();
|
||||
});
|
||||
const expectedElements = ["A", "B", "C"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(mapStream)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
60
tests/merge.spec.ts
Normal file
60
tests/merge.spec.ts
Normal file
@ -0,0 +1,60 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { merge } from "../src";
|
||||
|
||||
test.cb(
|
||||
"merge() merges multiple readable streams in chunk arrival order",
|
||||
t => {
|
||||
t.plan(6);
|
||||
const source1 = new Readable({ objectMode: true, read: () => ({}) });
|
||||
const source2 = new Readable({ objectMode: true, read: () => ({}) });
|
||||
const expectedElements = ["a", "d", "b", "e", "c", "f"];
|
||||
let i = 0;
|
||||
merge(source1, source2)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source1.push("a");
|
||||
setTimeout(() => source2.push("d"), 10);
|
||||
setTimeout(() => source1.push("b"), 20);
|
||||
setTimeout(() => source2.push("e"), 30);
|
||||
setTimeout(() => source1.push("c"), 40);
|
||||
setTimeout(() => source2.push("f"), 50);
|
||||
setTimeout(() => source2.push(null), 60);
|
||||
setTimeout(() => source1.push(null), 70);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb("merge() merges a readable stream", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true, read: () => ({}) });
|
||||
const expectedElements = ["a", "b", "c"];
|
||||
let i = 0;
|
||||
merge(source)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("merge() merges an empty list of readable streams", t => {
|
||||
t.plan(0);
|
||||
merge()
|
||||
.on("data", () => t.pass())
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
});
|
77
tests/parallelMap.spec.ts
Normal file
77
tests/parallelMap.spec.ts
Normal file
@ -0,0 +1,77 @@
|
||||
import { Readable } from "stream";
|
||||
import { performance } from "perf_hooks";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { parallelMap } from "../src";
|
||||
import { sleep } from "../src/helpers";
|
||||
|
||||
test.cb("parallelMap() parallel mapping", t => {
|
||||
t.plan(6);
|
||||
const offset = 50;
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = [
|
||||
"a_processed",
|
||||
"b_processed",
|
||||
"c_processed",
|
||||
"d_processed",
|
||||
"e_processed",
|
||||
"f_processed",
|
||||
];
|
||||
interface IPerfData {
|
||||
start: number;
|
||||
output?: string;
|
||||
finish?: number;
|
||||
}
|
||||
const orderedResults: IPerfData[] = [];
|
||||
source
|
||||
.pipe(
|
||||
parallelMap(async (data: any) => {
|
||||
const perfData: IPerfData = { start: performance.now() };
|
||||
const c = data + "_processed";
|
||||
perfData.output = c;
|
||||
await sleep(offset);
|
||||
perfData.finish = performance.now();
|
||||
orderedResults.push(perfData);
|
||||
return c;
|
||||
}, 2),
|
||||
)
|
||||
.on("data", (element: string) => {
|
||||
t.true(expectedElements.includes(element));
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", async () => {
|
||||
expect(orderedResults[0].finish).to.be.lessThan(
|
||||
orderedResults[2].start,
|
||||
);
|
||||
expect(orderedResults[1].finish).to.be.lessThan(
|
||||
orderedResults[3].start,
|
||||
);
|
||||
expect(orderedResults[2].finish).to.be.lessThan(
|
||||
orderedResults[4].start,
|
||||
);
|
||||
expect(orderedResults[3].finish).to.be.lessThan(
|
||||
orderedResults[5].start,
|
||||
);
|
||||
expect(orderedResults[0].start).to.be.lessThan(
|
||||
orderedResults[2].start + offset,
|
||||
);
|
||||
expect(orderedResults[1].start).to.be.lessThan(
|
||||
orderedResults[3].start + offset,
|
||||
);
|
||||
expect(orderedResults[2].start).to.be.lessThan(
|
||||
orderedResults[4].start + offset,
|
||||
);
|
||||
expect(orderedResults[3].start).to.be.lessThan(
|
||||
orderedResults[5].start + offset,
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push("d");
|
||||
source.push("e");
|
||||
source.push("f");
|
||||
source.push(null);
|
||||
});
|
40
tests/parse.spec.ts
Normal file
40
tests/parse.spec.ts
Normal file
@ -0,0 +1,40 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { parse } from "../src";
|
||||
|
||||
test.cb("parse() parses the streamed elements as JSON", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["abc", {}, []];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(parse())
|
||||
.on("data", part => {
|
||||
expect(part).to.deep.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push('"abc"');
|
||||
source.push("{}");
|
||||
source.push("[]");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("parse() emits errors on invalid JSON", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(parse())
|
||||
.resume()
|
||||
.on("error", () => t.pass())
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("{}");
|
||||
source.push({});
|
||||
source.push([]);
|
||||
source.push(null);
|
||||
});
|
89
tests/rate.spec.ts
Normal file
89
tests/rate.spec.ts
Normal file
@ -0,0 +1,89 @@
|
||||
import { Readable } from "stream";
|
||||
import { performance } from "perf_hooks";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { rate } from "../src";
|
||||
|
||||
test.cb("rate() sends data at a rate of 150", t => {
|
||||
t.plan(5);
|
||||
const targetRate = 150;
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "b", "c", "d", "e"];
|
||||
const start = performance.now();
|
||||
let i = 0;
|
||||
|
||||
source
|
||||
.pipe(rate(targetRate))
|
||||
.on("data", (element: string[]) => {
|
||||
const currentRate = (i / (performance.now() - start)) * 1000;
|
||||
expect(element).to.deep.equal(expectedElements[i]);
|
||||
expect(currentRate).lessThan(targetRate);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push("d");
|
||||
source.push("e");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("rate() sends data at a rate of 50", t => {
|
||||
t.plan(5);
|
||||
const targetRate = 50;
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "b", "c", "d", "e"];
|
||||
const start = performance.now();
|
||||
let i = 0;
|
||||
|
||||
source
|
||||
.pipe(rate(targetRate))
|
||||
.on("data", (element: string[]) => {
|
||||
const currentRate = (i / (performance.now() - start)) * 1000;
|
||||
expect(element).to.deep.equal(expectedElements[i]);
|
||||
expect(currentRate).lessThan(targetRate);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push("d");
|
||||
source.push("e");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("rate() sends data at a rate of 1", t => {
|
||||
t.plan(5);
|
||||
const targetRate = 1;
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "b", "c", "d", "e"];
|
||||
const start = performance.now();
|
||||
let i = 0;
|
||||
|
||||
source
|
||||
.pipe(rate(targetRate))
|
||||
.on("data", (element: string[]) => {
|
||||
const currentRate = (i / (performance.now() - start)) * 1000;
|
||||
expect(element).to.deep.equal(expectedElements[i]);
|
||||
expect(currentRate).lessThan(targetRate);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push("d");
|
||||
source.push("e");
|
||||
source.push(null);
|
||||
});
|
98
tests/reduce.spec.ts
Normal file
98
tests/reduce.spec.ts
Normal file
@ -0,0 +1,98 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { reduce } from "../src";
|
||||
|
||||
test.cb("reduce() reduces elements synchronously", t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedValue = 6;
|
||||
source
|
||||
.pipe(reduce((acc: number, element: string) => acc + element.length, 0))
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedValue);
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("reduce() reduces elements asynchronously", t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedValue = 6;
|
||||
source
|
||||
.pipe(
|
||||
reduce(async (acc: number, element: string) => {
|
||||
await Promise.resolve();
|
||||
return acc + element.length;
|
||||
}, 0),
|
||||
)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedValue);
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb.skip("reduce() emits errors during synchronous reduce", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(
|
||||
reduce((acc: number, element: string) => {
|
||||
if (element !== "ab") {
|
||||
throw new Error("Failed reduce");
|
||||
}
|
||||
return acc + element.length;
|
||||
}, 0),
|
||||
)
|
||||
.resume()
|
||||
.on("error", err => {
|
||||
expect(err.message).to.equal("Failed reduce");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb.skip("reduce() emits errors during asynchronous reduce", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(
|
||||
reduce(async (acc: number, element: string) => {
|
||||
await Promise.resolve();
|
||||
if (element !== "ab") {
|
||||
throw new Error("Failed mapping");
|
||||
}
|
||||
return acc + element.length;
|
||||
}, 0),
|
||||
)
|
||||
.resume()
|
||||
.on("error", err => {
|
||||
expect(err.message).to.equal("Failed mapping");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
});
|
80
tests/replace.spec.ts
Normal file
80
tests/replace.spec.ts
Normal file
@ -0,0 +1,80 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { replace } from "../src";
|
||||
|
||||
test.cb(
|
||||
"replace() replaces occurrences of the given string in the streamed elements with the specified " +
|
||||
"replacement string",
|
||||
t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["abc", "xyf", "ghi"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(replace("de", "xy"))
|
||||
.on("data", part => {
|
||||
expect(part).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("abc");
|
||||
source.push("def");
|
||||
source.push("ghi");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"replace() replaces occurrences of the given regular expression in the streamed elements with " +
|
||||
"the specified replacement string",
|
||||
t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["abc", "xyz", "ghi"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(replace(/^def$/, "xyz"))
|
||||
.on("data", part => {
|
||||
expect(part).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("abc");
|
||||
source.push("def");
|
||||
source.push("ghi");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"replace() replaces occurrences of the given multi-byte character even if it spans multiple chunks",
|
||||
t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["ø", "O", "a"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(replace("ö", "O"))
|
||||
.on("data", part => {
|
||||
expect(part).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks
|
||||
source.push(Buffer.from("ø").slice(1, 2));
|
||||
source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks
|
||||
source.push(Buffer.from("ö").slice(1, 2));
|
||||
source.push("a");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
98
tests/split.spec.ts
Normal file
98
tests/split.spec.ts
Normal file
@ -0,0 +1,98 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { split } from "../src";
|
||||
|
||||
test.cb("split() splits chunks using the default separator (\\n)", t => {
|
||||
t.plan(5);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedParts = ["ab", "c", "d", "ef", ""];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(split())
|
||||
.on("data", part => {
|
||||
expect(part).to.equal(expectedParts[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab\n");
|
||||
source.push("c");
|
||||
source.push("\n");
|
||||
source.push("d");
|
||||
source.push("\nef\n");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("split() splits chunks using the specified separator", t => {
|
||||
t.plan(6);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedParts = ["ab", "c", "d", "e", "f", ""];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(split("|"))
|
||||
.on("data", (part: string) => {
|
||||
expect(part).to.equal(expectedParts[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab|");
|
||||
source.push("c|d");
|
||||
source.push("|");
|
||||
source.push("e");
|
||||
source.push("|f|");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"split() splits utf8 encoded buffers using the specified separator",
|
||||
t => {
|
||||
t.plan(3);
|
||||
const expectedElements = ["a", "b", "c"];
|
||||
let i = 0;
|
||||
const through = split(",");
|
||||
const buf = Buffer.from("a,b,c");
|
||||
through
|
||||
.on("data", element => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
i++;
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
for (let j = 0; j < buf.length; ++j) {
|
||||
through.write(buf.slice(j, j + 1));
|
||||
}
|
||||
through.end();
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"split() splits utf8 encoded buffers with multi-byte characters using the specified separator",
|
||||
t => {
|
||||
t.plan(3);
|
||||
const expectedElements = ["一", "一", "一"];
|
||||
let i = 0;
|
||||
const through = split(",");
|
||||
const buf = Buffer.from("一,一,一"); // Those spaces are multi-byte utf8 characters (code: 4E00)
|
||||
through
|
||||
.on("data", element => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
i++;
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
for (let j = 0; j < buf.length; ++j) {
|
||||
through.write(buf.slice(j, j + 1));
|
||||
}
|
||||
through.end();
|
||||
},
|
||||
);
|
61
tests/stringify.spec.ts
Normal file
61
tests/stringify.spec.ts
Normal file
@ -0,0 +1,61 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { stringify } from "../src";
|
||||
|
||||
test.cb("stringify() stringifies the streamed elements as JSON", t => {
|
||||
t.plan(4);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = [
|
||||
'"abc"',
|
||||
"0",
|
||||
'{"a":"a","b":"b","c":"c"}',
|
||||
'["a","b","c"]',
|
||||
];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(stringify())
|
||||
.on("data", part => {
|
||||
expect(part).to.deep.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("abc");
|
||||
source.push(0);
|
||||
source.push({ a: "a", b: "b", c: "c" });
|
||||
source.push(["a", "b", "c"]);
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"stringify() stringifies the streamed elements as pretty-printed JSON",
|
||||
t => {
|
||||
t.plan(4);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = [
|
||||
'"abc"',
|
||||
"0",
|
||||
'{\n "a": "a",\n "b": "b",\n "c": "c"\n}',
|
||||
'[\n "a",\n "b",\n "c"\n]',
|
||||
];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(stringify({ pretty: true }))
|
||||
.on("data", part => {
|
||||
expect(part).to.deep.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("abc");
|
||||
source.push(0);
|
||||
source.push({ a: "a", b: "b", c: "c" });
|
||||
source.push(["a", "b", "c"]);
|
||||
source.push(null);
|
||||
},
|
||||
);
|
26
tests/unbatch.spec.ts
Normal file
26
tests/unbatch.spec.ts
Normal file
@ -0,0 +1,26 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { unbatch, batch } from "../src";
|
||||
|
||||
test.cb("unbatch() unbatches", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "b", "c"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(batch(3))
|
||||
.pipe(unbatch())
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
@ -3,16 +3,19 @@
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedLocals": false,
|
||||
"noImplicitThis": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"suppressImplicitAnyIndexErrors": true,
|
||||
"outDir": "./dist",
|
||||
"module": "commonjs",
|
||||
"target": "es5",
|
||||
"lib": ["es2016"],
|
||||
"sourceMap": true,
|
||||
"declaration": true
|
||||
"module": "commonjs"
|
||||
},
|
||||
"include": ["src/**/*.ts"]
|
||||
"target": "es5",
|
||||
"lib": [
|
||||
"es2016"
|
||||
],
|
||||
"sourceMap": true,
|
||||
"declaration": true,
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["tests", "node_modules"]
|
||||
}
|
||||
|
@ -7,8 +7,9 @@
|
||||
"rules": {
|
||||
"no-console": false,
|
||||
"no-implicit-dependencies": [true, "dev"],
|
||||
"prettier": true,
|
||||
"prettier": [true, ".prettierrc"],
|
||||
"ordered-imports": false,
|
||||
"interface-name": false
|
||||
"interface-name": false,
|
||||
"object-literal-sort-keys": false
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user