7 Commits

Author SHA1 Message Date
Jerry Kurian
46d118d4a5 Update docs 2020-04-27 14:28:55 -04:00
Jerry Kurian
5dd856deed Update test 2020-04-27 14:23:58 -04:00
Jerry Kurian
d0a9d35fe7 Purge idle pipelines 2020-04-27 14:20:07 -04:00
Jerry Kurian
71b03678ba Add chunk to constructor (#5)
* Add chunk to constructor

* Add test

* Bump version
2020-04-27 12:25:36 -04:00
Lewis Diamond
f661f9be6b Export DemuxOptions is necessary for publishing 2020-03-02 10:17:41 -05:00
Lewis Diamond
ed73bd2887 Adding collected 2020-03-02 10:09:46 -05:00
Lewis Diamond
2841f4e182 Merge pull request #4 from Jogogoplay/feature/demux-pipe
Allow demux to be piped ie muxed
2020-02-28 17:23:59 -05:00
13 changed files with 184 additions and 76 deletions

View File

@@ -1,6 +1,6 @@
{ {
"name": "@jogogo/mhysa", "name": "@jogogo/mhysa",
"version": "2.0.0-alpha.1", "version": "2.0.0-alpha.3",
"description": "Streams and event emitter utils for Node.js", "description": "Streams and event emitter utils for Node.js",
"keywords": [ "keywords": [
"promise", "promise",
@@ -43,7 +43,7 @@
"dependencies": {}, "dependencies": {},
"devDependencies": { "devDependencies": {
"@types/chai": "^4.1.7", "@types/chai": "^4.1.7",
"@types/node": "^12.7.2", "@types/node": "^12.12.15",
"@types/sinon": "^7.0.13", "@types/sinon": "^7.0.13",
"ava": "^2.4.0", "ava": "^2.4.0",
"chai": "^4.2.0", "chai": "^4.2.0",
@@ -58,7 +58,8 @@
}, },
"ava": { "ava": {
"files": [ "files": [
"tests/*.spec.ts" "tests/*.spec.ts",
"tests/utils/*.spec.ts"
], ],
"sources": [ "sources": [
"src/**/*.ts" "src/**/*.ts"

View File

@@ -12,7 +12,9 @@ export function batch(
clearTimeout(timer); clearTimeout(timer);
} }
timer = null; timer = null;
self.push(buffer); if (buffer.length > 0) {
self.push(buffer);
}
buffer = []; buffer = [];
}; };
return new Transform({ return new Transform({

View File

@@ -1,5 +1,5 @@
import { pipeline, TransformOptions, Transform } from "stream";
import { AllStreams, isReadable } from "../helpers"; import { AllStreams, isReadable } from "../helpers";
import { PassThrough, pipeline, TransformOptions, Transform } from "stream";
export function compose( export function compose(
streams: Array< streams: Array<
@@ -34,11 +34,11 @@ export class Compose extends Transform {
options?: TransformOptions, options?: TransformOptions,
) { ) {
super(options); super(options);
this.first = streams[0]; this.first = new PassThrough(options);
this.last = streams[streams.length - 1]; this.last = streams[streams.length - 1];
this.streams = streams; this.streams = streams;
pipeline( pipeline(
streams, [this.first, ...streams],
errorCallback || errorCallback ||
((error: any) => { ((error: any) => {
if (error) { if (error) {

View File

@@ -1,23 +1,17 @@
import { DuplexOptions, Duplex, Transform, Writable } from "stream"; import { DuplexOptions, Duplex, Transform } from "stream";
import { isReadable } from "../helpers"; import { isReadable } from "../helpers";
enum EventSubscription {
Last = 0,
First,
All,
Self,
Unhandled,
}
type DemuxStreams = NodeJS.WritableStream | NodeJS.ReadWriteStream; type DemuxStreams = NodeJS.WritableStream | NodeJS.ReadWriteStream;
interface DemuxOptions extends DuplexOptions { export interface DemuxOptions extends DuplexOptions {
remultiplex?: boolean; remultiplex?: boolean;
purgeIdleInterval?: number;
maxIdleTime?: number;
} }
export function demux( export function demux(
construct: (destKey?: string) => DemuxStreams, construct: (destKey?: string, chunk?: any) => DemuxStreams,
demuxBy: string | ((chunk: any) => string), demuxBy: string | ((chunk: any) => string),
options?: DemuxOptions, options?: DemuxOptions,
): Duplex { ): Duplex {
@@ -26,12 +20,13 @@ export function demux(
class Demux extends Duplex { class Demux extends Duplex {
private streamsByKey: { private streamsByKey: {
[key: string]: DemuxStreams; [key: string]: { stream: DemuxStreams; lastWrite: number };
}; };
private demuxer: (chunk: any) => string; private demuxer: (chunk: any) => string;
private construct: (destKey?: string) => DemuxStreams; private construct: (destKey?: string, chunk?: any) => DemuxStreams;
private remultiplex: boolean; private remultiplex: boolean;
private transform: Transform; private transform: Transform;
private maxIdleTime: number;
constructor( constructor(
construct: (destKey?: string) => DemuxStreams, construct: (destKey?: string) => DemuxStreams,
demuxBy: string | ((chunk: any) => string), demuxBy: string | ((chunk: any) => string),
@@ -51,17 +46,36 @@ class Demux extends Duplex {
cb(null); cb(null);
}, },
}); });
this.maxIdleTime = options.maxIdleTime || 600000;
const purgeIdleInterval = options.purgeIdleInterval || 600000;
setInterval(() => {
this._destroyIdle();
}, purgeIdleInterval);
this.on("unpipe", () => this._flush()); this.on("unpipe", () => this._flush());
} }
private _destroyIdle() {
for (let key in this.streamsByKey) {
const curTime = Date.now();
const pipeline = this.streamsByKey[key];
if (curTime - pipeline.lastWrite > this.maxIdleTime) {
delete this.streamsByKey[key];
}
}
}
// tslint:disable-next-line
public _read(size: number) {} public _read(size: number) {}
public async _write(chunk: any, encoding: any, cb: any) { public async _write(chunk: any, encoding: any, cb: any) {
const destKey = this.demuxer(chunk); const destKey = this.demuxer(chunk);
if (this.streamsByKey[destKey] === undefined) { if (this.streamsByKey[destKey] === undefined) {
const newPipeline = await this.construct(destKey); const newPipeline = await this.construct(destKey, chunk);
this.streamsByKey[destKey] = newPipeline; this.streamsByKey[destKey] = {
stream: newPipeline,
lastWrite: Date.now(),
};
if (this.remultiplex && isReadable(newPipeline)) { if (this.remultiplex && isReadable(newPipeline)) {
(newPipeline as NodeJS.ReadWriteStream).pipe(this.transform); (newPipeline as NodeJS.ReadWriteStream).pipe(this.transform);
} else if (this.remultiplex) { } else if (this.remultiplex) {
@@ -69,10 +83,12 @@ class Demux extends Duplex {
`Pipeline construct for ${destKey} does not implement readable interface`, `Pipeline construct for ${destKey} does not implement readable interface`,
); );
} }
} else {
this.streamsByKey[destKey].lastWrite = Date.now();
} }
if (!this.streamsByKey[destKey].write(chunk, encoding)) { if (!this.streamsByKey[destKey].stream.write(chunk, encoding)) {
this.streamsByKey[destKey].once("drain", () => { this.streamsByKey[destKey].stream.once("drain", () => {
cb(); cb();
}); });
} else { } else {
@@ -83,8 +99,8 @@ class Demux extends Duplex {
public _flush() { public _flush() {
const pipelines = Object.values(this.streamsByKey); const pipelines = Object.values(this.streamsByKey);
let totalEnded = 0; let totalEnded = 0;
pipelines.forEach(pipeline => { pipelines.forEach(({ stream }) => {
pipeline.once("end", () => { stream.once("end", () => {
totalEnded++; totalEnded++;
if (pipelines.length === totalEnded) { if (pipelines.length === totalEnded) {
this.push(null); this.push(null);
@@ -92,12 +108,12 @@ class Demux extends Duplex {
} }
}); });
}); });
pipelines.forEach(pipeline => pipeline.end()); pipelines.forEach(({ stream }) => stream.end());
} }
public _destroy(error: any, cb: (error?: any) => void) { public _destroy(error: any, cb: (error?: any) => void) {
const pipelines = Object.values(this.streamsByKey); const pipelines = Object.values(this.streamsByKey);
pipelines.forEach(p => (p as any).destroy()); pipelines.forEach(({ stream }) => (stream as any).destroy());
cb(error); cb(error);
} }
} }

View File

@@ -256,11 +256,14 @@ export default function mhysa(defaultOptions?: TransformOptions) {
/** /**
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream. * Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
* @param construct Constructor for new output source. Should return a Writable or ReadWrite stream. * @param {Function} construct Constructor for new output source. Should return a Writable or ReadWrite stream.
* @param demuxBy * @param {String | Function} demuxBy
* @param demuxBy.key? Key to fetch value from source chunks to demultiplex source. * @param {string} demuxBy.key? Key to fetch value from source chunks to demultiplex source.
* @param demuxBy.keyBy? Function to fetch value from source chunks to demultiplex source. * @param {Function} demuxBy.keyBy? Function to fetch value from source chunks to demultiplex source.
* @param options Writable stream options * @param {Object} options Demux stream options
* @param {boolean} options.remultiplex? If demux should be remultiplexed into a single destination
* @param {number} options.purgeIdleInterval? Interval at which a purge for idle pipelines will occur
* @param {number} options.maxIdleTime? Min time a demuxed pipeline must be idle for to be purged
*/ */
demux: withDefaultOptions(2, demux), demux: withDefaultOptions(2, demux),
}; };

View File

@@ -1,2 +1,6 @@
import mhysa from "./functions"; import mhysa from "./functions";
import * as _utils from "./utils";
export default mhysa; export default mhysa;
// @TODO fix this with proper import export
export const utils = { ..._utils };

12
src/utils/collected.ts Normal file
View File

@@ -0,0 +1,12 @@
import { Transform } from "stream";
export function collected(stream: Transform): any {
return new Promise((resolve, reject) => {
stream.once("data", d => {
resolve(d);
});
stream.once("error", e => {
reject(e);
});
});
}

1
src/utils/index.ts Normal file
View File

@@ -0,0 +1 @@
export { collected } from "./collected";

View File

@@ -2,7 +2,7 @@ import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import mhysa from "../src"; import mhysa from "../src";
const { batch } = mhysa({ objectMode: true }); const { batch, map, fromArray } = mhysa({ objectMode: true });
test.cb("batch() batches chunks together", t => { test.cb("batch() batches chunks together", t => {
t.plan(3); t.plan(3);
@@ -57,3 +57,28 @@ test.cb("batch() yields a batch after the timeout", t => {
source.push(null); source.push(null);
}, 600 * 2); }, 600 * 2);
}); });
test.cb(
"batch() yields all input data even when the last element(s) dont make a full batch",
t => {
const data = [1, 2, 3, 4, 5, 6, 7];
fromArray([...data])
.pipe(batch(3))
.pipe(
map(d => {
t.deepEqual(
d,
[data.shift(), data.shift(), data.shift()].filter(
x => !!x,
),
);
}),
)
.on("error", t.fail)
.on("finish", () => {
t.is(data.length, 0);
t.end();
});
},
);

View File

@@ -4,7 +4,7 @@ import { sleep } from "../src/helpers";
import { Readable, Writable } from "stream"; import { Readable, Writable } from "stream";
import mhysa from "../src"; import mhysa from "../src";
import { performance } from "perf_hooks"; import { performance } from "perf_hooks";
const { compose, map } = mhysa({ objectMode: true }); const { compose, map, fromArray } = mhysa({ objectMode: true });
test.cb("compose() chains two streams together in the correct order", t => { test.cb("compose() chains two streams together in the correct order", t => {
t.plan(3); t.plan(3);
@@ -98,7 +98,7 @@ test.cb("piping compose() maintains correct order", t => {
}); });
test("compose() writable length should be less than highWaterMark when handing writes", async t => { test("compose() writable length should be less than highWaterMark when handing writes", async t => {
t.plan(7); t.plan(2);
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
interface Chunk { interface Chunk {
key: string; key: string;
@@ -144,19 +144,12 @@ test("compose() writable length should be less than highWaterMark when handing w
{ key: "e", mapped: [] }, { key: "e", mapped: [] },
]; ];
for (const item of input) { fromArray(input).pipe(composed);
const res = composed.write(item);
expect(composed._writableState.length).to.be.at.most(2);
t.pass();
if (!res) {
await sleep(10);
}
}
}); });
}); });
test("compose() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", async t => { test("compose() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", async t => {
t.plan(7); t.plan(2);
const _rate = 100; const _rate = 100;
const highWaterMark = 2; const highWaterMark = 2;
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
@@ -189,19 +182,14 @@ test("compose() should emit drain event ~rate * highWaterMark ms for every write
composed.on("drain", () => { composed.on("drain", () => {
t.pass(); t.pass();
expect(composed._writableState.length).to.be.equal(0); expect(composed._writableState.length).to.be.equal(0);
expect(performance.now() - start).to.be.closeTo(
_rate * highWaterMark,
40,
);
}); });
composed.on("data", (chunk: Chunk) => { composed.on("data", (chunk: Chunk) => {
pendingReads--; t.deepEqual(chunk.mapped, [1, 2]);
if (pendingReads === 0) {
resolve();
}
}); });
composed.on("finish", () => resolve());
const input = [ const input = [
{ key: "a", mapped: [] }, { key: "a", mapped: [] },
{ key: "b", mapped: [] }, { key: "b", mapped: [] },
@@ -209,19 +197,7 @@ test("compose() should emit drain event ~rate * highWaterMark ms for every write
{ key: "d", mapped: [] }, { key: "d", mapped: [] },
{ key: "e", mapped: [] }, { key: "e", mapped: [] },
]; ];
fromArray(input).pipe(composed);
let start = performance.now();
let pendingReads = input.length;
start = performance.now();
for (const item of input) {
const res = composed.write(item);
expect(composed._writableState.length).to.be.at.most(highWaterMark);
t.pass();
if (!res) {
await sleep(_rate * highWaterMark * 2);
start = performance.now();
}
}
}); });
}); });
@@ -259,10 +235,6 @@ test.cb(
composed.on("drain", () => { composed.on("drain", () => {
expect(composed._writableState.length).to.be.equal(0); expect(composed._writableState.length).to.be.equal(0);
expect(performance.now() - start).to.be.closeTo(
_rate * input.length,
50,
);
t.pass(); t.pass();
}); });
@@ -283,7 +255,6 @@ test.cb(
input.forEach(item => { input.forEach(item => {
composed.write(item); composed.write(item);
}); });
const start = performance.now();
}, },
); );

View File

@@ -44,6 +44,34 @@ test.cb("demux() constructor should be called once per key", t => {
fromArray(input).pipe(demuxed); fromArray(input).pipe(demuxed);
}); });
test.cb("demux() item written passed in constructor", t => {
t.plan(4);
const input = [
{ key: "a", visited: [] },
{ key: "b", visited: [] },
{ key: "c", visited: [] },
];
const construct = sinon.spy((destKey: string, item: any) => {
expect(item).to.deep.equal({ key: destKey, visited: [] });
t.pass();
const dest = map((chunk: Test) => {
chunk.visited.push(1);
return chunk;
});
return dest;
});
const demuxed = demux(construct, "key", {});
demuxed.on("finish", () => {
t.pass();
t.end();
});
fromArray(input).pipe(demuxed);
});
test.cb("demux() should send input through correct pipeline", t => { test.cb("demux() should send input through correct pipeline", t => {
t.plan(6); t.plan(6);
const input = [ const input = [
@@ -834,3 +862,39 @@ test.cb("demux() should be 'destroyable'", t => {
fakeSource.push(input[3]); fakeSource.push(input[3]);
fakeSource.push(input[4]); fakeSource.push(input[4]);
}); });
test.cb("Should delete idle pipelines", t => {
t.plan(5);
const input = [
{ key: "a", visited: [] },
{ key: "b", visited: [] },
{ key: "b", visited: [] },
{ key: "a", visited: [] },
{ key: "b", visited: [] },
{ key: "c", visited: [] },
];
const construct = sinon.spy((destKey: string) => {
const dest = map((chunk: Test) => {
chunk.visited.push(1);
return chunk;
});
t.pass();
return dest;
});
const demuxed = demux(construct, "key", {
maxIdleTime: 110,
purgeIdleInterval: 110,
});
demuxed.on("data", data => {
if (data.key === "c") t.end();
});
for (let i = 0; i < input.length; i++) {
setTimeout(() => {
demuxed.write(input[i]);
}, i * 100);
}
});

View File

@@ -0,0 +1,9 @@
import test from "ava";
import { collected } from "../../src/utils";
import mhysa from "../../src";
const { fromArray, collect } = mhysa({ objectMode: true });
test("collected returns a promise for the first data point", async t => {
const data = collected(fromArray([1, 2, 3, 4]).pipe(collect()));
t.deepEqual(await data, [1, 2, 3, 4]);
});

View File

@@ -409,10 +409,10 @@
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.2.tgz#c4e63af5e8823ce9cc3f0b34f7b998c2171f0c44" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.2.tgz#c4e63af5e8823ce9cc3f0b34f7b998c2171f0c44"
integrity sha512-dyYO+f6ihZEtNPDcWNR1fkoTDf3zAK3lAABDze3mz6POyIercH0lEUawUFXlG8xaQZmm1yEBON/4TsYv/laDYg== integrity sha512-dyYO+f6ihZEtNPDcWNR1fkoTDf3zAK3lAABDze3mz6POyIercH0lEUawUFXlG8xaQZmm1yEBON/4TsYv/laDYg==
"@types/node@^12.7.2": "@types/node@^12.12.15":
version "12.12.14" version "12.12.15"
resolved "https://npm.dev.jogogo.co/@types%2fnode/-/node-12.12.14.tgz#1c1d6e3c75dba466e0326948d56e8bd72a1903d2" resolved "https://npm.dev.jogogo.co/@types%2fnode/-/node-12.12.15.tgz#8dfb6ce22fedd469128137640a3aa8f17415422f"
integrity sha512-u/SJDyXwuihpwjXy7hOOghagLEV1KdAST6syfnOk6QZAMzZuWZqXy5aYYZbh8Jdpd4escVFP0MvftHNDb9pruA== integrity sha512-Pv+vWicyFd07Hw/SmNnTUguqrHgDfMtjabvD9sQyxeqbpCEg8CmViLBaVPHtNsoBgZECrRf5/pgV6FJIBrGSjw==
"@types/sinon@^7.0.13": "@types/sinon@^7.0.13":
version "7.5.1" version "7.5.1"
@@ -1935,7 +1935,7 @@ merge2@^1.2.3, merge2@^1.3.0:
integrity sha512-2j4DAdlBOkiSZIsaXk4mTE3sRS02yBHAtfy127xRV3bQUFqXkjHCHLW6Scv7DwNRbIWNHH8zpnz9zMaKXIdvYw== integrity sha512-2j4DAdlBOkiSZIsaXk4mTE3sRS02yBHAtfy127xRV3bQUFqXkjHCHLW6Scv7DwNRbIWNHH8zpnz9zMaKXIdvYw==
mhysa@./: mhysa@./:
version "0.0.1-beta.4" version "2.0.0-alpha.1"
micromatch@^4.0.2: micromatch@^4.0.2:
version "4.0.2" version "4.0.2"