Update tests

This commit is contained in:
Jerry Kurian 2019-09-11 14:29:20 -04:00
parent 9d280b1662
commit dcfd6fe4c2
4 changed files with 79 additions and 211 deletions

View File

@ -31,15 +31,10 @@ export function demux(
} }
class Demux extends Writable { class Demux extends Writable {
public isWritable: boolean;
private streamsByKey: { private streamsByKey: {
[key: string]: { [key: string]: NodeJS.WritableStream | NodeJS.ReadWriteStream;
stream: NodeJS.WritableStream | NodeJS.ReadWriteStream;
writable: boolean;
};
}; };
private demuxer: (chunk: any) => string; private demuxer: (chunk: any) => string;
private nonWritableStreams: Array<string>;
private construct: ( private construct: (
destKey?: string, destKey?: string,
) => NodeJS.WritableStream | NodeJS.ReadWriteStream; ) => NodeJS.WritableStream | NodeJS.ReadWriteStream;
@ -57,26 +52,19 @@ class Demux extends Writable {
this.demuxer = demuxBy.keyBy || ((chunk: any) => chunk[demuxBy.key!]); this.demuxer = demuxBy.keyBy || ((chunk: any) => chunk[demuxBy.key!]);
this.construct = construct; this.construct = construct;
this.streamsByKey = {}; this.streamsByKey = {};
this.isWritable = true;
this.nonWritableStreams = [];
} }
// Throttles when one stream is not writable
public async _write(chunk: any, encoding: any, cb: any) { public async _write(chunk: any, encoding: any, cb: any) {
const destKey = this.demuxer(chunk); const destKey = this.demuxer(chunk);
if (this.streamsByKey[destKey] === undefined) { if (this.streamsByKey[destKey] === undefined) {
this.streamsByKey[destKey] = { this.streamsByKey[destKey] = this.construct(destKey);
stream: this.construct(destKey),
writable: true,
};
} }
if (!this.streamsByKey[destKey].stream.write(chunk, encoding, cb)) { if (!this.streamsByKey[destKey].write(chunk, encoding)) {
await new Promise((resolve, reject) => { this.streamsByKey[destKey].once("drain", () => {
this.streamsByKey[destKey].stream.once("drain", () => { cb();
resolve();
this.emit("drain");
});
}); });
} else {
cb();
} }
} }
@ -87,7 +75,7 @@ class Demux extends Writable {
break; break;
case EventSubscription.All: case EventSubscription.All:
Object.keys(this.streamsByKey).forEach(key => Object.keys(this.streamsByKey).forEach(key =>
this.streamsByKey[key].stream.on(event, cb), this.streamsByKey[key].on(event, cb),
); );
break; break;
case EventSubscription.Unhandled: case EventSubscription.Unhandled:
@ -107,7 +95,7 @@ class Demux extends Writable {
break; break;
case EventSubscription.All: case EventSubscription.All:
Object.keys(this.streamsByKey).forEach(key => Object.keys(this.streamsByKey).forEach(key =>
this.streamsByKey[key].stream.once(event, cb), this.streamsByKey[key].once(event, cb),
); );
break; break;
case EventSubscription.Unhandled: case EventSubscription.Unhandled:

View File

@ -12,12 +12,7 @@ export function map<T, R>(
return new Transform({ return new Transform({
...options, ...options,
async transform(chunk: T, encoding, callback) { async transform(chunk: T, encoding, callback) {
try { callback(null, await mapper(chunk, encoding));
const mapped = await mapper(chunk, encoding);
callback(null, mapped);
} catch (err) {
callback(err);
}
}, },
}); });
} }

View File

@ -172,63 +172,6 @@ test.cb("demux() should send input through correct pipeline using keyBy", t => {
demuxed.end(); demuxed.end();
}); });
// Probably needs to be removed
test.cb("should emit errors", t => {
t.plan(2);
let index = 0;
const input = [
{ key: "a", visited: [] },
{ key: "b", visited: [] },
{ key: "a", visited: [] },
{ key: "a", visited: [] },
];
const results = [
{ key: "a", visited: [0] },
{ key: "b", visited: [1] },
{ key: "a", visited: [2] },
{ key: "a", visited: [3] },
];
const destinationStreamKeys = [];
const sink = new Writable({
objectMode: true,
write(chunk, enc, cb) {
expect(results).to.deep.include(chunk);
expect(input).to.not.deep.include(chunk);
t.pass();
cb();
},
});
const construct = (destKey: string) => {
destinationStreamKeys.push(destKey);
const dest = map((chunk: Test) => {
if (chunk.key === "b") {
throw new Error("Caught object with key 'b'");
}
const _chunk = { ...chunk, visited: [] };
_chunk.visited.push(index);
index++;
return _chunk;
}).on("error", () => {}); // Otherwise ava complains
dest.pipe(sink);
return dest;
};
const demuxed = demux(
construct,
{ keyBy: (chunk: any) => chunk.key },
{ objectMode: true },
);
demuxed.on("error", e => {
expect(e.message).to.equal("Caught object with key 'b'");
t.pass();
t.end();
});
input.forEach(event => demuxed.write(event));
});
test("demux() when write returns false, drain event should be emitted after at least slowProcessorSpeed * highWaterMark", t => { test("demux() when write returns false, drain event should be emitted after at least slowProcessorSpeed * highWaterMark", t => {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
t.plan(7); t.plan(7);
@ -259,11 +202,14 @@ test("demux() when write returns false, drain event should be emitted after at l
}, },
}); });
const construct = (destKey: string) => { const construct = (destKey: string) => {
const first = map(async (chunk: Chunk) => { const first = map(
await sleep(slowProcessorSpeed); async (chunk: Chunk) => {
chunk.mapped.push(1); await sleep(slowProcessorSpeed);
return chunk; chunk.mapped.push(1);
}); return chunk;
},
{ highWaterMark: 1, objectMode: true },
);
const second = map(async (chunk: Chunk) => { const second = map(async (chunk: Chunk) => {
chunk.mapped.push(2); chunk.mapped.push(2);
@ -285,14 +231,6 @@ test("demux() when write returns false, drain event should be emitted after at l
reject(); reject();
}); });
_demux.on("drain", () => {
expect(_demux._writableState.length).to.be.equal(0);
expect(performance.now() - start).to.be.greaterThan(
slowProcessorSpeed * highWaterMark,
);
t.pass();
});
let start = null; let start = null;
for (const item of input) { for (const item of input) {
const res = _demux.write(item); const res = _demux.write(item);
@ -301,6 +239,11 @@ test("demux() when write returns false, drain event should be emitted after at l
start = performance.now(); start = performance.now();
await new Promise((resolv, rej) => { await new Promise((resolv, rej) => {
_demux.once("drain", () => { _demux.once("drain", () => {
expect(_demux._writableState.length).to.be.equal(0);
expect(performance.now() - start).to.be.greaterThan(
slowProcessorSpeed * highWaterMark,
);
t.pass();
resolv(); resolv();
}); });
}); });
@ -318,63 +261,60 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar
} }
const highWaterMark = 5; const highWaterMark = 5;
const input = [ const input = [
{ key: "a", mapped: [] }, { key: "a", val: 1, mapped: [] },
{ key: "a", mapped: [] }, { key: "a", val: 2, mapped: [] },
{ key: "a", mapped: [] }, { key: "a", val: 3, mapped: [] },
{ key: "a", mapped: [] }, { key: "a", val: 4, mapped: [] },
{ key: "a", mapped: [] }, { key: "a", val: 5, mapped: [] },
{ key: "a", mapped: [] }, { key: "a", val: 6, mapped: [] },
]; ];
let pendingReads = input.length; let pendingReads = input.length;
const sink = new Writable({ const sink = new Writable({
objectMode: true, objectMode: true,
write(chunk, encoding, cb) { write(chunk, encoding, cb) {
cb(); cb();
t.pass();
pendingReads--; pendingReads--;
t.pass();
if (pendingReads === 0) { if (pendingReads === 0) {
resolve(); resolve();
} }
}, },
}); });
const construct = (destKey: string) => { const construct = (destKey: string) => {
const first = map(async (chunk: Chunk) => { const pipeline = map(
chunk.mapped.push(1); async (chunk: Chunk) => {
return chunk; await sleep(50);
}); chunk.mapped.push(2);
return chunk;
},
{ highWaterMark: 1, objectMode: true },
);
const second = map(async (chunk: Chunk) => { pipeline.pipe(sink);
chunk.mapped.push(2); return pipeline;
return chunk;
});
first.pipe(second).pipe(sink);
return first;
}; };
const _demux = demux( const _demux = demux(
construct, construct,
{ key: "key" }, { key: "key" },
{ {
objectMode: true, objectMode: true,
highWaterMark, highWaterMark: 5,
}, },
); );
_demux.on("error", err => { _demux.on("error", err => {
reject(); reject();
}); });
_demux.on("drain", () => {
expect(_demux._writableState.length).to.be.equal(0);
t.pass();
});
for (const item of input) { for (const item of input) {
const res = _demux.write(item); const res = _demux.write(item);
expect(_demux._writableState.length).to.be.at.most(highWaterMark); expect(_demux._writableState.length).to.be.at.most(highWaterMark);
if (!res) { if (!res) {
await new Promise((_resolve, _reject) => { await new Promise(_resolve => {
_demux.once("drain", () => { _demux.once("drain", () => {
_resolve(); _resolve();
expect(_demux._writableState.length).to.be.equal(0);
t.pass();
}); });
}); });
} }
@ -386,8 +326,8 @@ test.cb(
"demux() should emit drain event immediately when second stream is bottleneck", "demux() should emit drain event immediately when second stream is bottleneck",
t => { t => {
t.plan(6); t.plan(6);
const highWaterMark = 5; const slowProcessorSpeed = 100;
const slowProcessorSpeed = 200; const highWaterMark = 3;
interface Chunk { interface Chunk {
key: string; key: string;
mapped: number[]; mapped: number[];
@ -395,11 +335,13 @@ test.cb(
const sink = new Writable({ const sink = new Writable({
objectMode: true, objectMode: true,
write(chunk, encoding, cb) { write(chunk, encoding, cb) {
expect(chunk.mapped).to.deep.equal([1, 2]);
t.pass(); t.pass();
cb(); pendingReads--;
if (pendingReads === 0) { if (pendingReads === 0) {
t.end(); t.end();
} }
cb();
}, },
}); });
const construct = (destKey: string) => { const construct = (destKey: string) => {
@ -408,16 +350,13 @@ test.cb(
chunk.mapped.push(1); chunk.mapped.push(1);
return chunk; return chunk;
}, },
{ objectMode: true }, { objectMode: true, highWaterMark: 1 },
); );
const second = map( const second = map(
async (chunk: Chunk) => { async (chunk: Chunk) => {
pendingReads--;
await sleep(slowProcessorSpeed); await sleep(slowProcessorSpeed);
chunk.mapped.push(2); chunk.mapped.push(2);
expect(second._writableState.length).to.be.equal(1);
expect(first._readableState.length).to.equal(pendingReads);
return chunk; return chunk;
}, },
{ objectMode: true, highWaterMark: 1 }, { objectMode: true, highWaterMark: 1 },
@ -440,8 +379,9 @@ test.cb(
_demux.on("drain", () => { _demux.on("drain", () => {
expect(_demux._writableState.length).to.be.equal(0); expect(_demux._writableState.length).to.be.equal(0);
expect(performance.now() - start).to.be.lessThan( // Should take longer than the amount of items needed to be processed until we are under highWaterMark
slowProcessorSpeed, expect(performance.now() - start).to.be.greaterThan(
slowProcessorSpeed * (input.length - highWaterMark - 1),
); );
t.pass(); t.pass();
}); });
@ -453,18 +393,18 @@ test.cb(
{ key: "a", mapped: [] }, { key: "a", mapped: [] },
{ key: "a", mapped: [] }, { key: "a", mapped: [] },
]; ];
let pendingReads = input.length; let pendingReads = input.length;
const start = performance.now();
input.forEach(item => { input.forEach(item => {
_demux.write(item); _demux.write(item);
}); });
const start = performance.now();
}, },
); );
test.only("demux() should only emit drain event when all streams are writable", t => { test("demux() should be blocked by slowest pipeline", t => {
t.plan(1); t.plan(1);
const highWaterMark = 2; const slowProcessorSpeed = 100;
interface Chunk { interface Chunk {
key: string; key: string;
mapped: number[]; mapped: number[];
@ -476,33 +416,26 @@ test.only("demux() should only emit drain event when all streams are writable",
cb(); cb();
pendingReads--; pendingReads--;
if (chunk.key === "b") { if (chunk.key === "b") {
expect(performance.now() - start).to.be.greaterThan(150); expect(performance.now() - start).to.be.greaterThan(
slowProcessorSpeed * totalItems,
);
t.pass(); t.pass();
} expect(pendingReads).to.equal(0);
if (pendingReads === 0) {
resolve(); resolve();
} }
}, },
}); });
const construct = (destKey: string) => { const construct = (destKey: string) => {
const first = map( const first = map(
(chunk: Chunk) => { async (chunk: Chunk) => {
await sleep(slowProcessorSpeed);
chunk.mapped.push(1); chunk.mapped.push(1);
return chunk; return chunk;
}, },
{ objectMode: true, highWaterMark: 1 }, { objectMode: true, highWaterMark: 1 },
); );
const second = map( first.pipe(sink);
async (chunk: Chunk) => {
await sleep(2000);
chunk.mapped.push(2);
return chunk;
},
{ objectMode: true, highWaterMark: 1 },
);
first.pipe(second).pipe(sink);
return first; return first;
}; };
const _demux = demux( const _demux = demux(
@ -510,6 +443,7 @@ test.only("demux() should only emit drain event when all streams are writable",
{ key: "key" }, { key: "key" },
{ {
objectMode: true, objectMode: true,
highWaterMark: 1,
}, },
); );
_demux.on("error", err => { _demux.on("error", err => {
@ -521,13 +455,21 @@ test.only("demux() should only emit drain event when all streams are writable",
{ key: "a", mapped: [] }, { key: "a", mapped: [] },
{ key: "c", mapped: [] }, { key: "c", mapped: [] },
{ key: "c", mapped: [] }, { key: "c", mapped: [] },
{ key: "b", mapped: [] }, // should only be recieved after a and c become writable { key: "c", mapped: [] },
{ key: "b", mapped: [] },
]; ];
let pendingReads = input.length; let pendingReads = input.length;
const totalItems = input.length;
const start = performance.now(); const start = performance.now();
for (const item of input) { for (const item of input) {
console.log("DEMUX", _demux.write(item)); if (!_demux.write(item)) {
await new Promise(_resolve => {
_demux.once("drain", () => {
_resolve();
});
});
}
} }
}); });
}); });
@ -543,6 +485,7 @@ test("demux() should emit drain event and first should contain up to highWaterMa
const sink = new Writable({ const sink = new Writable({
objectMode: true, objectMode: true,
write(chunk, encoding, cb) { write(chunk, encoding, cb) {
expect(chunk.mapped).to.deep.equal([1, 2]);
t.pass(); t.pass();
cb(); cb();
if (pendingReads === 0) { if (pendingReads === 0) {
@ -557,7 +500,7 @@ test("demux() should emit drain event and first should contain up to highWaterMa
chunk.mapped.push(1); chunk.mapped.push(1);
return chunk; return chunk;
}, },
{ objectMode: 2, highWaterMark: 2 }, { objectMode: true, highWaterMark: 1 },
); );
const second = map( const second = map(
@ -568,7 +511,7 @@ test("demux() should emit drain event and first should contain up to highWaterMa
pendingReads--; pendingReads--;
return chunk; return chunk;
}, },
{ objectMode: 2, highWaterMark: 2 }, { objectMode: true, highWaterMark: 1 },
); );
first.pipe(second).pipe(sink); first.pipe(second).pipe(sink);

View File

@ -49,61 +49,3 @@ test.cb("map() maps elements asynchronously", t => {
source.push("c"); source.push("c");
source.push(null); source.push(null);
}); });
test.cb("map() emits errors during synchronous mapping", t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const mapStream = map((element: string) => {
if (element !== "b") {
throw new Error("Failed mapping");
}
return element.toUpperCase();
});
source
.pipe(mapStream)
.on("data", data => {
expect(data).to.equal("B");
t.pass();
})
.on("error", err => {
source.pipe(mapStream);
mapStream.resume();
expect(err.message).to.equal("Failed mapping");
t.pass();
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test("map() emits errors during asynchronous mapping", t => {
t.plan(1);
return new Promise((resolve, _) => {
const source = new Readable({ objectMode: true });
const mapStream = map(async (element: string) => {
await Promise.resolve();
if (element === "b") {
throw new Error("Failed mapping");
}
return element.toUpperCase();
});
source
.pipe(mapStream)
.on("error", err => {
expect(err.message).to.equal("Failed mapping");
t.pass();
resolve();
})
.on("end", () => t.fail);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
source.push(null);
source.push(null);
});
});