Refactoring
This commit is contained in:
577
tests/accumulator.spec.ts
Normal file
577
tests/accumulator.spec.ts
Normal file
@@ -0,0 +1,577 @@
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { Readable } from "stream";
|
||||
import { accumulator, accumulatorBy } from "../src";
|
||||
import { FlushStrategy } from "../src/functions/baseDefinitions";
|
||||
|
||||
test.cb("accumulator() rolling", t => {
|
||||
t.plan(3);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||
const secondFlush = [{ ts: 2, key: "d" }, { ts: 3, key: "e" }];
|
||||
const thirdFlush = [{ ts: 4, key: "f" }];
|
||||
const flushes = [firstFlush, secondFlush, thirdFlush];
|
||||
|
||||
source
|
||||
.pipe(accumulator(2, undefined, FlushStrategy.rolling))
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (e: any) => {
|
||||
t.end(e);
|
||||
})
|
||||
.on("end", () => {
|
||||
t.end();
|
||||
});
|
||||
[...firstFlush, ...secondFlush, ...thirdFlush].forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("accumulator() rolling with key", t => {
|
||||
t.plan(2);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const firstFlush = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 2, key: "d" },
|
||||
];
|
||||
const secondFlush = [{ ts: 3, key: "e" }];
|
||||
const flushes = [firstFlush, secondFlush];
|
||||
|
||||
source
|
||||
.pipe(accumulator(3, undefined, FlushStrategy.rolling, "ts"))
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (e: any) => {
|
||||
t.end(e);
|
||||
})
|
||||
.on("end", () => {
|
||||
t.end();
|
||||
});
|
||||
[...firstFlush, ...secondFlush].forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"accumulator() rolling should emit error and ignore chunk when its missing key",
|
||||
t => {
|
||||
t.plan(2);
|
||||
let index = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const accumulatorStream = accumulator(
|
||||
3,
|
||||
undefined,
|
||||
FlushStrategy.rolling,
|
||||
"nonExistingKey",
|
||||
);
|
||||
const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||
|
||||
source
|
||||
.pipe(accumulatorStream)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
// No valid data output
|
||||
expect(flush).to.deep.equal([]);
|
||||
})
|
||||
.on("error", (err: any) => {
|
||||
source.pipe(accumulatorStream);
|
||||
accumulatorStream.resume();
|
||||
expect(err.message).to.equal(
|
||||
`Key is missing in event: (nonExistingKey, ${JSON.stringify(
|
||||
input[index],
|
||||
)})`,
|
||||
);
|
||||
index++;
|
||||
t.pass();
|
||||
})
|
||||
.on("end", () => {
|
||||
t.end();
|
||||
});
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"accumulator() rolling should emit error, ignore chunk when key is missing and continue processing chunks correctly",
|
||||
t => {
|
||||
t.plan(3);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const accumulatorStream = accumulator(
|
||||
3,
|
||||
undefined,
|
||||
FlushStrategy.rolling,
|
||||
"ts",
|
||||
);
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ key: "d" },
|
||||
{ ts: 3, key: "e" },
|
||||
];
|
||||
const firstFlush = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
];
|
||||
const secondFlush = [{ ts: 3, key: "e" }];
|
||||
const flushes = [firstFlush, secondFlush];
|
||||
|
||||
source
|
||||
.pipe(accumulatorStream)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (err: any) => {
|
||||
source.pipe(accumulatorStream);
|
||||
accumulatorStream.resume();
|
||||
expect(err.message).to.equal(
|
||||
`Key is missing in event: (ts, ${JSON.stringify(
|
||||
input[3],
|
||||
)})`,
|
||||
);
|
||||
t.pass();
|
||||
})
|
||||
.on("end", () => {
|
||||
t.end();
|
||||
});
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb("accumulator() sliding", t => {
|
||||
t.plan(4);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 4, key: "d" },
|
||||
];
|
||||
const firstFlush = [{ ts: 0, key: "a" }];
|
||||
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||
const thirdFlush = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
];
|
||||
const fourthFlush = [
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 4, key: "d" },
|
||||
];
|
||||
|
||||
const flushes = [firstFlush, secondFlush, thirdFlush, fourthFlush];
|
||||
source
|
||||
.pipe(accumulator(3, undefined, FlushStrategy.sliding))
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (e: any) => {
|
||||
t.end(e);
|
||||
})
|
||||
.on("end", () => {
|
||||
t.end();
|
||||
});
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("accumulator() sliding with key", t => {
|
||||
t.plan(6);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 3, key: "d" },
|
||||
{ ts: 5, key: "f" },
|
||||
{ ts: 6, key: "g" },
|
||||
];
|
||||
const firstFlush = [{ ts: 0, key: "a" }];
|
||||
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||
const thirdFlush = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
];
|
||||
const fourthFlush = [
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 3, key: "d" },
|
||||
];
|
||||
const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
|
||||
const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
|
||||
|
||||
const flushes = [
|
||||
firstFlush,
|
||||
secondFlush,
|
||||
thirdFlush,
|
||||
fourthFlush,
|
||||
fifthFlush,
|
||||
sixthFlush,
|
||||
];
|
||||
source
|
||||
.pipe(accumulator(3, undefined, FlushStrategy.sliding, "ts"))
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (e: any) => {
|
||||
t.end(e);
|
||||
})
|
||||
.on("end", () => {
|
||||
t.end();
|
||||
});
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"accumulator() sliding should emit error and ignore chunk when key is missing",
|
||||
t => {
|
||||
t.plan(2);
|
||||
let index = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const accumulatorStream = accumulator(
|
||||
3,
|
||||
undefined,
|
||||
FlushStrategy.sliding,
|
||||
"nonExistingKey",
|
||||
);
|
||||
const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||
|
||||
source
|
||||
.pipe(accumulatorStream)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
expect(flush).to.deep.equal([]);
|
||||
})
|
||||
.on("error", (err: any) => {
|
||||
source.pipe(accumulatorStream);
|
||||
accumulatorStream.resume();
|
||||
expect(err.message).to.equal(
|
||||
`Key is missing in event: (nonExistingKey, ${JSON.stringify(
|
||||
input[index],
|
||||
)})`,
|
||||
);
|
||||
index++;
|
||||
t.pass();
|
||||
})
|
||||
.on("end", () => {
|
||||
t.end();
|
||||
});
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"accumulator() sliding should emit error, ignore chunk when key is missing and continue processing chunks correctly",
|
||||
t => {
|
||||
t.plan(6);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const accumulatorStream = accumulator(
|
||||
3,
|
||||
undefined,
|
||||
FlushStrategy.sliding,
|
||||
"ts",
|
||||
);
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 3, key: "d" },
|
||||
{ ts: 5, key: "f" },
|
||||
{ ts: 6, key: "g" },
|
||||
];
|
||||
const firstFlush = [{ ts: 0, key: "a" }];
|
||||
const secondFlush = [{ ts: 0, key: "a" }, { ts: 2, key: "c" }];
|
||||
const thirdFlush = [{ ts: 2, key: "c" }, { ts: 3, key: "d" }];
|
||||
const fourthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
|
||||
const fifthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
|
||||
|
||||
const flushes = [
|
||||
firstFlush,
|
||||
secondFlush,
|
||||
thirdFlush,
|
||||
fourthFlush,
|
||||
fifthFlush,
|
||||
];
|
||||
source
|
||||
.pipe(accumulatorStream)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (err: any) => {
|
||||
source.pipe(accumulatorStream);
|
||||
accumulatorStream.resume();
|
||||
expect(err.message).to.equal(
|
||||
`Key is missing in event: (ts, ${JSON.stringify(
|
||||
input[1],
|
||||
)})`,
|
||||
);
|
||||
t.pass();
|
||||
})
|
||||
.on("end", () => {
|
||||
t.end();
|
||||
});
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb("accumulatorBy() rolling", t => {
|
||||
t.plan(2);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const firstFlush = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 2, key: "d" },
|
||||
];
|
||||
const secondFlush = [{ ts: 3, key: "e" }];
|
||||
const flushes = [firstFlush, secondFlush];
|
||||
|
||||
source
|
||||
.pipe(
|
||||
accumulatorBy(
|
||||
undefined,
|
||||
FlushStrategy.rolling,
|
||||
(event: TestObject, bufferChunk: TestObject) => {
|
||||
return bufferChunk.ts + 3 <= event.ts;
|
||||
},
|
||||
),
|
||||
)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (e: any) => {
|
||||
t.end(e);
|
||||
})
|
||||
.on("end", () => {
|
||||
t.end();
|
||||
});
|
||||
[...firstFlush, ...secondFlush].forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"accumulatorBy() rolling should emit error when key iteratee throws",
|
||||
t => {
|
||||
t.plan(2);
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
];
|
||||
const accumulaterStream = accumulatorBy(
|
||||
undefined,
|
||||
FlushStrategy.rolling,
|
||||
(event: TestObject, bufferChunk: TestObject) => {
|
||||
if (event.key !== "a") {
|
||||
throw new Error("Failed mapping");
|
||||
}
|
||||
return bufferChunk.ts + 3 <= event.ts;
|
||||
},
|
||||
);
|
||||
source
|
||||
.pipe(accumulaterStream)
|
||||
.on("error", (err: any) => {
|
||||
source.pipe(accumulaterStream);
|
||||
accumulaterStream.resume();
|
||||
expect(err.message).to.equal("Failed mapping");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", () => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb("accumulatorBy() sliding", t => {
|
||||
t.plan(6);
|
||||
let chunkIndex = 0;
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 3, key: "d" },
|
||||
{ ts: 5, key: "f" },
|
||||
{ ts: 6, key: "g" },
|
||||
];
|
||||
const firstFlush = [{ ts: 0, key: "a" }];
|
||||
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||
const thirdFlush = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
];
|
||||
const fourthFlush = [
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
{ ts: 3, key: "d" },
|
||||
];
|
||||
const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
|
||||
const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
|
||||
|
||||
const flushes = [
|
||||
firstFlush,
|
||||
secondFlush,
|
||||
thirdFlush,
|
||||
fourthFlush,
|
||||
fifthFlush,
|
||||
sixthFlush,
|
||||
];
|
||||
source
|
||||
.pipe(
|
||||
accumulatorBy(
|
||||
undefined,
|
||||
FlushStrategy.sliding,
|
||||
(event: TestObject, bufferChunk: TestObject) => {
|
||||
return bufferChunk.ts + 3 <= event.ts ? true : false;
|
||||
},
|
||||
),
|
||||
)
|
||||
.on("data", (flush: TestObject[]) => {
|
||||
t.deepEqual(flush, flushes[chunkIndex]);
|
||||
chunkIndex++;
|
||||
})
|
||||
.on("error", (e: any) => {
|
||||
t.end(e);
|
||||
})
|
||||
.on("end", () => {
|
||||
t.end();
|
||||
});
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"accumulatorBy() sliding should emit error when key iteratee throws",
|
||||
t => {
|
||||
t.plan(2);
|
||||
interface TestObject {
|
||||
ts: number;
|
||||
key: string;
|
||||
}
|
||||
const source = new Readable({ objectMode: true });
|
||||
const input = [
|
||||
{ ts: 0, key: "a" },
|
||||
{ ts: 1, key: "b" },
|
||||
{ ts: 2, key: "c" },
|
||||
];
|
||||
const accumulaterStream = accumulatorBy(
|
||||
undefined,
|
||||
FlushStrategy.sliding,
|
||||
(event: TestObject, bufferChunk: TestObject) => {
|
||||
if (event.key !== "a") {
|
||||
throw new Error("Failed mapping");
|
||||
}
|
||||
return bufferChunk.ts + 3 <= event.ts ? true : false;
|
||||
},
|
||||
);
|
||||
source
|
||||
.pipe(accumulaterStream)
|
||||
.on("error", (err: any) => {
|
||||
source.pipe(accumulaterStream);
|
||||
accumulaterStream.resume();
|
||||
expect(err.message).to.equal("Failed mapping");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", () => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
input.forEach(item => {
|
||||
source.push(item);
|
||||
});
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
58
tests/batch.spec.ts
Normal file
58
tests/batch.spec.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { batch } from "../src";
|
||||
|
||||
test.cb("batch() batches chunks together", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = [["a", "b", "c"], ["d", "e", "f"], ["g"]];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(batch(3))
|
||||
.on("data", (element: string[]) => {
|
||||
expect(element).to.deep.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push("d");
|
||||
source.push("e");
|
||||
source.push("f");
|
||||
source.push("g");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("batch() yields a batch after the timeout", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({
|
||||
objectMode: true,
|
||||
read(size: number) {},
|
||||
});
|
||||
const expectedElements = [["a", "b"], ["c"], ["d"]];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(batch(3))
|
||||
.on("data", (element: string[]) => {
|
||||
expect(element).to.deep.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.fail)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
setTimeout(() => {
|
||||
source.push("c");
|
||||
}, 600);
|
||||
setTimeout(() => {
|
||||
source.push("d");
|
||||
source.push(null);
|
||||
}, 600 * 2);
|
||||
});
|
||||
28
tests/child.spec.ts
Normal file
28
tests/child.spec.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import * as cp from "child_process";
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { child } from "../src";
|
||||
|
||||
test.cb(
|
||||
"child() allows easily writing to child process stdin and reading from its stdout",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable();
|
||||
const catProcess = cp.exec("cat");
|
||||
let out = "";
|
||||
source
|
||||
.pipe(child(catProcess))
|
||||
.on("data", chunk => (out += chunk))
|
||||
.on("error", t.end)
|
||||
.on("end", () => {
|
||||
expect(out).to.equal("abcdef");
|
||||
t.pass();
|
||||
t.end();
|
||||
});
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
132
tests/collect.spec.ts
Normal file
132
tests/collect.spec.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { collect } from "../src";
|
||||
|
||||
test.cb(
|
||||
"collect() collects streamed elements into an array (object, flowing mode)",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: true });
|
||||
|
||||
source
|
||||
.pipe(collect({ objectMode: true }))
|
||||
.on("data", collected => {
|
||||
expect(collected).to.deep.equal(["a", "b", "c"]);
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"collect() collects streamed elements into an array (object, paused mode)",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const collector = source.pipe(collect({ objectMode: true }));
|
||||
|
||||
collector
|
||||
.on("readable", () => {
|
||||
let collected = collector.read();
|
||||
while (collected !== null) {
|
||||
expect(collected).to.deep.equal(["a", "b", "c"]);
|
||||
t.pass();
|
||||
collected = collector.read();
|
||||
}
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"collect() collects streamed bytes into a buffer (non-object, flowing mode)",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: false });
|
||||
|
||||
source
|
||||
.pipe(collect())
|
||||
.on("data", collected => {
|
||||
expect(collected).to.deep.equal(Buffer.from("abc"));
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"collect() collects streamed bytes into a buffer (non-object, paused mode)",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: false });
|
||||
const collector = source.pipe(collect({ objectMode: false }));
|
||||
collector
|
||||
.on("readable", () => {
|
||||
let collected = collector.read();
|
||||
while (collected !== null) {
|
||||
expect(collected).to.deep.equal(Buffer.from("abc"));
|
||||
t.pass();
|
||||
collected = collector.read();
|
||||
}
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"collect() emits an empty array if the source was empty (object mode)",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const collector = source.pipe(collect({ objectMode: true }));
|
||||
collector
|
||||
.on("data", collected => {
|
||||
expect(collected).to.deep.equal([]);
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"collect() emits nothing if the source was empty (non-object mode)",
|
||||
t => {
|
||||
t.plan(0);
|
||||
const source = new Readable({ objectMode: false });
|
||||
const collector = source.pipe(collect({ objectMode: false }));
|
||||
collector
|
||||
.on("data", () => t.fail())
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
180
tests/concat.spec.ts
Normal file
180
tests/concat.spec.ts
Normal file
@@ -0,0 +1,180 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { concat, collect } from "../src";
|
||||
|
||||
test.cb(
|
||||
"concat() concatenates multiple readable streams (object, flowing mode)",
|
||||
t => {
|
||||
t.plan(6);
|
||||
const source1 = new Readable({ objectMode: true });
|
||||
const source2 = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||
let i = 0;
|
||||
concat(source1, source2)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source1.push("a");
|
||||
source2.push("d");
|
||||
source1.push("b");
|
||||
source2.push("e");
|
||||
source1.push("c");
|
||||
source2.push("f");
|
||||
source2.push(null);
|
||||
source1.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"concat() concatenates multiple readable streams (object, paused mode)",
|
||||
t => {
|
||||
t.plan(6);
|
||||
const source1 = new Readable({ objectMode: true });
|
||||
const source2 = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||
let i = 0;
|
||||
const concatenation = concat(source1, source2)
|
||||
.on("readable", () => {
|
||||
let element = concatenation.read();
|
||||
while (element !== null) {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
element = concatenation.read();
|
||||
}
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source1.push("a");
|
||||
source2.push("d");
|
||||
source1.push("b");
|
||||
source2.push("e");
|
||||
source1.push("c");
|
||||
source2.push("f");
|
||||
source2.push(null);
|
||||
source1.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"concat() concatenates multiple readable streams (non-object, flowing mode)",
|
||||
t => {
|
||||
t.plan(6);
|
||||
const source1 = new Readable({ objectMode: false });
|
||||
const source2 = new Readable({ objectMode: false });
|
||||
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||
let i = 0;
|
||||
concat(source1, source2)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.deep.equal(Buffer.from(expectedElements[i]));
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source1.push("a");
|
||||
source2.push("d");
|
||||
source1.push("b");
|
||||
source2.push("e");
|
||||
source1.push("c");
|
||||
source2.push("f");
|
||||
source2.push(null);
|
||||
source1.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"concat() concatenates multiple readable streams (non-object, paused mode)",
|
||||
t => {
|
||||
t.plan(6);
|
||||
const source1 = new Readable({ objectMode: false, read: () => ({}) });
|
||||
const source2 = new Readable({ objectMode: false, read: () => ({}) });
|
||||
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||
let i = 0;
|
||||
const concatenation = concat(source1, source2)
|
||||
.on("readable", () => {
|
||||
let element = concatenation.read();
|
||||
while (element !== null) {
|
||||
expect(element).to.deep.equal(
|
||||
Buffer.from(expectedElements[i]),
|
||||
);
|
||||
t.pass();
|
||||
i++;
|
||||
element = concatenation.read();
|
||||
}
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source1.push("a");
|
||||
setTimeout(() => source2.push("d"), 10);
|
||||
setTimeout(() => source1.push("b"), 20);
|
||||
setTimeout(() => source2.push("e"), 30);
|
||||
setTimeout(() => source1.push("c"), 40);
|
||||
setTimeout(() => source2.push("f"), 50);
|
||||
setTimeout(() => source2.push(null), 60);
|
||||
setTimeout(() => source1.push(null), 70);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb("concat() concatenates a single readable stream (object mode)", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||
let i = 0;
|
||||
concat(source)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"concat() concatenates a single readable stream (non-object mode)",
|
||||
t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: false });
|
||||
const expectedElements = ["a", "b", "c", "d", "e", "f"];
|
||||
let i = 0;
|
||||
concat(source)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.deep.equal(Buffer.from(expectedElements[i]));
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb("concat() concatenates empty list of readable streams", t => {
|
||||
t.plan(0);
|
||||
concat()
|
||||
.pipe(collect())
|
||||
.on("data", _ => {
|
||||
t.fail();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
});
|
||||
28
tests/duplex.spec.ts
Normal file
28
tests/duplex.spec.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import * as cp from "child_process";
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { duplex } from "../src";
|
||||
|
||||
test.cb(
|
||||
"duplex() combines a writable and readable stream into a ReadWrite stream",
|
||||
t => {
|
||||
t.plan(1);
|
||||
const source = new Readable();
|
||||
const catProcess = cp.exec("cat");
|
||||
let out = "";
|
||||
source
|
||||
.pipe(duplex(catProcess.stdin!, catProcess.stdout!))
|
||||
.on("data", chunk => (out += chunk))
|
||||
.on("error", t.end)
|
||||
.on("end", () => {
|
||||
expect(out).to.equal("abcdef");
|
||||
t.pass();
|
||||
t.end();
|
||||
});
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
102
tests/filter.spec.ts
Normal file
102
tests/filter.spec.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { Readable } from "stream";
|
||||
import { filter } from "../src";
|
||||
|
||||
test.cb("filter() filters elements synchronously", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "c"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(filter((element: string) => element !== "b"))
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("filter() filters elements asynchronously", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "c"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(
|
||||
filter(async (element: string) => {
|
||||
await Promise.resolve();
|
||||
return element !== "b";
|
||||
}),
|
||||
)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("filter() emits errors during synchronous filtering", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(
|
||||
filter((element: string) => {
|
||||
if (element !== "a") {
|
||||
throw new Error("Failed filtering");
|
||||
}
|
||||
return true;
|
||||
}),
|
||||
)
|
||||
.resume()
|
||||
.on("error", err => {
|
||||
expect(err.message).to.equal("Failed filtering");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("filter() emits errors during asynchronous filtering", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(
|
||||
filter(async (element: string) => {
|
||||
await Promise.resolve();
|
||||
if (element !== "a") {
|
||||
throw new Error("Failed filtering");
|
||||
}
|
||||
return true;
|
||||
}),
|
||||
)
|
||||
.resume()
|
||||
.on("error", err => {
|
||||
expect(err.message).to.equal("Failed filtering");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
100
tests/flatMap.spec.ts
Normal file
100
tests/flatMap.spec.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { flatMap } from "../src";
|
||||
|
||||
test.cb("flatMap() maps elements synchronously", t => {
|
||||
t.plan(6);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "A", "b", "B", "c", "C"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(flatMap((element: string) => [element, element.toUpperCase()]))
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("flatMap() maps elements asynchronously", t => {
|
||||
t.plan(6);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "A", "b", "B", "c", "C"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(
|
||||
flatMap(async (element: string) => {
|
||||
await Promise.resolve();
|
||||
return [element, element.toUpperCase()];
|
||||
}),
|
||||
)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("flatMap() emits errors during synchronous mapping", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(
|
||||
flatMap((element: string) => {
|
||||
if (element !== "a") {
|
||||
throw new Error("Failed mapping");
|
||||
}
|
||||
return [element, element.toUpperCase()];
|
||||
}),
|
||||
)
|
||||
.resume()
|
||||
.on("error", err => {
|
||||
expect(err.message).to.equal("Failed mapping");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("flatMap() emits errors during asynchronous mapping", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(
|
||||
flatMap(async (element: string) => {
|
||||
await Promise.resolve();
|
||||
if (element !== "a") {
|
||||
throw new Error("Failed mapping");
|
||||
}
|
||||
return [element, element.toUpperCase()];
|
||||
}),
|
||||
)
|
||||
.resume()
|
||||
.on("error", err => {
|
||||
expect(err.message).to.equal("Failed mapping");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
45
tests/fromArray.spec.ts
Normal file
45
tests/fromArray.spec.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { fromArray } from "../src";
|
||||
|
||||
test.cb("fromArray() streams array elements in flowing mode", t => {
|
||||
t.plan(3);
|
||||
const elements = ["a", "b", "c"];
|
||||
const stream = fromArray(elements);
|
||||
let i = 0;
|
||||
stream
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(elements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
});
|
||||
|
||||
test.cb("fromArray() ends immediately if there are no array elements", t => {
|
||||
t.plan(0);
|
||||
fromArray([])
|
||||
.on("data", () => t.fail())
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
});
|
||||
|
||||
test.cb("fromArray() streams array elements in paused mode", t => {
|
||||
t.plan(3);
|
||||
const elements = ["a", "b", "c"];
|
||||
const stream = fromArray(elements);
|
||||
let i = 0;
|
||||
stream
|
||||
.on("readable", () => {
|
||||
let element = stream.read();
|
||||
while (element !== null) {
|
||||
expect(element).to.equal(elements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
element = stream.read();
|
||||
}
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
});
|
||||
56
tests/join.spec.ts
Normal file
56
tests/join.spec.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { join } from "../src";
|
||||
|
||||
test.cb("join() joins chunks using the specified separator", t => {
|
||||
t.plan(9);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedParts = ["ab|", "|", "c|d", "|", "|", "|", "e", "|", "|f|"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(join("|"))
|
||||
.on("data", part => {
|
||||
expect(part).to.equal(expectedParts[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab|");
|
||||
source.push("c|d");
|
||||
source.push("|");
|
||||
source.push("e");
|
||||
source.push("|f|");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"join() joins chunks using the specified separator without breaking up multi-byte characters " +
|
||||
"spanning multiple chunks",
|
||||
t => {
|
||||
t.plan(5);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedParts = ["ø", "|", "ö", "|", "一"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(join("|"))
|
||||
.on("data", part => {
|
||||
expect(part).to.equal(expectedParts[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks
|
||||
source.push(Buffer.from("ø").slice(1, 2));
|
||||
source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks
|
||||
source.push(Buffer.from("ö").slice(1, 2));
|
||||
source.push(Buffer.from("一").slice(0, 1)); // 3-byte character spanning three chunks
|
||||
source.push(Buffer.from("一").slice(1, 2));
|
||||
source.push(Buffer.from("一").slice(2, 3));
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
15
tests/last.spec.ts
Normal file
15
tests/last.spec.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { last } from "../src";
|
||||
|
||||
test("last() resolves to the last chunk streamed by the given readable stream", async t => {
|
||||
const source = new Readable({ objectMode: true });
|
||||
const lastPromise = last(source);
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
const lastChunk = await lastPromise;
|
||||
expect(lastChunk).to.equal("ef");
|
||||
});
|
||||
109
tests/map.spec.ts
Normal file
109
tests/map.spec.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { map } from "../src";
|
||||
|
||||
test.cb("map() maps elements synchronously", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const mapStream = map((element: string) => element.toUpperCase());
|
||||
const expectedElements = ["A", "B", "C"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(mapStream)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("map() maps elements asynchronously", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const mapStream = map(async (element: string) => {
|
||||
await Promise.resolve();
|
||||
return element.toUpperCase();
|
||||
});
|
||||
const expectedElements = ["A", "B", "C"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(mapStream)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("map() emits errors during synchronous mapping", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const mapStream = map((element: string) => {
|
||||
if (element !== "b") {
|
||||
throw new Error("Failed mapping");
|
||||
}
|
||||
return element.toUpperCase();
|
||||
});
|
||||
source
|
||||
.pipe(mapStream)
|
||||
.on("data", data => {
|
||||
expect(data).to.equal("B");
|
||||
t.pass();
|
||||
})
|
||||
.on("error", err => {
|
||||
source.pipe(mapStream);
|
||||
mapStream.resume();
|
||||
expect(err.message).to.equal("Failed mapping");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test("map() emits errors during asynchronous mapping", t => {
|
||||
t.plan(1);
|
||||
return new Promise((resolve, _) => {
|
||||
const source = new Readable({ objectMode: true });
|
||||
const mapStream = map(async (element: string) => {
|
||||
await Promise.resolve();
|
||||
if (element === "b") {
|
||||
throw new Error("Failed mapping");
|
||||
}
|
||||
return element.toUpperCase();
|
||||
});
|
||||
source
|
||||
.pipe(mapStream)
|
||||
.on("error", err => {
|
||||
expect(err.message).to.equal("Failed mapping");
|
||||
t.pass();
|
||||
resolve();
|
||||
})
|
||||
.on("end", () => t.fail);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
source.push(null);
|
||||
source.push(null);
|
||||
});
|
||||
});
|
||||
60
tests/merge.spec.ts
Normal file
60
tests/merge.spec.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { merge } from "../src";
|
||||
|
||||
test.cb(
|
||||
"merge() merges multiple readable streams in chunk arrival order",
|
||||
t => {
|
||||
t.plan(6);
|
||||
const source1 = new Readable({ objectMode: true, read: () => ({}) });
|
||||
const source2 = new Readable({ objectMode: true, read: () => ({}) });
|
||||
const expectedElements = ["a", "d", "b", "e", "c", "f"];
|
||||
let i = 0;
|
||||
merge(source1, source2)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source1.push("a");
|
||||
setTimeout(() => source2.push("d"), 10);
|
||||
setTimeout(() => source1.push("b"), 20);
|
||||
setTimeout(() => source2.push("e"), 30);
|
||||
setTimeout(() => source1.push("c"), 40);
|
||||
setTimeout(() => source2.push("f"), 50);
|
||||
setTimeout(() => source2.push(null), 60);
|
||||
setTimeout(() => source1.push(null), 70);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb("merge() merges a readable stream", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true, read: () => ({}) });
|
||||
const expectedElements = ["a", "b", "c"];
|
||||
let i = 0;
|
||||
merge(source)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("merge() merges an empty list of readable streams", t => {
|
||||
t.plan(0);
|
||||
merge()
|
||||
.on("data", () => t.pass())
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
});
|
||||
77
tests/parallelMap.spec.ts
Normal file
77
tests/parallelMap.spec.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { Readable } from "stream";
|
||||
import { performance } from "perf_hooks";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { parallelMap } from "../src";
|
||||
import { sleep } from "../src/helpers";
|
||||
|
||||
test.cb("parallelMap() parallel mapping", t => {
|
||||
t.plan(6);
|
||||
const offset = 50;
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = [
|
||||
"a_processed",
|
||||
"b_processed",
|
||||
"c_processed",
|
||||
"d_processed",
|
||||
"e_processed",
|
||||
"f_processed",
|
||||
];
|
||||
interface IPerfData {
|
||||
start: number;
|
||||
output?: string;
|
||||
finish?: number;
|
||||
}
|
||||
const orderedResults: IPerfData[] = [];
|
||||
source
|
||||
.pipe(
|
||||
parallelMap(async (data: any) => {
|
||||
const perfData: IPerfData = { start: performance.now() };
|
||||
const c = data + "_processed";
|
||||
perfData.output = c;
|
||||
await sleep(offset);
|
||||
perfData.finish = performance.now();
|
||||
orderedResults.push(perfData);
|
||||
return c;
|
||||
}, 2),
|
||||
)
|
||||
.on("data", (element: string) => {
|
||||
t.true(expectedElements.includes(element));
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", async () => {
|
||||
expect(orderedResults[0].finish).to.be.lessThan(
|
||||
orderedResults[2].start,
|
||||
);
|
||||
expect(orderedResults[1].finish).to.be.lessThan(
|
||||
orderedResults[3].start,
|
||||
);
|
||||
expect(orderedResults[2].finish).to.be.lessThan(
|
||||
orderedResults[4].start,
|
||||
);
|
||||
expect(orderedResults[3].finish).to.be.lessThan(
|
||||
orderedResults[5].start,
|
||||
);
|
||||
expect(orderedResults[0].start).to.be.lessThan(
|
||||
orderedResults[2].start + offset,
|
||||
);
|
||||
expect(orderedResults[1].start).to.be.lessThan(
|
||||
orderedResults[3].start + offset,
|
||||
);
|
||||
expect(orderedResults[2].start).to.be.lessThan(
|
||||
orderedResults[4].start + offset,
|
||||
);
|
||||
expect(orderedResults[3].start).to.be.lessThan(
|
||||
orderedResults[5].start + offset,
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push("d");
|
||||
source.push("e");
|
||||
source.push("f");
|
||||
source.push(null);
|
||||
});
|
||||
40
tests/parse.spec.ts
Normal file
40
tests/parse.spec.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { parse } from "../src";
|
||||
|
||||
test.cb("parse() parses the streamed elements as JSON", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["abc", {}, []];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(parse())
|
||||
.on("data", part => {
|
||||
expect(part).to.deep.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push('"abc"');
|
||||
source.push("{}");
|
||||
source.push("[]");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("parse() emits errors on invalid JSON", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(parse())
|
||||
.resume()
|
||||
.on("error", () => t.pass())
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("{}");
|
||||
source.push({});
|
||||
source.push([]);
|
||||
source.push(null);
|
||||
});
|
||||
67
tests/rate.spec.ts
Normal file
67
tests/rate.spec.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { Readable } from "stream";
|
||||
import { performance } from "perf_hooks";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { rate } from "../src";
|
||||
|
||||
test.cb("rate() sends data at desired rate", t => {
|
||||
t.plan(9);
|
||||
const fastRate = 150;
|
||||
const medRate = 50;
|
||||
const slowRate = 1;
|
||||
const sourceFast = new Readable({ objectMode: true });
|
||||
const sourceMed = new Readable({ objectMode: true });
|
||||
const sourceSlow = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "b", "c"];
|
||||
const start = performance.now();
|
||||
let i = 0;
|
||||
let j = 0;
|
||||
let k = 0;
|
||||
|
||||
sourceFast
|
||||
.pipe(rate(fastRate, 1))
|
||||
.on("data", (element: string[]) => {
|
||||
const currentRate = (i / (performance.now() - start)) * 1000;
|
||||
expect(element).to.deep.equal(expectedElements[i]);
|
||||
expect(currentRate).lessThan(fastRate);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end);
|
||||
|
||||
sourceMed
|
||||
.pipe(rate(medRate, 1))
|
||||
.on("data", (element: string[]) => {
|
||||
const currentRate = (j / (performance.now() - start)) * 1000;
|
||||
expect(element).to.deep.equal(expectedElements[j]);
|
||||
expect(currentRate).lessThan(medRate);
|
||||
t.pass();
|
||||
j++;
|
||||
})
|
||||
.on("error", t.end);
|
||||
|
||||
sourceSlow
|
||||
.pipe(rate(slowRate, 1))
|
||||
.on("data", (element: string[]) => {
|
||||
const currentRate = (k / (performance.now() - start)) * 1000;
|
||||
expect(element).to.deep.equal(expectedElements[k]);
|
||||
expect(currentRate).lessThan(slowRate);
|
||||
t.pass();
|
||||
k++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
sourceFast.push("a");
|
||||
sourceFast.push("b");
|
||||
sourceFast.push("c");
|
||||
sourceFast.push(null);
|
||||
sourceMed.push("a");
|
||||
sourceMed.push("b");
|
||||
sourceMed.push("c");
|
||||
sourceMed.push(null);
|
||||
sourceSlow.push("a");
|
||||
sourceSlow.push("b");
|
||||
sourceSlow.push("c");
|
||||
sourceSlow.push(null);
|
||||
});
|
||||
98
tests/reduce.spec.ts
Normal file
98
tests/reduce.spec.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { reduce } from "../src";
|
||||
|
||||
test.cb("reduce() reduces elements synchronously", t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedValue = 6;
|
||||
source
|
||||
.pipe(reduce((acc: number, element: string) => acc + element.length, 0))
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedValue);
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("reduce() reduces elements asynchronously", t => {
|
||||
t.plan(1);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedValue = 6;
|
||||
source
|
||||
.pipe(
|
||||
reduce(async (acc: number, element: string) => {
|
||||
await Promise.resolve();
|
||||
return acc + element.length;
|
||||
}, 0),
|
||||
)
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedValue);
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("reduce() emits errors during synchronous reduce", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(
|
||||
reduce((acc: number, element: string) => {
|
||||
if (element !== "ab") {
|
||||
throw new Error("Failed reduce");
|
||||
}
|
||||
return acc + element.length;
|
||||
}, 0),
|
||||
)
|
||||
.resume()
|
||||
.on("error", err => {
|
||||
expect(err.message).to.equal("Failed reduce");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("reduce() emits errors during asynchronous reduce", t => {
|
||||
t.plan(2);
|
||||
const source = new Readable({ objectMode: true });
|
||||
source
|
||||
.pipe(
|
||||
reduce(async (acc: number, element: string) => {
|
||||
await Promise.resolve();
|
||||
if (element !== "ab") {
|
||||
throw new Error("Failed mapping");
|
||||
}
|
||||
return acc + element.length;
|
||||
}, 0),
|
||||
)
|
||||
.resume()
|
||||
.on("error", err => {
|
||||
expect(err.message).to.equal("Failed mapping");
|
||||
t.pass();
|
||||
})
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab");
|
||||
source.push("cd");
|
||||
source.push("ef");
|
||||
source.push(null);
|
||||
});
|
||||
80
tests/replace.spec.ts
Normal file
80
tests/replace.spec.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { replace } from "../src";
|
||||
|
||||
test.cb(
|
||||
"replace() replaces occurrences of the given string in the streamed elements with the specified " +
|
||||
"replacement string",
|
||||
t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["abc", "xyf", "ghi"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(replace("de", "xy"))
|
||||
.on("data", part => {
|
||||
expect(part).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("abc");
|
||||
source.push("def");
|
||||
source.push("ghi");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"replace() replaces occurrences of the given regular expression in the streamed elements with " +
|
||||
"the specified replacement string",
|
||||
t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["abc", "xyz", "ghi"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(replace(/^def$/, "xyz"))
|
||||
.on("data", part => {
|
||||
expect(part).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("abc");
|
||||
source.push("def");
|
||||
source.push("ghi");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"replace() replaces occurrences of the given multi-byte character even if it spans multiple chunks",
|
||||
t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["ø", "O", "a"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(replace("ö", "O"))
|
||||
.on("data", part => {
|
||||
expect(part).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks
|
||||
source.push(Buffer.from("ø").slice(1, 2));
|
||||
source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks
|
||||
source.push(Buffer.from("ö").slice(1, 2));
|
||||
source.push("a");
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
98
tests/split.spec.ts
Normal file
98
tests/split.spec.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { split } from "../src";
|
||||
|
||||
test.cb("split() splits chunks using the default separator (\\n)", t => {
|
||||
t.plan(5);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedParts = ["ab", "c", "d", "ef", ""];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(split())
|
||||
.on("data", part => {
|
||||
expect(part).to.equal(expectedParts[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab\n");
|
||||
source.push("c");
|
||||
source.push("\n");
|
||||
source.push("d");
|
||||
source.push("\nef\n");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb("split() splits chunks using the specified separator", t => {
|
||||
t.plan(6);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedParts = ["ab", "c", "d", "e", "f", ""];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(split("|"))
|
||||
.on("data", (part: string) => {
|
||||
expect(part).to.equal(expectedParts[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("ab|");
|
||||
source.push("c|d");
|
||||
source.push("|");
|
||||
source.push("e");
|
||||
source.push("|f|");
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"split() splits utf8 encoded buffers using the specified separator",
|
||||
t => {
|
||||
t.plan(3);
|
||||
const expectedElements = ["a", "b", "c"];
|
||||
let i = 0;
|
||||
const through = split(",");
|
||||
const buf = Buffer.from("a,b,c");
|
||||
through
|
||||
.on("data", element => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
i++;
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
for (let j = 0; j < buf.length; ++j) {
|
||||
through.write(buf.slice(j, j + 1));
|
||||
}
|
||||
through.end();
|
||||
},
|
||||
);
|
||||
|
||||
test.cb(
|
||||
"split() splits utf8 encoded buffers with multi-byte characters using the specified separator",
|
||||
t => {
|
||||
t.plan(3);
|
||||
const expectedElements = ["一", "一", "一"];
|
||||
let i = 0;
|
||||
const through = split(",");
|
||||
const buf = Buffer.from("一,一,一"); // Those spaces are multi-byte utf8 characters (code: 4E00)
|
||||
through
|
||||
.on("data", element => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
i++;
|
||||
t.pass();
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
for (let j = 0; j < buf.length; ++j) {
|
||||
through.write(buf.slice(j, j + 1));
|
||||
}
|
||||
through.end();
|
||||
},
|
||||
);
|
||||
61
tests/stringify.spec.ts
Normal file
61
tests/stringify.spec.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { stringify } from "../src";
|
||||
|
||||
test.cb("stringify() stringifies the streamed elements as JSON", t => {
|
||||
t.plan(4);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = [
|
||||
'"abc"',
|
||||
"0",
|
||||
'{"a":"a","b":"b","c":"c"}',
|
||||
'["a","b","c"]',
|
||||
];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(stringify())
|
||||
.on("data", part => {
|
||||
expect(part).to.deep.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("abc");
|
||||
source.push(0);
|
||||
source.push({ a: "a", b: "b", c: "c" });
|
||||
source.push(["a", "b", "c"]);
|
||||
source.push(null);
|
||||
});
|
||||
|
||||
test.cb(
|
||||
"stringify() stringifies the streamed elements as pretty-printed JSON",
|
||||
t => {
|
||||
t.plan(4);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = [
|
||||
'"abc"',
|
||||
"0",
|
||||
'{\n "a": "a",\n "b": "b",\n "c": "c"\n}',
|
||||
'[\n "a",\n "b",\n "c"\n]',
|
||||
];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(stringify({ pretty: true }))
|
||||
.on("data", part => {
|
||||
expect(part).to.deep.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("abc");
|
||||
source.push(0);
|
||||
source.push({ a: "a", b: "b", c: "c" });
|
||||
source.push(["a", "b", "c"]);
|
||||
source.push(null);
|
||||
},
|
||||
);
|
||||
26
tests/unbatch.spec.ts
Normal file
26
tests/unbatch.spec.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { unbatch, batch } from "../src";
|
||||
|
||||
test.cb("unbatch() unbatches", t => {
|
||||
t.plan(3);
|
||||
const source = new Readable({ objectMode: true });
|
||||
const expectedElements = ["a", "b", "c"];
|
||||
let i = 0;
|
||||
source
|
||||
.pipe(batch(3))
|
||||
.pipe(unbatch())
|
||||
.on("data", (element: string) => {
|
||||
expect(element).to.equal(expectedElements[i]);
|
||||
t.pass();
|
||||
i++;
|
||||
})
|
||||
.on("error", t.end)
|
||||
.on("end", t.end);
|
||||
|
||||
source.push("a");
|
||||
source.push("b");
|
||||
source.push("c");
|
||||
source.push(null);
|
||||
});
|
||||
Reference in New Issue
Block a user