Compare commits
No commits in common. "master" and "feature/ObjectModeByConfig" have entirely different histories.
master
...
feature/Ob
206
README.md
206
README.md
@ -1,16 +1,15 @@
|
|||||||
# Strom
|
# Mhysa
|
||||||
|
|
||||||
**Dependency-free stream utils for Node.js**
|
**Dependency-free stream utils for Node.js**
|
||||||
|
|
||||||
<sub>Released under the [MIT](LICENSE) license.</sub>
|
<sub>Released under the [MIT](https://github.com/Wenzil/Mhysa/blob/master/LICENSE) license.</sub>
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
yarn add stromjs
|
yarn add mhysa
|
||||||
```
|
|
||||||
```sh
|
|
||||||
npm add stromjs
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
<sub>Tested with Node.js versions 8+</sub>
|
||||||
|
|
||||||
## fromArray(array)
|
## fromArray(array)
|
||||||
Convert an array into a `Readable` stream of its elements
|
Convert an array into a `Readable` stream of its elements
|
||||||
|
|
||||||
@ -19,14 +18,14 @@ Convert an array into a `Readable` stream of its elements
|
|||||||
| `array` | `T[]` | Array of elements to stream |
|
| `array` | `T[]` | Array of elements to stream |
|
||||||
|
|
||||||
```js
|
```js
|
||||||
strom.fromArray(["a", "b"])
|
Mhysa.fromArray(["a", "b"])
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
// ab is printed out
|
// ab is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## map(mapper, options)
|
## map(mapper, options)
|
||||||
Returns a `ReadWrite` stream that maps streamed chunks
|
Return a `ReadWrite` stream that maps streamed chunks
|
||||||
|
|
||||||
| Param | Type | Description |
|
| Param | Type | Description |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
@ -36,15 +35,15 @@ Returns a `ReadWrite` stream that maps streamed chunks
|
|||||||
| `options.writableObjectMode` | `boolean` | Whether this stream should behave as a writable stream of objects |
|
| `options.writableObjectMode` | `boolean` | Whether this stream should behave as a writable stream of objects |
|
||||||
|
|
||||||
```js
|
```js
|
||||||
strom.fromArray(["a", "b"])
|
Mhysa.fromArray(["a", "b"])
|
||||||
.pipe(strom.map(s => s.toUpperCase()))
|
.pipe(Mhysa.map(s => s.toUpperCase()))
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
// AB is printed out
|
// AB is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## flatMap(mapper, options)
|
## flatMap(mapper, options)
|
||||||
Returns a `ReadWrite` stream that flat maps streamed chunks
|
Return a `ReadWrite` stream that flat maps streamed chunks
|
||||||
|
|
||||||
| Param | Type | Description |
|
| Param | Type | Description |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
@ -54,15 +53,15 @@ Returns a `ReadWrite` stream that flat maps streamed chunks
|
|||||||
| `options.writableObjectMode` | `boolean` | Whether this stream should behave as a writable stream of objects |
|
| `options.writableObjectMode` | `boolean` | Whether this stream should behave as a writable stream of objects |
|
||||||
|
|
||||||
```js
|
```js
|
||||||
strom.fromArray(["a", "AA"])
|
Mhysa.fromArray(["a", "AA"])
|
||||||
.pipe(strom.flatMap(s => new Array(s.length).fill(s)))
|
.pipe(Mhysa.flatMap(s => new Array(s.length).fill(s)))
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
// aAAAA is printed out
|
// aAAAA is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## filter(predicate, options)
|
## filter(predicate, options)
|
||||||
Returns a `ReadWrite` stream that filters out streamed chunks for which the predicate does not hold
|
Return a `ReadWrite` stream that filters out streamed chunks for which the predicate does not hold
|
||||||
|
|
||||||
| Param | Type | Description |
|
| Param | Type | Description |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
@ -71,15 +70,15 @@ Returns a `ReadWrite` stream that filters out streamed chunks for which the pred
|
|||||||
| `options.objectMode` | `boolean` | `boolean` | Whether this stream should behave as a stream of objects |
|
| `options.objectMode` | `boolean` | `boolean` | Whether this stream should behave as a stream of objects |
|
||||||
|
|
||||||
```js
|
```js
|
||||||
strom.fromArray(["a", "b", "c"])
|
Mhysa.fromArray(["a", "b", "c"])
|
||||||
.pipe(strom.filter(s => s !== "b"))
|
.pipe(Mhysa.filter(s => s !== "b"))
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
// ac is printed out
|
// ac is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## reduce(iteratee, initialValue, options)
|
## reduce(iteratee, initialValue, options)
|
||||||
Returns a `ReadWrite` stream that reduces streamed chunks down to a single value and yield that
|
Return a `ReadWrite` stream that reduces streamed chunks down to a single value and yield that
|
||||||
value
|
value
|
||||||
|
|
||||||
| Param | Type | Description |
|
| Param | Type | Description |
|
||||||
@ -91,16 +90,16 @@ value
|
|||||||
| `options.writableObjectMode` | `boolean` | Whether this stream should behave as a writable stream of objects |
|
| `options.writableObjectMode` | `boolean` | Whether this stream should behave as a writable stream of objects |
|
||||||
|
|
||||||
```js
|
```js
|
||||||
strom.fromArray(["a", "b", "cc"])
|
Mhysa.fromArray(["a", "b", "cc"])
|
||||||
.pipe(strom.reduce((acc, s) => ({ ...acc, [s]: s.length }), {}))
|
.pipe(Mhysa.reduce((acc, s) => ({ ...acc, [s]: s.length }), {}))
|
||||||
.pipe(strom.stringify())
|
.pipe(Mhysa.stringify())
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
// {"a":1,"b":1","c":2} is printed out
|
// {"a":1,"b":1","c":2} is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## split(separator)
|
## split(separator)
|
||||||
Returns a `ReadWrite` stream that splits streamed chunks using the given separator
|
Return a `ReadWrite` stream that splits streamed chunks using the given separator
|
||||||
|
|
||||||
| Param | Type | Description |
|
| Param | Type | Description |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
@ -109,16 +108,16 @@ Returns a `ReadWrite` stream that splits streamed chunks using the given separat
|
|||||||
| `options.encoding` | `string` | Character encoding to use for decoding chunks. Defaults to utf8
|
| `options.encoding` | `string` | Character encoding to use for decoding chunks. Defaults to utf8
|
||||||
|
|
||||||
```js
|
```js
|
||||||
strom.fromArray(["a,b", "c,d"])
|
Mhysa.fromArray(["a,b", "c,d"])
|
||||||
.pipe(strom.split(","))
|
.pipe(Mhysa.split(","))
|
||||||
.pipe(strom.join("|"))
|
.pipe(Mhysa.join("|"))
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
// a|bc|d is printed out
|
// a|bc|d is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## join(separator)
|
## join(separator)
|
||||||
Returns a `ReadWrite` stream that joins streamed chunks using the given separator
|
Return a `ReadWrite` stream that joins streamed chunks using the given separator
|
||||||
|
|
||||||
| Param | Type | Description |
|
| Param | Type | Description |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
@ -127,15 +126,15 @@ Returns a `ReadWrite` stream that joins streamed chunks using the given separato
|
|||||||
| `options.encoding` | `string` | Character encoding to use for decoding chunks. Defaults to utf8
|
| `options.encoding` | `string` | Character encoding to use for decoding chunks. Defaults to utf8
|
||||||
|
|
||||||
```js
|
```js
|
||||||
strom.fromArray(["a", "b", "c"])
|
Mhysa.fromArray(["a", "b", "c"])
|
||||||
.pipe(strom.join(","))
|
.pipe(Mhysa.join(","))
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
// a,b,c is printed out
|
// a,b,c is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## replace(searchValue, replaceValue)
|
## replace(searchValue, replaceValue)
|
||||||
Returns a `ReadWrite` stream that replaces occurrences of the given string or regular expression in
|
Return a `ReadWrite` stream that replaces occurrences of the given string or regular expression in
|
||||||
the streamed chunks with the specified replacement string
|
the streamed chunks with the specified replacement string
|
||||||
|
|
||||||
| Param | Type | Description |
|
| Param | Type | Description |
|
||||||
@ -146,37 +145,37 @@ the streamed chunks with the specified replacement string
|
|||||||
| `options.encoding` | `string` | Character encoding to use for decoding chunks. Defaults to utf8
|
| `options.encoding` | `string` | Character encoding to use for decoding chunks. Defaults to utf8
|
||||||
|
|
||||||
```js
|
```js
|
||||||
strom.fromArray(["a1", "b22", "c333"])
|
Mhysa.fromArray(["a1", "b22", "c333"])
|
||||||
.pipe(strom.replace(/b\d+/, "B"))
|
.pipe(Mhysa.replace(/b\d+/, "B"))
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
// a1Bc333 is printed out
|
// a1Bc333 is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## parse()
|
## parse()
|
||||||
Returns a `ReadWrite` stream that parses the streamed chunks as JSON
|
Return a `ReadWrite` stream that parses the streamed chunks as JSON
|
||||||
|
|
||||||
```js
|
```js
|
||||||
strom.fromArray(['{ "a": "b" }'])
|
Mhysa.fromArray(['{ "a": "b" }'])
|
||||||
.pipe(strom.parse())
|
.pipe(Mhysa.parse())
|
||||||
.once("data", object => console.log(object));
|
.once("data", object => console.log(object));
|
||||||
// { a: 'b' } is printed out
|
// { a: 'b' } is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## stringify()
|
## stringify()
|
||||||
Returns a `ReadWrite` stream that stringifies the streamed chunks to JSON
|
Return a `ReadWrite` stream that stringifies the streamed chunks to JSON
|
||||||
|
|
||||||
```js
|
```js
|
||||||
strom.fromArray([{ a: "b" }])
|
Mhysa.fromArray([{ a: "b" }])
|
||||||
.pipe(strom.stringify())
|
.pipe(Mhysa.stringify())
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
// {"a":"b"} is printed out
|
// {"a":"b"} is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## collect(options)
|
## collect(options)
|
||||||
Returns a `ReadWrite` stream that collects streamed chunks into an array or buffer
|
Return a `ReadWrite` stream that collects streamed chunks into an array or buffer
|
||||||
|
|
||||||
| Param | Type | Description |
|
| Param | Type | Description |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
@ -184,15 +183,15 @@ Returns a `ReadWrite` stream that collects streamed chunks into an array or buff
|
|||||||
| `options.objectMode` | `boolean` | Whether this stream should behave as a stream of objects |
|
| `options.objectMode` | `boolean` | Whether this stream should behave as a stream of objects |
|
||||||
|
|
||||||
```js
|
```js
|
||||||
strom.fromArray(["a", "b", "c"])
|
Mhysa.fromArray(["a", "b", "c"])
|
||||||
.pipe(strom.collect({ objectMode: true }))
|
.pipe(Mhysa.collect({ objectMode: true }))
|
||||||
.once("data", object => console.log(object));
|
.once("data", object => console.log(object));
|
||||||
// [ 'a', 'b', 'c' ] is printed out
|
// [ 'a', 'b', 'c' ] is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## concat(streams)
|
## concat(streams)
|
||||||
Returns a `Readable` stream of readable streams concatenated together
|
Return a `Readable` stream of readable streams concatenated together
|
||||||
|
|
||||||
| Param | Type | Description |
|
| Param | Type | Description |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
@ -201,7 +200,7 @@ Returns a `Readable` stream of readable streams concatenated together
|
|||||||
```js
|
```js
|
||||||
const source1 = new Readable();
|
const source1 = new Readable();
|
||||||
const source2 = new Readable();
|
const source2 = new Readable();
|
||||||
strom.concat(source1, source2).pipe(process.stdout)
|
Mhysa.concat(source1, source2).pipe(process.stdout)
|
||||||
source1.push("a1 ");
|
source1.push("a1 ");
|
||||||
source2.push("c3 ");
|
source2.push("c3 ");
|
||||||
source1.push("b2 ");
|
source1.push("b2 ");
|
||||||
@ -213,7 +212,7 @@ source2.push(null);
|
|||||||
|
|
||||||
|
|
||||||
## merge(streams)
|
## merge(streams)
|
||||||
Returns a `Readable` stream of readable streams merged together in chunk arrival order
|
Return a `Readable` stream of readable streams merged together in chunk arrival order
|
||||||
|
|
||||||
| Param | Type | Description |
|
| Param | Type | Description |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
@ -222,7 +221,7 @@ Returns a `Readable` stream of readable streams merged together in chunk arrival
|
|||||||
```js
|
```js
|
||||||
const source1 = new Readable({ read() {} });
|
const source1 = new Readable({ read() {} });
|
||||||
const source2 = new Readable({ read() {} });
|
const source2 = new Readable({ read() {} });
|
||||||
strom.merge(source1, source2).pipe(process.stdout);
|
Mhysa.merge(source1, source2).pipe(process.stdout);
|
||||||
source1.push("a1 ");
|
source1.push("a1 ");
|
||||||
setTimeout(() => source2.push("c3 "), 10);
|
setTimeout(() => source2.push("c3 "), 10);
|
||||||
setTimeout(() => source1.push("b2 "), 20);
|
setTimeout(() => source1.push("b2 "), 20);
|
||||||
@ -234,7 +233,7 @@ setTimeout(() => source2.push(null), 50);
|
|||||||
|
|
||||||
|
|
||||||
## duplex(writable, readable)
|
## duplex(writable, readable)
|
||||||
Returns a `Duplex` stream from a writable stream that is assumed to somehow, when written to,
|
Return a `Duplex` stream from a writable stream that is assumed to somehow, when written to,
|
||||||
cause the given readable stream to yield chunks
|
cause the given readable stream to yield chunks
|
||||||
|
|
||||||
| Param | Type | Description |
|
| Param | Type | Description |
|
||||||
@ -244,15 +243,15 @@ cause the given readable stream to yield chunks
|
|||||||
|
|
||||||
```js
|
```js
|
||||||
const catProcess = require("child_process").exec("grep -o ab");
|
const catProcess = require("child_process").exec("grep -o ab");
|
||||||
strom.fromArray(["a", "b", "c"])
|
Mhysa.fromArray(["a", "b", "c"])
|
||||||
.pipe(strom.duplex(catProcess.stdin, catProcess.stdout))
|
.pipe(Mhysa.duplex(catProcess.stdin, catProcess.stdout))
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
// ab is printed out
|
// ab is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## child(childProcess)
|
## child(childProcess)
|
||||||
Returns a `Duplex` stream from a child process' stdin and stdout
|
Return a `Duplex` stream from a child process' stdin and stdout
|
||||||
|
|
||||||
| Param | Type | Description |
|
| Param | Type | Description |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
@ -260,15 +259,15 @@ Returns a `Duplex` stream from a child process' stdin and stdout
|
|||||||
|
|
||||||
```js
|
```js
|
||||||
const catProcess = require("child_process").exec("grep -o ab");
|
const catProcess = require("child_process").exec("grep -o ab");
|
||||||
strom.fromArray(["a", "b", "c"])
|
Mhysa.fromArray(["a", "b", "c"])
|
||||||
.pipe(strom.child(catProcess))
|
.pipe(Mhysa.child(catProcess))
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
// ab is printed out
|
// ab is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## last(readable)
|
## last(readable)
|
||||||
Returns a `Promise` resolving to the last streamed chunk of the given readable stream, after it has
|
Return a `Promise` resolving to the last streamed chunk of the given readable stream, after it has
|
||||||
ended
|
ended
|
||||||
|
|
||||||
| Param | Type | Description |
|
| Param | Type | Description |
|
||||||
@ -277,112 +276,9 @@ ended
|
|||||||
|
|
||||||
```js
|
```js
|
||||||
let f = async () => {
|
let f = async () => {
|
||||||
const source = strom.fromArray(["a", "b", "c"]);
|
const source = Mhysa.fromArray(["a", "b", "c"]);
|
||||||
console.log(await strom.last(source));
|
console.log(await Mhysa.last(source));
|
||||||
};
|
};
|
||||||
f();
|
f();
|
||||||
// c is printed out
|
// c is printed out
|
||||||
```
|
```
|
||||||
|
|
||||||
## accumulator(flushStrategy, iteratee, options)
|
|
||||||
TO BE DOCUMENTED
|
|
||||||
|
|
||||||
## batch(batchSize, maxBatchAge, options)
|
|
||||||
Returns a `Transform` stream which produces all incoming data in batches of size `batchSize`.
|
|
||||||
|
|
||||||
| Param | Type | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `batchSize` | `number` | Size of the batches to be produced |
|
|
||||||
| `maxBatchAge` | `number` | Maximum number of milliseconds a message will be queued for. E.g. a batch will be produced before reaching `batchSize` if the first message queued is `maxBatchAge` ms old or more |
|
|
||||||
| `options` | `TransformOptions` | Options passed down to the Transform object |
|
|
||||||
|
|
||||||
```js
|
|
||||||
strom.fromArray(["a", "b", "c", "d"])
|
|
||||||
.pipe(strom.batch(3, 500))
|
|
||||||
.pipe(process.stdout);
|
|
||||||
// ["a","b","c"]
|
|
||||||
// ["d"] //After 500ms
|
|
||||||
```
|
|
||||||
|
|
||||||
## compose(streams, errorCb, options)
|
|
||||||
|
|
||||||
Returns a `Transform` stream which consists of all `streams` but behaves as a single stream. The returned stream can be piped into and from transparently.
|
|
||||||
|
|
||||||
| Param | Type | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `streams` | `Array` | Streams to be composed |
|
|
||||||
| `errorCb` | `(err: Error) => void` | Function called when an error occurs in any of the streams |
|
|
||||||
| `options` | `TransformOptions` | Options passed down to the Transform object |
|
|
||||||
|
|
||||||
```js
|
|
||||||
const composed = strom.compose([
|
|
||||||
strom.split(),
|
|
||||||
strom.map(data => data.trim()),
|
|
||||||
strom.filter(str => !!str),
|
|
||||||
strom.parse(),
|
|
||||||
strom.flatMap(data => data),
|
|
||||||
strom.stringify(),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const data = ["[1,2,3] \n [4,5,6] ", "\n [7,8,9] \n\n"];
|
|
||||||
|
|
||||||
strom.fromArray(data).pipe(composed).pipe(process.stdout);
|
|
||||||
// 123456789
|
|
||||||
```
|
|
||||||
|
|
||||||
## demux(pipelineConstructor, demuxBy, options)
|
|
||||||
TO BE DOCUMENTED
|
|
||||||
|
|
||||||
## parallelMap(mapper, parallel, sleepTime, options)
|
|
||||||
Returns a `Transform` stream which maps incoming data through the async mapper with the given parallelism.
|
|
||||||
|
|
||||||
| Param | Type | Description | Default |
|
|
||||||
| --- | --- | --- | --- |
|
|
||||||
| `mapper` | `async (chunk: T, encoding: string) => R` | Mapper function, mapping each (chunk, encoding) to a new chunk (non-async will not be parallelized) | -- |
|
|
||||||
| `parallel` | `number` | Number of concurrent executions of the mapper allowed | 10 |
|
|
||||||
| `sleepTime` | `number` | Number of milliseconds to wait before testing if more messages can be processed | 1 |
|
|
||||||
|
|
||||||
```js
|
|
||||||
function sleep(time) {
|
|
||||||
return time > 0 ? new Promise(resolve => setTimeout(resolve, time)) : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
strom
|
|
||||||
.fromArray([1, 2, 3, 4, 6, 8])
|
|
||||||
.pipe(
|
|
||||||
strom.parallelMap(async d => {
|
|
||||||
await sleep(10000 - d * 1000);
|
|
||||||
return `${d}`;
|
|
||||||
}, 3),
|
|
||||||
)
|
|
||||||
.pipe(process.stdout);
|
|
||||||
|
|
||||||
// 321864
|
|
||||||
```
|
|
||||||
|
|
||||||
## rate()
|
|
||||||
|
|
||||||
|
|
||||||
```js
|
|
||||||
const strom = require("stromjs").strom();
|
|
||||||
|
|
||||||
function sleep(time) {
|
|
||||||
return time > 0 ? new Promise(resolve => setTimeout(resolve, time)) : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const rate = strom.rate(2, 1, { behavior: 1 });
|
|
||||||
rate.pipe(strom.map(x => console.log(x)));
|
|
||||||
async function produce() {
|
|
||||||
rate.write(1);
|
|
||||||
await sleep(500);
|
|
||||||
rate.write(2);
|
|
||||||
await sleep(500);
|
|
||||||
rate.write(3);
|
|
||||||
rate.write(4);
|
|
||||||
rate.write(5);
|
|
||||||
await sleep(500);
|
|
||||||
rate.write(6);
|
|
||||||
}
|
|
||||||
|
|
||||||
produce();
|
|
||||||
```
|
|
||||||
|
45
package.json
45
package.json
@ -1,26 +1,24 @@
|
|||||||
{
|
{
|
||||||
"name": "stromjs",
|
"name": "@jogogo/mhysa",
|
||||||
"version": "0.5.1",
|
"version": "0.0.1-beta.4",
|
||||||
"description": "Dependency-free streams utils for Node.js",
|
"description": "Streams and event emitter utils for Node.js",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"promise",
|
"promise",
|
||||||
"stream",
|
"stream",
|
||||||
|
"event emitter",
|
||||||
"utils"
|
"utils"
|
||||||
],
|
],
|
||||||
|
"author": {
|
||||||
|
"name": "Wenzil"
|
||||||
|
},
|
||||||
"contributors": [
|
"contributors": [
|
||||||
{
|
{
|
||||||
"name": "Sami Turcotte",
|
"name": "jerry",
|
||||||
"url": "https://github.com/Wenzil"
|
"email": "jerry@jogogo.co"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Jerry Kurian",
|
"name": "lewis",
|
||||||
"email": "jerrykurian@protonmail.com",
|
"email": "lewis@jogogo.co"
|
||||||
"url": "https://github.com/jkurian"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Lewis Diamond",
|
|
||||||
"email": "stromjs@lewisdiamond.com",
|
|
||||||
"url": "https://github.com/lewisdiamond"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
@ -29,25 +27,29 @@
|
|||||||
"files": [
|
"files": [
|
||||||
"dist"
|
"dist"
|
||||||
],
|
],
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://npm.dev.jogogo.co/"
|
||||||
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"url": "https://github.com/lewisdiamond/stromjs",
|
"url": "git@github.com:Jogogoplay/mhysa.git",
|
||||||
"type": "git"
|
"type": "git"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "ava",
|
"test": "NODE_PATH=src node node_modules/.bin/ava 'tests/*.spec.ts' -e",
|
||||||
|
"test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.js",
|
||||||
|
"test:all": "NODE_PATH=src node node_modules/.bin/ava",
|
||||||
"lint": "tslint -p tsconfig.json",
|
"lint": "tslint -p tsconfig.json",
|
||||||
"validate:tslint": "tslint-config-prettier-check ./tslint.json",
|
"validate:tslint": "tslint-config-prettier-check ./tslint.json",
|
||||||
"prepublishOnly": "yarn lint && yarn test && yarn tsc -d",
|
"prepublishOnly": "yarn lint && yarn test && yarn tsc -d"
|
||||||
"prepare": "tsc"
|
|
||||||
},
|
},
|
||||||
"dependencies": {},
|
"dependencies": {},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/chai": "^4.1.7",
|
"@types/chai": "^4.1.7",
|
||||||
"@types/node": "^12.12.15",
|
"@types/node": "^12.7.2",
|
||||||
"@types/sinon": "^7.0.13",
|
"@types/sinon": "^7.0.13",
|
||||||
"ava": "^2.4.0",
|
"ava": "^1.0.0-rc.2",
|
||||||
"chai": "^4.2.0",
|
"chai": "^4.2.0",
|
||||||
"stromjs": "./",
|
"mhysa": "./",
|
||||||
"prettier": "^1.14.3",
|
"prettier": "^1.14.3",
|
||||||
"sinon": "^7.4.2",
|
"sinon": "^7.4.2",
|
||||||
"ts-node": "^8.3.0",
|
"ts-node": "^8.3.0",
|
||||||
@ -58,8 +60,7 @@
|
|||||||
},
|
},
|
||||||
"ava": {
|
"ava": {
|
||||||
"files": [
|
"files": [
|
||||||
"tests/*.spec.ts",
|
"tests/*.spec.ts"
|
||||||
"tests/utils/*.spec.ts"
|
|
||||||
],
|
],
|
||||||
"sources": [
|
"sources": [
|
||||||
"src/**/*.ts"
|
"src/**/*.ts"
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
strom
|
Mhysa.fromArray(["a", "b", "c"])
|
||||||
.fromArray(["a", "b", "c"])
|
.pipe(Mhysa.map(s => Promise.resolve(s + s)))
|
||||||
.pipe(strom.map(s => Promise.resolve(s + s)))
|
.pipe(Mhysa.flatMap(s => Promise.resolve([s, s.toUpperCase()])))
|
||||||
.pipe(strom.flatMap(s => Promise.resolve([s, s.toUpperCase()])))
|
.pipe(Mhysa.filter(s => Promise.resolve(s !== "bb")))
|
||||||
.pipe(strom.filter(s => Promise.resolve(s !== "bb")))
|
.pipe(Mhysa.join(","))
|
||||||
.pipe(strom.join(","))
|
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
const catProcess = require("child_process").exec("grep -o ab");
|
const catProcess = require("child_process").exec("grep -o ab");
|
||||||
|
|
||||||
strom
|
Mhysa.fromArray(["a", "b", "c"])
|
||||||
.fromArray(["a", "b", "c"])
|
.pipe(Mhysa.child(catProcess))
|
||||||
.pipe(strom.child(catProcess))
|
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
strom
|
Mhysa.fromArray(["a", "b", "c"])
|
||||||
.fromArray(["a", "b", "c"])
|
.pipe(Mhysa.collect({ objectMode: true }))
|
||||||
.pipe(strom.collect({ objectMode: true }))
|
|
||||||
.on("data", object => console.log(object));
|
.on("data", object => console.log(object));
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
const { Readable } = require("stream");
|
const { Readable } = require("stream");
|
||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
const source1 = new Readable();
|
const source1 = new Readable();
|
||||||
const source2 = new Readable();
|
const source2 = new Readable();
|
||||||
strom.concat(source1, source2).pipe(process.stdout);
|
Mhysa.concat(source1, source2).pipe(process.stdout);
|
||||||
source1.push("a1 ");
|
source1.push("a1 ");
|
||||||
source2.push("c3 ");
|
source2.push("c3 ");
|
||||||
source1.push("b2 ");
|
source1.push("b2 ");
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
const catProcess = require("child_process").exec("grep -o ab");
|
const catProcess = require("child_process").exec("grep -o ab");
|
||||||
|
|
||||||
strom
|
Mhysa.fromArray(["a", "b", "c"])
|
||||||
.fromArray(["a", "b", "c"])
|
.pipe(Mhysa.duplex(catProcess.stdin, catProcess.stdout))
|
||||||
.pipe(strom.duplex(catProcess.stdin, catProcess.stdout))
|
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
strom
|
Mhysa.fromArray(["a", "b", "c"])
|
||||||
.fromArray(["a", "b", "c"])
|
.pipe(Mhysa.filter(s => s !== "b"))
|
||||||
.pipe(strom.filter(s => s !== "b"))
|
.pipe(Mhysa.join(","))
|
||||||
.pipe(strom.join(","))
|
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
strom
|
Mhysa.fromArray(["a", "AA"])
|
||||||
.fromArray(["a", "AA"])
|
.pipe(Mhysa.flatMap(s => new Array(s.length).fill(s)))
|
||||||
.pipe(strom.flatMap(s => new Array(s.length).fill(s)))
|
.pipe(Mhysa.join(","))
|
||||||
.pipe(strom.join(","))
|
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
strom
|
Mhysa.fromArray(["a", "b", "c"])
|
||||||
.fromArray(["a", "b", "c"])
|
.pipe(Mhysa.join(","))
|
||||||
.pipe(strom.join(","))
|
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
let f = async () => {
|
let f = async () => {
|
||||||
const source = strom.fromArray(["a", "b", "c"]);
|
const source = Mhysa.fromArray(["a", "b", "c"]);
|
||||||
console.log(await strom.last(source));
|
console.log(await Mhysa.last(source));
|
||||||
};
|
};
|
||||||
f();
|
f();
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
strom
|
Mhysa.fromArray(["a", "b"])
|
||||||
.fromArray(["a", "b"])
|
.pipe(Mhysa.map(s => s.toUpperCase()))
|
||||||
.pipe(strom.map(s => s.toUpperCase()))
|
.pipe(Mhysa.join(","))
|
||||||
.pipe(strom.join(","))
|
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
const { Readable } = require("stream");
|
const { Readable } = require("stream");
|
||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
const source1 = new Readable({ read() {} });
|
const source1 = new Readable({ read() {} });
|
||||||
const source2 = new Readable({ read() {} });
|
const source2 = new Readable({ read() {} });
|
||||||
strom.merge(source1, source2).pipe(process.stdout);
|
Mhysa.merge(source1, source2).pipe(process.stdout);
|
||||||
source1.push("a1 ");
|
source1.push("a1 ");
|
||||||
setTimeout(() => source2.push("c3 "), 10);
|
setTimeout(() => source2.push("c3 "), 10);
|
||||||
setTimeout(() => source1.push("b2 "), 20);
|
setTimeout(() => source1.push("b2 "), 20);
|
||||||
|
@ -1,15 +0,0 @@
|
|||||||
const strom = require("stromjs");
|
|
||||||
|
|
||||||
function sleep(time) {
|
|
||||||
return time > 0 ? new Promise(resolve => setTimeout(resolve, time)) : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
strom
|
|
||||||
.fromArray([1, 2, 3, 4, 6, 8])
|
|
||||||
.pipe(
|
|
||||||
strom.parallelMap(async d => {
|
|
||||||
await sleep(10000 - d * 1000);
|
|
||||||
return `${d}`;
|
|
||||||
}, 3),
|
|
||||||
)
|
|
||||||
.pipe(process.stdout);
|
|
@ -1,6 +1,5 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
strom
|
Mhysa.fromArray(['{ "a": "b" }'])
|
||||||
.fromArray(['{ "a": "b" }'])
|
.pipe(Mhysa.parse())
|
||||||
.pipe(strom.parse())
|
|
||||||
.on("data", object => console.log(object));
|
.on("data", object => console.log(object));
|
||||||
|
@ -1,21 +0,0 @@
|
|||||||
const strom = require("stromjs");
|
|
||||||
|
|
||||||
function sleep(time) {
|
|
||||||
return time > 0 ? new Promise(resolve => setTimeout(resolve, time)) : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const rate = strom.rate(2, 1, { behavior: 1 });
|
|
||||||
rate.pipe(strom.map(x => console.log(x)));
|
|
||||||
async function produce() {
|
|
||||||
rate.write(1);
|
|
||||||
await sleep(500);
|
|
||||||
rate.write(2);
|
|
||||||
await sleep(500);
|
|
||||||
rate.write(3);
|
|
||||||
rate.write(4);
|
|
||||||
rate.write(5);
|
|
||||||
await sleep(500);
|
|
||||||
rate.write(6);
|
|
||||||
}
|
|
||||||
|
|
||||||
produce();
|
|
@ -1,7 +1,6 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
strom
|
Mhysa.fromArray(["a", "b", "cc"])
|
||||||
.fromArray(["a", "b", "cc"])
|
.pipe(Mhysa.reduce((acc, s) => ({ ...acc, [s]: s.length }), {}))
|
||||||
.pipe(strom.reduce((acc, s) => ({ ...acc, [s]: s.length }), {}))
|
.pipe(Mhysa.stringify())
|
||||||
.pipe(strom.stringify())
|
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
strom
|
Mhysa.fromArray(["a1", "b22", "c333"])
|
||||||
.fromArray(["a1", "b22", "c333"])
|
.pipe(Mhysa.replace(/b\d+/, "B"))
|
||||||
.pipe(strom.replace(/b\d+/, "B"))
|
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
strom
|
Mhysa.fromArray(["a,b", "c,d"])
|
||||||
.fromArray(["a,b", "c,d"])
|
.pipe(Mhysa.split(","))
|
||||||
.pipe(strom.split(","))
|
.pipe(Mhysa.join("|"))
|
||||||
.pipe(strom.join("|"))
|
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
const strom = require("stromjs");
|
const Mhysa = require("mhysa");
|
||||||
|
|
||||||
strom
|
Mhysa.fromArray([{ a: "b" }])
|
||||||
.fromArray([{ a: "b" }])
|
.pipe(Mhysa.stringify())
|
||||||
.pipe(strom.stringify())
|
|
||||||
.pipe(process.stdout);
|
.pipe(process.stdout);
|
||||||
|
@ -2,7 +2,7 @@ import { Transform, TransformOptions } from "stream";
|
|||||||
|
|
||||||
export function batch(
|
export function batch(
|
||||||
batchSize: number = 1000,
|
batchSize: number = 1000,
|
||||||
maxBatchAge: number = 0,
|
maxBatchAge: number = 500,
|
||||||
options: TransformOptions = {},
|
options: TransformOptions = {},
|
||||||
): Transform {
|
): Transform {
|
||||||
let buffer: any[] = [];
|
let buffer: any[] = [];
|
||||||
@ -12,9 +12,7 @@ export function batch(
|
|||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
}
|
}
|
||||||
timer = null;
|
timer = null;
|
||||||
if (buffer.length > 0) {
|
|
||||||
self.push(buffer);
|
self.push(buffer);
|
||||||
}
|
|
||||||
buffer = [];
|
buffer = [];
|
||||||
};
|
};
|
||||||
return new Transform({
|
return new Transform({
|
||||||
@ -23,7 +21,7 @@ export function batch(
|
|||||||
buffer.push(chunk);
|
buffer.push(chunk);
|
||||||
if (buffer.length === batchSize) {
|
if (buffer.length === batchSize) {
|
||||||
sendChunk(this);
|
sendChunk(this);
|
||||||
} else if (maxBatchAge) {
|
} else {
|
||||||
if (timer === null) {
|
if (timer === null) {
|
||||||
timer = setInterval(() => {
|
timer = setInterval(() => {
|
||||||
sendChunk(this);
|
sendChunk(this);
|
||||||
|
@ -1,18 +1,16 @@
|
|||||||
import { AllStreams, isReadable } from "../helpers";
|
import { pipeline, Duplex, DuplexOptions } from "stream";
|
||||||
import { PassThrough, pipeline, TransformOptions, Transform } from "stream";
|
|
||||||
|
|
||||||
export function compose(
|
export function compose(
|
||||||
streams: Array<
|
streams: Array<
|
||||||
NodeJS.ReadableStream | NodeJS.ReadWriteStream | NodeJS.WritableStream
|
NodeJS.ReadableStream | NodeJS.ReadWriteStream | NodeJS.WritableStream
|
||||||
>,
|
>,
|
||||||
errorCallback?: (err: any) => void,
|
options?: DuplexOptions,
|
||||||
options?: TransformOptions,
|
|
||||||
): Compose {
|
): Compose {
|
||||||
if (streams.length < 2) {
|
if (streams.length < 2) {
|
||||||
throw new Error("At least two streams are required to compose");
|
throw new Error("At least two streams are required to compose");
|
||||||
}
|
}
|
||||||
|
|
||||||
return new Compose(streams, errorCallback, options);
|
return new Compose(streams, options);
|
||||||
}
|
}
|
||||||
|
|
||||||
enum EventSubscription {
|
enum EventSubscription {
|
||||||
@ -22,60 +20,46 @@ enum EventSubscription {
|
|||||||
Self,
|
Self,
|
||||||
}
|
}
|
||||||
|
|
||||||
export class Compose extends Transform {
|
const eventsTarget = {
|
||||||
|
close: EventSubscription.Last,
|
||||||
|
data: EventSubscription.Last,
|
||||||
|
drain: EventSubscription.Self,
|
||||||
|
end: EventSubscription.Last,
|
||||||
|
error: EventSubscription.Self,
|
||||||
|
finish: EventSubscription.Last,
|
||||||
|
pause: EventSubscription.Last,
|
||||||
|
pipe: EventSubscription.First,
|
||||||
|
readable: EventSubscription.Last,
|
||||||
|
resume: EventSubscription.Last,
|
||||||
|
unpipe: EventSubscription.First,
|
||||||
|
};
|
||||||
|
|
||||||
|
type AllStreams =
|
||||||
|
| NodeJS.ReadableStream
|
||||||
|
| NodeJS.ReadWriteStream
|
||||||
|
| NodeJS.WritableStream;
|
||||||
|
|
||||||
|
export class Compose extends Duplex {
|
||||||
private first: AllStreams;
|
private first: AllStreams;
|
||||||
private last: AllStreams;
|
private last: AllStreams;
|
||||||
private streams: AllStreams[];
|
private streams: AllStreams[];
|
||||||
private inputStream: ReadableStream;
|
|
||||||
|
|
||||||
constructor(
|
constructor(streams: AllStreams[], options?: DuplexOptions) {
|
||||||
streams: AllStreams[],
|
|
||||||
errorCallback?: (err: any) => void,
|
|
||||||
options?: TransformOptions,
|
|
||||||
) {
|
|
||||||
super(options);
|
super(options);
|
||||||
this.first = new PassThrough(options);
|
this.first = streams[0];
|
||||||
this.last = streams[streams.length - 1];
|
this.last = streams[streams.length - 1];
|
||||||
this.streams = streams;
|
this.streams = streams;
|
||||||
pipeline(
|
pipeline(streams, (err: any) => {
|
||||||
[this.first, ...streams],
|
this.emit("error", err);
|
||||||
errorCallback ||
|
|
||||||
((error: any) => {
|
|
||||||
if (error) {
|
|
||||||
this.emit("error", error);
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (isReadable(this.last)) {
|
|
||||||
(this.last as NodeJS.ReadWriteStream).pipe(
|
|
||||||
new Transform({
|
|
||||||
...options,
|
|
||||||
transform: (d: any, encoding, cb) => {
|
|
||||||
this.push(d);
|
|
||||||
cb();
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public _transform(chunk: any, encoding: string, cb: any) {
|
|
||||||
(this.first as NodeJS.WritableStream).write(chunk, encoding, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
public _flush(cb: any) {
|
|
||||||
if (isReadable(this.first)) {
|
|
||||||
(this.first as any).push(null);
|
|
||||||
}
|
|
||||||
this.last.once("end", () => {
|
|
||||||
cb();
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public _destroy(error: any, cb: (error?: any) => void) {
|
public pipe<T extends NodeJS.WritableStream>(dest: T) {
|
||||||
this.streams.forEach(s => (s as any).destroy());
|
return (this.last as NodeJS.ReadableStream).pipe(dest);
|
||||||
cb(error);
|
}
|
||||||
|
|
||||||
|
public _write(chunk: any, encoding: string, cb: any) {
|
||||||
|
(this.first as NodeJS.WritableStream).write(chunk, encoding, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
public bubble(...events: string[]) {
|
public bubble(...events: string[]) {
|
||||||
@ -85,4 +69,38 @@ export class Compose extends Transform {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public on(event: string, cb: any) {
|
||||||
|
switch (eventsTarget[event]) {
|
||||||
|
case EventSubscription.First:
|
||||||
|
this.first.on(event, cb);
|
||||||
|
break;
|
||||||
|
case EventSubscription.Last:
|
||||||
|
this.last.on(event, cb);
|
||||||
|
break;
|
||||||
|
case EventSubscription.All:
|
||||||
|
this.streams.forEach(s => s.on(event, cb));
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
super.on(event, cb);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public once(event: string, cb: any) {
|
||||||
|
switch (eventsTarget[event]) {
|
||||||
|
case EventSubscription.First:
|
||||||
|
this.first.once(event, cb);
|
||||||
|
break;
|
||||||
|
case EventSubscription.Last:
|
||||||
|
this.last.once(event, cb);
|
||||||
|
break;
|
||||||
|
case EventSubscription.All:
|
||||||
|
this.streams.forEach(s => s.once(event, cb));
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
super.once(event, cb);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,131 +1,99 @@
|
|||||||
import { DuplexOptions, Duplex, Transform } from "stream";
|
import { WritableOptions, Writable } from "stream";
|
||||||
|
|
||||||
import { isReadable } from "../helpers";
|
enum EventSubscription {
|
||||||
|
Last = 0,
|
||||||
|
First,
|
||||||
|
All,
|
||||||
|
Self,
|
||||||
|
Unhandled,
|
||||||
|
}
|
||||||
|
|
||||||
|
const eventsTarget = {
|
||||||
|
close: EventSubscription.Self,
|
||||||
|
data: EventSubscription.All,
|
||||||
|
drain: EventSubscription.Self,
|
||||||
|
end: EventSubscription.Self,
|
||||||
|
error: EventSubscription.Self,
|
||||||
|
finish: EventSubscription.Self,
|
||||||
|
pause: EventSubscription.Self,
|
||||||
|
pipe: EventSubscription.Self,
|
||||||
|
readable: EventSubscription.Self,
|
||||||
|
resume: EventSubscription.Self,
|
||||||
|
unpipe: EventSubscription.Self,
|
||||||
|
};
|
||||||
|
|
||||||
type DemuxStreams = NodeJS.WritableStream | NodeJS.ReadWriteStream;
|
type DemuxStreams = NodeJS.WritableStream | NodeJS.ReadWriteStream;
|
||||||
|
|
||||||
export interface DemuxOptions extends DuplexOptions {
|
|
||||||
remultiplex?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function demux(
|
export function demux(
|
||||||
pipelineConstructor: (
|
construct: (destKey?: string) => DemuxStreams,
|
||||||
destKey?: string,
|
|
||||||
chunk?: any,
|
|
||||||
) => DemuxStreams | DemuxStreams[],
|
|
||||||
demuxBy: string | ((chunk: any) => string),
|
demuxBy: string | ((chunk: any) => string),
|
||||||
options?: DemuxOptions,
|
options?: WritableOptions,
|
||||||
): Duplex {
|
): Writable {
|
||||||
return new Demux(pipelineConstructor, demuxBy, options);
|
return new Demux(construct, demuxBy, options);
|
||||||
}
|
}
|
||||||
|
|
||||||
class Demux extends Duplex {
|
// @TODO handle pipe event ie) Multiplex
|
||||||
|
class Demux extends Writable {
|
||||||
private streamsByKey: {
|
private streamsByKey: {
|
||||||
[key: string]: DemuxStreams[];
|
[key: string]: DemuxStreams;
|
||||||
};
|
};
|
||||||
private demuxer: (chunk: any) => string;
|
private demuxer: (chunk: any) => string;
|
||||||
private pipelineConstructor: (
|
private construct: (destKey?: string) => DemuxStreams;
|
||||||
destKey?: string,
|
|
||||||
chunk?: any,
|
|
||||||
) => DemuxStreams[];
|
|
||||||
private remultiplex: boolean;
|
|
||||||
private transform: Transform;
|
|
||||||
constructor(
|
constructor(
|
||||||
pipelineConstructor: (
|
construct: (destKey?: string) => DemuxStreams,
|
||||||
destKey?: string,
|
|
||||||
chunk?: any,
|
|
||||||
) => DemuxStreams | DemuxStreams[],
|
|
||||||
demuxBy: string | ((chunk: any) => string),
|
demuxBy: string | ((chunk: any) => string),
|
||||||
options: DemuxOptions = {},
|
options: WritableOptions = {},
|
||||||
) {
|
) {
|
||||||
super(options);
|
super(options);
|
||||||
this.demuxer =
|
this.demuxer =
|
||||||
typeof demuxBy === "string" ? chunk => chunk[demuxBy] : demuxBy;
|
typeof demuxBy === "string" ? chunk => chunk[demuxBy] : demuxBy;
|
||||||
this.pipelineConstructor = (destKey: string, chunk?: any) => {
|
this.construct = construct;
|
||||||
const pipeline = pipelineConstructor(destKey, chunk);
|
|
||||||
return Array.isArray(pipeline) ? pipeline : [pipeline];
|
|
||||||
};
|
|
||||||
this.remultiplex =
|
|
||||||
options.remultiplex === undefined ? true : options.remultiplex;
|
|
||||||
this.streamsByKey = {};
|
this.streamsByKey = {};
|
||||||
this.transform = new Transform({
|
|
||||||
...options,
|
|
||||||
transform: (d, _, cb) => {
|
|
||||||
this.push(d);
|
|
||||||
cb(null);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
this.on("unpipe", () => this._flush());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// tslint:disable-next-line
|
|
||||||
public _read(size: number) {}
|
|
||||||
|
|
||||||
public async _write(chunk: any, encoding: any, cb: any) {
|
public async _write(chunk: any, encoding: any, cb: any) {
|
||||||
const destKey = this.demuxer(chunk);
|
const destKey = this.demuxer(chunk);
|
||||||
if (this.streamsByKey[destKey] === undefined) {
|
if (this.streamsByKey[destKey] === undefined) {
|
||||||
const newPipelines = this.pipelineConstructor(destKey, chunk);
|
this.streamsByKey[destKey] = await this.construct(destKey);
|
||||||
this.streamsByKey[destKey] = newPipelines;
|
|
||||||
|
|
||||||
newPipelines.forEach(newPipeline => {
|
|
||||||
if (this.remultiplex && isReadable(newPipeline)) {
|
|
||||||
(newPipeline as NodeJS.ReadWriteStream).pipe(
|
|
||||||
this.transform,
|
|
||||||
);
|
|
||||||
} else if (this.remultiplex) {
|
|
||||||
console.error(
|
|
||||||
`Pipeline construct for ${destKey} does not implement readable interface`,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
if (!this.streamsByKey[destKey].write(chunk, encoding)) {
|
||||||
|
this.streamsByKey[destKey].once("drain", () => {
|
||||||
|
cb();
|
||||||
});
|
});
|
||||||
}
|
} else {
|
||||||
const pipelines = this.streamsByKey[destKey];
|
|
||||||
const pendingDrains: Array<Promise<any>> = [];
|
|
||||||
|
|
||||||
pipelines.forEach(pipeline => {
|
|
||||||
if (!pipeline.write(chunk, encoding)) {
|
|
||||||
pendingDrains.push(
|
|
||||||
new Promise(resolve => {
|
|
||||||
pipeline.once("drain", () => {
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
await Promise.all(pendingDrains);
|
|
||||||
cb();
|
cb();
|
||||||
}
|
}
|
||||||
|
|
||||||
public _flush() {
|
|
||||||
const pipelines: DemuxStreams[] = Array.prototype.concat.apply(
|
|
||||||
[],
|
|
||||||
Object.values(this.streamsByKey),
|
|
||||||
);
|
|
||||||
const flushPromises: Array<Promise<void>> = [];
|
|
||||||
pipelines.forEach(pipeline => {
|
|
||||||
flushPromises.push(
|
|
||||||
new Promise(resolve => {
|
|
||||||
pipeline.once("end", () => {
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
pipelines.forEach(pipeline => pipeline.end());
|
|
||||||
Promise.all(flushPromises).then(() => {
|
|
||||||
this.push(null);
|
|
||||||
this.emit("end");
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public _destroy(error: any, cb: (error?: any) => void) {
|
public on(event: string, cb: any) {
|
||||||
const pipelines: DemuxStreams[] = [].concat.apply(
|
switch (eventsTarget[event]) {
|
||||||
[],
|
case EventSubscription.Self:
|
||||||
Object.values(this.streamsByKey),
|
super.on(event, cb);
|
||||||
|
break;
|
||||||
|
case EventSubscription.All:
|
||||||
|
Object.keys(this.streamsByKey).forEach(key =>
|
||||||
|
this.streamsByKey[key].on(event, cb),
|
||||||
);
|
);
|
||||||
pipelines.forEach(p => (p as any).destroy());
|
break;
|
||||||
cb(error);
|
default:
|
||||||
|
super.on(event, cb);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public once(event: string, cb: any) {
|
||||||
|
switch (eventsTarget[event]) {
|
||||||
|
case EventSubscription.Self:
|
||||||
|
super.once(event, cb);
|
||||||
|
break;
|
||||||
|
case EventSubscription.All:
|
||||||
|
Object.keys(this.streamsByKey).forEach(key =>
|
||||||
|
this.streamsByKey[key].once(event, cb),
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
super.once(event, cb);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,7 @@ import { unbatch } from "./unbatch";
|
|||||||
import { compose } from "./compose";
|
import { compose } from "./compose";
|
||||||
import { demux } from "./demux";
|
import { demux } from "./demux";
|
||||||
|
|
||||||
export function strom(defaultOptions: TransformOptions = { objectMode: true }) {
|
export default function mhysa(defaultOptions?: TransformOptions) {
|
||||||
function withDefaultOptions<T extends any[], R>(
|
function withDefaultOptions<T extends any[], R>(
|
||||||
n: number,
|
n: number,
|
||||||
fn: (...args: T) => R,
|
fn: (...args: T) => R,
|
||||||
@ -121,10 +121,6 @@ export function strom(defaultOptions: TransformOptions = { objectMode: true }) {
|
|||||||
/**
|
/**
|
||||||
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
|
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
|
||||||
* must be a fully defined JSON string in utf8.
|
* must be a fully defined JSON string in utf8.
|
||||||
* @param format: @type SerializationFormats defaults SerializationFormats.utf8
|
|
||||||
* @param emitError: @type boolean Whether or not to emit an error when
|
|
||||||
* failing to parse. An error will automatically close the stream.
|
|
||||||
* Defaults to true.
|
|
||||||
*/
|
*/
|
||||||
parse,
|
parse,
|
||||||
|
|
||||||
@ -249,10 +245,9 @@ export function strom(defaultOptions: TransformOptions = { objectMode: true }) {
|
|||||||
/**
|
/**
|
||||||
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
|
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
|
||||||
* @param streams Array of streams to compose. Minimum of two.
|
* @param streams Array of streams to compose. Minimum of two.
|
||||||
* @param errorCallback a function that handles any error coming out of the pipeline
|
|
||||||
* @param options Transform stream options
|
* @param options Transform stream options
|
||||||
*/
|
*/
|
||||||
compose: withDefaultOptions(2, compose),
|
compose: withDefaultOptions(1, compose),
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
|
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
|
||||||
@ -263,10 +258,5 @@ export function strom(defaultOptions: TransformOptions = { objectMode: true }) {
|
|||||||
* @param options Writable stream options
|
* @param options Writable stream options
|
||||||
*/
|
*/
|
||||||
demux: withDefaultOptions(2, demux),
|
demux: withDefaultOptions(2, demux),
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new strom instance overriding the defaults
|
|
||||||
*/
|
|
||||||
instance: strom,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,6 @@ import { SerializationFormats } from "./baseDefinitions";
|
|||||||
|
|
||||||
export function parse(
|
export function parse(
|
||||||
format: SerializationFormats = SerializationFormats.utf8,
|
format: SerializationFormats = SerializationFormats.utf8,
|
||||||
emitError: boolean = true,
|
|
||||||
): Transform {
|
): Transform {
|
||||||
const decoder = new StringDecoder(format);
|
const decoder = new StringDecoder(format);
|
||||||
return new Transform({
|
return new Transform({
|
||||||
@ -14,13 +13,9 @@ export function parse(
|
|||||||
try {
|
try {
|
||||||
const asString = decoder.write(chunk);
|
const asString = decoder.write(chunk);
|
||||||
// Using await causes parsing errors to be emitted
|
// Using await causes parsing errors to be emitted
|
||||||
callback(null, await JSON.parse(asString));
|
callback(undefined, await JSON.parse(asString));
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
if (emitError) {
|
|
||||||
callback(err);
|
callback(err);
|
||||||
} else {
|
|
||||||
callback();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
@ -2,42 +2,21 @@ import { Transform, TransformOptions } from "stream";
|
|||||||
import { performance } from "perf_hooks";
|
import { performance } from "perf_hooks";
|
||||||
import { sleep } from "../helpers";
|
import { sleep } from "../helpers";
|
||||||
|
|
||||||
export enum Behavior {
|
|
||||||
BUFFER = 0,
|
|
||||||
DROP = 1,
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface RateOptions {
|
|
||||||
window?: number;
|
|
||||||
behavior?: Behavior;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function rate(
|
export function rate(
|
||||||
targetRate: number = 50,
|
targetRate: number = 50,
|
||||||
period: number = 1,
|
period: number = 1,
|
||||||
options?: TransformOptions & RateOptions,
|
options?: TransformOptions,
|
||||||
): Transform {
|
): Transform {
|
||||||
const deltaMS = ((1 / targetRate) * 1000) / period; // Skip a full period
|
const deltaMS = ((1 / targetRate) * 1000) / period; // Skip a full period
|
||||||
let total = 0;
|
let total = 0;
|
||||||
const window = options?.window || Infinity;
|
const start = performance.now();
|
||||||
const behavior = options?.behavior || Behavior.BUFFER;
|
|
||||||
let start = performance.now();
|
|
||||||
return new Transform({
|
return new Transform({
|
||||||
...options,
|
...options,
|
||||||
async transform(data, encoding, callback) {
|
async transform(data, encoding, callback) {
|
||||||
const now = performance.now();
|
const currentRate = (total / (performance.now() - start)) * 1000;
|
||||||
if (now - start >= window) {
|
|
||||||
start = now - window;
|
|
||||||
}
|
|
||||||
const currentRate = (total / (now - start)) * 1000;
|
|
||||||
if (targetRate && currentRate > targetRate) {
|
if (targetRate && currentRate > targetRate) {
|
||||||
if (behavior === Behavior.DROP) {
|
|
||||||
callback(undefined);
|
|
||||||
return;
|
|
||||||
} else {
|
|
||||||
await sleep(deltaMS);
|
await sleep(deltaMS);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
total += 1;
|
total += 1;
|
||||||
callback(undefined, data);
|
callback(undefined, data);
|
||||||
},
|
},
|
||||||
|
@ -1,17 +1,3 @@
|
|||||||
export async function sleep(time: number): Promise<{} | null> {
|
export async function sleep(time: number): Promise<{} | null> {
|
||||||
return time > 0 ? new Promise(resolve => setTimeout(resolve, time)) : null;
|
return time > 0 ? new Promise(resolve => setTimeout(resolve, time)) : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type AllStreams =
|
|
||||||
| NodeJS.ReadableStream
|
|
||||||
| NodeJS.ReadWriteStream
|
|
||||||
| NodeJS.WritableStream;
|
|
||||||
|
|
||||||
export function isReadable(
|
|
||||||
stream: AllStreams,
|
|
||||||
): stream is NodeJS.WritableStream {
|
|
||||||
return (
|
|
||||||
(stream as NodeJS.ReadableStream).pipe !== undefined &&
|
|
||||||
(stream as any).readable === true
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
31
src/index.ts
31
src/index.ts
@ -1,29 +1,2 @@
|
|||||||
import { strom } from "./functions";
|
import mhysa from "./functions";
|
||||||
export * from "./utils";
|
export default mhysa;
|
||||||
export const {
|
|
||||||
fromArray,
|
|
||||||
map,
|
|
||||||
flatMap,
|
|
||||||
filter,
|
|
||||||
reduce,
|
|
||||||
split,
|
|
||||||
join,
|
|
||||||
replace,
|
|
||||||
parse,
|
|
||||||
stringify,
|
|
||||||
collect,
|
|
||||||
concat,
|
|
||||||
merge,
|
|
||||||
duplex,
|
|
||||||
child,
|
|
||||||
last,
|
|
||||||
batch,
|
|
||||||
unbatch,
|
|
||||||
rate,
|
|
||||||
parallelMap,
|
|
||||||
accumulator,
|
|
||||||
accumulatorBy,
|
|
||||||
compose,
|
|
||||||
demux,
|
|
||||||
instance,
|
|
||||||
} = strom();
|
|
||||||
|
@ -1,12 +0,0 @@
|
|||||||
import { Transform } from "stream";
|
|
||||||
|
|
||||||
export function collected(stream: Transform): any {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
stream.once("data", d => {
|
|
||||||
resolve(d);
|
|
||||||
});
|
|
||||||
stream.once("error", e => {
|
|
||||||
reject(e);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
@ -1 +0,0 @@
|
|||||||
export { collected } from "./collected";
|
|
@ -1,9 +1,10 @@
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import { accumulator, accumulatorBy } from "../src";
|
import mhysa from "../src";
|
||||||
import { FlushStrategy } from "../src/functions/accumulator";
|
import { FlushStrategy } from "../src/functions/accumulator";
|
||||||
import { performance } from "perf_hooks";
|
import { performance } from "perf_hooks";
|
||||||
|
const { accumulator, accumulatorBy } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
test.cb("accumulator() rolling", t => {
|
test.cb("accumulator() rolling", t => {
|
||||||
t.plan(3);
|
t.plan(3);
|
||||||
@ -13,14 +14,8 @@ test.cb("accumulator() rolling", t => {
|
|||||||
key: string;
|
key: string;
|
||||||
}
|
}
|
||||||
const source = new Readable({ objectMode: true });
|
const source = new Readable({ objectMode: true });
|
||||||
const firstFlush = [
|
const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||||
{ ts: 0, key: "a" },
|
const secondFlush = [{ ts: 2, key: "d" }, { ts: 3, key: "e" }];
|
||||||
{ ts: 1, key: "b" },
|
|
||||||
];
|
|
||||||
const secondFlush = [
|
|
||||||
{ ts: 2, key: "d" },
|
|
||||||
{ ts: 3, key: "e" },
|
|
||||||
];
|
|
||||||
const thirdFlush = [{ ts: 4, key: "f" }];
|
const thirdFlush = [{ ts: 4, key: "f" }];
|
||||||
const flushes = [firstFlush, secondFlush, thirdFlush];
|
const flushes = [firstFlush, secondFlush, thirdFlush];
|
||||||
|
|
||||||
@ -93,10 +88,7 @@ test.cb(
|
|||||||
"nonExistingKey",
|
"nonExistingKey",
|
||||||
{ objectMode: true },
|
{ objectMode: true },
|
||||||
);
|
);
|
||||||
const input = [
|
const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||||
{ ts: 0, key: "a" },
|
|
||||||
{ ts: 1, key: "b" },
|
|
||||||
];
|
|
||||||
|
|
||||||
source
|
source
|
||||||
.pipe(accumulatorStream)
|
.pipe(accumulatorStream)
|
||||||
@ -190,10 +182,7 @@ test.cb("accumulator() sliding", t => {
|
|||||||
{ ts: 4, key: "d" },
|
{ ts: 4, key: "d" },
|
||||||
];
|
];
|
||||||
const firstFlush = [{ ts: 0, key: "a" }];
|
const firstFlush = [{ ts: 0, key: "a" }];
|
||||||
const secondFlush = [
|
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||||
{ ts: 0, key: "a" },
|
|
||||||
{ ts: 1, key: "b" },
|
|
||||||
];
|
|
||||||
const thirdFlush = [
|
const thirdFlush = [
|
||||||
{ ts: 0, key: "a" },
|
{ ts: 0, key: "a" },
|
||||||
{ ts: 1, key: "b" },
|
{ ts: 1, key: "b" },
|
||||||
@ -243,10 +232,7 @@ test.cb("accumulator() sliding with key", t => {
|
|||||||
{ ts: 6, key: "g" },
|
{ ts: 6, key: "g" },
|
||||||
];
|
];
|
||||||
const firstFlush = [{ ts: 0, key: "a" }];
|
const firstFlush = [{ ts: 0, key: "a" }];
|
||||||
const secondFlush = [
|
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||||
{ ts: 0, key: "a" },
|
|
||||||
{ ts: 1, key: "b" },
|
|
||||||
];
|
|
||||||
const thirdFlush = [
|
const thirdFlush = [
|
||||||
{ ts: 0, key: "a" },
|
{ ts: 0, key: "a" },
|
||||||
{ ts: 1, key: "b" },
|
{ ts: 1, key: "b" },
|
||||||
@ -257,14 +243,8 @@ test.cb("accumulator() sliding with key", t => {
|
|||||||
{ ts: 2, key: "c" },
|
{ ts: 2, key: "c" },
|
||||||
{ ts: 3, key: "d" },
|
{ ts: 3, key: "d" },
|
||||||
];
|
];
|
||||||
const fifthFlush = [
|
const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
|
||||||
{ ts: 3, key: "d" },
|
const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
|
||||||
{ ts: 5, key: "f" },
|
|
||||||
];
|
|
||||||
const sixthFlush = [
|
|
||||||
{ ts: 5, key: "f" },
|
|
||||||
{ ts: 6, key: "g" },
|
|
||||||
];
|
|
||||||
|
|
||||||
const flushes = [
|
const flushes = [
|
||||||
firstFlush,
|
firstFlush,
|
||||||
@ -306,10 +286,7 @@ test.cb(
|
|||||||
"nonExistingKey",
|
"nonExistingKey",
|
||||||
{ objectMode: true },
|
{ objectMode: true },
|
||||||
);
|
);
|
||||||
const input = [
|
const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||||
{ ts: 0, key: "a" },
|
|
||||||
{ ts: 1, key: "b" },
|
|
||||||
];
|
|
||||||
|
|
||||||
source
|
source
|
||||||
.pipe(accumulatorStream)
|
.pipe(accumulatorStream)
|
||||||
@ -357,22 +334,10 @@ test.cb(
|
|||||||
{ ts: 6, key: "g" },
|
{ ts: 6, key: "g" },
|
||||||
];
|
];
|
||||||
const firstFlush = [{ ts: 0, key: "a" }];
|
const firstFlush = [{ ts: 0, key: "a" }];
|
||||||
const secondFlush = [
|
const secondFlush = [{ ts: 0, key: "a" }, { ts: 2, key: "c" }];
|
||||||
{ ts: 0, key: "a" },
|
const thirdFlush = [{ ts: 2, key: "c" }, { ts: 3, key: "d" }];
|
||||||
{ ts: 2, key: "c" },
|
const fourthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
|
||||||
];
|
const fifthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
|
||||||
const thirdFlush = [
|
|
||||||
{ ts: 2, key: "c" },
|
|
||||||
{ ts: 3, key: "d" },
|
|
||||||
];
|
|
||||||
const fourthFlush = [
|
|
||||||
{ ts: 3, key: "d" },
|
|
||||||
{ ts: 5, key: "f" },
|
|
||||||
];
|
|
||||||
const fifthFlush = [
|
|
||||||
{ ts: 5, key: "f" },
|
|
||||||
{ ts: 6, key: "g" },
|
|
||||||
];
|
|
||||||
|
|
||||||
const flushes = [
|
const flushes = [
|
||||||
firstFlush,
|
firstFlush,
|
||||||
@ -504,10 +469,7 @@ test.cb("accumulatorBy() sliding", t => {
|
|||||||
{ ts: 6, key: "g" },
|
{ ts: 6, key: "g" },
|
||||||
];
|
];
|
||||||
const firstFlush = [{ ts: 0, key: "a" }];
|
const firstFlush = [{ ts: 0, key: "a" }];
|
||||||
const secondFlush = [
|
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
|
||||||
{ ts: 0, key: "a" },
|
|
||||||
{ ts: 1, key: "b" },
|
|
||||||
];
|
|
||||||
const thirdFlush = [
|
const thirdFlush = [
|
||||||
{ ts: 0, key: "a" },
|
{ ts: 0, key: "a" },
|
||||||
{ ts: 1, key: "b" },
|
{ ts: 1, key: "b" },
|
||||||
@ -518,14 +480,8 @@ test.cb("accumulatorBy() sliding", t => {
|
|||||||
{ ts: 2, key: "c" },
|
{ ts: 2, key: "c" },
|
||||||
{ ts: 3, key: "d" },
|
{ ts: 3, key: "d" },
|
||||||
];
|
];
|
||||||
const fifthFlush = [
|
const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
|
||||||
{ ts: 3, key: "d" },
|
const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
|
||||||
{ ts: 5, key: "f" },
|
|
||||||
];
|
|
||||||
const sixthFlush = [
|
|
||||||
{ ts: 5, key: "f" },
|
|
||||||
{ ts: 6, key: "g" },
|
|
||||||
];
|
|
||||||
|
|
||||||
const flushes = [
|
const flushes = [
|
||||||
firstFlush,
|
firstFlush,
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { batch, map, fromArray } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { batch } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
test.cb("batch() batches chunks together", t => {
|
test.cb("batch() batches chunks together", t => {
|
||||||
t.plan(3);
|
t.plan(3);
|
||||||
@ -38,7 +39,7 @@ test.cb("batch() yields a batch after the timeout", t => {
|
|||||||
const expectedElements = [["a", "b"], ["c"], ["d"]];
|
const expectedElements = [["a", "b"], ["c"], ["d"]];
|
||||||
let i = 0;
|
let i = 0;
|
||||||
source
|
source
|
||||||
.pipe(batch(3, 500))
|
.pipe(batch(3))
|
||||||
.on("data", (element: string[]) => {
|
.on("data", (element: string[]) => {
|
||||||
t.deepEqual(element, expectedElements[i]);
|
t.deepEqual(element, expectedElements[i]);
|
||||||
i++;
|
i++;
|
||||||
@ -56,28 +57,3 @@ test.cb("batch() yields a batch after the timeout", t => {
|
|||||||
source.push(null);
|
source.push(null);
|
||||||
}, 600 * 2);
|
}, 600 * 2);
|
||||||
});
|
});
|
||||||
|
|
||||||
test.cb(
|
|
||||||
"batch() yields all input data even when the last element(s) dont make a full batch",
|
|
||||||
t => {
|
|
||||||
const data = [1, 2, 3, 4, 5, 6, 7];
|
|
||||||
|
|
||||||
fromArray([...data])
|
|
||||||
.pipe(batch(3))
|
|
||||||
.pipe(
|
|
||||||
map(d => {
|
|
||||||
t.deepEqual(
|
|
||||||
d,
|
|
||||||
[data.shift(), data.shift(), data.shift()].filter(
|
|
||||||
x => !!x,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.on("error", t.fail)
|
|
||||||
.on("finish", () => {
|
|
||||||
t.is(data.length, 0);
|
|
||||||
t.end();
|
|
||||||
});
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
@ -2,7 +2,8 @@ import * as cp from "child_process";
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { child } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { child } = mhysa();
|
||||||
|
|
||||||
test.cb(
|
test.cb(
|
||||||
"child() allows easily writing to child process stdin and reading from its stdout",
|
"child() allows easily writing to child process stdin and reading from its stdout",
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { collect } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { collect } = mhysa();
|
||||||
|
|
||||||
test.cb(
|
test.cb(
|
||||||
"collect() collects streamed elements into an array (object, flowing mode)",
|
"collect() collects streamed elements into an array (object, flowing mode)",
|
||||||
@ -58,7 +59,7 @@ test.cb(
|
|||||||
const source = new Readable({ objectMode: false });
|
const source = new Readable({ objectMode: false });
|
||||||
|
|
||||||
source
|
source
|
||||||
.pipe(collect({ objectMode: false }))
|
.pipe(collect())
|
||||||
.on("data", collected => {
|
.on("data", collected => {
|
||||||
expect(collected).to.deep.equal(Buffer.from("abc"));
|
expect(collected).to.deep.equal(Buffer.from("abc"));
|
||||||
t.pass();
|
t.pass();
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
import * as test from "ava";
|
const test = require("ava");
|
||||||
import { expect } from "chai";
|
const { expect } = require("chai");
|
||||||
import { sleep } from "../src/helpers";
|
const { sleep } = require("../src/helpers");
|
||||||
import { Readable, Writable } from "stream";
|
import mhysa from "../src";
|
||||||
import { compose, map, fromArray } from "../src";
|
|
||||||
import { performance } from "perf_hooks";
|
import { performance } from "perf_hooks";
|
||||||
|
const { compose, map } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
test.cb("compose() chains two streams together in the correct order", t => {
|
test.cb("compose() chains two streams together in the correct order", t => {
|
||||||
t.plan(3);
|
t.plan(3);
|
||||||
@ -22,7 +22,10 @@ test.cb("compose() chains two streams together in the correct order", t => {
|
|||||||
return chunk;
|
return chunk;
|
||||||
});
|
});
|
||||||
|
|
||||||
const composed = compose([first, second]);
|
const composed = compose(
|
||||||
|
[first, second],
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
|
||||||
composed.on("data", data => {
|
composed.on("data", data => {
|
||||||
expect(data).to.deep.equal(result[i]);
|
expect(data).to.deep.equal(result[i]);
|
||||||
@ -32,6 +35,12 @@ test.cb("compose() chains two streams together in the correct order", t => {
|
|||||||
t.end();
|
t.end();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
composed.on("error", err => {
|
||||||
|
t.end(err);
|
||||||
|
});
|
||||||
|
composed.on("end", () => {
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
const input = [
|
const input = [
|
||||||
{ key: "a", visited: [] },
|
{ key: "a", visited: [] },
|
||||||
@ -63,7 +72,10 @@ test.cb("piping compose() maintains correct order", t => {
|
|||||||
return chunk;
|
return chunk;
|
||||||
});
|
});
|
||||||
|
|
||||||
const composed = compose([first, second]);
|
const composed = compose(
|
||||||
|
[first, second],
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
const third = map((chunk: Chunk) => {
|
const third = map((chunk: Chunk) => {
|
||||||
chunk.visited.push(3);
|
chunk.visited.push(3);
|
||||||
return chunk;
|
return chunk;
|
||||||
@ -97,25 +109,34 @@ test.cb("piping compose() maintains correct order", t => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test("compose() writable length should be less than highWaterMark when handing writes", async t => {
|
test("compose() writable length should be less than highWaterMark when handing writes", async t => {
|
||||||
t.plan(2);
|
t.plan(7);
|
||||||
return new Promise(async (resolve, reject) => {
|
return new Promise(async (resolve, reject) => {
|
||||||
interface Chunk {
|
interface Chunk {
|
||||||
key: string;
|
key: string;
|
||||||
mapped: number[];
|
mapped: number[];
|
||||||
}
|
}
|
||||||
const first = map(async (chunk: Chunk) => {
|
const first = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
chunk.mapped.push(1);
|
chunk.mapped.push(1);
|
||||||
return chunk;
|
return chunk;
|
||||||
});
|
},
|
||||||
|
{
|
||||||
|
objectMode: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
const second = map(async (chunk: Chunk) => {
|
const second = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
chunk.mapped.push(2);
|
chunk.mapped.push(2);
|
||||||
return chunk;
|
return chunk;
|
||||||
});
|
},
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
|
||||||
const composed = compose([first, second], undefined, {
|
const composed = compose(
|
||||||
highWaterMark: 2,
|
[first, second],
|
||||||
});
|
{ objectMode: true, highWaterMark: 2 },
|
||||||
|
);
|
||||||
composed.on("error", err => {
|
composed.on("error", err => {
|
||||||
reject();
|
reject();
|
||||||
});
|
});
|
||||||
@ -139,12 +160,19 @@ test("compose() writable length should be less than highWaterMark when handing w
|
|||||||
{ key: "e", mapped: [] },
|
{ key: "e", mapped: [] },
|
||||||
];
|
];
|
||||||
|
|
||||||
fromArray(input).pipe(composed);
|
for (const item of input) {
|
||||||
|
const res = composed.write(item);
|
||||||
|
expect(composed._writableState.length).to.be.at.most(2);
|
||||||
|
t.pass();
|
||||||
|
if (!res) {
|
||||||
|
await sleep(10);
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test("compose() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", async t => {
|
test("compose() should emit drain event ~rate * highWaterMark ms for every write that causes backpressure", async t => {
|
||||||
t.plan(2);
|
t.plan(7);
|
||||||
const _rate = 100;
|
const _rate = 100;
|
||||||
const highWaterMark = 2;
|
const highWaterMark = 2;
|
||||||
return new Promise(async (resolve, reject) => {
|
return new Promise(async (resolve, reject) => {
|
||||||
@ -152,20 +180,29 @@ test("compose() should emit drain event ~rate * highWaterMark ms for every write
|
|||||||
key: string;
|
key: string;
|
||||||
mapped: number[];
|
mapped: number[];
|
||||||
}
|
}
|
||||||
const first = map(async (chunk: Chunk) => {
|
const first = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
await sleep(_rate);
|
await sleep(_rate);
|
||||||
chunk.mapped.push(1);
|
chunk.mapped.push(1);
|
||||||
return chunk;
|
return chunk;
|
||||||
});
|
},
|
||||||
|
{
|
||||||
|
objectMode: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
const second = map(async (chunk: Chunk) => {
|
const second = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
chunk.mapped.push(2);
|
chunk.mapped.push(2);
|
||||||
return chunk;
|
return chunk;
|
||||||
});
|
},
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
|
||||||
const composed = compose([first, second], undefined, {
|
const composed = compose(
|
||||||
highWaterMark,
|
[first, second],
|
||||||
});
|
{ objectMode: true, highWaterMark },
|
||||||
|
);
|
||||||
composed.on("error", err => {
|
composed.on("error", err => {
|
||||||
reject();
|
reject();
|
||||||
});
|
});
|
||||||
@ -173,14 +210,19 @@ test("compose() should emit drain event ~rate * highWaterMark ms for every write
|
|||||||
composed.on("drain", () => {
|
composed.on("drain", () => {
|
||||||
t.pass();
|
t.pass();
|
||||||
expect(composed._writableState.length).to.be.equal(0);
|
expect(composed._writableState.length).to.be.equal(0);
|
||||||
|
expect(performance.now() - start).to.be.closeTo(
|
||||||
|
_rate * highWaterMark,
|
||||||
|
40,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
composed.on("data", (chunk: Chunk) => {
|
composed.on("data", (chunk: Chunk) => {
|
||||||
t.deepEqual(chunk.mapped, [1, 2]);
|
pendingReads--;
|
||||||
|
if (pendingReads === 0) {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
composed.on("finish", () => resolve());
|
|
||||||
|
|
||||||
const input = [
|
const input = [
|
||||||
{ key: "a", mapped: [] },
|
{ key: "a", mapped: [] },
|
||||||
{ key: "b", mapped: [] },
|
{ key: "b", mapped: [] },
|
||||||
@ -188,7 +230,19 @@ test("compose() should emit drain event ~rate * highWaterMark ms for every write
|
|||||||
{ key: "d", mapped: [] },
|
{ key: "d", mapped: [] },
|
||||||
{ key: "e", mapped: [] },
|
{ key: "e", mapped: [] },
|
||||||
];
|
];
|
||||||
fromArray(input).pipe(composed);
|
|
||||||
|
let start = performance.now();
|
||||||
|
let pendingReads = input.length;
|
||||||
|
start = performance.now();
|
||||||
|
for (const item of input) {
|
||||||
|
const res = composed.write(item);
|
||||||
|
expect(composed._writableState.length).to.be.at.most(highWaterMark);
|
||||||
|
t.pass();
|
||||||
|
if (!res) {
|
||||||
|
await sleep(_rate * highWaterMark * 2);
|
||||||
|
start = performance.now();
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -201,20 +255,29 @@ test.cb(
|
|||||||
key: string;
|
key: string;
|
||||||
mapped: number[];
|
mapped: number[];
|
||||||
}
|
}
|
||||||
const first = map(async (chunk: Chunk) => {
|
const first = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
await sleep(_rate);
|
await sleep(_rate);
|
||||||
chunk.mapped.push(1);
|
chunk.mapped.push(1);
|
||||||
return chunk;
|
return chunk;
|
||||||
});
|
},
|
||||||
|
{
|
||||||
|
objectMode: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
const second = map(async (chunk: Chunk) => {
|
const second = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
chunk.mapped.push(2);
|
chunk.mapped.push(2);
|
||||||
return chunk;
|
return chunk;
|
||||||
});
|
},
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
|
||||||
const composed = compose([first, second], undefined, {
|
const composed = compose(
|
||||||
highWaterMark: 5,
|
[first, second],
|
||||||
});
|
{ objectMode: true, highWaterMark: 5 },
|
||||||
|
);
|
||||||
|
|
||||||
composed.on("error", err => {
|
composed.on("error", err => {
|
||||||
t.end(err);
|
t.end(err);
|
||||||
@ -222,6 +285,10 @@ test.cb(
|
|||||||
|
|
||||||
composed.on("drain", () => {
|
composed.on("drain", () => {
|
||||||
expect(composed._writableState.length).to.be.equal(0);
|
expect(composed._writableState.length).to.be.equal(0);
|
||||||
|
expect(performance.now() - start).to.be.closeTo(
|
||||||
|
_rate * input.length,
|
||||||
|
50,
|
||||||
|
);
|
||||||
t.pass();
|
t.pass();
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -242,6 +309,7 @@ test.cb(
|
|||||||
input.forEach(item => {
|
input.forEach(item => {
|
||||||
composed.write(item);
|
composed.write(item);
|
||||||
});
|
});
|
||||||
|
const start = performance.now();
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -254,10 +322,15 @@ test.cb(
|
|||||||
key: string;
|
key: string;
|
||||||
mapped: number[];
|
mapped: number[];
|
||||||
}
|
}
|
||||||
const first = map((chunk: Chunk) => {
|
const first = map(
|
||||||
|
(chunk: Chunk) => {
|
||||||
chunk.mapped.push(1);
|
chunk.mapped.push(1);
|
||||||
return chunk;
|
return chunk;
|
||||||
});
|
},
|
||||||
|
{
|
||||||
|
objectMode: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
const second = map(
|
const second = map(
|
||||||
async (chunk: Chunk) => {
|
async (chunk: Chunk) => {
|
||||||
@ -268,12 +341,13 @@ test.cb(
|
|||||||
chunk.mapped.push(2);
|
chunk.mapped.push(2);
|
||||||
return chunk;
|
return chunk;
|
||||||
},
|
},
|
||||||
{ highWaterMark: 1 },
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
);
|
);
|
||||||
|
|
||||||
const composed = compose([first, second], undefined, {
|
const composed = compose(
|
||||||
highWaterMark: 5,
|
[first, second],
|
||||||
});
|
{ objectMode: true, highWaterMark: 5 },
|
||||||
|
);
|
||||||
composed.on("error", err => {
|
composed.on("error", err => {
|
||||||
t.end(err);
|
t.end(err);
|
||||||
});
|
});
|
||||||
@ -324,6 +398,7 @@ test.cb(
|
|||||||
return chunk;
|
return chunk;
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
objectMode: true,
|
||||||
highWaterMark: 2,
|
highWaterMark: 2,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
@ -335,12 +410,13 @@ test.cb(
|
|||||||
chunk.mapped.push("second");
|
chunk.mapped.push("second");
|
||||||
return chunk;
|
return chunk;
|
||||||
},
|
},
|
||||||
{ highWaterMark: 2 },
|
{ objectMode: true, highWaterMark: 2 },
|
||||||
);
|
);
|
||||||
|
|
||||||
const composed = compose([first, second], undefined, {
|
const composed = compose(
|
||||||
highWaterMark: 5,
|
[first, second],
|
||||||
});
|
{ objectMode: true, highWaterMark: 5 },
|
||||||
|
);
|
||||||
composed.on("error", err => {
|
composed.on("error", err => {
|
||||||
t.end(err);
|
t.end(err);
|
||||||
});
|
});
|
||||||
@ -382,20 +458,29 @@ test.cb(
|
|||||||
key: string;
|
key: string;
|
||||||
mapped: number[];
|
mapped: number[];
|
||||||
}
|
}
|
||||||
const first = map(async (chunk: Chunk) => {
|
const first = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
await sleep(_rate);
|
await sleep(_rate);
|
||||||
chunk.mapped.push(1);
|
chunk.mapped.push(1);
|
||||||
return chunk;
|
return chunk;
|
||||||
});
|
},
|
||||||
|
{
|
||||||
|
objectMode: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
const second = map(async (chunk: Chunk) => {
|
const second = map(
|
||||||
|
async (chunk: Chunk) => {
|
||||||
chunk.mapped.push(2);
|
chunk.mapped.push(2);
|
||||||
return chunk;
|
return chunk;
|
||||||
});
|
},
|
||||||
|
{ objectMode: true },
|
||||||
|
);
|
||||||
|
|
||||||
const composed = compose([first, second], undefined, {
|
const composed = compose(
|
||||||
highWaterMark: 6,
|
[first, second],
|
||||||
});
|
{ objectMode: true, highWaterMark: 6 },
|
||||||
|
);
|
||||||
|
|
||||||
composed.on("error", err => {
|
composed.on("error", err => {
|
||||||
t.end(err);
|
t.end(err);
|
||||||
@ -425,124 +510,3 @@ test.cb(
|
|||||||
});
|
});
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
test.cb("compose() should be 'destroyable'", t => {
|
|
||||||
t.plan(3);
|
|
||||||
const _sleep = 100;
|
|
||||||
interface Chunk {
|
|
||||||
key: string;
|
|
||||||
mapped: number[];
|
|
||||||
}
|
|
||||||
|
|
||||||
const first = map(async (chunk: Chunk) => {
|
|
||||||
await sleep(_sleep);
|
|
||||||
chunk.mapped.push(1);
|
|
||||||
return chunk;
|
|
||||||
});
|
|
||||||
|
|
||||||
const second = map(async (chunk: Chunk) => {
|
|
||||||
chunk.mapped.push(2);
|
|
||||||
return chunk;
|
|
||||||
});
|
|
||||||
|
|
||||||
const composed = compose([first, second], (err: any) => {
|
|
||||||
t.pass();
|
|
||||||
});
|
|
||||||
|
|
||||||
const fakeSource = new Readable({
|
|
||||||
objectMode: true,
|
|
||||||
read() {
|
|
||||||
return;
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const fakeSink = new Writable({
|
|
||||||
objectMode: true,
|
|
||||||
write(data, enc, cb) {
|
|
||||||
const cur = input.shift();
|
|
||||||
t.is(cur.key, data.key);
|
|
||||||
t.deepEqual(cur.mapped, [1, 2]);
|
|
||||||
if (cur.key === "a") {
|
|
||||||
composed.destroy();
|
|
||||||
}
|
|
||||||
cb();
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
composed.on("close", t.end);
|
|
||||||
fakeSource.pipe(composed).pipe(fakeSink);
|
|
||||||
|
|
||||||
const input = [
|
|
||||||
{ key: "a", mapped: [] },
|
|
||||||
{ key: "b", mapped: [] },
|
|
||||||
{ key: "c", mapped: [] },
|
|
||||||
{ key: "d", mapped: [] },
|
|
||||||
{ key: "e", mapped: [] },
|
|
||||||
];
|
|
||||||
fakeSource.push(input[0]);
|
|
||||||
fakeSource.push(input[1]);
|
|
||||||
fakeSource.push(input[2]);
|
|
||||||
fakeSource.push(input[3]);
|
|
||||||
fakeSource.push(input[4]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test.cb("compose() `finish` and `end` propagates", t => {
|
|
||||||
interface Chunk {
|
|
||||||
key: string;
|
|
||||||
mapped: number[];
|
|
||||||
}
|
|
||||||
|
|
||||||
t.plan(8);
|
|
||||||
const first = map(async (chunk: Chunk) => {
|
|
||||||
chunk.mapped.push(1);
|
|
||||||
return chunk;
|
|
||||||
});
|
|
||||||
|
|
||||||
const second = map(async (chunk: Chunk) => {
|
|
||||||
chunk.mapped.push(2);
|
|
||||||
return chunk;
|
|
||||||
});
|
|
||||||
|
|
||||||
const composed = compose([first, second], undefined, {
|
|
||||||
highWaterMark: 3,
|
|
||||||
});
|
|
||||||
|
|
||||||
const fakeSource = new Readable({
|
|
||||||
objectMode: true,
|
|
||||||
read() {
|
|
||||||
return;
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const sink = map((d: Chunk) => {
|
|
||||||
const curr = input.shift();
|
|
||||||
t.is(curr.key, d.key);
|
|
||||||
t.deepEqual(d.mapped, [1, 2]);
|
|
||||||
});
|
|
||||||
|
|
||||||
fakeSource.pipe(composed).pipe(sink);
|
|
||||||
|
|
||||||
fakeSource.on("end", () => {
|
|
||||||
t.pass();
|
|
||||||
});
|
|
||||||
composed.on("finish", () => {
|
|
||||||
t.pass();
|
|
||||||
});
|
|
||||||
composed.on("end", () => {
|
|
||||||
t.pass();
|
|
||||||
t.end();
|
|
||||||
});
|
|
||||||
sink.on("finish", () => {
|
|
||||||
t.pass();
|
|
||||||
});
|
|
||||||
|
|
||||||
const input = [
|
|
||||||
{ key: "a", mapped: [] },
|
|
||||||
{ key: "b", mapped: [] },
|
|
||||||
{ key: "c", mapped: [] },
|
|
||||||
{ key: "d", mapped: [] },
|
|
||||||
{ key: "e", mapped: [] },
|
|
||||||
];
|
|
||||||
fakeSource.push(input[0]);
|
|
||||||
fakeSource.push(input[1]);
|
|
||||||
fakeSource.push(null);
|
|
||||||
});
|
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { concat, collect } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { concat, collect } = mhysa();
|
||||||
|
|
||||||
test.cb(
|
test.cb(
|
||||||
"concat() concatenates multiple readable streams (object, flowing mode)",
|
"concat() concatenates multiple readable streams (object, flowing mode)",
|
||||||
@ -171,7 +172,7 @@ test.cb(
|
|||||||
test.cb("concat() concatenates empty list of readable streams", t => {
|
test.cb("concat() concatenates empty list of readable streams", t => {
|
||||||
t.plan(0);
|
t.plan(0);
|
||||||
concat()
|
concat()
|
||||||
.pipe(collect({ objectMode: false }))
|
.pipe(collect())
|
||||||
.on("data", _ => {
|
.on("data", _ => {
|
||||||
t.fail();
|
t.fail();
|
||||||
})
|
})
|
||||||
|
@ -1,30 +1,21 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { batch as _batch, instance as strom } from "../src";
|
import mhysa from "../src";
|
||||||
|
|
||||||
const withDefaultOptions = strom({ objectMode: false });
|
const withDefaultOptions = mhysa({ objectMode: true });
|
||||||
|
const withoutOptions = mhysa();
|
||||||
|
|
||||||
test("strom instances can have default options", t => {
|
test("Mhysa instances can have default options", t => {
|
||||||
let batch = withDefaultOptions.batch();
|
let batch = withDefaultOptions.batch();
|
||||||
t.false(batch._readableState.objectMode);
|
t.true(batch._readableState.objectMode);
|
||||||
t.false(batch._writableState.objectMode);
|
t.true(batch._writableState.objectMode);
|
||||||
batch = withDefaultOptions.batch(3);
|
batch = withDefaultOptions.batch(3);
|
||||||
t.false(batch._readableState.objectMode);
|
t.true(batch._readableState.objectMode);
|
||||||
t.false(batch._writableState.objectMode);
|
t.true(batch._writableState.objectMode);
|
||||||
batch = withDefaultOptions.batch(3, 1);
|
batch = withDefaultOptions.batch(3, 1);
|
||||||
t.false(batch._readableState.objectMode);
|
|
||||||
t.false(batch._writableState.objectMode);
|
|
||||||
batch = withDefaultOptions.batch(3, 1, { objectMode: true });
|
|
||||||
t.true(batch._readableState.objectMode);
|
t.true(batch._readableState.objectMode);
|
||||||
t.true(batch._writableState.objectMode);
|
t.true(batch._writableState.objectMode);
|
||||||
|
batch = withDefaultOptions.batch(3, 1, { objectMode: false });
|
||||||
batch = _batch(3);
|
|
||||||
t.true(batch._readableState.objectMode);
|
|
||||||
t.true(batch._writableState.objectMode);
|
|
||||||
batch = _batch(3, 1);
|
|
||||||
t.true(batch._readableState.objectMode);
|
|
||||||
t.true(batch._writableState.objectMode);
|
|
||||||
batch = _batch(3, 1, { objectMode: false });
|
|
||||||
t.false(batch._readableState.objectMode);
|
t.false(batch._readableState.objectMode);
|
||||||
t.false(batch._writableState.objectMode);
|
t.false(batch._writableState.objectMode);
|
||||||
});
|
});
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { demux, map, fromArray } from "../src";
|
import mhysa from "../src";
|
||||||
import { Writable, Readable } from "stream";
|
import { Writable } from "stream";
|
||||||
import * as sinon from "sinon";
|
const sinon = require("sinon");
|
||||||
import { sleep } from "../src/helpers";
|
const { sleep } = require("../src/helpers");
|
||||||
import { performance } from "perf_hooks";
|
import { performance } from "perf_hooks";
|
||||||
|
const { demux, map } = mhysa();
|
||||||
|
|
||||||
interface Test {
|
interface Test {
|
||||||
key: string;
|
key: string;
|
||||||
@ -30,7 +31,7 @@ test.cb("demux() constructor should be called once per key", t => {
|
|||||||
return dest;
|
return dest;
|
||||||
});
|
});
|
||||||
|
|
||||||
const demuxed = demux(construct, "key", {});
|
const demuxed = demux(construct, "key", { objectMode: true });
|
||||||
|
|
||||||
demuxed.on("finish", () => {
|
demuxed.on("finish", () => {
|
||||||
expect(construct.withArgs("a").callCount).to.equal(1);
|
expect(construct.withArgs("a").callCount).to.equal(1);
|
||||||
@ -40,35 +41,8 @@ test.cb("demux() constructor should be called once per key", t => {
|
|||||||
t.end();
|
t.end();
|
||||||
});
|
});
|
||||||
|
|
||||||
fromArray(input).pipe(demuxed);
|
input.forEach(event => demuxed.write(event));
|
||||||
});
|
demuxed.end();
|
||||||
|
|
||||||
test.cb("demux() item written passed in constructor", t => {
|
|
||||||
t.plan(4);
|
|
||||||
const input = [
|
|
||||||
{ key: "a", visited: [] },
|
|
||||||
{ key: "b", visited: [] },
|
|
||||||
{ key: "c", visited: [] },
|
|
||||||
];
|
|
||||||
const construct = sinon.spy((destKey: string, item: any) => {
|
|
||||||
expect(item).to.deep.equal({ key: destKey, visited: [] });
|
|
||||||
t.pass();
|
|
||||||
const dest = map((chunk: Test) => {
|
|
||||||
chunk.visited.push(1);
|
|
||||||
return chunk;
|
|
||||||
});
|
|
||||||
|
|
||||||
return dest;
|
|
||||||
});
|
|
||||||
|
|
||||||
const demuxed = demux(construct, "key", {});
|
|
||||||
|
|
||||||
demuxed.on("finish", () => {
|
|
||||||
t.pass();
|
|
||||||
t.end();
|
|
||||||
});
|
|
||||||
|
|
||||||
fromArray(input).pipe(demuxed);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test.cb("demux() should send input through correct pipeline", t => {
|
test.cb("demux() should send input through correct pipeline", t => {
|
||||||
@ -92,7 +66,7 @@ test.cb("demux() should send input through correct pipeline", t => {
|
|||||||
return dest;
|
return dest;
|
||||||
};
|
};
|
||||||
|
|
||||||
const demuxed = demux(construct, "key", {});
|
const demuxed = demux(construct, "key", { objectMode: true });
|
||||||
|
|
||||||
demuxed.on("finish", () => {
|
demuxed.on("finish", () => {
|
||||||
pipelineSpies["a"].getCalls().forEach(call => {
|
pipelineSpies["a"].getCalls().forEach(call => {
|
||||||
@ -110,7 +84,8 @@ test.cb("demux() should send input through correct pipeline", t => {
|
|||||||
t.end();
|
t.end();
|
||||||
});
|
});
|
||||||
|
|
||||||
fromArray(input).pipe(demuxed);
|
input.forEach(event => demuxed.write(event));
|
||||||
|
demuxed.end();
|
||||||
});
|
});
|
||||||
|
|
||||||
test.cb("demux() constructor should be called once per key using keyBy", t => {
|
test.cb("demux() constructor should be called once per key using keyBy", t => {
|
||||||
@ -133,7 +108,7 @@ test.cb("demux() constructor should be called once per key using keyBy", t => {
|
|||||||
return dest;
|
return dest;
|
||||||
});
|
});
|
||||||
|
|
||||||
const demuxed = demux(construct, item => item.key, {});
|
const demuxed = demux(construct, item => item.key, { objectMode: true });
|
||||||
|
|
||||||
demuxed.on("finish", () => {
|
demuxed.on("finish", () => {
|
||||||
expect(construct.withArgs("a").callCount).to.equal(1);
|
expect(construct.withArgs("a").callCount).to.equal(1);
|
||||||
@ -143,7 +118,8 @@ test.cb("demux() constructor should be called once per key using keyBy", t => {
|
|||||||
t.end();
|
t.end();
|
||||||
});
|
});
|
||||||
|
|
||||||
fromArray(input).pipe(demuxed);
|
input.forEach(event => demuxed.write(event));
|
||||||
|
demuxed.end();
|
||||||
});
|
});
|
||||||
|
|
||||||
test.cb("demux() should send input through correct pipeline using keyBy", t => {
|
test.cb("demux() should send input through correct pipeline using keyBy", t => {
|
||||||
@ -167,7 +143,7 @@ test.cb("demux() should send input through correct pipeline using keyBy", t => {
|
|||||||
return dest;
|
return dest;
|
||||||
};
|
};
|
||||||
|
|
||||||
const demuxed = demux(construct, item => item.key, {});
|
const demuxed = demux(construct, item => item.key, { objectMode: true });
|
||||||
|
|
||||||
demuxed.on("finish", () => {
|
demuxed.on("finish", () => {
|
||||||
pipelineSpies["a"].getCalls().forEach(call => {
|
pipelineSpies["a"].getCalls().forEach(call => {
|
||||||
@ -185,10 +161,11 @@ test.cb("demux() should send input through correct pipeline using keyBy", t => {
|
|||||||
t.end();
|
t.end();
|
||||||
});
|
});
|
||||||
|
|
||||||
fromArray(input).pipe(demuxed);
|
input.forEach(event => demuxed.write(event));
|
||||||
|
demuxed.end();
|
||||||
});
|
});
|
||||||
|
|
||||||
test("demux() write should return false and emit drain if more than highWaterMark items are buffered", t => {
|
test("demux() write should return false after if it has >= highWaterMark items buffered and drain should be emitted", t => {
|
||||||
return new Promise(async (resolve, reject) => {
|
return new Promise(async (resolve, reject) => {
|
||||||
t.plan(7);
|
t.plan(7);
|
||||||
interface Chunk {
|
interface Chunk {
|
||||||
@ -212,7 +189,7 @@ test("demux() write should return false and emit drain if more than highWaterMar
|
|||||||
await sleep(slowProcessorSpeed);
|
await sleep(slowProcessorSpeed);
|
||||||
return { ...chunk, mapped: [1] };
|
return { ...chunk, mapped: [1] };
|
||||||
},
|
},
|
||||||
{ highWaterMark: 1 },
|
{ highWaterMark: 1, objectMode: true },
|
||||||
);
|
);
|
||||||
|
|
||||||
first.on("data", chunk => {
|
first.on("data", chunk => {
|
||||||
@ -228,6 +205,7 @@ test("demux() write should return false and emit drain if more than highWaterMar
|
|||||||
};
|
};
|
||||||
|
|
||||||
const _demux = demux(construct, "key", {
|
const _demux = demux(construct, "key", {
|
||||||
|
objectMode: true,
|
||||||
highWaterMark,
|
highWaterMark,
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -251,7 +229,7 @@ test("demux() write should return false and emit drain if more than highWaterMar
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test("demux() should emit one drain event after slowProcessorSpeed * highWaterMark ms when first stream is bottleneck", t => {
|
test("demux() should emit one drain event after slowProcessorSpeed * highWaterMark ms", t => {
|
||||||
return new Promise(async (resolve, reject) => {
|
return new Promise(async (resolve, reject) => {
|
||||||
t.plan(7);
|
t.plan(7);
|
||||||
interface Chunk {
|
interface Chunk {
|
||||||
@ -277,7 +255,7 @@ test("demux() should emit one drain event after slowProcessorSpeed * highWaterMa
|
|||||||
chunk.mapped.push(1);
|
chunk.mapped.push(1);
|
||||||
return chunk;
|
return chunk;
|
||||||
},
|
},
|
||||||
{ highWaterMark: 1 },
|
{ highWaterMark: 1, objectMode: true },
|
||||||
);
|
);
|
||||||
|
|
||||||
first.on("data", () => {
|
first.on("data", () => {
|
||||||
@ -290,6 +268,7 @@ test("demux() should emit one drain event after slowProcessorSpeed * highWaterMa
|
|||||||
return first;
|
return first;
|
||||||
};
|
};
|
||||||
const _demux = demux(construct, "key", {
|
const _demux = demux(construct, "key", {
|
||||||
|
objectMode: true,
|
||||||
highWaterMark,
|
highWaterMark,
|
||||||
});
|
});
|
||||||
_demux.on("error", err => {
|
_demux.on("error", err => {
|
||||||
@ -317,7 +296,7 @@ test("demux() should emit one drain event after slowProcessorSpeed * highWaterMa
|
|||||||
|
|
||||||
test("demux() should emit one drain event when writing 6 items with highWaterMark of 5", t => {
|
test("demux() should emit one drain event when writing 6 items with highWaterMark of 5", t => {
|
||||||
return new Promise(async (resolve, reject) => {
|
return new Promise(async (resolve, reject) => {
|
||||||
t.plan(1);
|
t.plan(7);
|
||||||
interface Chunk {
|
interface Chunk {
|
||||||
key: string;
|
key: string;
|
||||||
mapped: number[];
|
mapped: number[];
|
||||||
@ -339,11 +318,12 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar
|
|||||||
chunk.mapped.push(2);
|
chunk.mapped.push(2);
|
||||||
return chunk;
|
return chunk;
|
||||||
},
|
},
|
||||||
{ highWaterMark: 1 },
|
{ highWaterMark: 1, objectMode: true },
|
||||||
);
|
);
|
||||||
|
|
||||||
first.on("data", () => {
|
first.on("data", () => {
|
||||||
pendingReads--;
|
pendingReads--;
|
||||||
|
t.pass();
|
||||||
if (pendingReads === 0) {
|
if (pendingReads === 0) {
|
||||||
resolve();
|
resolve();
|
||||||
}
|
}
|
||||||
@ -351,6 +331,7 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar
|
|||||||
return first;
|
return first;
|
||||||
};
|
};
|
||||||
const _demux = demux(construct, "key", {
|
const _demux = demux(construct, "key", {
|
||||||
|
objectMode: true,
|
||||||
highWaterMark: 5,
|
highWaterMark: 5,
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -374,10 +355,9 @@ test("demux() should emit one drain event when writing 6 items with highWaterMar
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test.cb(
|
test.cb.only(
|
||||||
"demux() should emit drain event when second stream is bottleneck after (highWaterMark - 2) * slowProcessorSpeed ms",
|
"demux() should emit drain event when third stream is bottleneck",
|
||||||
t => {
|
t => {
|
||||||
// ie) first two items are pushed directly into first and second streams (highWaterMark - 2 remain in demux)
|
|
||||||
t.plan(8);
|
t.plan(8);
|
||||||
const slowProcessorSpeed = 100;
|
const slowProcessorSpeed = 100;
|
||||||
const highWaterMark = 5;
|
const highWaterMark = 5;
|
||||||
@ -403,7 +383,7 @@ test.cb(
|
|||||||
chunk.mapped.push(1);
|
chunk.mapped.push(1);
|
||||||
return chunk;
|
return chunk;
|
||||||
},
|
},
|
||||||
{ highWaterMark: 1 },
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
);
|
);
|
||||||
|
|
||||||
const second = map(
|
const second = map(
|
||||||
@ -412,23 +392,25 @@ test.cb(
|
|||||||
chunk.mapped.push(2);
|
chunk.mapped.push(2);
|
||||||
return chunk;
|
return chunk;
|
||||||
},
|
},
|
||||||
{ highWaterMark: 1 },
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
);
|
);
|
||||||
|
|
||||||
first.pipe(second).pipe(sink);
|
first.pipe(second).pipe(sink);
|
||||||
return first;
|
return first;
|
||||||
};
|
};
|
||||||
const _demux = demux(construct, () => "a", {
|
const _demux = demux(construct, () => "a", {
|
||||||
|
objectMode: true,
|
||||||
highWaterMark,
|
highWaterMark,
|
||||||
});
|
});
|
||||||
_demux.on("error", err => {
|
_demux.on("error", err => {
|
||||||
t.end(err);
|
t.end(err);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// This event should be received after at least 5 * slowProcessorSpeed (two are read immediately by first and second, 5 remaining in demux before drain event)
|
||||||
_demux.on("drain", () => {
|
_demux.on("drain", () => {
|
||||||
expect(_demux._writableState.length).to.be.equal(0);
|
expect(_demux._writableState.length).to.be.equal(0);
|
||||||
expect(performance.now() - start).to.be.greaterThan(
|
expect(performance.now() - start).to.be.greaterThan(
|
||||||
slowProcessorSpeed * 3,
|
slowProcessorSpeed * (input.length - 2),
|
||||||
);
|
);
|
||||||
t.pass();
|
t.pass();
|
||||||
});
|
});
|
||||||
@ -445,14 +427,15 @@ test.cb(
|
|||||||
let pendingReads = input.length;
|
let pendingReads = input.length;
|
||||||
|
|
||||||
const start = performance.now();
|
const start = performance.now();
|
||||||
fromArray(input).pipe(_demux);
|
input.forEach(item => {
|
||||||
|
_demux.write(item);
|
||||||
|
});
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
test.cb(
|
test.cb(
|
||||||
"demux() should emit drain event when third stream is bottleneck",
|
"demux() should emit drain event when second stream is bottleneck",
|
||||||
t => {
|
t => {
|
||||||
// @TODO investigate why drain is emitted after slowProcessorSpeed
|
|
||||||
t.plan(8);
|
t.plan(8);
|
||||||
const slowProcessorSpeed = 100;
|
const slowProcessorSpeed = 100;
|
||||||
const highWaterMark = 5;
|
const highWaterMark = 5;
|
||||||
@ -463,7 +446,7 @@ test.cb(
|
|||||||
const sink = new Writable({
|
const sink = new Writable({
|
||||||
objectMode: true,
|
objectMode: true,
|
||||||
write(chunk, encoding, cb) {
|
write(chunk, encoding, cb) {
|
||||||
expect(chunk.mapped).to.deep.equal([1, 2, 3]);
|
expect(chunk.mapped).to.deep.equal([1, 2]);
|
||||||
t.pass();
|
t.pass();
|
||||||
pendingReads--;
|
pendingReads--;
|
||||||
if (pendingReads === 0) {
|
if (pendingReads === 0) {
|
||||||
@ -478,14 +461,14 @@ test.cb(
|
|||||||
chunk.mapped.push(1);
|
chunk.mapped.push(1);
|
||||||
return chunk;
|
return chunk;
|
||||||
},
|
},
|
||||||
{ highWaterMark: 1 },
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
);
|
);
|
||||||
const second = map(
|
const second = map(
|
||||||
(chunk: Chunk) => {
|
(chunk: Chunk) => {
|
||||||
chunk.mapped.push(2);
|
chunk.mapped.push(2);
|
||||||
return chunk;
|
return chunk;
|
||||||
},
|
},
|
||||||
{ highWaterMark: 1 },
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
);
|
);
|
||||||
|
|
||||||
const third = map(
|
const third = map(
|
||||||
@ -494,7 +477,7 @@ test.cb(
|
|||||||
chunk.mapped.push(3);
|
chunk.mapped.push(3);
|
||||||
return chunk;
|
return chunk;
|
||||||
},
|
},
|
||||||
{ highWaterMark: 1 },
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
);
|
);
|
||||||
|
|
||||||
first
|
first
|
||||||
@ -504,16 +487,18 @@ test.cb(
|
|||||||
return first;
|
return first;
|
||||||
};
|
};
|
||||||
const _demux = demux(construct, () => "a", {
|
const _demux = demux(construct, () => "a", {
|
||||||
|
objectMode: true,
|
||||||
highWaterMark,
|
highWaterMark,
|
||||||
});
|
});
|
||||||
_demux.on("error", err => {
|
_demux.on("error", err => {
|
||||||
t.end(err);
|
t.end(err);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// This event should be received after at least 3 * slowProcessorSpeed (two are read immediately by first and second, 3 remaining in demux before drain event)
|
||||||
_demux.on("drain", () => {
|
_demux.on("drain", () => {
|
||||||
expect(_demux._writableState.length).to.be.equal(0);
|
expect(_demux._writableState.length).to.be.equal(0);
|
||||||
expect(performance.now() - start).to.be.greaterThan(
|
expect(performance.now() - start).to.be.greaterThan(
|
||||||
slowProcessorSpeed,
|
slowProcessorSpeed * (input.length - 4),
|
||||||
);
|
);
|
||||||
t.pass();
|
t.pass();
|
||||||
});
|
});
|
||||||
@ -530,7 +515,9 @@ test.cb(
|
|||||||
let pendingReads = input.length;
|
let pendingReads = input.length;
|
||||||
|
|
||||||
const start = performance.now();
|
const start = performance.now();
|
||||||
fromArray(input).pipe(_demux);
|
input.forEach(item => {
|
||||||
|
_demux.write(item);
|
||||||
|
});
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -549,21 +536,10 @@ test("demux() should be blocked by slowest pipeline", t => {
|
|||||||
chunk.mapped.push(1);
|
chunk.mapped.push(1);
|
||||||
return chunk;
|
return chunk;
|
||||||
},
|
},
|
||||||
{ highWaterMark: 1 },
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
);
|
);
|
||||||
|
|
||||||
return first;
|
first.on("data", chunk => {
|
||||||
};
|
|
||||||
|
|
||||||
const _demux = demux(construct, "key", {
|
|
||||||
highWaterMark: 1,
|
|
||||||
});
|
|
||||||
|
|
||||||
_demux.on("error", err => {
|
|
||||||
reject(err);
|
|
||||||
});
|
|
||||||
|
|
||||||
_demux.on("data", async chunk => {
|
|
||||||
pendingReads--;
|
pendingReads--;
|
||||||
if (chunk.key === "b") {
|
if (chunk.key === "b") {
|
||||||
expect(performance.now() - start).to.be.greaterThan(
|
expect(performance.now() - start).to.be.greaterThan(
|
||||||
@ -574,12 +550,22 @@ test("demux() should be blocked by slowest pipeline", t => {
|
|||||||
resolve();
|
resolve();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
return first;
|
||||||
|
};
|
||||||
|
const _demux = demux(construct, "key", {
|
||||||
|
objectMode: true,
|
||||||
|
highWaterMark: 1,
|
||||||
|
});
|
||||||
|
_demux.on("error", err => {
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
|
||||||
const input = [
|
const input = [
|
||||||
{ key: "a", mapped: [] },
|
{ key: "a", mapped: [] },
|
||||||
{ key: "a", mapped: [] },
|
{ key: "a", mapped: [] },
|
||||||
{ key: "c", mapped: [] },
|
{ key: "c", mapped: [] },
|
||||||
{ key: "c", mapped: [] },
|
{ key: "c", mapped: [] },
|
||||||
|
{ key: "c", mapped: [] },
|
||||||
{ key: "b", mapped: [] },
|
{ key: "b", mapped: [] },
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -598,266 +584,74 @@ test("demux() should be blocked by slowest pipeline", t => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test.cb("Demux should remux to sink", t => {
|
test("demux() should emit drain event when second stream in pipeline is bottleneck", t => {
|
||||||
t.plan(6);
|
t.plan(5);
|
||||||
let i = 0;
|
const highWaterMark = 3;
|
||||||
const input = [
|
return new Promise(async (resolve, reject) => {
|
||||||
{ key: "a", visited: [] },
|
|
||||||
{ key: "b", visited: [] },
|
|
||||||
{ key: "a", visited: [] },
|
|
||||||
{ key: "c", visited: [] },
|
|
||||||
{ key: "a", visited: [] },
|
|
||||||
{ key: "b", visited: [] },
|
|
||||||
];
|
|
||||||
const result = [
|
|
||||||
{ key: "a", visited: ["a"] },
|
|
||||||
{ key: "b", visited: ["b"] },
|
|
||||||
{ key: "a", visited: ["a"] },
|
|
||||||
{ key: "c", visited: ["c"] },
|
|
||||||
{ key: "a", visited: ["a"] },
|
|
||||||
{ key: "b", visited: ["b"] },
|
|
||||||
];
|
|
||||||
const construct = (destKey: string) => {
|
|
||||||
const dest = map((chunk: any) => {
|
|
||||||
chunk.visited.push(destKey);
|
|
||||||
return chunk;
|
|
||||||
});
|
|
||||||
|
|
||||||
return dest;
|
|
||||||
};
|
|
||||||
|
|
||||||
const sink = map(d => {
|
|
||||||
t.deepEqual(d, result[i]);
|
|
||||||
i++;
|
|
||||||
if (i === input.length) {
|
|
||||||
t.end();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const demuxed = demux(construct, "key", {});
|
|
||||||
|
|
||||||
fromArray(input)
|
|
||||||
.pipe(demuxed)
|
|
||||||
.pipe(sink);
|
|
||||||
});
|
|
||||||
|
|
||||||
test.cb("Demux should send data events", t => {
|
|
||||||
t.plan(6);
|
|
||||||
let i = 0;
|
|
||||||
const input = [
|
|
||||||
{ key: "a", visited: [] },
|
|
||||||
{ key: "b", visited: [] },
|
|
||||||
{ key: "a", visited: [] },
|
|
||||||
{ key: "c", visited: [] },
|
|
||||||
{ key: "a", visited: [] },
|
|
||||||
{ key: "b", visited: [] },
|
|
||||||
];
|
|
||||||
const result = [
|
|
||||||
{ key: "a", visited: ["a"] },
|
|
||||||
{ key: "b", visited: ["b"] },
|
|
||||||
{ key: "a", visited: ["a"] },
|
|
||||||
{ key: "c", visited: ["c"] },
|
|
||||||
{ key: "a", visited: ["a"] },
|
|
||||||
{ key: "b", visited: ["b"] },
|
|
||||||
];
|
|
||||||
const construct = (destKey: string) => {
|
|
||||||
const dest = map((chunk: any) => {
|
|
||||||
chunk.visited.push(destKey);
|
|
||||||
return chunk;
|
|
||||||
});
|
|
||||||
|
|
||||||
return dest;
|
|
||||||
};
|
|
||||||
|
|
||||||
const demuxed = demux(construct, "key", {});
|
|
||||||
|
|
||||||
fromArray(input).pipe(demuxed);
|
|
||||||
|
|
||||||
demuxed.on("data", d => {
|
|
||||||
t.deepEqual(d, result[i]);
|
|
||||||
i++;
|
|
||||||
if (i === input.length) {
|
|
||||||
t.end();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test.cb("demux() `finish` and `end` propagates", t => {
|
|
||||||
interface Chunk {
|
interface Chunk {
|
||||||
key: string;
|
key: string;
|
||||||
mapped: number[];
|
mapped: number[];
|
||||||
}
|
}
|
||||||
|
const sink = new Writable({
|
||||||
t.plan(9);
|
|
||||||
|
|
||||||
const construct = (destKey: string) => {
|
|
||||||
const dest = map((chunk: any) => {
|
|
||||||
chunk.mapped.push(destKey);
|
|
||||||
return chunk;
|
|
||||||
});
|
|
||||||
return dest;
|
|
||||||
};
|
|
||||||
|
|
||||||
const _demux = demux(construct, "key", {
|
|
||||||
highWaterMark: 3,
|
|
||||||
});
|
|
||||||
|
|
||||||
const fakeSource = new Readable({
|
|
||||||
objectMode: true,
|
objectMode: true,
|
||||||
read() {
|
write(chunk, encoding, cb) {
|
||||||
return;
|
expect(chunk.mapped).to.deep.equal([1, 2]);
|
||||||
|
t.pass();
|
||||||
|
cb();
|
||||||
|
if (pendingReads === 0) {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const sink = map((d: any) => {
|
|
||||||
const curr = input.shift();
|
|
||||||
t.is(curr.key, d.key);
|
|
||||||
t.deepEqual(d.mapped, [d.key]);
|
|
||||||
});
|
|
||||||
|
|
||||||
fakeSource.pipe(_demux).pipe(sink);
|
|
||||||
|
|
||||||
fakeSource.on("end", () => {
|
|
||||||
t.pass();
|
|
||||||
});
|
|
||||||
_demux.on("finish", () => {
|
|
||||||
t.pass();
|
|
||||||
});
|
|
||||||
_demux.on("unpipe", () => {
|
|
||||||
t.pass();
|
|
||||||
});
|
|
||||||
_demux.on("end", () => {
|
|
||||||
t.pass();
|
|
||||||
t.end();
|
|
||||||
});
|
|
||||||
sink.on("finish", () => {
|
|
||||||
t.pass();
|
|
||||||
});
|
|
||||||
|
|
||||||
const input = [
|
|
||||||
{ key: "a", mapped: [] },
|
|
||||||
{ key: "b", mapped: [] },
|
|
||||||
{ key: "a", mapped: [] },
|
|
||||||
{ key: "a", mapped: [] },
|
|
||||||
{ key: "b", mapped: [] },
|
|
||||||
];
|
|
||||||
fakeSource.push(input[0]);
|
|
||||||
fakeSource.push(input[1]);
|
|
||||||
fakeSource.push(null);
|
|
||||||
});
|
|
||||||
|
|
||||||
test.cb("demux() `unpipe` propagates", t => {
|
|
||||||
interface Chunk {
|
|
||||||
key: string;
|
|
||||||
mapped: number[];
|
|
||||||
}
|
|
||||||
|
|
||||||
t.plan(7);
|
|
||||||
|
|
||||||
const construct = (destKey: string) => {
|
const construct = (destKey: string) => {
|
||||||
const dest = map((chunk: any) => {
|
const first = map(
|
||||||
chunk.mapped.push(destKey);
|
(chunk: Chunk) => {
|
||||||
|
expect(first._readableState.length).to.be.at.most(2);
|
||||||
|
chunk.mapped.push(1);
|
||||||
return chunk;
|
return chunk;
|
||||||
});
|
|
||||||
return dest;
|
|
||||||
};
|
|
||||||
|
|
||||||
const _demux = demux(construct, "key", {
|
|
||||||
highWaterMark: 3,
|
|
||||||
});
|
|
||||||
|
|
||||||
const fakeSource = new Readable({
|
|
||||||
objectMode: true,
|
|
||||||
read() {
|
|
||||||
return;
|
|
||||||
},
|
},
|
||||||
});
|
{ objectMode: true, highWaterMark: 2 },
|
||||||
|
);
|
||||||
|
|
||||||
const sink = map((d: any) => {
|
const second = map(
|
||||||
const curr = input.shift();
|
async (chunk: Chunk) => {
|
||||||
t.is(curr.key, d.key);
|
await sleep(100);
|
||||||
t.deepEqual(d.mapped, [d.key]);
|
chunk.mapped.push(2);
|
||||||
});
|
expect(second._writableState.length).to.be.equal(1);
|
||||||
|
pendingReads--;
|
||||||
fakeSource.pipe(_demux).pipe(sink);
|
|
||||||
|
|
||||||
_demux.on("unpipe", () => {
|
|
||||||
t.pass();
|
|
||||||
});
|
|
||||||
|
|
||||||
sink.on("unpipe", () => {
|
|
||||||
t.pass();
|
|
||||||
});
|
|
||||||
|
|
||||||
sink.on("finish", () => {
|
|
||||||
t.pass();
|
|
||||||
t.end();
|
|
||||||
});
|
|
||||||
|
|
||||||
const input = [
|
|
||||||
{ key: "a", mapped: [] },
|
|
||||||
{ key: "b", mapped: [] },
|
|
||||||
{ key: "a", mapped: [] },
|
|
||||||
{ key: "a", mapped: [] },
|
|
||||||
{ key: "b", mapped: [] },
|
|
||||||
];
|
|
||||||
fakeSource.push(input[0]);
|
|
||||||
fakeSource.push(input[1]);
|
|
||||||
fakeSource.push(null);
|
|
||||||
});
|
|
||||||
|
|
||||||
test.cb("demux() should be 'destroyable'", t => {
|
|
||||||
t.plan(2);
|
|
||||||
const _sleep = 100;
|
|
||||||
interface Chunk {
|
|
||||||
key: string;
|
|
||||||
mapped: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
const construct = (destKey: string) => {
|
|
||||||
const first = map(async (chunk: Chunk) => {
|
|
||||||
await sleep(_sleep);
|
|
||||||
chunk.mapped.push(destKey);
|
|
||||||
return chunk;
|
return chunk;
|
||||||
});
|
},
|
||||||
|
{ objectMode: true, highWaterMark: 1 },
|
||||||
|
);
|
||||||
|
|
||||||
|
first.pipe(second).pipe(sink);
|
||||||
return first;
|
return first;
|
||||||
};
|
};
|
||||||
|
|
||||||
const _demux = demux(construct, "key");
|
const _demux = demux(construct, "key", {
|
||||||
|
|
||||||
const fakeSource = new Readable({
|
|
||||||
objectMode: true,
|
objectMode: true,
|
||||||
read() {
|
highWaterMark,
|
||||||
return;
|
});
|
||||||
},
|
_demux.on("error", err => {
|
||||||
|
reject();
|
||||||
});
|
});
|
||||||
|
|
||||||
const fakeSink = new Writable({
|
_demux.on("drain", () => {
|
||||||
objectMode: true,
|
expect(_demux._writableState.length).to.be.equal(0);
|
||||||
write(data, enc, cb) {
|
t.pass();
|
||||||
const cur = input.shift();
|
|
||||||
t.is(cur.key, data.key);
|
|
||||||
t.deepEqual(cur.mapped, ["a"]);
|
|
||||||
if (cur.key === "a") {
|
|
||||||
_demux.destroy();
|
|
||||||
}
|
|
||||||
cb();
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
|
|
||||||
_demux.on("close", t.end);
|
|
||||||
fakeSource.pipe(_demux).pipe(fakeSink);
|
|
||||||
|
|
||||||
const input = [
|
const input = [
|
||||||
{ key: "a", mapped: [] },
|
{ key: "a", mapped: [] },
|
||||||
{ key: "b", mapped: [] },
|
{ key: "a", mapped: [] },
|
||||||
{ key: "c", mapped: [] },
|
{ key: "a", mapped: [] },
|
||||||
{ key: "d", mapped: [] },
|
{ key: "a", mapped: [] },
|
||||||
{ key: "e", mapped: [] },
|
|
||||||
];
|
];
|
||||||
fakeSource.push(input[0]);
|
let pendingReads = input.length;
|
||||||
fakeSource.push(input[1]);
|
|
||||||
fakeSource.push(input[2]);
|
input.forEach(item => {
|
||||||
fakeSource.push(input[3]);
|
_demux.write(item);
|
||||||
fakeSource.push(input[4]);
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
@ -2,7 +2,8 @@ import * as cp from "child_process";
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { duplex } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { duplex } = mhysa();
|
||||||
|
|
||||||
test.cb(
|
test.cb(
|
||||||
"duplex() combines a writable and readable stream into a ReadWrite stream",
|
"duplex() combines a writable and readable stream into a ReadWrite stream",
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import { filter } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { filter } = mhysa();
|
||||||
|
|
||||||
test.cb("filter() filters elements synchronously", t => {
|
test.cb("filter() filters elements synchronously", t => {
|
||||||
t.plan(2);
|
t.plan(2);
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { flatMap } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { flatMap } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
test.cb("flatMap() maps elements synchronously", t => {
|
test.cb("flatMap() maps elements synchronously", t => {
|
||||||
t.plan(6);
|
t.plan(6);
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { fromArray } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { fromArray } = mhysa();
|
||||||
|
|
||||||
test.cb("fromArray() streams array elements in flowing mode", t => {
|
test.cb("fromArray() streams array elements in flowing mode", t => {
|
||||||
t.plan(3);
|
t.plan(3);
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { join } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { join } = mhysa();
|
||||||
|
|
||||||
test.cb("join() joins chunks using the specified separator", t => {
|
test.cb("join() joins chunks using the specified separator", t => {
|
||||||
t.plan(9);
|
t.plan(9);
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { last } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { last } = mhysa();
|
||||||
|
|
||||||
test("last() resolves to the last chunk streamed by the given readable stream", async t => {
|
test("last() resolves to the last chunk streamed by the given readable stream", async t => {
|
||||||
const source = new Readable({ objectMode: true });
|
const source = new Readable({ objectMode: true });
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { map } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { map } = mhysa();
|
||||||
|
|
||||||
test.cb("map() maps elements synchronously", t => {
|
test.cb("map() maps elements synchronously", t => {
|
||||||
t.plan(3);
|
t.plan(3);
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { merge } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { merge } = mhysa();
|
||||||
|
|
||||||
test.cb(
|
test.cb(
|
||||||
"merge() merges multiple readable streams in chunk arrival order",
|
"merge() merges multiple readable streams in chunk arrival order",
|
||||||
|
@ -2,8 +2,9 @@ import { Readable } from "stream";
|
|||||||
import { performance } from "perf_hooks";
|
import { performance } from "perf_hooks";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { parallelMap } from "../src";
|
import mhysa from "../src";
|
||||||
import { sleep } from "../src/helpers";
|
import { sleep } from "../src/helpers";
|
||||||
|
const { parallelMap } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
test.cb("parallelMap() parallel mapping", t => {
|
test.cb("parallelMap() parallel mapping", t => {
|
||||||
t.plan(6);
|
t.plan(6);
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable, finished } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { parse } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { parse } = mhysa();
|
||||||
|
|
||||||
test.cb("parse() parses the streamed elements as JSON", t => {
|
test.cb("parse() parses the streamed elements as JSON", t => {
|
||||||
t.plan(3);
|
t.plan(3);
|
||||||
@ -25,17 +26,13 @@ test.cb("parse() parses the streamed elements as JSON", t => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test.cb("parse() emits errors on invalid JSON", t => {
|
test.cb("parse() emits errors on invalid JSON", t => {
|
||||||
t.plan(1);
|
t.plan(2);
|
||||||
const source = new Readable({ objectMode: true });
|
const source = new Readable({ objectMode: true });
|
||||||
|
|
||||||
source
|
source
|
||||||
.pipe(parse())
|
.pipe(parse())
|
||||||
.resume()
|
.resume()
|
||||||
.on("error", (d: any) => {
|
.on("error", () => t.pass())
|
||||||
t.pass();
|
.on("end", t.end);
|
||||||
t.end();
|
|
||||||
})
|
|
||||||
.on("end", t.fail);
|
|
||||||
|
|
||||||
source.push("{}");
|
source.push("{}");
|
||||||
source.push({});
|
source.push({});
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import { performance } from "perf_hooks";
|
import { performance } from "perf_hooks";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { rate } from "../src";
|
import { expect } from "chai";
|
||||||
import { sleep } from "../src/helpers";
|
import mhysa from "../src";
|
||||||
|
const { rate } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
test.cb("rate() sends data at a rate of 150", t => {
|
test.cb("rate() sends data at a rate of 150", t => {
|
||||||
t.plan(15);
|
t.plan(5);
|
||||||
const targetRate = 150;
|
const targetRate = 150;
|
||||||
const source = new Readable({ objectMode: true });
|
const source = new Readable({ objectMode: true });
|
||||||
const expectedElements = ["a", "b", "c", "d", "e"];
|
const expectedElements = ["a", "b", "c", "d", "e"];
|
||||||
@ -14,10 +15,10 @@ test.cb("rate() sends data at a rate of 150", t => {
|
|||||||
|
|
||||||
source
|
source
|
||||||
.pipe(rate(targetRate))
|
.pipe(rate(targetRate))
|
||||||
.on("data", (element: string) => {
|
.on("data", (element: string[]) => {
|
||||||
const currentRate = (i / (performance.now() - start)) * 1000;
|
const currentRate = (i / (performance.now() - start)) * 1000;
|
||||||
t.is(element, expectedElements[i]);
|
expect(element).to.deep.equal(expectedElements[i]);
|
||||||
t.true(currentRate <= targetRate);
|
expect(currentRate).lessThan(targetRate);
|
||||||
t.pass();
|
t.pass();
|
||||||
i++;
|
i++;
|
||||||
})
|
})
|
||||||
@ -33,7 +34,7 @@ test.cb("rate() sends data at a rate of 150", t => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test.cb("rate() sends data at a rate of 50", t => {
|
test.cb("rate() sends data at a rate of 50", t => {
|
||||||
t.plan(15);
|
t.plan(5);
|
||||||
const targetRate = 50;
|
const targetRate = 50;
|
||||||
const source = new Readable({ objectMode: true });
|
const source = new Readable({ objectMode: true });
|
||||||
const expectedElements = ["a", "b", "c", "d", "e"];
|
const expectedElements = ["a", "b", "c", "d", "e"];
|
||||||
@ -42,10 +43,10 @@ test.cb("rate() sends data at a rate of 50", t => {
|
|||||||
|
|
||||||
source
|
source
|
||||||
.pipe(rate(targetRate))
|
.pipe(rate(targetRate))
|
||||||
.on("data", (element: string) => {
|
.on("data", (element: string[]) => {
|
||||||
const currentRate = (i / (performance.now() - start)) * 1000;
|
const currentRate = (i / (performance.now() - start)) * 1000;
|
||||||
t.is(element, expectedElements[i]);
|
expect(element).to.deep.equal(expectedElements[i]);
|
||||||
t.true(currentRate <= targetRate);
|
expect(currentRate).lessThan(targetRate);
|
||||||
t.pass();
|
t.pass();
|
||||||
i++;
|
i++;
|
||||||
})
|
})
|
||||||
@ -61,7 +62,7 @@ test.cb("rate() sends data at a rate of 50", t => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test.cb("rate() sends data at a rate of 1", t => {
|
test.cb("rate() sends data at a rate of 1", t => {
|
||||||
t.plan(15);
|
t.plan(5);
|
||||||
const targetRate = 1;
|
const targetRate = 1;
|
||||||
const source = new Readable({ objectMode: true });
|
const source = new Readable({ objectMode: true });
|
||||||
const expectedElements = ["a", "b", "c", "d", "e"];
|
const expectedElements = ["a", "b", "c", "d", "e"];
|
||||||
@ -70,10 +71,10 @@ test.cb("rate() sends data at a rate of 1", t => {
|
|||||||
|
|
||||||
source
|
source
|
||||||
.pipe(rate(targetRate))
|
.pipe(rate(targetRate))
|
||||||
.on("data", (element: string) => {
|
.on("data", (element: string[]) => {
|
||||||
const currentRate = (i / (performance.now() - start)) * 1000;
|
const currentRate = (i / (performance.now() - start)) * 1000;
|
||||||
t.is(element, expectedElements[i]);
|
expect(element).to.deep.equal(expectedElements[i]);
|
||||||
t.true(currentRate <= targetRate);
|
expect(currentRate).lessThan(targetRate);
|
||||||
t.pass();
|
t.pass();
|
||||||
i++;
|
i++;
|
||||||
})
|
})
|
||||||
@ -87,41 +88,3 @@ test.cb("rate() sends data at a rate of 1", t => {
|
|||||||
source.push("e");
|
source.push("e");
|
||||||
source.push(null);
|
source.push(null);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("rate() sends data at a rate of 1 and drops extra messages", async t => {
|
|
||||||
t.plan(9);
|
|
||||||
const targetRate = 1;
|
|
||||||
const source = new Readable({
|
|
||||||
objectMode: true,
|
|
||||||
read: () => {
|
|
||||||
return;
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const expectedElements = ["a", "b", "e"];
|
|
||||||
const start = performance.now();
|
|
||||||
let i = 0;
|
|
||||||
|
|
||||||
let plan = 0;
|
|
||||||
source
|
|
||||||
.pipe(rate(targetRate, 1, { behavior: 1 }))
|
|
||||||
.on("data", (element: string) => {
|
|
||||||
const currentRate = (i / (performance.now() - start)) * 1000;
|
|
||||||
t.is(element, expectedElements[i]);
|
|
||||||
t.true(currentRate <= targetRate);
|
|
||||||
plan++;
|
|
||||||
t.pass();
|
|
||||||
i++;
|
|
||||||
})
|
|
||||||
.on("error", t.fail)
|
|
||||||
.on("end", t.fail);
|
|
||||||
|
|
||||||
source.push("a");
|
|
||||||
await sleep(1000);
|
|
||||||
source.push("b");
|
|
||||||
source.push("c");
|
|
||||||
source.push("d");
|
|
||||||
await sleep(1000);
|
|
||||||
source.push("e");
|
|
||||||
await sleep(1000);
|
|
||||||
source.push(null);
|
|
||||||
});
|
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { reduce } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { reduce } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
test.cb("reduce() reduces elements synchronously", t => {
|
test.cb("reduce() reduces elements synchronously", t => {
|
||||||
t.plan(1);
|
t.plan(1);
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { replace } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { replace } = mhysa();
|
||||||
|
|
||||||
test.cb(
|
test.cb(
|
||||||
"replace() replaces occurrences of the given string in the streamed elements with the specified " +
|
"replace() replaces occurrences of the given string in the streamed elements with the specified " +
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { split } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { split } = mhysa();
|
||||||
|
|
||||||
test.cb("split() splits chunks using the default separator (\\n)", t => {
|
test.cb("split() splits chunks using the default separator (\\n)", t => {
|
||||||
t.plan(5);
|
t.plan(5);
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { stringify } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { stringify } = mhysa();
|
||||||
|
|
||||||
test.cb("stringify() stringifies the streamed elements as JSON", t => {
|
test.cb("stringify() stringifies the streamed elements as JSON", t => {
|
||||||
t.plan(4);
|
t.plan(4);
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import { Readable } from "stream";
|
import { Readable } from "stream";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import { unbatch, batch } from "../src";
|
import mhysa from "../src";
|
||||||
|
const { unbatch, batch } = mhysa({ objectMode: true });
|
||||||
|
|
||||||
test.cb("unbatch() unbatches", t => {
|
test.cb("unbatch() unbatches", t => {
|
||||||
t.plan(3);
|
t.plan(3);
|
||||||
|
@ -1,8 +0,0 @@
|
|||||||
import test from "ava";
|
|
||||||
import { collected } from "../../src/utils";
|
|
||||||
import { fromArray, collect } from "../../src";
|
|
||||||
|
|
||||||
test("collected returns a promise for the first data point", async t => {
|
|
||||||
const data = collected(fromArray([1, 2, 3, 4]).pipe(collect()));
|
|
||||||
t.deepEqual(await data, [1, 2, 3, 4]);
|
|
||||||
});
|
|
Loading…
Reference in New Issue
Block a user