Refactoring

This commit is contained in:
Jerry Kurian
2019-08-16 09:02:54 -04:00
parent 505fefeeb5
commit faac6134af
48 changed files with 84 additions and 72 deletions

View File

@@ -1,7 +1,10 @@
import { Transform } from "stream";
import { AccumulatorByIteratee, FlushStrategy } from "./definitions";
import { TransformOptions } from "../baseDefinitions";
import { batch } from "../../index";
import {
AccumulatorByIteratee,
FlushStrategy,
TransformOptions,
} from "./baseDefinitions";
import { batch } from ".";
function _accumulator<T>(
accumulateBy: (data: T, buffer: T[], stream: Transform) => void,

View File

@@ -1,577 +0,0 @@
import test from "ava";
import { expect } from "chai";
import { Readable } from "stream";
import { accumulator, accumulatorBy } from ".";
import { FlushStrategy } from "./definitions";
test.cb("accumulator() rolling", t => {
t.plan(3);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const firstFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
const secondFlush = [{ ts: 2, key: "d" }, { ts: 3, key: "e" }];
const thirdFlush = [{ ts: 4, key: "f" }];
const flushes = [firstFlush, secondFlush, thirdFlush];
source
.pipe(accumulator(2, undefined, FlushStrategy.rolling))
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (e: any) => {
t.end(e);
})
.on("end", () => {
t.end();
});
[...firstFlush, ...secondFlush, ...thirdFlush].forEach(item => {
source.push(item);
});
source.push(null);
});
test.cb("accumulator() rolling with key", t => {
t.plan(2);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const firstFlush = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 2, key: "d" },
];
const secondFlush = [{ ts: 3, key: "e" }];
const flushes = [firstFlush, secondFlush];
source
.pipe(accumulator(3, undefined, FlushStrategy.rolling, "ts"))
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (e: any) => {
t.end(e);
})
.on("end", () => {
t.end();
});
[...firstFlush, ...secondFlush].forEach(item => {
source.push(item);
});
source.push(null);
});
test.cb(
"accumulator() rolling should emit error and ignore chunk when its missing key",
t => {
t.plan(2);
let index = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const accumulatorStream = accumulator(
3,
undefined,
FlushStrategy.rolling,
"nonExistingKey",
);
const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
source
.pipe(accumulatorStream)
.on("data", (flush: TestObject[]) => {
// No valid data output
expect(flush).to.deep.equal([]);
})
.on("error", (err: any) => {
source.pipe(accumulatorStream);
accumulatorStream.resume();
expect(err.message).to.equal(
`Key is missing in event: (nonExistingKey, ${JSON.stringify(
input[index],
)})`,
);
index++;
t.pass();
})
.on("end", () => {
t.end();
});
input.forEach(item => {
source.push(item);
});
source.push(null);
},
);
test.cb(
"accumulator() rolling should emit error, ignore chunk when key is missing and continue processing chunks correctly",
t => {
t.plan(3);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const accumulatorStream = accumulator(
3,
undefined,
FlushStrategy.rolling,
"ts",
);
const input = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ key: "d" },
{ ts: 3, key: "e" },
];
const firstFlush = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
];
const secondFlush = [{ ts: 3, key: "e" }];
const flushes = [firstFlush, secondFlush];
source
.pipe(accumulatorStream)
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (err: any) => {
source.pipe(accumulatorStream);
accumulatorStream.resume();
expect(err.message).to.equal(
`Key is missing in event: (ts, ${JSON.stringify(
input[3],
)})`,
);
t.pass();
})
.on("end", () => {
t.end();
});
input.forEach(item => {
source.push(item);
});
source.push(null);
},
);
test.cb("accumulator() sliding", t => {
t.plan(4);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const input = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 4, key: "d" },
];
const firstFlush = [{ ts: 0, key: "a" }];
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
const thirdFlush = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
];
const fourthFlush = [
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 4, key: "d" },
];
const flushes = [firstFlush, secondFlush, thirdFlush, fourthFlush];
source
.pipe(accumulator(3, undefined, FlushStrategy.sliding))
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (e: any) => {
t.end(e);
})
.on("end", () => {
t.end();
});
input.forEach(item => {
source.push(item);
});
source.push(null);
});
test.cb("accumulator() sliding with key", t => {
t.plan(6);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const input = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 3, key: "d" },
{ ts: 5, key: "f" },
{ ts: 6, key: "g" },
];
const firstFlush = [{ ts: 0, key: "a" }];
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
const thirdFlush = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
];
const fourthFlush = [
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 3, key: "d" },
];
const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
const flushes = [
firstFlush,
secondFlush,
thirdFlush,
fourthFlush,
fifthFlush,
sixthFlush,
];
source
.pipe(accumulator(3, undefined, FlushStrategy.sliding, "ts"))
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (e: any) => {
t.end(e);
})
.on("end", () => {
t.end();
});
input.forEach(item => {
source.push(item);
});
source.push(null);
});
test.cb(
"accumulator() sliding should emit error and ignore chunk when key is missing",
t => {
t.plan(2);
let index = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const accumulatorStream = accumulator(
3,
undefined,
FlushStrategy.sliding,
"nonExistingKey",
);
const input = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
source
.pipe(accumulatorStream)
.on("data", (flush: TestObject[]) => {
expect(flush).to.deep.equal([]);
})
.on("error", (err: any) => {
source.pipe(accumulatorStream);
accumulatorStream.resume();
expect(err.message).to.equal(
`Key is missing in event: (nonExistingKey, ${JSON.stringify(
input[index],
)})`,
);
index++;
t.pass();
})
.on("end", () => {
t.end();
});
input.forEach(item => {
source.push(item);
});
source.push(null);
},
);
test.cb(
"accumulator() sliding should emit error, ignore chunk when key is missing and continue processing chunks correctly",
t => {
t.plan(6);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const accumulatorStream = accumulator(
3,
undefined,
FlushStrategy.sliding,
"ts",
);
const input = [
{ ts: 0, key: "a" },
{ key: "b" },
{ ts: 2, key: "c" },
{ ts: 3, key: "d" },
{ ts: 5, key: "f" },
{ ts: 6, key: "g" },
];
const firstFlush = [{ ts: 0, key: "a" }];
const secondFlush = [{ ts: 0, key: "a" }, { ts: 2, key: "c" }];
const thirdFlush = [{ ts: 2, key: "c" }, { ts: 3, key: "d" }];
const fourthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
const fifthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
const flushes = [
firstFlush,
secondFlush,
thirdFlush,
fourthFlush,
fifthFlush,
];
source
.pipe(accumulatorStream)
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (err: any) => {
source.pipe(accumulatorStream);
accumulatorStream.resume();
expect(err.message).to.equal(
`Key is missing in event: (ts, ${JSON.stringify(
input[1],
)})`,
);
t.pass();
})
.on("end", () => {
t.end();
});
input.forEach(item => {
source.push(item);
});
source.push(null);
},
);
test.cb("accumulatorBy() rolling", t => {
t.plan(2);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const firstFlush = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 2, key: "d" },
];
const secondFlush = [{ ts: 3, key: "e" }];
const flushes = [firstFlush, secondFlush];
source
.pipe(
accumulatorBy(
undefined,
FlushStrategy.rolling,
(event: TestObject, bufferChunk: TestObject) => {
return bufferChunk.ts + 3 <= event.ts;
},
),
)
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (e: any) => {
t.end(e);
})
.on("end", () => {
t.end();
});
[...firstFlush, ...secondFlush].forEach(item => {
source.push(item);
});
source.push(null);
});
test.cb(
"accumulatorBy() rolling should emit error when key iteratee throws",
t => {
t.plan(2);
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const input = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
];
const accumulaterStream = accumulatorBy(
undefined,
FlushStrategy.rolling,
(event: TestObject, bufferChunk: TestObject) => {
if (event.key !== "a") {
throw new Error("Failed mapping");
}
return bufferChunk.ts + 3 <= event.ts;
},
);
source
.pipe(accumulaterStream)
.on("error", (err: any) => {
source.pipe(accumulaterStream);
accumulaterStream.resume();
expect(err.message).to.equal("Failed mapping");
t.pass();
})
.on("end", () => {
t.end();
});
input.forEach(item => {
source.push(item);
});
source.push(null);
},
);
test.cb("accumulatorBy() sliding", t => {
t.plan(6);
let chunkIndex = 0;
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const input = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 3, key: "d" },
{ ts: 5, key: "f" },
{ ts: 6, key: "g" },
];
const firstFlush = [{ ts: 0, key: "a" }];
const secondFlush = [{ ts: 0, key: "a" }, { ts: 1, key: "b" }];
const thirdFlush = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
];
const fourthFlush = [
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
{ ts: 3, key: "d" },
];
const fifthFlush = [{ ts: 3, key: "d" }, { ts: 5, key: "f" }];
const sixthFlush = [{ ts: 5, key: "f" }, { ts: 6, key: "g" }];
const flushes = [
firstFlush,
secondFlush,
thirdFlush,
fourthFlush,
fifthFlush,
sixthFlush,
];
source
.pipe(
accumulatorBy(
undefined,
FlushStrategy.sliding,
(event: TestObject, bufferChunk: TestObject) => {
return bufferChunk.ts + 3 <= event.ts ? true : false;
},
),
)
.on("data", (flush: TestObject[]) => {
t.deepEqual(flush, flushes[chunkIndex]);
chunkIndex++;
})
.on("error", (e: any) => {
t.end(e);
})
.on("end", () => {
t.end();
});
input.forEach(item => {
source.push(item);
});
source.push(null);
});
test.cb(
"accumulatorBy() sliding should emit error when key iteratee throws",
t => {
t.plan(2);
interface TestObject {
ts: number;
key: string;
}
const source = new Readable({ objectMode: true });
const input = [
{ ts: 0, key: "a" },
{ ts: 1, key: "b" },
{ ts: 2, key: "c" },
];
const accumulaterStream = accumulatorBy(
undefined,
FlushStrategy.sliding,
(event: TestObject, bufferChunk: TestObject) => {
if (event.key !== "a") {
throw new Error("Failed mapping");
}
return bufferChunk.ts + 3 <= event.ts ? true : false;
},
);
source
.pipe(accumulaterStream)
.on("error", (err: any) => {
source.pipe(accumulaterStream);
accumulaterStream.resume();
expect(err.message).to.equal("Failed mapping");
t.pass();
})
.on("end", () => {
t.end();
});
input.forEach(item => {
source.push(item);
});
source.push(null);
},
);

View File

@@ -1,6 +0,0 @@
export enum FlushStrategy {
rolling = "rolling",
sliding = "sliding",
}
export type AccumulatorByIteratee<T> = (event: T, bufferChunk: T) => boolean;

View File

@@ -21,3 +21,9 @@ export type JsonValue = JsonPrimitive | JsonPrimitive[];
export interface JsonParseOptions {
pretty: boolean;
}
export enum FlushStrategy {
rolling = "rolling",
sliding = "sliding",
}
export type AccumulatorByIteratee<T> = (event: T, bufferChunk: T) => boolean;

View File

@@ -1,5 +1,5 @@
import { Transform } from "stream";
import { TransformOptions } from "../baseDefinitions";
import { TransformOptions } from "./baseDefinitions";
/**
* Stores chunks of data internally in array and batches when batchSize is reached.
*

View File

@@ -1,58 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { batch } from ".";
test.cb("batch() batches chunks together", t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const expectedElements = [["a", "b", "c"], ["d", "e", "f"], ["g"]];
let i = 0;
source
.pipe(batch(3))
.on("data", (element: string[]) => {
expect(element).to.deep.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push("d");
source.push("e");
source.push("f");
source.push("g");
source.push(null);
});
test.cb("batch() yields a batch after the timeout", t => {
t.plan(3);
const source = new Readable({
objectMode: true,
read(size: number) {},
});
const expectedElements = [["a", "b"], ["c"], ["d"]];
let i = 0;
source
.pipe(batch(3))
.on("data", (element: string[]) => {
expect(element).to.deep.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.fail)
.on("end", t.end);
source.push("a");
source.push("b");
setTimeout(() => {
source.push("c");
}, 600);
setTimeout(() => {
source.push("d");
source.push(null);
}, 600 * 2);
});

View File

@@ -1,5 +1,5 @@
import { ChildProcess } from "child_process";
import { duplex } from "../baseFunctions";
import { duplex } from "./baseFunctions";
/**
* Return a Duplex stream from a child process' stdin and stdout
* @param childProcess Child process from which to create duplex stream

View File

@@ -1,28 +0,0 @@
import * as cp from "child_process";
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { child } from ".";
test.cb(
"child() allows easily writing to child process stdin and reading from its stdout",
t => {
t.plan(1);
const source = new Readable();
const catProcess = cp.exec("cat");
let out = "";
source
.pipe(child(catProcess))
.on("data", chunk => (out += chunk))
.on("error", t.end)
.on("end", () => {
expect(out).to.equal("abcdef");
t.pass();
t.end();
});
source.push("ab");
source.push("cd");
source.push("ef");
source.push(null);
},
);

View File

@@ -1,5 +1,5 @@
import { Transform } from "stream";
import { ThroughOptions } from "../baseDefinitions";
import { ThroughOptions } from "./baseDefinitions";
/**
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
* @param options

View File

@@ -1,132 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { collect } from ".";
test.cb(
"collect() collects streamed elements into an array (object, flowing mode)",
t => {
t.plan(1);
const source = new Readable({ objectMode: true });
source
.pipe(collect({ objectMode: true }))
.on("data", collected => {
expect(collected).to.deep.equal(["a", "b", "c"]);
t.pass();
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
},
);
test.cb(
"collect() collects streamed elements into an array (object, paused mode)",
t => {
t.plan(1);
const source = new Readable({ objectMode: true });
const collector = source.pipe(collect({ objectMode: true }));
collector
.on("readable", () => {
let collected = collector.read();
while (collected !== null) {
expect(collected).to.deep.equal(["a", "b", "c"]);
t.pass();
collected = collector.read();
}
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
},
);
test.cb(
"collect() collects streamed bytes into a buffer (non-object, flowing mode)",
t => {
t.plan(1);
const source = new Readable({ objectMode: false });
source
.pipe(collect())
.on("data", collected => {
expect(collected).to.deep.equal(Buffer.from("abc"));
t.pass();
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
},
);
test.cb(
"collect() collects streamed bytes into a buffer (non-object, paused mode)",
t => {
t.plan(1);
const source = new Readable({ objectMode: false });
const collector = source.pipe(collect({ objectMode: false }));
collector
.on("readable", () => {
let collected = collector.read();
while (collected !== null) {
expect(collected).to.deep.equal(Buffer.from("abc"));
t.pass();
collected = collector.read();
}
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
},
);
test.cb(
"collect() emits an empty array if the source was empty (object mode)",
t => {
t.plan(1);
const source = new Readable({ objectMode: true });
const collector = source.pipe(collect({ objectMode: true }));
collector
.on("data", collected => {
expect(collected).to.deep.equal([]);
t.pass();
})
.on("error", t.end)
.on("end", t.end);
source.push(null);
},
);
test.cb(
"collect() emits nothing if the source was empty (non-object mode)",
t => {
t.plan(0);
const source = new Readable({ objectMode: false });
const collector = source.pipe(collect({ objectMode: false }));
collector
.on("data", () => t.fail())
.on("error", t.end)
.on("end", t.end);
source.push(null);
},
);

View File

@@ -1,180 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { concat, collect } from "../baseFunctions";
test.cb(
"concat() concatenates multiple readable streams (object, flowing mode)",
t => {
t.plan(6);
const source1 = new Readable({ objectMode: true });
const source2 = new Readable({ objectMode: true });
const expectedElements = ["a", "b", "c", "d", "e", "f"];
let i = 0;
concat(source1, source2)
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source1.push("a");
source2.push("d");
source1.push("b");
source2.push("e");
source1.push("c");
source2.push("f");
source2.push(null);
source1.push(null);
},
);
test.cb(
"concat() concatenates multiple readable streams (object, paused mode)",
t => {
t.plan(6);
const source1 = new Readable({ objectMode: true });
const source2 = new Readable({ objectMode: true });
const expectedElements = ["a", "b", "c", "d", "e", "f"];
let i = 0;
const concatenation = concat(source1, source2)
.on("readable", () => {
let element = concatenation.read();
while (element !== null) {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
element = concatenation.read();
}
})
.on("error", t.end)
.on("end", t.end);
source1.push("a");
source2.push("d");
source1.push("b");
source2.push("e");
source1.push("c");
source2.push("f");
source2.push(null);
source1.push(null);
},
);
test.cb(
"concat() concatenates multiple readable streams (non-object, flowing mode)",
t => {
t.plan(6);
const source1 = new Readable({ objectMode: false });
const source2 = new Readable({ objectMode: false });
const expectedElements = ["a", "b", "c", "d", "e", "f"];
let i = 0;
concat(source1, source2)
.on("data", (element: string) => {
expect(element).to.deep.equal(Buffer.from(expectedElements[i]));
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source1.push("a");
source2.push("d");
source1.push("b");
source2.push("e");
source1.push("c");
source2.push("f");
source2.push(null);
source1.push(null);
},
);
test.cb(
"concat() concatenates multiple readable streams (non-object, paused mode)",
t => {
t.plan(6);
const source1 = new Readable({ objectMode: false, read: () => ({}) });
const source2 = new Readable({ objectMode: false, read: () => ({}) });
const expectedElements = ["a", "b", "c", "d", "e", "f"];
let i = 0;
const concatenation = concat(source1, source2)
.on("readable", () => {
let element = concatenation.read();
while (element !== null) {
expect(element).to.deep.equal(
Buffer.from(expectedElements[i]),
);
t.pass();
i++;
element = concatenation.read();
}
})
.on("error", t.end)
.on("end", t.end);
source1.push("a");
setTimeout(() => source2.push("d"), 10);
setTimeout(() => source1.push("b"), 20);
setTimeout(() => source2.push("e"), 30);
setTimeout(() => source1.push("c"), 40);
setTimeout(() => source2.push("f"), 50);
setTimeout(() => source2.push(null), 60);
setTimeout(() => source1.push(null), 70);
},
);
test.cb("concat() concatenates a single readable stream (object mode)", t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const expectedElements = ["a", "b", "c", "d", "e", "f"];
let i = 0;
concat(source)
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb(
"concat() concatenates a single readable stream (non-object mode)",
t => {
t.plan(3);
const source = new Readable({ objectMode: false });
const expectedElements = ["a", "b", "c", "d", "e", "f"];
let i = 0;
concat(source)
.on("data", (element: string) => {
expect(element).to.deep.equal(Buffer.from(expectedElements[i]));
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
},
);
test.cb("concat() concatenates empty list of readable streams", t => {
t.plan(0);
concat()
.pipe(collect())
.on("data", _ => {
t.fail();
})
.on("error", t.end)
.on("end", t.end);
});

View File

@@ -1,28 +0,0 @@
import * as cp from "child_process";
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { duplex } from "../baseFunctions";
test.cb(
"duplex() combines a writable and readable stream into a ReadWrite stream",
t => {
t.plan(1);
const source = new Readable();
const catProcess = cp.exec("cat");
let out = "";
source
.pipe(duplex(catProcess.stdin!, catProcess.stdout!))
.on("data", chunk => (out += chunk))
.on("error", t.end)
.on("end", () => {
expect(out).to.equal("abcdef");
t.pass();
t.end();
});
source.push("ab");
source.push("cd");
source.push("ef");
source.push(null);
},
);

View File

@@ -1,5 +1,5 @@
import { Transform } from "stream";
import { ThroughOptions } from "../baseDefinitions";
import { ThroughOptions } from "./baseDefinitions";
/**
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
* @param predicate Predicate with which to filter scream chunks

View File

@@ -1,102 +0,0 @@
import test from "ava";
import { expect } from "chai";
import { Readable } from "stream";
import { filter } from ".";
test.cb("filter() filters elements synchronously", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
const expectedElements = ["a", "c"];
let i = 0;
source
.pipe(filter((element: string) => element !== "b"))
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("filter() filters elements asynchronously", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
const expectedElements = ["a", "c"];
let i = 0;
source
.pipe(
filter(async (element: string) => {
await Promise.resolve();
return element !== "b";
}),
)
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("filter() emits errors during synchronous filtering", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(
filter((element: string) => {
if (element !== "a") {
throw new Error("Failed filtering");
}
return true;
}),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed filtering");
t.pass();
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("filter() emits errors during asynchronous filtering", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(
filter(async (element: string) => {
await Promise.resolve();
if (element !== "a") {
throw new Error("Failed filtering");
}
return true;
}),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed filtering");
t.pass();
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});

View File

@@ -1,5 +1,5 @@
import { Transform } from "stream";
import { TransformOptions } from "../baseDefinitions";
import { TransformOptions } from "./baseDefinitions";
/**
* Return a ReadWrite stream that flat maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such)

View File

@@ -1,100 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { flatMap } from ".";
test.cb("flatMap() maps elements synchronously", t => {
t.plan(6);
const source = new Readable({ objectMode: true });
const expectedElements = ["a", "A", "b", "B", "c", "C"];
let i = 0;
source
.pipe(flatMap((element: string) => [element, element.toUpperCase()]))
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("flatMap() maps elements asynchronously", t => {
t.plan(6);
const source = new Readable({ objectMode: true });
const expectedElements = ["a", "A", "b", "B", "c", "C"];
let i = 0;
source
.pipe(
flatMap(async (element: string) => {
await Promise.resolve();
return [element, element.toUpperCase()];
}),
)
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("flatMap() emits errors during synchronous mapping", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(
flatMap((element: string) => {
if (element !== "a") {
throw new Error("Failed mapping");
}
return [element, element.toUpperCase()];
}),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed mapping");
t.pass();
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("flatMap() emits errors during asynchronous mapping", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(
flatMap(async (element: string) => {
await Promise.resolve();
if (element !== "a") {
throw new Error("Failed mapping");
}
return [element, element.toUpperCase()];
}),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed mapping");
t.pass();
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});

View File

@@ -1,45 +0,0 @@
import test from "ava";
import { expect } from "chai";
import { fromArray } from ".";
test.cb("fromArray() streams array elements in flowing mode", t => {
t.plan(3);
const elements = ["a", "b", "c"];
const stream = fromArray(elements);
let i = 0;
stream
.on("data", (element: string) => {
expect(element).to.equal(elements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
});
test.cb("fromArray() ends immediately if there are no array elements", t => {
t.plan(0);
fromArray([])
.on("data", () => t.fail())
.on("error", t.end)
.on("end", t.end);
});
test.cb("fromArray() streams array elements in paused mode", t => {
t.plan(3);
const elements = ["a", "b", "c"];
const stream = fromArray(elements);
let i = 0;
stream
.on("readable", () => {
let element = stream.read();
while (element !== null) {
expect(element).to.equal(elements[i]);
t.pass();
i++;
element = stream.read();
}
})
.on("error", t.end)
.on("end", t.end);
});

View File

@@ -7,12 +7,9 @@ import {
TransformOptions,
WithEncoding,
JsonParseOptions,
} from "./baseDefinitions";
import {
FlushStrategy,
AccumulatorByIteratee,
} from "./accumulator/definitions";
} from "./baseDefinitions";
/**
* Convert an array into a Readable stream of its elements

View File

@@ -1,6 +1,6 @@
import { Transform } from "stream";
import { StringDecoder } from "string_decoder";
import { WithEncoding } from "../baseDefinitions";
import { WithEncoding } from "./baseDefinitions";
/**
* Return a ReadWrite stream that joins streamed chunks using the given separator
* @param separator Separator to join with

View File

@@ -1,56 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { join } from ".";
test.cb("join() joins chunks using the specified separator", t => {
t.plan(9);
const source = new Readable({ objectMode: true });
const expectedParts = ["ab|", "|", "c|d", "|", "|", "|", "e", "|", "|f|"];
let i = 0;
source
.pipe(join("|"))
.on("data", part => {
expect(part).to.equal(expectedParts[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("ab|");
source.push("c|d");
source.push("|");
source.push("e");
source.push("|f|");
source.push(null);
});
test.cb(
"join() joins chunks using the specified separator without breaking up multi-byte characters " +
"spanning multiple chunks",
t => {
t.plan(5);
const source = new Readable({ objectMode: true });
const expectedParts = ["ø", "|", "ö", "|", "一"];
let i = 0;
source
.pipe(join("|"))
.on("data", part => {
expect(part).to.equal(expectedParts[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks
source.push(Buffer.from("ø").slice(1, 2));
source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks
source.push(Buffer.from("ö").slice(1, 2));
source.push(Buffer.from("一").slice(0, 1)); // 3-byte character spanning three chunks
source.push(Buffer.from("一").slice(1, 2));
source.push(Buffer.from("一").slice(2, 3));
source.push(null);
},
);

View File

@@ -1,15 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { last } from "../baseFunctions";
test("last() resolves to the last chunk streamed by the given readable stream", async t => {
const source = new Readable({ objectMode: true });
const lastPromise = last(source);
source.push("ab");
source.push("cd");
source.push("ef");
source.push(null);
const lastChunk = await lastPromise;
expect(lastChunk).to.equal("ef");
});

View File

@@ -1,5 +1,5 @@
import { Transform } from "stream";
import { TransformOptions } from "../baseDefinitions";
import { TransformOptions } from "./baseDefinitions";
/**
* Return a ReadWrite stream that maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such)

View File

@@ -1,109 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { map } from ".";
test.cb("map() maps elements synchronously", t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const mapStream = map((element: string) => element.toUpperCase());
const expectedElements = ["A", "B", "C"];
let i = 0;
source
.pipe(mapStream)
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("map() maps elements asynchronously", t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const mapStream = map(async (element: string) => {
await Promise.resolve();
return element.toUpperCase();
});
const expectedElements = ["A", "B", "C"];
let i = 0;
source
.pipe(mapStream)
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("map() emits errors during synchronous mapping", t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const mapStream = map((element: string) => {
if (element !== "b") {
throw new Error("Failed mapping");
}
return element.toUpperCase();
});
source
.pipe(mapStream)
.on("data", data => {
expect(data).to.equal("B");
t.pass();
})
.on("error", err => {
source.pipe(mapStream);
mapStream.resume();
expect(err.message).to.equal("Failed mapping");
t.pass();
})
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test("map() emits errors during asynchronous mapping", t => {
t.plan(1);
return new Promise((resolve, _) => {
const source = new Readable({ objectMode: true });
const mapStream = map(async (element: string) => {
await Promise.resolve();
if (element === "b") {
throw new Error("Failed mapping");
}
return element.toUpperCase();
});
source
.pipe(mapStream)
.on("error", err => {
expect(err.message).to.equal("Failed mapping");
t.pass();
resolve();
})
.on("end", () => t.fail);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
source.push(null);
source.push(null);
});
});

View File

@@ -1,60 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { merge } from "../baseFunctions";
test.cb(
"merge() merges multiple readable streams in chunk arrival order",
t => {
t.plan(6);
const source1 = new Readable({ objectMode: true, read: () => ({}) });
const source2 = new Readable({ objectMode: true, read: () => ({}) });
const expectedElements = ["a", "d", "b", "e", "c", "f"];
let i = 0;
merge(source1, source2)
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source1.push("a");
setTimeout(() => source2.push("d"), 10);
setTimeout(() => source1.push("b"), 20);
setTimeout(() => source2.push("e"), 30);
setTimeout(() => source1.push("c"), 40);
setTimeout(() => source2.push("f"), 50);
setTimeout(() => source2.push(null), 60);
setTimeout(() => source1.push(null), 70);
},
);
test.cb("merge() merges a readable stream", t => {
t.plan(3);
const source = new Readable({ objectMode: true, read: () => ({}) });
const expectedElements = ["a", "b", "c"];
let i = 0;
merge(source)
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});
test.cb("merge() merges an empty list of readable streams", t => {
t.plan(0);
merge()
.on("data", () => t.pass())
.on("error", t.end)
.on("end", t.end);
});

View File

@@ -1,6 +1,6 @@
import { Transform } from "stream";
import { sleep } from "../../helpers";
import { TransformOptions } from "../baseDefinitions";
import { sleep } from "../helpers";
import { TransformOptions } from "./baseDefinitions";
/**
* Limits number of parallel processes in flight.
* @param parallel Max number of parallel processes.

View File

@@ -1,77 +0,0 @@
import { Readable } from "stream";
import { performance } from "perf_hooks";
import test from "ava";
import { expect } from "chai";
import { parallelMap } from "../baseFunctions";
import { sleep } from "../../helpers";
test.cb("parallelMap() parallel mapping", t => {
t.plan(6);
const offset = 50;
const source = new Readable({ objectMode: true });
const expectedElements = [
"a_processed",
"b_processed",
"c_processed",
"d_processed",
"e_processed",
"f_processed",
];
interface IPerfData {
start: number;
output?: string;
finish?: number;
}
const orderedResults: IPerfData[] = [];
source
.pipe(
parallelMap(async (data: any) => {
const perfData: IPerfData = { start: performance.now() };
const c = data + "_processed";
perfData.output = c;
await sleep(offset);
perfData.finish = performance.now();
orderedResults.push(perfData);
return c;
}, 2),
)
.on("data", (element: string) => {
t.true(expectedElements.includes(element));
})
.on("error", t.end)
.on("end", async () => {
expect(orderedResults[0].finish).to.be.lessThan(
orderedResults[2].start,
);
expect(orderedResults[1].finish).to.be.lessThan(
orderedResults[3].start,
);
expect(orderedResults[2].finish).to.be.lessThan(
orderedResults[4].start,
);
expect(orderedResults[3].finish).to.be.lessThan(
orderedResults[5].start,
);
expect(orderedResults[0].start).to.be.lessThan(
orderedResults[2].start + offset,
);
expect(orderedResults[1].start).to.be.lessThan(
orderedResults[3].start + offset,
);
expect(orderedResults[2].start).to.be.lessThan(
orderedResults[4].start + offset,
);
expect(orderedResults[3].start).to.be.lessThan(
orderedResults[5].start + offset,
);
t.end();
});
source.push("a");
source.push("b");
source.push("c");
source.push("d");
source.push("e");
source.push("f");
source.push(null);
});

View File

@@ -1,6 +1,6 @@
import { Transform } from "stream";
import { StringDecoder } from "string_decoder";
import { SerializationFormats } from "../baseDefinitions";
import { SerializationFormats } from "./baseDefinitions";
/**
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
* must be a fully defined JSON string.

View File

@@ -1,40 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { parse } from "../baseFunctions";
test.cb("parse() parses the streamed elements as JSON", t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const expectedElements = ["abc", {}, []];
let i = 0;
source
.pipe(parse())
.on("data", part => {
expect(part).to.deep.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push('"abc"');
source.push("{}");
source.push("[]");
source.push(null);
});
test.cb("parse() emits errors on invalid JSON", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(parse())
.resume()
.on("error", () => t.pass())
.on("end", t.end);
source.push("{}");
source.push({});
source.push([]);
source.push(null);
});

View File

@@ -1,7 +1,7 @@
import { Transform } from "stream";
import { performance } from "perf_hooks";
import { sleep } from "../../helpers";
import { TransformOptions } from "../baseDefinitions";
import { sleep } from "../helpers";
import { TransformOptions } from "./baseDefinitions";
/**
* Limits date of data transferred into stream.
* @param targetRate Desired rate in ms

View File

@@ -1,67 +0,0 @@
import { Readable } from "stream";
import { performance } from "perf_hooks";
import test from "ava";
import { expect } from "chai";
import { rate } from "../baseFunctions";
test.cb("rate() sends data at desired rate", t => {
t.plan(9);
const fastRate = 150;
const medRate = 50;
const slowRate = 1;
const sourceFast = new Readable({ objectMode: true });
const sourceMed = new Readable({ objectMode: true });
const sourceSlow = new Readable({ objectMode: true });
const expectedElements = ["a", "b", "c"];
const start = performance.now();
let i = 0;
let j = 0;
let k = 0;
sourceFast
.pipe(rate(fastRate))
.on("data", (element: string[]) => {
const currentRate = (i / (performance.now() - start)) * 1000;
expect(element).to.deep.equal(expectedElements[i]);
expect(currentRate).lessThan(fastRate);
t.pass();
i++;
})
.on("error", t.end);
sourceMed
.pipe(rate(medRate))
.on("data", (element: string[]) => {
const currentRate = (j / (performance.now() - start)) * 1000;
expect(element).to.deep.equal(expectedElements[j]);
expect(currentRate).lessThan(medRate);
t.pass();
j++;
})
.on("error", t.end);
sourceSlow
.pipe(rate(slowRate, 1))
.on("data", (element: string[]) => {
const currentRate = (k / (performance.now() - start)) * 1000;
expect(element).to.deep.equal(expectedElements[k]);
expect(currentRate).lessThan(slowRate);
t.pass();
k++;
})
.on("error", t.end)
.on("end", t.end);
sourceFast.push("a");
sourceFast.push("b");
sourceFast.push("c");
sourceFast.push(null);
sourceMed.push("a");
sourceMed.push("b");
sourceMed.push("c");
sourceMed.push(null);
sourceSlow.push("a");
sourceSlow.push("b");
sourceSlow.push("c");
sourceSlow.push(null);
});

View File

@@ -1,5 +1,5 @@
import { Transform } from "stream";
import { TransformOptions } from "../baseDefinitions";
import { TransformOptions } from "./baseDefinitions";
/**
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
* value

View File

@@ -1,98 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { reduce } from ".";
test.cb("reduce() reduces elements synchronously", t => {
t.plan(1);
const source = new Readable({ objectMode: true });
const expectedValue = 6;
source
.pipe(reduce((acc: number, element: string) => acc + element.length, 0))
.on("data", (element: string) => {
expect(element).to.equal(expectedValue);
t.pass();
})
.on("error", t.end)
.on("end", t.end);
source.push("ab");
source.push("cd");
source.push("ef");
source.push(null);
});
test.cb("reduce() reduces elements asynchronously", t => {
t.plan(1);
const source = new Readable({ objectMode: true });
const expectedValue = 6;
source
.pipe(
reduce(async (acc: number, element: string) => {
await Promise.resolve();
return acc + element.length;
}, 0),
)
.on("data", (element: string) => {
expect(element).to.equal(expectedValue);
t.pass();
})
.on("error", t.end)
.on("end", t.end);
source.push("ab");
source.push("cd");
source.push("ef");
source.push(null);
});
test.cb("reduce() emits errors during synchronous reduce", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(
reduce((acc: number, element: string) => {
if (element !== "ab") {
throw new Error("Failed reduce");
}
return acc + element.length;
}, 0),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed reduce");
t.pass();
})
.on("end", t.end);
source.push("ab");
source.push("cd");
source.push("ef");
source.push(null);
});
test.cb("reduce() emits errors during asynchronous reduce", t => {
t.plan(2);
const source = new Readable({ objectMode: true });
source
.pipe(
reduce(async (acc: number, element: string) => {
await Promise.resolve();
if (element !== "ab") {
throw new Error("Failed mapping");
}
return acc + element.length;
}, 0),
)
.resume()
.on("error", err => {
expect(err.message).to.equal("Failed mapping");
t.pass();
})
.on("end", t.end);
source.push("ab");
source.push("cd");
source.push("ef");
source.push(null);
});

View File

@@ -1,6 +1,6 @@
import { Transform } from "stream";
import { StringDecoder } from "string_decoder";
import { WithEncoding } from "../baseDefinitions";
import { WithEncoding } from "./baseDefinitions";
/**
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
* the streamed chunks with the specified replacement string

View File

@@ -1,80 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { replace } from ".";
test.cb(
"replace() replaces occurrences of the given string in the streamed elements with the specified " +
"replacement string",
t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const expectedElements = ["abc", "xyf", "ghi"];
let i = 0;
source
.pipe(replace("de", "xy"))
.on("data", part => {
expect(part).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("abc");
source.push("def");
source.push("ghi");
source.push(null);
},
);
test.cb(
"replace() replaces occurrences of the given regular expression in the streamed elements with " +
"the specified replacement string",
t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const expectedElements = ["abc", "xyz", "ghi"];
let i = 0;
source
.pipe(replace(/^def$/, "xyz"))
.on("data", part => {
expect(part).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("abc");
source.push("def");
source.push("ghi");
source.push(null);
},
);
test.cb(
"replace() replaces occurrences of the given multi-byte character even if it spans multiple chunks",
t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const expectedElements = ["ø", "O", "a"];
let i = 0;
source
.pipe(replace("ö", "O"))
.on("data", part => {
expect(part).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push(Buffer.from("ø").slice(0, 1)); // 2-byte character spanning two chunks
source.push(Buffer.from("ø").slice(1, 2));
source.push(Buffer.from("ö").slice(0, 1)); // 2-byte character spanning two chunks
source.push(Buffer.from("ö").slice(1, 2));
source.push("a");
source.push(null);
},
);

View File

@@ -1,6 +1,6 @@
import { Transform } from "stream";
import { StringDecoder } from "string_decoder";
import { WithEncoding } from "../baseDefinitions";
import { WithEncoding } from "./baseDefinitions";
/**
* Return a ReadWrite stream that splits streamed chunks using the given separator
* @param separator Separator to split by, defaulting to "\n"

View File

@@ -1,98 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { split } from ".";
test.cb("split() splits chunks using the default separator (\\n)", t => {
t.plan(5);
const source = new Readable({ objectMode: true });
const expectedParts = ["ab", "c", "d", "ef", ""];
let i = 0;
source
.pipe(split())
.on("data", part => {
expect(part).to.equal(expectedParts[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("ab\n");
source.push("c");
source.push("\n");
source.push("d");
source.push("\nef\n");
source.push(null);
});
test.cb("split() splits chunks using the specified separator", t => {
t.plan(6);
const source = new Readable({ objectMode: true });
const expectedParts = ["ab", "c", "d", "e", "f", ""];
let i = 0;
source
.pipe(split("|"))
.on("data", (part: string) => {
expect(part).to.equal(expectedParts[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("ab|");
source.push("c|d");
source.push("|");
source.push("e");
source.push("|f|");
source.push(null);
});
test.cb(
"split() splits utf8 encoded buffers using the specified separator",
t => {
t.plan(3);
const expectedElements = ["a", "b", "c"];
let i = 0;
const through = split(",");
const buf = Buffer.from("a,b,c");
through
.on("data", element => {
expect(element).to.equal(expectedElements[i]);
i++;
t.pass();
})
.on("error", t.end)
.on("end", t.end);
for (let j = 0; j < buf.length; ++j) {
through.write(buf.slice(j, j + 1));
}
through.end();
},
);
test.cb(
"split() splits utf8 encoded buffers with multi-byte characters using the specified separator",
t => {
t.plan(3);
const expectedElements = ["一", "一", "一"];
let i = 0;
const through = split(",");
const buf = Buffer.from("一,一,一"); // Those spaces are multi-byte utf8 characters (code: 4E00)
through
.on("data", element => {
expect(element).to.equal(expectedElements[i]);
i++;
t.pass();
})
.on("error", t.end)
.on("end", t.end);
for (let j = 0; j < buf.length; ++j) {
through.write(buf.slice(j, j + 1));
}
through.end();
},
);

View File

@@ -1,5 +1,5 @@
import { Transform } from "stream";
import { JsonValue, JsonParseOptions } from "../baseDefinitions";
import { JsonValue, JsonParseOptions } from "./baseDefinitions";
/**
* Return a ReadWrite stream that stringifies the streamed chunks to JSON

View File

@@ -1,61 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { stringify } from "../baseFunctions";
test.cb("stringify() stringifies the streamed elements as JSON", t => {
t.plan(4);
const source = new Readable({ objectMode: true });
const expectedElements = [
'"abc"',
"0",
'{"a":"a","b":"b","c":"c"}',
'["a","b","c"]',
];
let i = 0;
source
.pipe(stringify())
.on("data", part => {
expect(part).to.deep.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("abc");
source.push(0);
source.push({ a: "a", b: "b", c: "c" });
source.push(["a", "b", "c"]);
source.push(null);
});
test.cb(
"stringify() stringifies the streamed elements as pretty-printed JSON",
t => {
t.plan(4);
const source = new Readable({ objectMode: true });
const expectedElements = [
'"abc"',
"0",
'{\n "a": "a",\n "b": "b",\n "c": "c"\n}',
'[\n "a",\n "b",\n "c"\n]',
];
let i = 0;
source
.pipe(stringify({ pretty: true }))
.on("data", part => {
expect(part).to.deep.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("abc");
source.push(0);
source.push({ a: "a", b: "b", c: "c" });
source.push(["a", "b", "c"]);
source.push(null);
},
);

View File

@@ -1,5 +1,5 @@
import { Transform } from "stream";
import { TransformOptions } from "../baseDefinitions";
import { TransformOptions } from "./baseDefinitions";
/**
* Unbatches and sends individual chunks of data
*/

View File

@@ -1,26 +0,0 @@
import { Readable } from "stream";
import test from "ava";
import { expect } from "chai";
import { unbatch, batch } from "../baseFunctions";
test.cb("unbatch() unbatches", t => {
t.plan(3);
const source = new Readable({ objectMode: true });
const expectedElements = ["a", "b", "c"];
let i = 0;
source
.pipe(batch(3))
.pipe(unbatch())
.on("data", (element: string) => {
expect(element).to.equal(expectedElements[i]);
t.pass();
i++;
})
.on("error", t.end)
.on("end", t.end);
source.push("a");
source.push("b");
source.push("c");
source.push(null);
});