Refactoring

This commit is contained in:
Jerry Kurian 2019-08-16 09:02:54 -04:00
parent 505fefeeb5
commit faac6134af
48 changed files with 84 additions and 72 deletions

View File

@ -22,7 +22,7 @@
"type": "git" "type": "git"
}, },
"scripts": { "scripts": {
"test": "NODE_PATH=src node node_modules/.bin/ava 'src/**/**/*.spec.ts' -e", "test": "NODE_PATH=src node node_modules/.bin/ava 'tests/*.spec.ts' -e",
"test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.ts", "test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.ts",
"test:all": "NODE_PATH=src node node_modules/.bin/ava", "test:all": "NODE_PATH=src node node_modules/.bin/ava",
"lint": "tslint -p tsconfig.json", "lint": "tslint -p tsconfig.json",
@ -45,7 +45,7 @@
}, },
"ava": { "ava": {
"files": [ "files": [
"src/**/*.spec.ts" "tests/*.spec.ts"
], ],
"sources": [ "sources": [
"src/**/*.ts" "src/**/*.ts"

View File

@ -1,7 +1,10 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { AccumulatorByIteratee, FlushStrategy } from "./definitions"; import {
import { TransformOptions } from "../baseDefinitions"; AccumulatorByIteratee,
import { batch } from "../../index"; FlushStrategy,
TransformOptions,
} from "./baseDefinitions";
import { batch } from ".";
function _accumulator<T>( function _accumulator<T>(
accumulateBy: (data: T, buffer: T[], stream: Transform) => void, accumulateBy: (data: T, buffer: T[], stream: Transform) => void,

View File

@ -1,6 +0,0 @@
export enum FlushStrategy {
rolling = "rolling",
sliding = "sliding",
}
export type AccumulatorByIteratee<T> = (event: T, bufferChunk: T) => boolean;

View File

@ -21,3 +21,9 @@ export type JsonValue = JsonPrimitive | JsonPrimitive[];
export interface JsonParseOptions { export interface JsonParseOptions {
pretty: boolean; pretty: boolean;
} }
export enum FlushStrategy {
rolling = "rolling",
sliding = "sliding",
}
export type AccumulatorByIteratee<T> = (event: T, bufferChunk: T) => boolean;

View File

@ -1,5 +1,5 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { TransformOptions } from "../baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/** /**
* Stores chunks of data internally in array and batches when batchSize is reached. * Stores chunks of data internally in array and batches when batchSize is reached.
* *

View File

@ -1,5 +1,5 @@
import { ChildProcess } from "child_process"; import { ChildProcess } from "child_process";
import { duplex } from "../baseFunctions"; import { duplex } from "./baseFunctions";
/** /**
* Return a Duplex stream from a child process' stdin and stdout * Return a Duplex stream from a child process' stdin and stdout
* @param childProcess Child process from which to create duplex stream * @param childProcess Child process from which to create duplex stream

View File

@ -1,5 +1,5 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { ThroughOptions } from "../baseDefinitions"; import { ThroughOptions } from "./baseDefinitions";
/** /**
* Return a ReadWrite stream that collects streamed chunks into an array or buffer * Return a ReadWrite stream that collects streamed chunks into an array or buffer
* @param options * @param options

View File

@ -1,5 +1,5 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { ThroughOptions } from "../baseDefinitions"; import { ThroughOptions } from "./baseDefinitions";
/** /**
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold * Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
* @param predicate Predicate with which to filter scream chunks * @param predicate Predicate with which to filter scream chunks

View File

@ -1,5 +1,5 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { TransformOptions } from "../baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/** /**
* Return a ReadWrite stream that flat maps streamed chunks * Return a ReadWrite stream that flat maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such) * @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such)

View File

@ -7,12 +7,9 @@ import {
TransformOptions, TransformOptions,
WithEncoding, WithEncoding,
JsonParseOptions, JsonParseOptions,
} from "./baseDefinitions";
import {
FlushStrategy, FlushStrategy,
AccumulatorByIteratee, AccumulatorByIteratee,
} from "./accumulator/definitions"; } from "./baseDefinitions";
/** /**
* Convert an array into a Readable stream of its elements * Convert an array into a Readable stream of its elements

View File

@ -1,6 +1,6 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { StringDecoder } from "string_decoder"; import { StringDecoder } from "string_decoder";
import { WithEncoding } from "../baseDefinitions"; import { WithEncoding } from "./baseDefinitions";
/** /**
* Return a ReadWrite stream that joins streamed chunks using the given separator * Return a ReadWrite stream that joins streamed chunks using the given separator
* @param separator Separator to join with * @param separator Separator to join with

View File

@ -1,5 +1,5 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { TransformOptions } from "../baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/** /**
* Return a ReadWrite stream that maps streamed chunks * Return a ReadWrite stream that maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such) * @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such)

View File

@ -1,6 +1,6 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { sleep } from "../../helpers"; import { sleep } from "../helpers";
import { TransformOptions } from "../baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/** /**
* Limits number of parallel processes in flight. * Limits number of parallel processes in flight.
* @param parallel Max number of parallel processes. * @param parallel Max number of parallel processes.

View File

@ -1,6 +1,6 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { StringDecoder } from "string_decoder"; import { StringDecoder } from "string_decoder";
import { SerializationFormats } from "../baseDefinitions"; import { SerializationFormats } from "./baseDefinitions";
/** /**
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk * Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
* must be a fully defined JSON string. * must be a fully defined JSON string.

View File

@ -1,7 +1,7 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { performance } from "perf_hooks"; import { performance } from "perf_hooks";
import { sleep } from "../../helpers"; import { sleep } from "../helpers";
import { TransformOptions } from "../baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/** /**
* Limits date of data transferred into stream. * Limits date of data transferred into stream.
* @param targetRate Desired rate in ms * @param targetRate Desired rate in ms

View File

@ -1,5 +1,5 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { TransformOptions } from "../baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/** /**
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that * Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
* value * value

View File

@ -1,6 +1,6 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { StringDecoder } from "string_decoder"; import { StringDecoder } from "string_decoder";
import { WithEncoding } from "../baseDefinitions"; import { WithEncoding } from "./baseDefinitions";
/** /**
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in * Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
* the streamed chunks with the specified replacement string * the streamed chunks with the specified replacement string

View File

@ -1,6 +1,6 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { StringDecoder } from "string_decoder"; import { StringDecoder } from "string_decoder";
import { WithEncoding } from "../baseDefinitions"; import { WithEncoding } from "./baseDefinitions";
/** /**
* Return a ReadWrite stream that splits streamed chunks using the given separator * Return a ReadWrite stream that splits streamed chunks using the given separator
* @param separator Separator to split by, defaulting to "\n" * @param separator Separator to split by, defaulting to "\n"

View File

@ -1,5 +1,5 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { JsonValue, JsonParseOptions } from "../baseDefinitions"; import { JsonValue, JsonParseOptions } from "./baseDefinitions";
/** /**
* Return a ReadWrite stream that stringifies the streamed chunks to JSON * Return a ReadWrite stream that stringifies the streamed chunks to JSON

View File

@ -1,5 +1,5 @@
import { Transform } from "stream"; import { Transform } from "stream";
import { TransformOptions } from "../baseDefinitions"; import { TransformOptions } from "./baseDefinitions";
/** /**
* Unbatches and sends individual chunks of data * Unbatches and sends individual chunks of data
*/ */

View File

@ -20,4 +20,5 @@ export {
rate, rate,
parallelMap, parallelMap,
accumulator, accumulator,
accumulatorBy,
} from "./functions"; } from "./functions";

View File

@ -1,8 +1,8 @@
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { Readable } from "stream"; import { Readable } from "stream";
import { accumulator, accumulatorBy } from "."; import { accumulator, accumulatorBy } from "../src";
import { FlushStrategy } from "./definitions"; import { FlushStrategy } from "../src/functions/baseDefinitions";
test.cb("accumulator() rolling", t => { test.cb("accumulator() rolling", t => {
t.plan(3); t.plan(3);

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { batch } from "."; import { batch } from "../src";
test.cb("batch() batches chunks together", t => { test.cb("batch() batches chunks together", t => {
t.plan(3); t.plan(3);

View File

@ -2,7 +2,7 @@ import * as cp from "child_process";
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { child } from "."; import { child } from "../src";
test.cb( test.cb(
"child() allows easily writing to child process stdin and reading from its stdout", "child() allows easily writing to child process stdin and reading from its stdout",

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { collect } from "."; import { collect } from "../src";
test.cb( test.cb(
"collect() collects streamed elements into an array (object, flowing mode)", "collect() collects streamed elements into an array (object, flowing mode)",

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { concat, collect } from "../baseFunctions"; import { concat, collect } from "../src";
test.cb( test.cb(
"concat() concatenates multiple readable streams (object, flowing mode)", "concat() concatenates multiple readable streams (object, flowing mode)",

View File

@ -2,7 +2,7 @@ import * as cp from "child_process";
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { duplex } from "../baseFunctions"; import { duplex } from "../src";
test.cb( test.cb(
"duplex() combines a writable and readable stream into a ReadWrite stream", "duplex() combines a writable and readable stream into a ReadWrite stream",

View File

@ -1,7 +1,7 @@
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { Readable } from "stream"; import { Readable } from "stream";
import { filter } from "."; import { filter } from "../src";
test.cb("filter() filters elements synchronously", t => { test.cb("filter() filters elements synchronously", t => {
t.plan(2); t.plan(2);

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { flatMap } from "."; import { flatMap } from "../src";
test.cb("flatMap() maps elements synchronously", t => { test.cb("flatMap() maps elements synchronously", t => {
t.plan(6); t.plan(6);

View File

@ -1,6 +1,6 @@
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { fromArray } from "."; import { fromArray } from "../src";
test.cb("fromArray() streams array elements in flowing mode", t => { test.cb("fromArray() streams array elements in flowing mode", t => {
t.plan(3); t.plan(3);

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { join } from "."; import { join } from "../src";
test.cb("join() joins chunks using the specified separator", t => { test.cb("join() joins chunks using the specified separator", t => {
t.plan(9); t.plan(9);

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { last } from "../baseFunctions"; import { last } from "../src";
test("last() resolves to the last chunk streamed by the given readable stream", async t => { test("last() resolves to the last chunk streamed by the given readable stream", async t => {
const source = new Readable({ objectMode: true }); const source = new Readable({ objectMode: true });

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { map } from "."; import { map } from "../src";
test.cb("map() maps elements synchronously", t => { test.cb("map() maps elements synchronously", t => {
t.plan(3); t.plan(3);

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { merge } from "../baseFunctions"; import { merge } from "../src";
test.cb( test.cb(
"merge() merges multiple readable streams in chunk arrival order", "merge() merges multiple readable streams in chunk arrival order",

View File

@ -2,8 +2,8 @@ import { Readable } from "stream";
import { performance } from "perf_hooks"; import { performance } from "perf_hooks";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { parallelMap } from "../baseFunctions"; import { parallelMap } from "../src";
import { sleep } from "../../helpers"; import { sleep } from "../src/helpers";
test.cb("parallelMap() parallel mapping", t => { test.cb("parallelMap() parallel mapping", t => {
t.plan(6); t.plan(6);

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { parse } from "../baseFunctions"; import { parse } from "../src";
test.cb("parse() parses the streamed elements as JSON", t => { test.cb("parse() parses the streamed elements as JSON", t => {
t.plan(3); t.plan(3);

View File

@ -2,7 +2,7 @@ import { Readable } from "stream";
import { performance } from "perf_hooks"; import { performance } from "perf_hooks";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { rate } from "../baseFunctions"; import { rate } from "../src";
test.cb("rate() sends data at desired rate", t => { test.cb("rate() sends data at desired rate", t => {
t.plan(9); t.plan(9);
@ -19,7 +19,7 @@ test.cb("rate() sends data at desired rate", t => {
let k = 0; let k = 0;
sourceFast sourceFast
.pipe(rate(fastRate)) .pipe(rate(fastRate, 1))
.on("data", (element: string[]) => { .on("data", (element: string[]) => {
const currentRate = (i / (performance.now() - start)) * 1000; const currentRate = (i / (performance.now() - start)) * 1000;
expect(element).to.deep.equal(expectedElements[i]); expect(element).to.deep.equal(expectedElements[i]);
@ -30,7 +30,7 @@ test.cb("rate() sends data at desired rate", t => {
.on("error", t.end); .on("error", t.end);
sourceMed sourceMed
.pipe(rate(medRate)) .pipe(rate(medRate, 1))
.on("data", (element: string[]) => { .on("data", (element: string[]) => {
const currentRate = (j / (performance.now() - start)) * 1000; const currentRate = (j / (performance.now() - start)) * 1000;
expect(element).to.deep.equal(expectedElements[j]); expect(element).to.deep.equal(expectedElements[j]);

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { reduce } from "."; import { reduce } from "../src";
test.cb("reduce() reduces elements synchronously", t => { test.cb("reduce() reduces elements synchronously", t => {
t.plan(1); t.plan(1);

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { replace } from "."; import { replace } from "../src";
test.cb( test.cb(
"replace() replaces occurrences of the given string in the streamed elements with the specified " + "replace() replaces occurrences of the given string in the streamed elements with the specified " +

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { split } from "."; import { split } from "../src";
test.cb("split() splits chunks using the default separator (\\n)", t => { test.cb("split() splits chunks using the default separator (\\n)", t => {
t.plan(5); t.plan(5);

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { stringify } from "../baseFunctions"; import { stringify } from "../src";
test.cb("stringify() stringifies the streamed elements as JSON", t => { test.cb("stringify() stringifies the streamed elements as JSON", t => {
t.plan(4); t.plan(4);

View File

@ -1,7 +1,7 @@
import { Readable } from "stream"; import { Readable } from "stream";
import test from "ava"; import test from "ava";
import { expect } from "chai"; import { expect } from "chai";
import { unbatch, batch } from "../baseFunctions"; import { unbatch, batch } from "../src";
test.cb("unbatch() unbatches", t => { test.cb("unbatch() unbatches", t => {
t.plan(3); t.plan(3);

View File

@ -9,10 +9,21 @@
"suppressImplicitAnyIndexErrors": true, "suppressImplicitAnyIndexErrors": true,
"outDir": "./dist", "outDir": "./dist",
"module": "commonjs", "module": "commonjs",
"baseUrl": ".",
"paths": {
"src/*": [
"src/*"
]
},
"target": "es5", "target": "es5",
"lib": ["es2016", "es2019"], "lib": [
"es2016",
"es2019"
],
"sourceMap": true, "sourceMap": true,
"declaration": true "declaration": true
}, },
"include": ["src/**/*.ts"] "include": [
"src/**/*.ts"
]
} }