Refactoring
This commit is contained in:
parent
505fefeeb5
commit
faac6134af
@ -22,7 +22,7 @@
|
||||
"type": "git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "NODE_PATH=src node node_modules/.bin/ava 'src/**/**/*.spec.ts' -e",
|
||||
"test": "NODE_PATH=src node node_modules/.bin/ava 'tests/*.spec.ts' -e",
|
||||
"test:debug": "NODE_PATH=src node inspect node_modules/ava/profile.ts",
|
||||
"test:all": "NODE_PATH=src node node_modules/.bin/ava",
|
||||
"lint": "tslint -p tsconfig.json",
|
||||
@ -45,7 +45,7 @@
|
||||
},
|
||||
"ava": {
|
||||
"files": [
|
||||
"src/**/*.spec.ts"
|
||||
"tests/*.spec.ts"
|
||||
],
|
||||
"sources": [
|
||||
"src/**/*.ts"
|
||||
|
@ -1,7 +1,10 @@
|
||||
import { Transform } from "stream";
|
||||
import { AccumulatorByIteratee, FlushStrategy } from "./definitions";
|
||||
import { TransformOptions } from "../baseDefinitions";
|
||||
import { batch } from "../../index";
|
||||
import {
|
||||
AccumulatorByIteratee,
|
||||
FlushStrategy,
|
||||
TransformOptions,
|
||||
} from "./baseDefinitions";
|
||||
import { batch } from ".";
|
||||
|
||||
function _accumulator<T>(
|
||||
accumulateBy: (data: T, buffer: T[], stream: Transform) => void,
|
@ -1,6 +0,0 @@
|
||||
export enum FlushStrategy {
|
||||
rolling = "rolling",
|
||||
sliding = "sliding",
|
||||
}
|
||||
|
||||
export type AccumulatorByIteratee<T> = (event: T, bufferChunk: T) => boolean;
|
@ -21,3 +21,9 @@ export type JsonValue = JsonPrimitive | JsonPrimitive[];
|
||||
export interface JsonParseOptions {
|
||||
pretty: boolean;
|
||||
}
|
||||
export enum FlushStrategy {
|
||||
rolling = "rolling",
|
||||
sliding = "sliding",
|
||||
}
|
||||
|
||||
export type AccumulatorByIteratee<T> = (event: T, bufferChunk: T) => boolean;
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { Transform } from "stream";
|
||||
import { TransformOptions } from "../baseDefinitions";
|
||||
import { TransformOptions } from "./baseDefinitions";
|
||||
/**
|
||||
* Stores chunks of data internally in array and batches when batchSize is reached.
|
||||
*
|
@ -1,5 +1,5 @@
|
||||
import { ChildProcess } from "child_process";
|
||||
import { duplex } from "../baseFunctions";
|
||||
import { duplex } from "./baseFunctions";
|
||||
/**
|
||||
* Return a Duplex stream from a child process' stdin and stdout
|
||||
* @param childProcess Child process from which to create duplex stream
|
@ -1,5 +1,5 @@
|
||||
import { Transform } from "stream";
|
||||
import { ThroughOptions } from "../baseDefinitions";
|
||||
import { ThroughOptions } from "./baseDefinitions";
|
||||
/**
|
||||
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
|
||||
* @param options
|
@ -1,5 +1,5 @@
|
||||
import { Transform } from "stream";
|
||||
import { ThroughOptions } from "../baseDefinitions";
|
||||
import { ThroughOptions } from "./baseDefinitions";
|
||||
/**
|
||||
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
|
||||
* @param predicate Predicate with which to filter scream chunks
|
@ -1,5 +1,5 @@
|
||||
import { Transform } from "stream";
|
||||
import { TransformOptions } from "../baseDefinitions";
|
||||
import { TransformOptions } from "./baseDefinitions";
|
||||
/**
|
||||
* Return a ReadWrite stream that flat maps streamed chunks
|
||||
* @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such)
|
@ -7,12 +7,9 @@ import {
|
||||
TransformOptions,
|
||||
WithEncoding,
|
||||
JsonParseOptions,
|
||||
} from "./baseDefinitions";
|
||||
|
||||
import {
|
||||
FlushStrategy,
|
||||
AccumulatorByIteratee,
|
||||
} from "./accumulator/definitions";
|
||||
} from "./baseDefinitions";
|
||||
|
||||
/**
|
||||
* Convert an array into a Readable stream of its elements
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { Transform } from "stream";
|
||||
import { StringDecoder } from "string_decoder";
|
||||
import { WithEncoding } from "../baseDefinitions";
|
||||
import { WithEncoding } from "./baseDefinitions";
|
||||
/**
|
||||
* Return a ReadWrite stream that joins streamed chunks using the given separator
|
||||
* @param separator Separator to join with
|
@ -1,5 +1,5 @@
|
||||
import { Transform } from "stream";
|
||||
import { TransformOptions } from "../baseDefinitions";
|
||||
import { TransformOptions } from "./baseDefinitions";
|
||||
/**
|
||||
* Return a ReadWrite stream that maps streamed chunks
|
||||
* @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such)
|
@ -1,6 +1,6 @@
|
||||
import { Transform } from "stream";
|
||||
import { sleep } from "../../helpers";
|
||||
import { TransformOptions } from "../baseDefinitions";
|
||||
import { sleep } from "../helpers";
|
||||
import { TransformOptions } from "./baseDefinitions";
|
||||
/**
|
||||
* Limits number of parallel processes in flight.
|
||||
* @param parallel Max number of parallel processes.
|
@ -1,6 +1,6 @@
|
||||
import { Transform } from "stream";
|
||||
import { StringDecoder } from "string_decoder";
|
||||
import { SerializationFormats } from "../baseDefinitions";
|
||||
import { SerializationFormats } from "./baseDefinitions";
|
||||
/**
|
||||
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
|
||||
* must be a fully defined JSON string.
|
@ -1,7 +1,7 @@
|
||||
import { Transform } from "stream";
|
||||
import { performance } from "perf_hooks";
|
||||
import { sleep } from "../../helpers";
|
||||
import { TransformOptions } from "../baseDefinitions";
|
||||
import { sleep } from "../helpers";
|
||||
import { TransformOptions } from "./baseDefinitions";
|
||||
/**
|
||||
* Limits date of data transferred into stream.
|
||||
* @param targetRate Desired rate in ms
|
@ -1,5 +1,5 @@
|
||||
import { Transform } from "stream";
|
||||
import { TransformOptions } from "../baseDefinitions";
|
||||
import { TransformOptions } from "./baseDefinitions";
|
||||
/**
|
||||
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
|
||||
* value
|
@ -1,6 +1,6 @@
|
||||
import { Transform } from "stream";
|
||||
import { StringDecoder } from "string_decoder";
|
||||
import { WithEncoding } from "../baseDefinitions";
|
||||
import { WithEncoding } from "./baseDefinitions";
|
||||
/**
|
||||
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
|
||||
* the streamed chunks with the specified replacement string
|
@ -1,6 +1,6 @@
|
||||
import { Transform } from "stream";
|
||||
import { StringDecoder } from "string_decoder";
|
||||
import { WithEncoding } from "../baseDefinitions";
|
||||
import { WithEncoding } from "./baseDefinitions";
|
||||
/**
|
||||
* Return a ReadWrite stream that splits streamed chunks using the given separator
|
||||
* @param separator Separator to split by, defaulting to "\n"
|
@ -1,5 +1,5 @@
|
||||
import { Transform } from "stream";
|
||||
import { JsonValue, JsonParseOptions } from "../baseDefinitions";
|
||||
import { JsonValue, JsonParseOptions } from "./baseDefinitions";
|
||||
|
||||
/**
|
||||
* Return a ReadWrite stream that stringifies the streamed chunks to JSON
|
@ -1,5 +1,5 @@
|
||||
import { Transform } from "stream";
|
||||
import { TransformOptions } from "../baseDefinitions";
|
||||
import { TransformOptions } from "./baseDefinitions";
|
||||
/**
|
||||
* Unbatches and sends individual chunks of data
|
||||
*/
|
@ -20,4 +20,5 @@ export {
|
||||
rate,
|
||||
parallelMap,
|
||||
accumulator,
|
||||
accumulatorBy,
|
||||
} from "./functions";
|
||||
|
@ -1,8 +1,8 @@
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { Readable } from "stream";
|
||||
import { accumulator, accumulatorBy } from ".";
|
||||
import { FlushStrategy } from "./definitions";
|
||||
import { accumulator, accumulatorBy } from "../src";
|
||||
import { FlushStrategy } from "../src/functions/baseDefinitions";
|
||||
|
||||
test.cb("accumulator() rolling", t => {
|
||||
t.plan(3);
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { batch } from ".";
|
||||
import { batch } from "../src";
|
||||
|
||||
test.cb("batch() batches chunks together", t => {
|
||||
t.plan(3);
|
@ -2,7 +2,7 @@ import * as cp from "child_process";
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { child } from ".";
|
||||
import { child } from "../src";
|
||||
|
||||
test.cb(
|
||||
"child() allows easily writing to child process stdin and reading from its stdout",
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { collect } from ".";
|
||||
import { collect } from "../src";
|
||||
|
||||
test.cb(
|
||||
"collect() collects streamed elements into an array (object, flowing mode)",
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { concat, collect } from "../baseFunctions";
|
||||
import { concat, collect } from "../src";
|
||||
|
||||
test.cb(
|
||||
"concat() concatenates multiple readable streams (object, flowing mode)",
|
@ -2,7 +2,7 @@ import * as cp from "child_process";
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { duplex } from "../baseFunctions";
|
||||
import { duplex } from "../src";
|
||||
|
||||
test.cb(
|
||||
"duplex() combines a writable and readable stream into a ReadWrite stream",
|
@ -1,7 +1,7 @@
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { Readable } from "stream";
|
||||
import { filter } from ".";
|
||||
import { filter } from "../src";
|
||||
|
||||
test.cb("filter() filters elements synchronously", t => {
|
||||
t.plan(2);
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { flatMap } from ".";
|
||||
import { flatMap } from "../src";
|
||||
|
||||
test.cb("flatMap() maps elements synchronously", t => {
|
||||
t.plan(6);
|
@ -1,6 +1,6 @@
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { fromArray } from ".";
|
||||
import { fromArray } from "../src";
|
||||
|
||||
test.cb("fromArray() streams array elements in flowing mode", t => {
|
||||
t.plan(3);
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { join } from ".";
|
||||
import { join } from "../src";
|
||||
|
||||
test.cb("join() joins chunks using the specified separator", t => {
|
||||
t.plan(9);
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { last } from "../baseFunctions";
|
||||
import { last } from "../src";
|
||||
|
||||
test("last() resolves to the last chunk streamed by the given readable stream", async t => {
|
||||
const source = new Readable({ objectMode: true });
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { map } from ".";
|
||||
import { map } from "../src";
|
||||
|
||||
test.cb("map() maps elements synchronously", t => {
|
||||
t.plan(3);
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { merge } from "../baseFunctions";
|
||||
import { merge } from "../src";
|
||||
|
||||
test.cb(
|
||||
"merge() merges multiple readable streams in chunk arrival order",
|
@ -2,8 +2,8 @@ import { Readable } from "stream";
|
||||
import { performance } from "perf_hooks";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { parallelMap } from "../baseFunctions";
|
||||
import { sleep } from "../../helpers";
|
||||
import { parallelMap } from "../src";
|
||||
import { sleep } from "../src/helpers";
|
||||
|
||||
test.cb("parallelMap() parallel mapping", t => {
|
||||
t.plan(6);
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { parse } from "../baseFunctions";
|
||||
import { parse } from "../src";
|
||||
|
||||
test.cb("parse() parses the streamed elements as JSON", t => {
|
||||
t.plan(3);
|
@ -2,7 +2,7 @@ import { Readable } from "stream";
|
||||
import { performance } from "perf_hooks";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { rate } from "../baseFunctions";
|
||||
import { rate } from "../src";
|
||||
|
||||
test.cb("rate() sends data at desired rate", t => {
|
||||
t.plan(9);
|
||||
@ -19,7 +19,7 @@ test.cb("rate() sends data at desired rate", t => {
|
||||
let k = 0;
|
||||
|
||||
sourceFast
|
||||
.pipe(rate(fastRate))
|
||||
.pipe(rate(fastRate, 1))
|
||||
.on("data", (element: string[]) => {
|
||||
const currentRate = (i / (performance.now() - start)) * 1000;
|
||||
expect(element).to.deep.equal(expectedElements[i]);
|
||||
@ -30,7 +30,7 @@ test.cb("rate() sends data at desired rate", t => {
|
||||
.on("error", t.end);
|
||||
|
||||
sourceMed
|
||||
.pipe(rate(medRate))
|
||||
.pipe(rate(medRate, 1))
|
||||
.on("data", (element: string[]) => {
|
||||
const currentRate = (j / (performance.now() - start)) * 1000;
|
||||
expect(element).to.deep.equal(expectedElements[j]);
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { reduce } from ".";
|
||||
import { reduce } from "../src";
|
||||
|
||||
test.cb("reduce() reduces elements synchronously", t => {
|
||||
t.plan(1);
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { replace } from ".";
|
||||
import { replace } from "../src";
|
||||
|
||||
test.cb(
|
||||
"replace() replaces occurrences of the given string in the streamed elements with the specified " +
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { split } from ".";
|
||||
import { split } from "../src";
|
||||
|
||||
test.cb("split() splits chunks using the default separator (\\n)", t => {
|
||||
t.plan(5);
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { stringify } from "../baseFunctions";
|
||||
import { stringify } from "../src";
|
||||
|
||||
test.cb("stringify() stringifies the streamed elements as JSON", t => {
|
||||
t.plan(4);
|
@ -1,7 +1,7 @@
|
||||
import { Readable } from "stream";
|
||||
import test from "ava";
|
||||
import { expect } from "chai";
|
||||
import { unbatch, batch } from "../baseFunctions";
|
||||
import { unbatch, batch } from "../src";
|
||||
|
||||
test.cb("unbatch() unbatches", t => {
|
||||
t.plan(3);
|
@ -1,18 +1,29 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": false,
|
||||
"noImplicitThis": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"suppressImplicitAnyIndexErrors": true,
|
||||
"outDir": "./dist",
|
||||
"module": "commonjs",
|
||||
"target": "es5",
|
||||
"lib": ["es2016", "es2019"],
|
||||
"sourceMap": true,
|
||||
"declaration": true
|
||||
"compilerOptions": {
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": false,
|
||||
"noImplicitThis": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"suppressImplicitAnyIndexErrors": true,
|
||||
"outDir": "./dist",
|
||||
"module": "commonjs",
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"src/*": [
|
||||
"src/*"
|
||||
]
|
||||
},
|
||||
"include": ["src/**/*.ts"]
|
||||
"target": "es5",
|
||||
"lib": [
|
||||
"es2016",
|
||||
"es2019"
|
||||
],
|
||||
"sourceMap": true,
|
||||
"declaration": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*.ts"
|
||||
]
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user