Improve API reference in README

This commit is contained in:
Sami Turcotte 2018-12-02 22:54:29 -05:00
parent 95b4235daa
commit 26bc652833
2 changed files with 139 additions and 129 deletions

240
README.md
View File

@ -1,151 +1,143 @@
# Mhysa # Mhysa
**Streams and event emitter utils for Node.js** **Stream utils for Node.js**
## Installation
```sh ```sh
yarn add mhysa yarn add mhysa
``` ```
## Basic Usage ## fromArray(array)
Convert an array into a `Readable` stream of its elements
The following snippet demonstrates most of Mhysa's current features without much explanation. More | Param | Type | Description |
will come! | --- | --- | --- |
| `array` | `T[]` | Array of elements to stream |
```js
const {
utils: { sleep, delay, once },
...Mhysa
} = require("mhysa");
async function main() { ## map(mapper, options)
const collector = Mhysa.concat( Return a `ReadWrite` stream that maps streamed chunks
Mhysa.fromArray(["a\n", "b\n", "c\n"]),
Mhysa.fromArray(["d", "e"]).pipe(Mhysa.join("-")),
)
.pipe(Mhysa.split("\n"))
.pipe(
Mhysa.flatMap(async s => {
await sleep(100);
return delay([s, s.toUpperCase()], 100);
}),
)
.pipe(Mhysa.collect({ objectMode: true }));
const collected = await once(collector, "data"); | Param | Type | Description |
console.log(collected); // [ 'a', 'A', 'b', 'B', 'c', 'C', 'd-e', 'D-E' ] (after 6 * 100 ms) | --- | --- | --- |
} | `mapper` | `(chunk: T, encoding: string) => R` | Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such) |
main(); | `options` | `object` | |
``` | `options.readableObjectMode` | `boolean` | Whether this stream should behave as a readable stream of objects |
| `options.writableObjectMode` | `boolean` | Whether this stream should behave as a writable stream of objects |
## API
```ts ## flatMap(mapper, options)
/** Return a `ReadWrite` stream that flat maps streamed chunks
* Convert an array into a readable stream of its elements
* @param array The array of elements to stream
*/
fromArray(array: any[]): NodeJS.ReadableStream;
/** | Param | Type | Description |
* Return a ReadWrite stream that maps streamed chunks | --- | --- | --- |
* @param mapper The mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such) | `mapper` | `(chunk: T, encoding: string) => R[]` | Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such) |
* @param options | `options` | `object` | |
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects | `options.readableObjectMode` | `boolean` | Whether this stream should behave as a readable stream of objects |
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects | `options.writableObjectMode` | `boolean` | Whether this stream should behave as a writable stream of objects |
*/
map<T, R>(
mapper: (chunk: T, encoding: string) => R,
options?: ThroughOptions,
): NodeJS.ReadWriteStream;
/**
* Return a ReadWrite stream that flat maps streamed chunks
* @param mapper The mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such)
* @param options
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
*/
flatMap<T, R>(
mapper:
| ((chunk: T, encoding: string) => R[])
| ((chunk: T, encoding: string) => Promise<R[]>),
options?: ThroughOptions,
): NodeJS.ReadWriteStream;
/** ## filter(predicate, options)
* Return a ReadWrite stream that splits streamed chunks using the given separator Return a `ReadWrite` stream that filters out streamed chunks for which the predicate does not hold
* @param separator The separator to split by, defaulting to "\n"
*/
split(
separator?: string | RegExp,
): NodeJS.ReadWriteStream;
/** | Param | Type | Description |
* Return a ReadWrite stream that joins streamed chunks using the given separator | --- | --- | --- |
* @param separator The separator to join with | `predicate` | `(chunk: T, encoding: string) => boolean` | Predicate with which to filter scream chunks |
*/ | `options` | `object` | |
join(separator: string): NodeJS.ReadWriteStream; | `options.objectMode` | `boolean` | `boolean` | Whether this stream should behave as a stream of objects |
/**
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
* @param options
* @param options.objectMode Whether this stream should behave as a stream of objects
*/
collect(
options?: ReadableOptions,
): NodeJS.ReadWriteStream;
/** ## reduce(iteratee, initialValue, options)
* Return a stream of readable streams concatenated together Return a `ReadWrite` stream that reduces streamed chunks down to a single value and yield that
* @param streams The readable streams to concatenate value
*/
concat(
...streams: NodeJS.ReadableStream[]
): NodeJS.ReadableStream;
``` | Param | Type | Description |
| --- | --- | --- |
| `iteratee` | `(chunk: T, encoding: string) => R` | Reducer function to apply on each streamed chunk |
| `initialValue` | `T` | Initial value |
| `options` | `object` | |
| `options.readableObjectMode` | `boolean` | Whether this stream should behave as a readable stream of objects |
| `options.writableObjectMode` | `boolean` | Whether this stream should behave as a writable stream of objects |
### Interfaces
```ts ## split(separator)
interface ReadableOptions { Return a `ReadWrite` stream that splits streamed chunks using the given separator
objectMode?: boolean;
}
interface ThroughOptions { | Param | Type | Description |
readableObjectMode?: boolean; | --- | --- | --- |
writableObjectMode?: boolean; | `separator` | `string` | Separator to split by, defaulting to `"\n"` |
}
```
### { utils }
```ts ## join(separator)
/** Return a `ReadWrite` stream that joins streamed chunks using the given separator
* Resolve after the given delay in milliseconds
*
* @param ms The number of milliseconds to wait
*/
sleep(ms: number): Promise<{}>;
/** | Param | Type | Description |
* Resolve a value after the given delay in milliseconds | --- | --- | --- |
* | `separator` | `string` | Separator to join with |
* @param value Value to resolve
* @param ms Number of milliseconds to wait
*/
delay<T>(value: T, ms: number): Promise<T>;
/**
* Resolve once the given event emitter emits the specified event ## replace(searchValue, replaceValue)
* Return a `ReadWrite` stream that replaces occurrences of the given string or regular expression in
* @param emitter Event emitter to watch the streamed chunks with the specified replacement string
* @param event Event to watch
*/ | Param | Type | Description |
once<T>( | --- | --- | --- |
emitter: NodeJS.EventEmitter, | `searchValue` | `string | RegExp` | Search string to use |
event: string, | `replaceValue` | `string` | Replacement string to use |
): Promise<T>;
```
## parse()
Return a `ReadWrite` stream that parses the streamed chunks as JSON
## stringify()
Return a `ReadWrite` stream that stringifies the streamed chunks to JSON
## collect(options)
Return a `ReadWrite` stream that collects streamed chunks into an array or buffer
| Param | Type | Description |
| --- | --- | --- |
| `options` | `object` | |
| `options.objectMode` | `boolean` | Whether this stream should behave as a stream of objects |
## concat(streams)
Return a `Readable` stream of readable streams concatenated together
| Param | Type | Description |
| --- | --- | --- |
| `streams` | `...Readable[]` | Readable streams to concatenate |
## merge(streams)
Return a `Readable` stream of readable streams merged together in chunk arrival order
| Param | Type | Description |
| --- | --- | --- |
| `streams` | `...Readable[]` | Readable streams to merge |
## duplex(writable, readable)
Return a `Duplex` stream from a writable stream that is assumed to somehow, when written to,
cause the given readable stream to yield chunks
| Param | Type | Description |
| --- | --- | --- |
| `writable` | `Writable` | Writable stream assumed to cause the readable stream to yield chunks when written to |
| `readable` | `Readable` | Readable stream assumed to yield chunks when the writable stream is written to |
## child(childProcess)
Return a `Duplex` stream from a child process' stdin and stdout
| Param | Type | Description |
| --- | --- | --- |
| childProcess | `ChildProcess` | Child process from which to create duplex stream |
## last(readable)
Return a `Promise` resolving to the last streamed chunk of the given readable stream, after it has
ended
| Param | Type | Description |
| --- | --- | --- |
| `readable` | `Readable` | Readable stream to wait on |

View File

@ -10,7 +10,7 @@ export interface TransformOptions {
} }
/** /**
* Convert an array into a readable stream of its elements * Convert an array into a Readable stream of its elements
* @param array Array of elements to stream * @param array Array of elements to stream
*/ */
export function fromArray(array: any[]): NodeJS.ReadableStream { export function fromArray(array: any[]): NodeJS.ReadableStream {
@ -101,6 +101,12 @@ export function flatMap<T, R>(
}); });
} }
/**
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
* @param predicate Predicate with which to filter scream chunks
* @param options
* @param options.objectMode Whether this stream should behave as a stream of objects
*/
export function filter<T>( export function filter<T>(
predicate: predicate:
| ((chunk: T, encoding: string) => boolean) | ((chunk: T, encoding: string) => boolean)
@ -135,6 +141,15 @@ export function filter<T>(
}); });
} }
/**
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
* value
* @param iteratee Reducer function to apply on each streamed chunk
* @param initialValue Initial value
* @param options
* @param options.readableObjectMode Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode Whether this stream should behave as a writable stream of objects
*/
export function reduce<T, R>( export function reduce<T, R>(
iteratee: iteratee:
| ((previousValue: R, chunk: T, encoding: string) => R) | ((previousValue: R, chunk: T, encoding: string) => R)
@ -255,11 +270,13 @@ export function parse(): NodeJS.ReadWriteStream {
}, },
}); });
} }
type JsonPrimitive = string | number | object; type JsonPrimitive = string | number | object;
type JsonValue = JsonPrimitive | JsonPrimitive[]; type JsonValue = JsonPrimitive | JsonPrimitive[];
interface JsonParseOptions { interface JsonParseOptions {
pretty: boolean; pretty: boolean;
} }
/** /**
* Return a ReadWrite stream that stringifies the streamed chunks to JSON * Return a ReadWrite stream that stringifies the streamed chunks to JSON
*/ */
@ -306,7 +323,7 @@ export function collect(
} }
/** /**
* Return a stream of readable streams concatenated together * Return a Readable stream of readable streams concatenated together
* @param streams Readable streams to concatenate * @param streams Readable streams to concatenate
*/ */
export function concat( export function concat(
@ -348,7 +365,7 @@ export function concat(
} }
/** /**
* Return a stream of readable streams merged together in chunk arrival order * Return a Readable stream of readable streams merged together in chunk arrival order
* @param streams Readable streams to merge * @param streams Readable streams to merge
*/ */
export function merge( export function merge(
@ -427,8 +444,9 @@ export function child(childProcess: ChildProcess) {
} }
/** /**
* Resolve the last streamed chunk of the given readable stream, after it has ended * Return a Promise resolving to the last streamed chunk of the given readable stream, after it has
* @param readable The readable stream to wait on * ended
* @param readable Readable stream to wait on
*/ */
export function last<T>(readable: Readable): Promise<T | null> { export function last<T>(readable: Readable): Promise<T | null> {
let lastChunk: T | null = null; let lastChunk: T | null = null;