= (source: AsyncIterable) => Promise;
type PipelineDestination, P> = S extends PipelineTransformSource
? NodeJS.WritableStream | PipelineDestinationIterableFunction | PipelineDestinationPromiseFunction
: never;
type PipelineCallback> = S extends PipelineDestinationPromiseFunction
? (err: NodeJS.ErrnoException | null, value: P) => void
: (err: NodeJS.ErrnoException | null) => void;
type PipelinePromise> = S extends PipelineDestinationPromiseFunction ? Promise : Promise;
interface PipelineOptions {
signal: AbortSignal;
}
/**
* A module method to pipe between streams and generators forwarding errors and
* properly cleaning up and provide a callback when the pipeline is complete.
*
* ```js
* const { pipeline } = require('stream');
* const fs = require('fs');
* const zlib = require('zlib');
*
* // Use the pipeline API to easily pipe a series of streams
* // together and get notified when the pipeline is fully done.
*
* // A pipeline to gzip a potentially huge tar file efficiently:
*
* pipeline(
* fs.createReadStream('archive.tar'),
* zlib.createGzip(),
* fs.createWriteStream('archive.tar.gz'),
* (err) => {
* if (err) {
* console.error('Pipeline failed.', err);
* } else {
* console.log('Pipeline succeeded.');
* }
* }
* );
* ```
*
* The `pipeline` API provides a promise version, which can also
* receive an options argument as the last parameter with a`signal` `AbortSignal` property. When the signal is aborted,`destroy` will be called on the underlying pipeline, with
* an`AbortError`.
*
* ```js
* const { pipeline } = require('stream/promises');
*
* async function run() {
* await pipeline(
* fs.createReadStream('archive.tar'),
* zlib.createGzip(),
* fs.createWriteStream('archive.tar.gz')
* );
* console.log('Pipeline succeeded.');
* }
*
* run().catch(console.error);
* ```
*
* To use an `AbortSignal`, pass it inside an options object,
* as the last argument:
*
* ```js
* const { pipeline } = require('stream/promises');
*
* async function run() {
* const ac = new AbortController();
* const signal = ac.signal;
*
* setTimeout(() => ac.abort(), 1);
* await pipeline(
* fs.createReadStream('archive.tar'),
* zlib.createGzip(),
* fs.createWriteStream('archive.tar.gz'),
* { signal },
* );
* }
*
* run().catch(console.error); // AbortError
* ```
*
* The `pipeline` API also supports async generators:
*
* ```js
* const { pipeline } = require('stream/promises');
* const fs = require('fs');
*
* async function run() {
* await pipeline(
* fs.createReadStream('lowercase.txt'),
* async function* (source, signal) {
* source.setEncoding('utf8'); // Work with strings rather than `Buffer`s.
* for await (const chunk of source) {
* yield await processChunk(chunk, { signal });
* }
* },
* fs.createWriteStream('uppercase.txt')
* );
* console.log('Pipeline succeeded.');
* }
*
* run().catch(console.error);
* ```
*
* Remember to handle the `signal` argument passed into the async generator.
* Especially in the case where the async generator is the source for the
* pipeline (i.e. first argument) or the pipeline will never complete.
*
* ```js
* const { pipeline } = require('stream/promises');
* const fs = require('fs');
*
* async function run() {
* await pipeline(
* async function * (signal) {
* await someLongRunningfn({ signal });
* yield 'asd';
* },
* fs.createWriteStream('uppercase.txt')
* );
* console.log('Pipeline succeeded.');
* }
*
* run().catch(console.error);
* ```
*
* `stream.pipeline()` will call `stream.destroy(err)` on all streams except:
*
* * `Readable` streams which have emitted `'end'` or `'close'`.
* * `Writable` streams which have emitted `'finish'` or `'close'`.
*
* `stream.pipeline()` leaves dangling event listeners on the streams
* after the `callback` has been invoked. In the case of reuse of streams after
* failure, this can cause event listener leaks and swallowed errors.
* @since v10.0.0
* @param callback Called when the pipeline is fully done.
*/
function pipeline, B extends PipelineDestination>(
source: A,
destination: B,
callback?: PipelineCallback
): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
function pipeline, T1 extends PipelineTransform, B extends PipelineDestination>(
source: A,
transform1: T1,
destination: B,
callback?: PipelineCallback
): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
function pipeline, T1 extends PipelineTransform, T2 extends PipelineTransform, B extends PipelineDestination>(
source: A,
transform1: T1,
transform2: T2,
destination: B,
callback?: PipelineCallback
): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
function pipeline<
A extends PipelineSource,
T1 extends PipelineTransform,
T2 extends PipelineTransform,
T3 extends PipelineTransform,
B extends PipelineDestination
>(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, callback?: PipelineCallback): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
function pipeline<
A extends PipelineSource,
T1 extends PipelineTransform,
T2 extends PipelineTransform,
T3 extends PipelineTransform,
T4 extends PipelineTransform,
B extends PipelineDestination
>(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, callback?: PipelineCallback): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
function pipeline(
streams: ReadonlyArray,
callback?: (err: NodeJS.ErrnoException | null) => void
): NodeJS.WritableStream;
function pipeline(
stream1: NodeJS.ReadableStream,
stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream,
...streams: Array void)>
): NodeJS.WritableStream;
namespace pipeline {
function __promisify__, B extends PipelineDestination>(source: A, destination: B, options?: PipelineOptions): PipelinePromise;
function __promisify__, T1 extends PipelineTransform, B extends PipelineDestination>(
source: A,
transform1: T1,
destination: B,
options?: PipelineOptions
): PipelinePromise;
function __promisify__, T1 extends PipelineTransform, T2 extends PipelineTransform, B extends PipelineDestination>(
source: A,
transform1: T1,
transform2: T2,
destination: B,
options?: PipelineOptions
): PipelinePromise;
function __promisify__<
A extends PipelineSource,
T1 extends PipelineTransform,
T2 extends PipelineTransform,
T3 extends PipelineTransform,
B extends PipelineDestination
>(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, options?: PipelineOptions): PipelinePromise;
function __promisify__<
A extends PipelineSource,
T1 extends PipelineTransform,
T2 extends PipelineTransform,
T3 extends PipelineTransform,
T4 extends PipelineTransform,
B extends PipelineDestination
>(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, options?: PipelineOptions): PipelinePromise;
function __promisify__(streams: ReadonlyArray, options?: PipelineOptions): Promise;
function __promisify__(
stream1: NodeJS.ReadableStream,
stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream,
...streams: Array
): Promise;
}
interface Pipe {
close(): void;
hasRef(): boolean;
ref(): void;
unref(): void;
}
const promises: typeof streamPromises;
const consumers: typeof streamConsumers;
}
export = internal;
}
declare module 'node:stream' {
import stream = require('stream');
export = stream;
}