From fef0c4e8eabe2c2ceddbe356aecdb7d24b1aef02 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Sun, 1 Mar 2026 09:14:08 -0800 Subject: [PATCH 01/42] stream: prototype for new stream implementation --- .../fs/bench-filehandle-pull-vs-webstream.js | 196 +++ doc/api/fs.md | 106 ++ doc/api/index.md | 1 + doc/api/stream_new.md | 1215 +++++++++++++++++ lib/internal/fs/promises.js | 256 ++++ lib/internal/streams/new/broadcast.js | 587 ++++++++ lib/internal/streams/new/consumers.js | 505 +++++++ lib/internal/streams/new/duplex.js | 79 ++ lib/internal/streams/new/from.js | 575 ++++++++ lib/internal/streams/new/pull.js | 710 ++++++++++ lib/internal/streams/new/push.js | 519 +++++++ lib/internal/streams/new/share.js | 636 +++++++++ lib/internal/streams/new/transform.js | 430 ++++++ lib/internal/streams/new/types.js | 59 + lib/internal/streams/new/utils.js | 106 ++ lib/stream/new.js | 208 +++ .../test-fs-promises-file-handle-pull.js | 254 ++++ .../test-fs-promises-file-handle-writer.js | 473 +++++++ test/parallel/test-stream-new-broadcast.js | 276 ++++ test/parallel/test-stream-new-consumers.js | 319 +++++ test/parallel/test-stream-new-duplex.js | 152 +++ test/parallel/test-stream-new-from.js | 223 +++ test/parallel/test-stream-new-namespace.js | 209 +++ test/parallel/test-stream-new-pull.js | 213 +++ test/parallel/test-stream-new-push.js | 220 +++ test/parallel/test-stream-new-share.js | 240 ++++ test/parallel/test-stream-new-transform.js | 395 ++++++ 27 files changed, 9162 insertions(+) create mode 100644 benchmark/fs/bench-filehandle-pull-vs-webstream.js create mode 100644 doc/api/stream_new.md create mode 100644 lib/internal/streams/new/broadcast.js create mode 100644 lib/internal/streams/new/consumers.js create mode 100644 lib/internal/streams/new/duplex.js create mode 100644 lib/internal/streams/new/from.js create mode 100644 lib/internal/streams/new/pull.js create mode 100644 lib/internal/streams/new/push.js create mode 100644 lib/internal/streams/new/share.js create mode 100644 lib/internal/streams/new/transform.js create mode 100644 lib/internal/streams/new/types.js create mode 100644 lib/internal/streams/new/utils.js create mode 100644 lib/stream/new.js create mode 100644 test/parallel/test-fs-promises-file-handle-pull.js create mode 100644 test/parallel/test-fs-promises-file-handle-writer.js create mode 100644 test/parallel/test-stream-new-broadcast.js create mode 100644 test/parallel/test-stream-new-consumers.js create mode 100644 test/parallel/test-stream-new-duplex.js create mode 100644 test/parallel/test-stream-new-from.js create mode 100644 test/parallel/test-stream-new-namespace.js create mode 100644 test/parallel/test-stream-new-pull.js create mode 100644 test/parallel/test-stream-new-push.js create mode 100644 test/parallel/test-stream-new-share.js create mode 100644 test/parallel/test-stream-new-transform.js diff --git a/benchmark/fs/bench-filehandle-pull-vs-webstream.js b/benchmark/fs/bench-filehandle-pull-vs-webstream.js new file mode 100644 index 00000000000000..5c81fe53b8bc50 --- /dev/null +++ b/benchmark/fs/bench-filehandle-pull-vs-webstream.js @@ -0,0 +1,196 @@ +// Compare FileHandle.createReadStream() vs readableWebStream() vs pull() +// reading a large file through two transforms: uppercase then gzip compress. +'use strict'; + +const common = require('../common.js'); +const fs = require('fs'); +const zlib = require('zlib'); +const { Transform, Writable, pipeline } = require('stream'); + +const tmpdir = require('../../test/common/tmpdir'); +tmpdir.refresh(); +const filename = tmpdir.resolve(`.removeme-benchmark-garbage-${process.pid}`); + +const bench = common.createBenchmark(main, { + api: ['classic', 'webstream', 'pull'], + filesize: [1024 * 1024, 16 * 1024 * 1024, 64 * 1024 * 1024], + n: [5], +}); + +function main({ api, filesize, n }) { + // Create the fixture file with repeating lowercase ASCII + const chunk = Buffer.alloc(Math.min(filesize, 64 * 1024), 'abcdefghij'); + const fd = fs.openSync(filename, 'w'); + let remaining = filesize; + while (remaining > 0) { + const toWrite = Math.min(remaining, chunk.length); + fs.writeSync(fd, chunk, 0, toWrite); + remaining -= toWrite; + } + fs.closeSync(fd); + + if (api === 'classic') { + benchClassic(n, filesize).then(() => cleanup()); + } else if (api === 'webstream') { + benchWebStream(n, filesize).then(() => cleanup()); + } else { + benchPull(n, filesize).then(() => cleanup()); + } +} + +function cleanup() { + try { fs.unlinkSync(filename); } catch { /* ignore */ } +} + +// --------------------------------------------------------------------------- +// Classic streams path: createReadStream -> Transform (upper) -> createGzip +// --------------------------------------------------------------------------- +async function benchClassic(n, filesize) { + // Warm up + await runClassic(); + + bench.start(); + let totalBytes = 0; + for (let i = 0; i < n; i++) { + totalBytes += await runClassic(); + } + bench.end(totalBytes / (1024 * 1024)); +} + +function runClassic() { + return new Promise((resolve, reject) => { + const rs = fs.createReadStream(filename); + + // Transform 1: uppercase + const upper = new Transform({ + transform(chunk, encoding, callback) { + const buf = Buffer.allocUnsafe(chunk.length); + for (let i = 0; i < chunk.length; i++) { + const b = chunk[i]; + buf[i] = (b >= 0x61 && b <= 0x7a) ? b - 0x20 : b; + } + callback(null, buf); + }, + }); + + // Transform 2: gzip + const gz = zlib.createGzip(); + + // Sink: count compressed bytes + let totalBytes = 0; + const sink = new Writable({ + write(chunk, encoding, callback) { + totalBytes += chunk.length; + callback(); + }, + }); + + pipeline(rs, upper, gz, sink, (err) => { + if (err) reject(err); + else resolve(totalBytes); + }); + }); +} + +// --------------------------------------------------------------------------- +// WebStream path: readableWebStream -> TransformStream (upper) -> CompressionStream +// --------------------------------------------------------------------------- +async function benchWebStream(n, filesize) { + // Warm up + await runWebStream(); + + bench.start(); + let totalBytes = 0; + for (let i = 0; i < n; i++) { + totalBytes += await runWebStream(); + } + bench.end(totalBytes / (1024 * 1024)); +} + +async function runWebStream() { + const fh = await fs.promises.open(filename, 'r'); + try { + const rs = fh.readableWebStream(); + + // Transform 1: uppercase + const upper = new TransformStream({ + transform(chunk, controller) { + const buf = new Uint8Array(chunk.length); + for (let i = 0; i < chunk.length; i++) { + const b = chunk[i]; + // a-z (0x61-0x7a) -> A-Z (0x41-0x5a) + buf[i] = (b >= 0x61 && b <= 0x7a) ? b - 0x20 : b; + } + controller.enqueue(buf); + }, + }); + + // Transform 2: gzip via CompressionStream + const compress = new CompressionStream('gzip'); + + const output = rs.pipeThrough(upper).pipeThrough(compress); + const reader = output.getReader(); + + let totalBytes = 0; + while (true) { + const { done, value } = await reader.read(); + if (done) break; + totalBytes += value.byteLength; + } + return totalBytes; + } finally { + await fh.close(); + } +} + +// --------------------------------------------------------------------------- +// New streams path: pull() with uppercase transform + gzip transform +// --------------------------------------------------------------------------- +async function benchPull(n, filesize) { + const { pull, compressGzip } = require('stream/new'); + + // Warm up + await runPull(pull, compressGzip); + + bench.start(); + let totalBytes = 0; + for (let i = 0; i < n; i++) { + totalBytes += await runPull(pull, compressGzip); + } + bench.end(totalBytes / (1024 * 1024)); +} + +async function runPull(pull, compressGzip) { + const fh = await fs.promises.open(filename, 'r'); + try { + // Stateless transform: uppercase each chunk in the batch + const upper = (chunks) => { + if (chunks === null) return null; + const out = new Array(chunks.length); + for (let j = 0; j < chunks.length; j++) { + const src = chunks[j]; + const buf = new Uint8Array(src.length); + for (let i = 0; i < src.length; i++) { + const b = src[i]; + buf[i] = (b >= 0x61 && b <= 0x7a) ? b - 0x20 : b; + } + out[j] = buf; + } + return out; + }; + + const readable = fh.pull(upper, compressGzip()); + + // Count bytes symmetrically with the classic path (no final + // concatenation into a single buffer). + let totalBytes = 0; + for await (const chunks of readable) { + for (let i = 0; i < chunks.length; i++) { + totalBytes += chunks[i].byteLength; + } + } + return totalBytes; + } finally { + await fh.close(); + } +} diff --git a/doc/api/fs.md b/doc/api/fs.md index 8f122123512848..09a2c40f246280 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -377,6 +377,61 @@ added: v10.0.0 * Type: {number} The numeric file descriptor managed by the {FileHandle} object. +#### `filehandle.pull([...transforms][, options])` + + + +> Stability: 1 - Experimental + +* `...transforms` {Function|Object} Optional transforms to apply via + [`stream/new pull()`][]. +* `options` {Object} + * `signal` {AbortSignal} + * `autoClose` {boolean} Close the file handle when the stream ends. + **Default:** `false`. +* Returns: {AsyncIterable\} + +Return the file contents as an async iterable using the +[`node:stream/new`][] pull model. Reads are performed in 64 KB chunks. +If transforms are provided, they are applied via [`stream/new pull()`][]. + +The file handle is locked while the iterable is being consumed and unlocked +when iteration completes. + +```mjs +import { open } from 'node:fs/promises'; +import { text, compressGzip } from 'node:stream/new'; + +const fh = await open('input.txt', 'r'); + +// Read as text +console.log(await text(fh.pull({ autoClose: true }))); + +// Read with compression +const fh2 = await open('input.txt', 'r'); +const compressed = fh2.pull(compressGzip(), { autoClose: true }); +``` + +```cjs +const { open } = require('node:fs/promises'); +const { text, compressGzip } = require('node:stream/new'); + +async function run() { + const fh = await open('input.txt', 'r'); + + // Read as text + console.log(await text(fh.pull({ autoClose: true }))); + + // Read with compression + const fh2 = await open('input.txt', 'r'); + const compressed = fh2.pull(compressGzip(), { autoClose: true }); +} + +run().catch(console.error); +``` + #### `filehandle.read(buffer, offset, length, position)` + +> Stability: 1 - Experimental + +* `options` {Object} + * `autoClose` {boolean} Close the file handle when the writer ends. + **Default:** `false`. + * `start` {number} Byte offset to start writing at. **Default:** current + position (append). +* Returns: {Object} + * `write(chunk)` {Function} Returns {Promise\}. + * `writev(chunks)` {Function} Returns {Promise\}. Uses scatter/gather + I/O via a single `writev()` syscall. + * `end()` {Function} Returns {Promise\} total bytes written. + * `abort(reason)` {Function} Returns {Promise\}. + +Return a [`node:stream/new`][] writer backed by this file handle. + +The writer supports `Symbol.asyncDispose`, so it can be used with +`await using`. + +```mjs +import { open } from 'node:fs/promises'; +import { from, pipeTo, compressGzip } from 'node:stream/new'; + +const fh = await open('output.gz', 'w'); +const w = fh.writer({ autoClose: true }); +await pipeTo(from('Hello!'), compressGzip(), w); +await w.end(); +``` + +```cjs +const { open } = require('node:fs/promises'); +const { from, pipeTo, compressGzip } = require('node:stream/new'); + +async function run() { + const fh = await open('output.gz', 'w'); + const w = fh.writer({ autoClose: true }); + await pipeTo(from('Hello!'), compressGzip(), w); + await w.end(); +} + +run().catch(console.error); +``` + #### `filehandle[Symbol.asyncDispose]()` + +> Stability: 1 - Experimental + + + +The `node:stream/new` module provides a new streaming API built on iterables +rather than the event-driven `Readable`/`Writable`/`Transform` class hierarchy. + +Streams are represented as `AsyncIterable` (async) or +`Iterable` (sync). There are no base classes to extend -- any +object implementing the iterable protocol can participate. Transforms are plain +functions or objects with a `transform` method. + +Data flows in **batches** (`Uint8Array[]` per iteration) to amortize the cost +of async operations. + +```mjs +import { from, pull, text, compressGzip, decompressGzip } from 'node:stream/new'; + +// Compress and decompress a string +const compressed = pull(from('Hello, world!'), compressGzip()); +const result = await text(pull(compressed, decompressGzip())); +console.log(result); // 'Hello, world!' +``` + +```cjs +const { from, pull, text, compressGzip, decompressGzip } = require('node:stream/new'); + +async function run() { + // Compress and decompress a string + const compressed = pull(from('Hello, world!'), compressGzip()); + const result = await text(pull(compressed, decompressGzip())); + console.log(result); // 'Hello, world!' +} + +run().catch(console.error); +``` + +```mjs +import { open } from 'node:fs/promises'; +import { text, compressGzip, decompressGzip, pipeTo } from 'node:stream/new'; + +// Read a file, compress, write to another file +const src = await open('input.txt', 'r'); +const dst = await open('output.gz', 'w'); +await pipeTo(src.pull(), compressGzip(), dst.writer({ autoClose: true })); +await src.close(); + +// Read it back +const gz = await open('output.gz', 'r'); +console.log(await text(gz.pull(decompressGzip(), { autoClose: true }))); +``` + +```cjs +const { open } = require('node:fs/promises'); +const { text, compressGzip, decompressGzip, pipeTo } = require('node:stream/new'); + +async function run() { + // Read a file, compress, write to another file + const src = await open('input.txt', 'r'); + const dst = await open('output.gz', 'w'); + await pipeTo(src.pull(), compressGzip(), dst.writer({ autoClose: true })); + await src.close(); + + // Read it back + const gz = await open('output.gz', 'r'); + console.log(await text(gz.pull(decompressGzip(), { autoClose: true }))); +} + +run().catch(console.error); +``` + +## Concepts + +### Byte streams + +All data in the new streams API is represented as `Uint8Array` bytes. Strings +are automatically UTF-8 encoded when passed to `from()`, `push()`, or +`pipeTo()`. This removes ambiguity around encodings and enables zero-copy +transfers between streams and native code. + +### Batching + +Each iteration yields a **batch** -- an array of `Uint8Array` chunks +(`Uint8Array[]`). Batching amortizes the cost of `await` and Promise creation +across multiple chunks. A consumer that processes one chunk at a time can +simply iterate the inner array: + +```mjs +for await (const batch of source) { + for (const chunk of batch) { + handle(chunk); + } +} +``` + +```cjs +async function run() { + for await (const batch of source) { + for (const chunk of batch) { + handle(chunk); + } + } +} +``` + +### Transforms + +Transforms come in two forms: + +* **Stateless** -- a function `(chunks) => result` called once per batch. + Receives `Uint8Array[]` (or `null` as the flush signal). Returns + `Uint8Array[]`, `null`, or an iterable of chunks. + +* **Stateful** -- an object `{ transform(source) }` where `transform` is a + generator (sync or async) that receives the entire upstream iterable and + yields output. This form is used for compression, encryption, and any + transform that needs to buffer across batches. + +The flush signal (`null`) is sent after the source ends, giving transforms +a chance to emit trailing data (e.g., compression footers). + +```js +// Stateless: uppercase transform +const upper = (chunks) => { + if (chunks === null) return null; // flush + return chunks.map((c) => new TextEncoder().encode( + new TextDecoder().decode(c).toUpperCase(), + )); +}; + +// Stateful: line splitter +const lines = { + transform: async function*(source) { + let partial = ''; + for await (const chunks of source) { + if (chunks === null) { + if (partial) yield [new TextEncoder().encode(partial)]; + continue; + } + for (const chunk of chunks) { + const str = partial + new TextDecoder().decode(chunk); + const parts = str.split('\n'); + partial = parts.pop(); + for (const line of parts) { + yield [new TextEncoder().encode(`${line}\n`)]; + } + } + } + }, +}; +``` + +### Pull vs. push + +The API supports two models: + +* **Pull** -- data flows on demand. `pull()` and `pullSync()` create lazy + pipelines that only read from the source when the consumer iterates. + +* **Push** -- data is written explicitly. `push()` creates a writer/readable + pair with backpressure. The writer pushes data in; the readable is consumed + as an async iterable. + +### Writers + +A writer is any object with a `write(chunk)` method. Writers optionally +support `writev(chunks)` for batch writes (mapped to scatter/gather I/O where +available), `end()` to signal completion, and `abort(reason)` to signal +failure. + +## `require('node:stream/new')` + +All functions are available both as named exports and as properties of the +`Stream` namespace object: + +```mjs +// Named exports +import { from, pull, bytes, Stream } from 'node:stream/new'; + +// Namespace access +Stream.from('hello'); +``` + +```cjs +// Named exports +const { from, pull, bytes, Stream } = require('node:stream/new'); + +// Namespace access +Stream.from('hello'); +``` + +## Sources + +### `from(input)` + + + +* `input` {string|ArrayBuffer|ArrayBufferView|Iterable|AsyncIterable} +* Returns: {AsyncIterable\} + +Create an async byte stream from the given input. Strings are UTF-8 encoded. +`ArrayBuffer` and `ArrayBufferView` values are wrapped as `Uint8Array`. Arrays +and iterables are recursively flattened and normalized. + +Objects implementing `Symbol.for('Stream.toAsyncStreamable')` or +`Symbol.for('Stream.toStreamable')` are converted via those protocols. + +```mjs +import { Buffer } from 'node:buffer'; +import { from, text } from 'node:stream/new'; + +console.log(await text(from('hello'))); // 'hello' +console.log(await text(from(Buffer.from('hello')))); // 'hello' +``` + +```cjs +const { from, text } = require('node:stream/new'); + +async function run() { + console.log(await text(from('hello'))); // 'hello' + console.log(await text(from(Buffer.from('hello')))); // 'hello' +} + +run().catch(console.error); +``` + +### `fromSync(input)` + + + +* `input` {string|ArrayBuffer|ArrayBufferView|Iterable} +* Returns: {Iterable\} + +Synchronous version of [`from()`][]. Returns a sync iterable. Cannot accept +async iterables or promises. + +```mjs +import { fromSync, textSync } from 'node:stream/new'; + +console.log(textSync(fromSync('hello'))); // 'hello' +``` + +```cjs +const { fromSync, textSync } = require('node:stream/new'); + +console.log(textSync(fromSync('hello'))); // 'hello' +``` + +## Pipelines + +### `pipeTo(source[, ...transforms], writer[, options])` + + + +* `source` {AsyncIterable|Iterable} The data source. +* `...transforms` {Function|Object} Zero or more transforms to apply. +* `writer` {Object} Destination with `write(chunk)` method. +* `options` {Object} + * `signal` {AbortSignal} Abort the pipeline. + * `preventClose` {boolean} If `true`, do not call `writer.end()` when + the source ends. **Default:** `false`. + * `preventAbort` {boolean} If `true`, do not call `writer.abort()` on + error. **Default:** `false`. +* Returns: {Promise\} Total bytes written. + +Pipe a source through transforms into a writer. If the writer has a +`writev(chunks)` method, entire batches are passed in a single call (enabling +scatter/gather I/O). + +```mjs +import { from, pipeTo, compressGzip } from 'node:stream/new'; +import { open } from 'node:fs/promises'; + +const fh = await open('output.gz', 'w'); +const totalBytes = await pipeTo( + from('Hello, world!'), + compressGzip(), + fh.writer({ autoClose: true }), +); +``` + +```cjs +const { from, pipeTo, compressGzip } = require('node:stream/new'); +const { open } = require('node:fs/promises'); + +async function run() { + const fh = await open('output.gz', 'w'); + const totalBytes = await pipeTo( + from('Hello, world!'), + compressGzip(), + fh.writer({ autoClose: true }), + ); +} + +run().catch(console.error); +``` + +### `pipeToSync(source[, ...transforms], writer[, options])` + + + +* `source` {Iterable} The sync data source. +* `...transforms` {Function|Object} Zero or more sync transforms. +* `writer` {Object} Destination with `write(chunk)` method. +* `options` {Object} + * `preventClose` {boolean} **Default:** `false`. + * `preventAbort` {boolean} **Default:** `false`. +* Returns: {number} Total bytes written. + +Synchronous version of [`pipeTo()`][]. + +### `pull(source[, ...transforms][, options])` + + + +* `source` {AsyncIterable|Iterable} The data source. +* `...transforms` {Function|Object} Zero or more transforms to apply. +* `options` {Object} + * `signal` {AbortSignal} Abort the pipeline. +* Returns: {AsyncIterable\} + +Create a lazy async pipeline. Data is not read from `source` until the +returned iterable is consumed. Transforms are applied in order. + +```mjs +import { from, pull, text } from 'node:stream/new'; + +const upper = (chunks) => { + if (chunks === null) return null; + return chunks.map((c) => + new TextEncoder().encode(new TextDecoder().decode(c).toUpperCase()), + ); +}; + +const result = pull(from('hello'), upper); +console.log(await text(result)); // 'HELLO' +``` + +```cjs +const { from, pull, text } = require('node:stream/new'); + +const upper = (chunks) => { + if (chunks === null) return null; + return chunks.map((c) => + new TextEncoder().encode(new TextDecoder().decode(c).toUpperCase()), + ); +}; + +async function run() { + const result = pull(from('hello'), upper); + console.log(await text(result)); // 'HELLO' +} + +run().catch(console.error); +``` + +Using an `AbortSignal`: + +```mjs +import { pull } from 'node:stream/new'; + +const ac = new AbortController(); +const result = pull(source, transform, { signal: ac.signal }); +ac.abort(); // Pipeline throws AbortError on next iteration +``` + +```cjs +const { pull } = require('node:stream/new'); + +const ac = new AbortController(); +const result = pull(source, transform, { signal: ac.signal }); +ac.abort(); // Pipeline throws AbortError on next iteration +``` + +### `pullSync(source[, ...transforms])` + + + +* `source` {Iterable} The sync data source. +* `...transforms` {Function|Object} Zero or more sync transforms. +* Returns: {Iterable\} + +Synchronous version of [`pull()`][]. All transforms must be synchronous. + +## Push streams + +### `push([...transforms][, options])` + + + +* `...transforms` {Function|Object} Optional transforms applied to the + readable side. +* `options` {Object} + * `highWaterMark` {number} Maximum number of buffered slots before + backpressure is applied. **Default:** `1`. + * `backpressure` {string} Backpressure policy: `'strict'`, `'block'`, + `'drop-oldest'`, or `'drop-newest'`. **Default:** `'strict'`. + * `signal` {AbortSignal} Abort the stream. +* Returns: {Object} + * `writer` {PushWriter} The writer side. + * `readable` {AsyncIterable\} The readable side. + +Create a push stream with backpressure. The writer pushes data in; the +readable side is consumed as an async iterable. + +```mjs +import { push, text } from 'node:stream/new'; + +const { writer, readable } = push(); +writer.write('hello'); +writer.write(' world'); +writer.end(); + +console.log(await text(readable)); // 'hello world' +``` + +```cjs +const { push, text } = require('node:stream/new'); + +async function run() { + const { writer, readable } = push(); + writer.write('hello'); + writer.write(' world'); + writer.end(); + + console.log(await text(readable)); // 'hello world' +} + +run().catch(console.error); +``` + +#### Writer + +The writer returned by `push()` has the following methods: + +##### `writer.abort(reason)` + +* `reason` {Error} +* Returns: {Promise\} + +Abort the stream with an error. + +##### `writer.desiredSize` + +* {number|null} + +The number of buffer slots available before the high water mark is reached. +Returns `null` if the writer is closed or the consumer has disconnected. + +##### `writer.end()` + +* Returns: {Promise\} Total bytes written. + +Signal that no more data will be written. + +##### `writer.write(chunk)` + +* `chunk` {Uint8Array|string} +* Returns: {Promise\} + +Write a chunk. The promise resolves when buffer space is available. + +##### `writer.writeSync(chunk)` + +* `chunk` {Uint8Array|string} +* Returns: {boolean} `true` if the write was accepted, `false` if the + buffer is full. + +Synchronous write. Does not block; returns `false` if backpressure is active. + +##### `writer.writev(chunks)` + +* `chunks` {Uint8Array\[]|string\[]} +* Returns: {Promise\} + +Write multiple chunks as a single batch. + +##### `writer.writevSync(chunks)` + +* `chunks` {Uint8Array\[]|string\[]} +* Returns: {boolean} + +Synchronous batch write. + +## Consumers + +### `array(source[, options])` + + + +* `source` {AsyncIterable\|Iterable\} +* `options` {Object} + * `signal` {AbortSignal} + * `limit` {number} +* Returns: {Promise\} + +Collect all chunks as an array of `Uint8Array` values (without concatenating). + +### `arrayBuffer(source[, options])` + + + +* `source` {AsyncIterable\|Iterable\} +* `options` {Object} + * `signal` {AbortSignal} + * `limit` {number} +* Returns: {Promise\} + +Collect all bytes into an `ArrayBuffer`. + +### `arrayBufferSync(source[, options])` + + + +* `source` {Iterable\} +* `options` {Object} + * `limit` {number} +* Returns: {ArrayBuffer} + +Synchronous version of [`arrayBuffer()`][]. + +### `arraySync(source[, options])` + + + +* `source` {Iterable\} +* `options` {Object} + * `limit` {number} +* Returns: {Uint8Array\[]} + +Synchronous version of [`array()`][]. + +### `bytes(source[, options])` + + + +* `source` {AsyncIterable\|Iterable\} +* `options` {Object} + * `signal` {AbortSignal} + * `limit` {number} Maximum bytes to collect. Throws if exceeded. +* Returns: {Promise\} + +Collect all bytes from a stream into a single `Uint8Array`. + +```mjs +import { from, bytes } from 'node:stream/new'; + +const data = await bytes(from('hello')); +console.log(data); // Uint8Array(5) [ 104, 101, 108, 108, 111 ] +``` + +```cjs +const { from, bytes } = require('node:stream/new'); + +async function run() { + const data = await bytes(from('hello')); + console.log(data); // Uint8Array(5) [ 104, 101, 108, 108, 111 ] +} + +run().catch(console.error); +``` + +### `bytesSync(source[, options])` + + + +* `source` {Iterable\} +* `options` {Object} + * `limit` {number} +* Returns: {Uint8Array} + +Synchronous version of [`bytes()`][]. + +### `text(source[, options])` + + + +* `source` {AsyncIterable\|Iterable\} +* `options` {Object} + * `encoding` {string} Text encoding. **Default:** `'utf-8'`. + * `signal` {AbortSignal} + * `limit` {number} +* Returns: {Promise\} + +Collect all bytes and decode as text. + +```mjs +import { from, text } from 'node:stream/new'; + +console.log(await text(from('hello'))); // 'hello' +``` + +```cjs +const { from, text } = require('node:stream/new'); + +async function run() { + console.log(await text(from('hello'))); // 'hello' +} + +run().catch(console.error); +``` + +### `textSync(source[, options])` + + + +* `source` {Iterable\} +* `options` {Object} + * `encoding` {string} **Default:** `'utf-8'`. + * `limit` {number} +* Returns: {string} + +Synchronous version of [`text()`][]. + +## Utilities + +### `merge(...sources[, options])` + + + +* `...sources` {AsyncIterable\} Two or more async iterables. +* `options` {Object} + * `signal` {AbortSignal} +* Returns: {AsyncIterable\} + +Merge multiple async iterables by yielding batches in temporal order +(whichever source produces data first). All sources are consumed +concurrently. + +```mjs +import { from, merge, text } from 'node:stream/new'; + +const merged = merge(from('hello '), from('world')); +console.log(await text(merged)); // Order depends on timing +``` + +```cjs +const { from, merge, text } = require('node:stream/new'); + +async function run() { + const merged = merge(from('hello '), from('world')); + console.log(await text(merged)); // Order depends on timing +} + +run().catch(console.error); +``` + +### `ondrain(drainable)` + + + +* `drainable` {Object} An object implementing the drainable protocol. +* Returns: {Promise\|null} + +Wait for a drainable writer's backpressure to clear. Returns a promise that +resolves to `true` when the writer can accept more data, or `null` if the +object does not implement the drainable protocol. + +```mjs +import { push, ondrain } from 'node:stream/new'; + +const { writer, readable } = push({ highWaterMark: 2 }); +writer.writeSync('a'); +writer.writeSync('b'); + +// Buffer is full -- wait for drain +const canWrite = await ondrain(writer); +``` + +```cjs +const { push, ondrain } = require('node:stream/new'); + +async function run() { + const { writer, readable } = push({ highWaterMark: 2 }); + writer.writeSync('a'); + writer.writeSync('b'); + + // Buffer is full -- wait for drain + const canWrite = await ondrain(writer); +} + +run().catch(console.error); +``` + +### `tap(callback)` + + + +* `callback` {Function} `(chunks) => void` Called with each batch. +* Returns: {Function} A stateless transform. + +Create a pass-through transform that observes batches without modifying them. +Useful for logging, metrics, or debugging. + +```mjs +import { from, pull, text, tap } from 'node:stream/new'; + +const result = pull( + from('hello'), + tap((chunks) => console.log('Batch size:', chunks.length)), +); +console.log(await text(result)); +``` + +```cjs +const { from, pull, text, tap } = require('node:stream/new'); + +async function run() { + const result = pull( + from('hello'), + tap((chunks) => console.log('Batch size:', chunks.length)), + ); + console.log(await text(result)); +} + +run().catch(console.error); +``` + +### `tapSync(callback)` + + + +* `callback` {Function} +* Returns: {Function} + +Synchronous version of [`tap()`][]. + +## Multi-consumer + +### `broadcast([options])` + + + +* `options` {Object} + * `highWaterMark` {number} Buffer size in slots. **Default:** `16`. + * `backpressure` {string} `'strict'` or `'block'`. **Default:** `'strict'`. + * `signal` {AbortSignal} +* Returns: {Object} + * `writer` {BroadcastWriter} + * `broadcast` {Broadcast} + +Create a push-model multi-consumer broadcast channel. A single writer pushes +data to multiple consumers. Each consumer has an independent cursor into a +shared buffer. + +```mjs +import { broadcast, text } from 'node:stream/new'; + +const { writer, broadcast: bc } = broadcast(); + +const c1 = bc.push(); // Consumer 1 +const c2 = bc.push(); // Consumer 2 + +writer.write('hello'); +writer.end(); + +console.log(await text(c1)); // 'hello' +console.log(await text(c2)); // 'hello' +``` + +```cjs +const { broadcast, text } = require('node:stream/new'); + +async function run() { + const { writer, broadcast: bc } = broadcast(); + + const c1 = bc.push(); // Consumer 1 + const c2 = bc.push(); // Consumer 2 + + writer.write('hello'); + writer.end(); + + console.log(await text(c1)); // 'hello' + console.log(await text(c2)); // 'hello' +} + +run().catch(console.error); +``` + +#### `broadcast.cancel([reason])` + +* `reason` {Error} + +Cancel the broadcast. All consumers receive an error. + +#### `broadcast.push([...transforms][, options])` + +* `...transforms` {Function|Object} +* `options` {Object} + * `signal` {AbortSignal} +* Returns: {AsyncIterable\} + +Create a new consumer. Each consumer receives all data written to the +broadcast from the point of subscription onward. Optional transforms are +applied to this consumer's view of the data. + +#### `broadcast[Symbol.dispose]()` + +Alias for `broadcast.cancel()`. + +### `Broadcast.from(input[, options])` + + + +* `input` {AsyncIterable|Iterable|Broadcastable} +* `options` {Object} Same as `broadcast()`. +* Returns: {Object} `{ writer, broadcast }` + +Create a broadcast from an existing source. The source is consumed +automatically and pushed to all subscribers. + +### `share(source[, options])` + + + +* `source` {AsyncIterable} The source to share. +* `options` {Object} + * `highWaterMark` {number} Buffer size. **Default:** `16`. + * `backpressure` {string} `'strict'`, `'block'`, or `'drop-oldest'`. + **Default:** `'strict'`. +* Returns: {Share} + +Create a pull-model multi-consumer shared stream. Unlike `broadcast()`, the +source is only read when a consumer pulls. Multiple consumers share a single +buffer. + +```mjs +import { from, share, text } from 'node:stream/new'; + +const shared = share(from('hello')); + +const c1 = shared.pull(); +const c2 = shared.pull(); + +console.log(await text(c1)); // 'hello' +console.log(await text(c2)); // 'hello' +``` + +```cjs +const { from, share, text } = require('node:stream/new'); + +async function run() { + const shared = share(from('hello')); + + const c1 = shared.pull(); + const c2 = shared.pull(); + + console.log(await text(c1)); // 'hello' + console.log(await text(c2)); // 'hello' +} + +run().catch(console.error); +``` + +#### `share.cancel([reason])` + +* `reason` {Error} + +Cancel the share. All consumers receive an error. + +#### `share.pull([...transforms][, options])` + +* `...transforms` {Function|Object} +* `options` {Object} + * `signal` {AbortSignal} +* Returns: {AsyncIterable\} + +Create a new consumer of the shared source. + +#### `share[Symbol.dispose]()` + +Alias for `share.cancel()`. + +### `Share.from(input[, options])` + + + +* `input` {AsyncIterable|Shareable} +* `options` {Object} Same as `share()`. +* Returns: {Share} + +Create a share from an existing source. + +### `shareSync(source[, options])` + + + +* `source` {Iterable} The sync source to share. +* `options` {Object} + * `highWaterMark` {number} **Default:** `16`. + * `backpressure` {string} **Default:** `'strict'`. +* Returns: {SyncShare} + +Synchronous version of [`share()`][]. + +### `SyncShare.fromSync(input[, options])` + + + +* `input` {Iterable|SyncShareable} +* `options` {Object} +* Returns: {SyncShare} + +## Compression and decompression + +These transforms use the built-in zlib, Brotli, and Zstd compression +available in Node.js. Compression work is performed asynchronously, +overlapping with upstream I/O for maximum throughput. + +All compression transforms are stateful (they return `{ transform }` objects) +and can be passed to `pull()`, `pipeTo()`, or `push()`. + +### `compressBrotli([options])` + + + +* `options` {Object} + * `chunkSize` {number} **Default:** `16384`. + * `params` {Object} Key-value object where keys and values are + `zlib.constants` entries. The most important compressor parameters are: + * `BROTLI_PARAM_MODE` -- `BROTLI_MODE_GENERIC` (default), + `BROTLI_MODE_TEXT`, or `BROTLI_MODE_FONT`. + * `BROTLI_PARAM_QUALITY` -- ranges from `BROTLI_MIN_QUALITY` to + `BROTLI_MAX_QUALITY`. **Default:** `BROTLI_DEFAULT_QUALITY`. + * `BROTLI_PARAM_SIZE_HINT` -- expected input size. **Default:** `0` + (unknown). + * `BROTLI_PARAM_LGWIN` -- window size (log2). Ranges from + `BROTLI_MIN_WINDOW_BITS` to `BROTLI_MAX_WINDOW_BITS`. + * `BROTLI_PARAM_LGBLOCK` -- input block size (log2). + See the [Brotli compressor options][] in the zlib documentation for the + full list. + * `dictionary` {Buffer|TypedArray|DataView} +* Returns: {Object} A stateful transform. + +Create a Brotli compression transform. Output is compatible with +`zlib.brotliDecompress()` and `decompressBrotli()`. + +### `compressDeflate([options])` + + + +* `options` {Object} + * `chunkSize` {number} Output buffer size. **Default:** `16384`. + * `level` {number} Compression level (`0`-`9`). **Default:** `Z_DEFAULT_COMPRESSION`. + * `windowBits` {number} **Default:** `Z_DEFAULT_WINDOWBITS`. + * `memLevel` {number} **Default:** `Z_DEFAULT_MEMLEVEL`. + * `strategy` {number} **Default:** `Z_DEFAULT_STRATEGY`. + * `dictionary` {Buffer|TypedArray|DataView} +* Returns: {Object} A stateful transform. + +Create a deflate compression transform. Output is compatible with +`zlib.inflate()` and `decompressDeflate()`. + +### `compressGzip([options])` + + + +* `options` {Object} + * `chunkSize` {number} Output buffer size. **Default:** `16384`. + * `level` {number} Compression level (`0`-`9`). **Default:** `Z_DEFAULT_COMPRESSION`. + * `windowBits` {number} **Default:** `Z_DEFAULT_WINDOWBITS`. + * `memLevel` {number} **Default:** `Z_DEFAULT_MEMLEVEL`. + * `strategy` {number} **Default:** `Z_DEFAULT_STRATEGY`. + * `dictionary` {Buffer|TypedArray|DataView} +* Returns: {Object} A stateful transform. + +Create a gzip compression transform. Output is compatible with `zlib.gunzip()` +and `decompressGzip()`. + +```mjs +import { from, pull, bytes, text, compressGzip, decompressGzip } from 'node:stream/new'; + +const compressed = await bytes(pull(from('hello'), compressGzip())); +const original = await text(pull(from(compressed), decompressGzip())); +console.log(original); // 'hello' +``` + +```cjs +const { from, pull, bytes, text, compressGzip, decompressGzip } = require('node:stream/new'); + +async function run() { + const compressed = await bytes(pull(from('hello'), compressGzip())); + const original = await text(pull(from(compressed), decompressGzip())); + console.log(original); // 'hello' +} + +run().catch(console.error); +``` + +### `compressZstd([options])` + + + +* `options` {Object} + * `chunkSize` {number} **Default:** `16384`. + * `params` {Object} Key-value object where keys and values are + `zlib.constants` entries. The most important compressor parameters are: + * `ZSTD_c_compressionLevel` -- **Default:** `ZSTD_CLEVEL_DEFAULT` (3). + * `ZSTD_c_checksumFlag` -- generate a checksum. **Default:** `0`. + * `ZSTD_c_strategy` -- compression strategy. Values include + `ZSTD_fast`, `ZSTD_dfast`, `ZSTD_greedy`, `ZSTD_lazy`, + `ZSTD_lazy2`, `ZSTD_btlazy2`, `ZSTD_btopt`, `ZSTD_btultra`, + `ZSTD_btultra2`. + See the [Zstd compressor options][] in the zlib documentation for the + full list. + * `pledgedSrcSize` {number} Expected uncompressed size (optional hint). + * `dictionary` {Buffer|TypedArray|DataView} +* Returns: {Object} A stateful transform. + +Create a Zstandard compression transform. Output is compatible with +`zlib.zstdDecompress()` and `decompressZstd()`. + +### `decompressBrotli([options])` + + + +* `options` {Object} + * `chunkSize` {number} **Default:** `16384`. + * `params` {Object} Key-value object where keys and values are + `zlib.constants` entries. Available decompressor parameters: + * `BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION` -- boolean + flag affecting internal memory allocation. + * `BROTLI_DECODER_PARAM_LARGE_WINDOW` -- boolean flag enabling "Large + Window Brotli" mode (not compatible with [RFC 7932][]). + See the [Brotli decompressor options][] in the zlib documentation for + details. + * `dictionary` {Buffer|TypedArray|DataView} +* Returns: {Object} A stateful transform. + +Create a Brotli decompression transform. + +### `decompressDeflate([options])` + + + +* `options` {Object} + * `chunkSize` {number} Output buffer size. **Default:** `16384`. + * `level` {number} Compression level (`0`-`9`). **Default:** `Z_DEFAULT_COMPRESSION`. + * `windowBits` {number} **Default:** `Z_DEFAULT_WINDOWBITS`. + * `memLevel` {number} **Default:** `Z_DEFAULT_MEMLEVEL`. + * `strategy` {number} **Default:** `Z_DEFAULT_STRATEGY`. + * `dictionary` {Buffer|TypedArray|DataView} +* Returns: {Object} A stateful transform. + +Create a deflate decompression transform. + +### `decompressGzip([options])` + + + +* `options` {Object} + * `chunkSize` {number} Output buffer size. **Default:** `16384`. + * `level` {number} Compression level (`0`-`9`). **Default:** `Z_DEFAULT_COMPRESSION`. + * `windowBits` {number} **Default:** `Z_DEFAULT_WINDOWBITS`. + * `memLevel` {number} **Default:** `Z_DEFAULT_MEMLEVEL`. + * `strategy` {number} **Default:** `Z_DEFAULT_STRATEGY`. + * `dictionary` {Buffer|TypedArray|DataView} +* Returns: {Object} A stateful transform. + +Create a gzip decompression transform. + +### `decompressZstd([options])` + + + +* `options` {Object} + * `chunkSize` {number} **Default:** `16384`. + * `params` {Object} Key-value object where keys and values are + `zlib.constants` entries. Available decompressor parameters: + * `ZSTD_d_windowLogMax` -- maximum window size (log2) the decompressor + will allocate. Limits memory usage against malicious input. + See the [Zstd decompressor options][] in the zlib documentation for + details. + * `dictionary` {Buffer|TypedArray|DataView} +* Returns: {Object} A stateful transform. + +Create a Zstandard decompression transform. + +## Protocol symbols + +These well-known symbols allow third-party objects to participate in the +streaming protocol without importing from `node:stream/new` directly. + +### `Stream.broadcastProtocol` + +* Value: `Symbol.for('Stream.broadcastProtocol')` + +Implement to make an object usable with `Broadcast.from()`. + +### `Stream.drainableProtocol` + +* Value: `Symbol.for('Stream.drainableProtocol')` + +Implement to make a writer compatible with `ondrain()`. The method should +return a promise that resolves when backpressure clears, or `null` if no +backpressure. + +### `Stream.shareProtocol` + +* Value: `Symbol.for('Stream.shareProtocol')` + +Implement to make an object usable with `Share.from()`. + +### `Stream.shareSyncProtocol` + +* Value: `Symbol.for('Stream.shareSyncProtocol')` + +Implement to make an object usable with `SyncShare.fromSync()`. + +### `Stream.toAsyncStreamable` + +* Value: `Symbol.for('Stream.toAsyncStreamable')` + +Async version of `toStreamable`. The method may return a promise. + +### `Stream.toStreamable` + +* Value: `Symbol.for('Stream.toStreamable')` + +Implement this symbol as a method that returns a sync-streamable value +(string, `Uint8Array`, `Iterable`, etc.). Used by `from()` and `fromSync()`. + +```js +const obj = { + [Symbol.for('Stream.toStreamable')]() { + return 'hello from custom object'; + }, +}; +// from(obj) and fromSync(obj) will UTF-8 encode the returned string. +``` + +[Brotli compressor options]: zlib.md#compressor-options +[Brotli decompressor options]: zlib.md#decompressor-options +[RFC 7932]: https://www.rfc-editor.org/rfc/rfc7932 +[Zstd compressor options]: zlib.md#compressor-options-1 +[Zstd decompressor options]: zlib.md#decompressor-options-1 +[`array()`]: #arraysource-options +[`arrayBuffer()`]: #arraybuffersource-options +[`bytes()`]: #bytessource-options +[`from()`]: #frominput +[`pipeTo()`]: #pipetosource-transforms-writer-options +[`pull()`]: #pullsource-transforms-options +[`share()`]: #sharesource-options +[`tap()`]: #tapcallback +[`text()`]: #textsource-options diff --git a/lib/internal/fs/promises.js b/lib/internal/fs/promises.js index 2f95c4b79e17fd..d258e85e631dc7 100644 --- a/lib/internal/fs/promises.js +++ b/lib/internal/fs/promises.js @@ -16,6 +16,7 @@ const { SafePromisePrototypeFinally, Symbol, SymbolAsyncDispose, + SymbolAsyncIterator, Uint8Array, } = primordials; @@ -40,6 +41,7 @@ const { ERR_INVALID_ARG_VALUE, ERR_INVALID_STATE, ERR_METHOD_NOT_IMPLEMENTED, + ERR_OPERATION_FAILED, }, } = require('internal/errors'); const { isArrayBufferView } = require('internal/util/types'); @@ -139,6 +141,17 @@ const lazyReadableStream = getLazy(() => require('internal/webstreams/readablestream').ReadableStream, ); +// Lazy loaded to avoid circular dependency with new streams. +let newStreamsPull; +let newStreamsParsePullArgs; +function lazyNewStreams() { + if (newStreamsPull === undefined) { + newStreamsPull = require('internal/streams/new/pull').pull; + newStreamsParsePullArgs = + require('internal/streams/new/utils').parsePullArgs; + } +} + // By the time the C++ land creates an error for a promise rejection (likely from a // libuv callback), there is already no JS frames on the stack. So we need to // wait until V8 resumes execution back to JS land before we have enough information @@ -341,6 +354,249 @@ class FileHandle extends EventEmitter { return readable; } + /** + * Return the file contents as an AsyncIterable using the + * new streams pull model. Optional transforms and options (including + * AbortSignal) may be provided as trailing arguments, mirroring the + * Stream.pull() signature. + * @param {...(Function|object)} args - Optional transforms and/or options + * @returns {AsyncIterable} + */ + pull(...args) { + if (this[kFd] === -1) + throw new ERR_INVALID_STATE('The FileHandle is closed'); + if (this[kClosePromise]) + throw new ERR_INVALID_STATE('The FileHandle is closing'); + if (this[kLocked]) + throw new ERR_INVALID_STATE('The FileHandle is locked'); + this[kLocked] = true; + + lazyNewStreams(); + const { transforms, options } = newStreamsParsePullArgs(args); + + const handle = this; + const fd = this[kFd]; + const autoClose = options?.autoClose ?? false; + const signal = options?.signal; + + const source = { + async *[SymbolAsyncIterator]() { + handle[kRef](); + const readSize = 65536; + try { + if (signal) { + // Signal-aware path + while (true) { + if (signal.aborted) { + throw signal.reason ?? + lazyDOMException('The operation was aborted', + 'AbortError'); + } + // Allocate a fresh buffer each iteration. At 64 KiB this + // bypasses the slab pool, so there is no reuse benefit. + // Yielding the buffer directly avoids the per-chunk copy + // that was needed when a single buffer was reused. + const buf = Buffer.allocUnsafe(readSize); + let bytesRead; + try { + bytesRead = + (await binding.read(fd, buf, 0, + readSize, -1, kUsePromises)) || 0; + } catch (err) { + ErrorCaptureStackTrace(err, handleErrorFromBinding); + throw err; + } + if (bytesRead === 0) break; + yield [bytesRead < readSize ? buf.subarray(0, bytesRead) : buf]; + } + } else { + // Fast path - no signal check per iteration + while (true) { + const buf = Buffer.allocUnsafe(readSize); + let bytesRead; + try { + bytesRead = + (await binding.read(fd, buf, 0, + readSize, -1, kUsePromises)) || 0; + } catch (err) { + ErrorCaptureStackTrace(err, handleErrorFromBinding); + throw err; + } + if (bytesRead === 0) break; + yield [bytesRead < readSize ? buf.subarray(0, bytesRead) : buf]; + } + } + } finally { + handle[kLocked] = false; + handle[kUnref](); + if (autoClose) { + await handle.close(); + } + } + }, + }; + + // If transforms provided, wrap with pull pipeline + if (transforms.length > 0) { + const pullArgs = [...transforms]; + if (options) { + ArrayPrototypePush(pullArgs, options); + } + return newStreamsPull(source, ...pullArgs); + } + return source; + } + + /** + * Return a new-streams Writer backed by this file handle. + * The writer uses direct binding.writeBuffer / binding.writeBuffers + * calls, bypassing the FileHandle.write() validation chain. + * + * Supports writev() for batch writes (single syscall per batch). + * Handles EAGAIN with retry (up to 5 attempts), matching WriteStream. + * @param {{ + * autoClose?: boolean; + * start?: number; + * }} [options] + * @returns {{ write, writev, end, abort }} + */ + writer(options) { + if (this[kFd] === -1) + throw new ERR_INVALID_STATE('The FileHandle is closed'); + if (this[kClosePromise]) + throw new ERR_INVALID_STATE('The FileHandle is closing'); + if (this[kLocked]) + throw new ERR_INVALID_STATE('The FileHandle is locked'); + this[kLocked] = true; + + const handle = this; + const fd = this[kFd]; + const autoClose = options?.autoClose ?? false; + let pos = options?.start ?? -1; + let totalBytesWritten = 0; + let closed = false; + + if (pos !== -1) { + validateInteger(pos, 'options.start', 0); + } + + handle[kRef](); + + // Write a single buffer with EAGAIN retry (up to 5 retries). + async function writeAll(buf, offset, length, position) { + let retries = 0; + while (length > 0) { + const bytesWritten = (await PromisePrototypeThen( + binding.writeBuffer(fd, buf, offset, length, position, + kUsePromises), + undefined, + handleErrorFromBinding, + )) || 0; + + if (bytesWritten === 0) { + if (++retries > 5) { + throw new ERR_OPERATION_FAILED('write failed after retries'); + } + } else { + retries = 0; + } + + totalBytesWritten += bytesWritten; + offset += bytesWritten; + length -= bytesWritten; + if (position >= 0) position += bytesWritten; + } + } + + // Writev with EAGAIN retry. On partial write, concatenates remaining + // buffers and falls back to writeAll (same approach as WriteStream). + async function writevAll(buffers, position) { + let totalSize = 0; + for (let i = 0; i < buffers.length; i++) { + totalSize += buffers[i].byteLength; + } + + let retries = 0; + while (totalSize > 0) { + const bytesWritten = (await PromisePrototypeThen( + binding.writeBuffers(fd, buffers, position, kUsePromises), + undefined, + handleErrorFromBinding, + )) || 0; + + if (bytesWritten === 0) { + if (++retries > 5) { + throw new ERR_OPERATION_FAILED('writev failed after retries'); + } + } else { + retries = 0; + } + + totalBytesWritten += bytesWritten; + totalSize -= bytesWritten; + if (position >= 0) position += bytesWritten; + + if (totalSize > 0) { + // Partial write - concatenate remaining and use writeAll. + const remaining = Buffer.concat(buffers); + const wrote = bytesWritten; + await writeAll(remaining, wrote, remaining.length - wrote, + position); + return; + } + } + } + + async function cleanup() { + if (closed) return; + closed = true; + handle[kLocked] = false; + handle[kUnref](); + if (autoClose) { + await handle.close(); + } + } + + return { + write(chunk) { + if (closed) { + return PromiseReject( + new ERR_INVALID_STATE('The writer is closed')); + } + const position = pos; + if (pos >= 0) pos += chunk.byteLength; + return writeAll(chunk, 0, chunk.byteLength, position); + }, + + writev(chunks) { + if (closed) { + return PromiseReject( + new ERR_INVALID_STATE('The writer is closed')); + } + const position = pos; + if (pos >= 0) { + for (let i = 0; i < chunks.length; i++) { + pos += chunks[i].byteLength; + } + } + return writevAll(chunks, position); + }, + + async end() { + await cleanup(); + return totalBytesWritten; + }, + + async abort(reason) { + await cleanup(); + }, + + async [SymbolAsyncDispose]() { + await cleanup(); + }, + }; + } + /** * @typedef {import('./streams').ReadStream * } ReadStream diff --git a/lib/internal/streams/new/broadcast.js b/lib/internal/streams/new/broadcast.js new file mode 100644 index 00000000000000..3c4a02bae6b6c0 --- /dev/null +++ b/lib/internal/streams/new/broadcast.js @@ -0,0 +1,587 @@ +'use strict'; + +// New Streams API - Broadcast +// +// Push-model multi-consumer streaming. A single writer can push data to +// multiple consumers. Each consumer has an independent cursor into a +// shared buffer. + +const { + ArrayIsArray, + ArrayPrototypeMap, + ArrayPrototypePush, + ArrayPrototypeShift, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + Error, + MathMax, + Promise, + PromiseResolve, + SafeSet, + String, + SymbolAsyncIterator, + SymbolDispose, +} = primordials; + +const { TextEncoder } = require('internal/encoding'); + +const { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_STATE, + }, +} = require('internal/errors'); + +const { + broadcastProtocol, + drainableProtocol, +} = require('internal/streams/new/types'); + +const { + isAsyncIterable, + isSyncIterable, +} = require('internal/streams/new/from'); + +const { + pull: pullWithTransforms, +} = require('internal/streams/new/pull'); + +const encoder = new TextEncoder(); + +// ============================================================================= +// Argument Parsing +// ============================================================================= + +function isPushStreamOptions(value) { + return ( + value !== null && + typeof value === 'object' && + !('transform' in value) && + !('write' in value) + ); +} + +function parsePushArgs(args) { + if (args.length === 0) { + return { transforms: [], options: undefined }; + } + const last = args[args.length - 1]; + if (isPushStreamOptions(last)) { + return { + transforms: ArrayPrototypeSlice(args, 0, -1), + options: last, + }; + } + return { transforms: args, options: undefined }; +} + +// ============================================================================= +// Broadcast Implementation +// ============================================================================= + +class BroadcastImpl { + constructor(options) { + this._buffer = []; + this._bufferStart = 0; + this._consumers = new SafeSet(); + this._ended = false; + this._error = null; + this._cancelled = false; + this._options = options; + this._onBufferDrained = null; + } + + get consumerCount() { + return this._consumers.size; + } + + get bufferSize() { + return this._buffer.length; + } + + push(...args) { + const { transforms, options } = parsePushArgs(args); + const rawConsumer = this._createRawConsumer(); + + if (transforms.length > 0) { + if (options?.signal) { + return pullWithTransforms( + rawConsumer, ...transforms, { signal: options.signal }); + } + return pullWithTransforms(rawConsumer, ...transforms); + } + return rawConsumer; + } + + _createRawConsumer() { + const state = { + cursor: this._bufferStart + this._buffer.length, + resolve: null, + reject: null, + detached: false, + }; + + this._consumers.add(state); + const self = this; + + return { + [SymbolAsyncIterator]() { + return { + async next() { + if (state.detached) { + return { __proto__: null, done: true, value: undefined }; + } + + const bufferIndex = state.cursor - self._bufferStart; + if (bufferIndex < self._buffer.length) { + const chunk = self._buffer[bufferIndex]; + state.cursor++; + self._tryTrimBuffer(); + return { __proto__: null, done: false, value: chunk }; + } + + if (self._error) { + state.detached = true; + self._consumers.delete(state); + throw self._error; + } + + if (self._ended || self._cancelled) { + state.detached = true; + self._consumers.delete(state); + return { __proto__: null, done: true, value: undefined }; + } + + return new Promise((resolve, reject) => { + state.resolve = resolve; + state.reject = reject; + }); + }, + + async return() { + state.detached = true; + state.resolve = null; + state.reject = null; + self._consumers.delete(state); + self._tryTrimBuffer(); + return { __proto__: null, done: true, value: undefined }; + }, + + async throw() { + state.detached = true; + state.resolve = null; + state.reject = null; + self._consumers.delete(state); + self._tryTrimBuffer(); + return { __proto__: null, done: true, value: undefined }; + }, + }; + }, + }; + } + + cancel(reason) { + if (this._cancelled) return; + this._cancelled = true; + + if (reason) { + this._error = reason; + } + + for (const consumer of this._consumers) { + if (consumer.resolve) { + if (reason) { + consumer.reject?.(reason); + } else { + consumer.resolve({ done: true, value: undefined }); + } + consumer.resolve = null; + consumer.reject = null; + } + consumer.detached = true; + } + this._consumers.clear(); + } + + [SymbolDispose]() { + this.cancel(); + } + + // Internal methods called by Writer + + _write(chunk) { + if (this._ended || this._cancelled) return false; + + if (this._buffer.length >= this._options.highWaterMark) { + switch (this._options.backpressure) { + case 'strict': + case 'block': + return false; + case 'drop-oldest': + ArrayPrototypeShift(this._buffer); + this._bufferStart++; + for (const consumer of this._consumers) { + if (consumer.cursor < this._bufferStart) { + consumer.cursor = this._bufferStart; + } + } + break; + case 'drop-newest': + return true; + } + } + + ArrayPrototypePush(this._buffer, chunk); + this._notifyConsumers(); + return true; + } + + _end() { + if (this._ended) return; + this._ended = true; + + for (const consumer of this._consumers) { + if (consumer.resolve) { + const bufferIndex = consumer.cursor - this._bufferStart; + if (bufferIndex < this._buffer.length) { + const chunk = this._buffer[bufferIndex]; + consumer.cursor++; + consumer.resolve({ done: false, value: chunk }); + } else { + consumer.resolve({ done: true, value: undefined }); + } + consumer.resolve = null; + consumer.reject = null; + } + } + } + + _abort(reason) { + if (this._ended || this._error) return; + this._error = reason; + this._ended = true; + + for (const consumer of this._consumers) { + if (consumer.reject) { + consumer.reject(reason); + consumer.resolve = null; + consumer.reject = null; + } + } + } + + _getDesiredSize() { + if (this._ended || this._cancelled) return null; + return MathMax(0, this._options.highWaterMark - this._buffer.length); + } + + _canWrite() { + if (this._ended || this._cancelled) return false; + if ((this._options.backpressure === 'strict' || + this._options.backpressure === 'block') && + this._buffer.length >= this._options.highWaterMark) { + return false; + } + return true; + } + + _getMinCursor() { + let min = Infinity; + for (const consumer of this._consumers) { + if (consumer.cursor < min) { + min = consumer.cursor; + } + } + return min === Infinity ? + this._bufferStart + this._buffer.length : min; + } + + _tryTrimBuffer() { + const minCursor = this._getMinCursor(); + const trimCount = minCursor - this._bufferStart; + if (trimCount > 0) { + ArrayPrototypeSplice(this._buffer, 0, trimCount); + this._bufferStart = minCursor; + + if (this._onBufferDrained && + this._buffer.length < this._options.highWaterMark) { + this._onBufferDrained(); + } + } + } + + _notifyConsumers() { + for (const consumer of this._consumers) { + if (consumer.resolve) { + const bufferIndex = consumer.cursor - this._bufferStart; + if (bufferIndex < this._buffer.length) { + const chunk = this._buffer[bufferIndex]; + consumer.cursor++; + const resolve = consumer.resolve; + consumer.resolve = null; + consumer.reject = null; + resolve({ done: false, value: chunk }); + this._tryTrimBuffer(); + } + } + } + } +} + +// ============================================================================= +// BroadcastWriter +// ============================================================================= + +class BroadcastWriter { + constructor(broadcastImpl) { + this._broadcast = broadcastImpl; + this._totalBytes = 0; + this._closed = false; + this._aborted = false; + this._pendingWrites = []; + this._pendingDrains = []; + + this._broadcast._onBufferDrained = () => { + this._resolvePendingWrites(); + this._resolvePendingDrains(true); + }; + } + + [drainableProtocol]() { + const desired = this.desiredSize; + if (desired === null) return null; + if (desired > 0) return PromiseResolve(true); + return new Promise((resolve, reject) => { + ArrayPrototypePush(this._pendingDrains, { resolve, reject }); + }); + } + + get desiredSize() { + if (this._closed || this._aborted) return null; + return this._broadcast._getDesiredSize(); + } + + async write(chunk) { + return this.writev([chunk]); + } + + async writev(chunks) { + if (this._closed || this._aborted) { + throw new ERR_INVALID_STATE('Writer is closed'); + } + + const converted = ArrayPrototypeMap(chunks, (c) => + (typeof c === 'string' ? encoder.encode(c) : c)); + + if (this._broadcast._write(converted)) { + for (let i = 0; i < converted.length; i++) { + this._totalBytes += converted[i].byteLength; + } + return; + } + + const policy = this._broadcast._options?.backpressure ?? 'strict'; + const highWaterMark = this._broadcast._options?.highWaterMark ?? 16; + + if (policy === 'strict') { + if (this._pendingWrites.length >= highWaterMark) { + throw new ERR_INVALID_STATE( + 'Backpressure violation: too many pending writes. ' + + 'Await each write() call to respect backpressure.'); + } + return new Promise((resolve, reject) => { + ArrayPrototypePush(this._pendingWrites, + { chunk: converted, resolve, reject }); + }); + } + + // 'block' policy + return new Promise((resolve, reject) => { + ArrayPrototypePush(this._pendingWrites, + { chunk: converted, resolve, reject }); + }); + } + + writeSync(chunk) { + if (this._closed || this._aborted) return false; + if (!this._broadcast._canWrite()) return false; + const converted = + typeof chunk === 'string' ? encoder.encode(chunk) : chunk; + if (this._broadcast._write([converted])) { + this._totalBytes += converted.byteLength; + return true; + } + return false; + } + + writevSync(chunks) { + if (this._closed || this._aborted) return false; + if (!this._broadcast._canWrite()) return false; + const converted = ArrayPrototypeMap(chunks, (c) => + (typeof c === 'string' ? encoder.encode(c) : c)); + if (this._broadcast._write(converted)) { + for (let i = 0; i < converted.length; i++) { + this._totalBytes += converted[i].byteLength; + } + return true; + } + return false; + } + + async end() { + if (this._closed) return this._totalBytes; + this._closed = true; + this._broadcast._end(); + this._resolvePendingDrains(false); + return this._totalBytes; + } + + endSync() { + if (this._closed) return this._totalBytes; + this._closed = true; + this._broadcast._end(); + this._resolvePendingDrains(false); + return this._totalBytes; + } + + async abort(reason) { + if (this._aborted) return; + this._aborted = true; + this._closed = true; + const error = reason ?? new ERR_INVALID_STATE('Aborted'); + this._rejectPendingWrites(error); + this._rejectPendingDrains(error); + this._broadcast._abort(error); + } + + abortSync(reason) { + if (this._aborted) return true; + this._aborted = true; + this._closed = true; + const error = reason ?? new ERR_INVALID_STATE('Aborted'); + this._rejectPendingWrites(error); + this._rejectPendingDrains(error); + this._broadcast._abort(error); + return true; + } + + _resolvePendingWrites() { + while (this._pendingWrites.length > 0 && this._broadcast._canWrite()) { + const pending = ArrayPrototypeShift(this._pendingWrites); + if (this._broadcast._write(pending.chunk)) { + for (let i = 0; i < pending.chunk.length; i++) { + this._totalBytes += pending.chunk[i].byteLength; + } + pending.resolve(); + } else { + this._pendingWrites.unshift(pending); + break; + } + } + } + + _rejectPendingWrites(error) { + const writes = this._pendingWrites; + this._pendingWrites = []; + for (let i = 0; i < writes.length; i++) { + writes[i].reject(error); + } + } + + _resolvePendingDrains(canWrite) { + const drains = this._pendingDrains; + this._pendingDrains = []; + for (let i = 0; i < drains.length; i++) { + drains[i].resolve(canWrite); + } + } + + _rejectPendingDrains(error) { + const drains = this._pendingDrains; + this._pendingDrains = []; + for (let i = 0; i < drains.length; i++) { + drains[i].reject(error); + } + } +} + +// ============================================================================= +// Public API +// ============================================================================= + +/** + * Create a broadcast channel for push-model multi-consumer streaming. + * @param {{ highWaterMark?: number, backpressure?: string, signal?: AbortSignal }} [options] + * @returns {{ writer: Writer, broadcast: Broadcast }} + */ +function broadcast(options) { + const opts = { + highWaterMark: options?.highWaterMark ?? 16, + backpressure: options?.backpressure ?? 'strict', + signal: options?.signal, + }; + + const broadcastImpl = new BroadcastImpl(opts); + const writer = new BroadcastWriter(broadcastImpl); + + if (opts.signal) { + if (opts.signal.aborted) { + broadcastImpl.cancel(); + } else { + opts.signal.addEventListener('abort', () => { + broadcastImpl.cancel(); + }, { once: true }); + } + } + + return { writer, broadcast: broadcastImpl }; +} + +function isBroadcastable(value) { + return ( + value !== null && + typeof value === 'object' && + broadcastProtocol in value && + typeof value[broadcastProtocol] === 'function' + ); +} + +const Broadcast = { + from(input, options) { + if (isBroadcastable(input)) { + const bc = input[broadcastProtocol](options); + return { writer: {}, broadcast: bc }; + } + + const result = broadcast(options); + + (async () => { + try { + if (isAsyncIterable(input)) { + for await (const chunks of input) { + if (ArrayIsArray(chunks)) { + await result.writer.writev(chunks); + } + } + } else if (isSyncIterable(input)) { + for (const chunks of input) { + if (ArrayIsArray(chunks)) { + await result.writer.writev(chunks); + } + } + } + await result.writer.end(); + } catch (error) { + await result.writer.abort( + error instanceof Error ? error : new ERR_INVALID_ARG_TYPE('error', 'Error', String(error))); + } + })(); + + return result; + }, +}; + +module.exports = { + broadcast, + Broadcast, +}; diff --git a/lib/internal/streams/new/consumers.js b/lib/internal/streams/new/consumers.js new file mode 100644 index 00000000000000..b4ee67c88c779f --- /dev/null +++ b/lib/internal/streams/new/consumers.js @@ -0,0 +1,505 @@ +'use strict'; + +// New Streams API - Consumers & Utilities +// +// bytes(), text(), arrayBuffer() - collect entire stream +// tap(), tapSync() - observe without modifying +// merge() - temporal combining of sources +// ondrain() - backpressure drain utility + +const { + ArrayPrototypeFilter, + ArrayPrototypeMap, + ArrayPrototypePush, + ArrayPrototypeSlice, + SafePromiseAllReturnVoid, + SafePromiseRace, + SymbolAsyncIterator, +} = primordials; + +const { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_OUT_OF_RANGE, + }, +} = require('internal/errors'); +const { TextDecoder } = require('internal/encoding'); +const { lazyDOMException } = require('internal/util'); + +const { + isAsyncIterable, + isSyncIterable, +} = require('internal/streams/new/from'); + +const { + concatBytes, +} = require('internal/streams/new/utils'); + +const { + drainableProtocol, +} = require('internal/streams/new/types'); + +// ============================================================================= +// Type Guards +// ============================================================================= + +function isMergeOptions(value) { + return ( + value !== null && + typeof value === 'object' && + !isAsyncIterable(value) && + !isSyncIterable(value) + ); +} + +// ============================================================================= +// Sync Consumers +// ============================================================================= + +/** + * Collect all bytes from a sync source. + * @param {Iterable} source + * @param {{ limit?: number }} [options] + * @returns {Uint8Array} + */ +function bytesSync(source, options) { + const limit = options?.limit; + const chunks = []; + let totalBytes = 0; + + for (const batch of source) { + for (let i = 0; i < batch.length; i++) { + const chunk = batch[i]; + if (limit !== undefined) { + totalBytes += chunk.byteLength; + if (totalBytes > limit) { + throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); + } + } + ArrayPrototypePush(chunks, chunk); + } + } + + return concatBytes(chunks); +} + +/** + * Collect and decode text from a sync source. + * @param {Iterable} source + * @param {{ encoding?: string, limit?: number }} [options] + * @returns {string} + */ +function textSync(source, options) { + const data = bytesSync(source, options); + const decoder = new TextDecoder(options?.encoding ?? 'utf-8', { + fatal: true, + ignoreBOM: true, + }); + return decoder.decode(data); +} + +/** + * Collect bytes as ArrayBuffer from a sync source. + * @param {Iterable} source + * @param {{ limit?: number }} [options] + * @returns {ArrayBuffer} + */ +function arrayBufferSync(source, options) { + const data = bytesSync(source, options); + if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) { + return data.buffer; + } + return data.buffer.slice(data.byteOffset, + data.byteOffset + data.byteLength); +} + +/** + * Collect all chunks as an array from a sync source. + * @param {Iterable} source + * @param {{ limit?: number }} [options] + * @returns {Uint8Array[]} + */ +function arraySync(source, options) { + const limit = options?.limit; + const chunks = []; + let totalBytes = 0; + + for (const batch of source) { + for (let i = 0; i < batch.length; i++) { + const chunk = batch[i]; + if (limit !== undefined) { + totalBytes += chunk.byteLength; + if (totalBytes > limit) { + throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); + } + } + ArrayPrototypePush(chunks, chunk); + } + } + + return chunks; +} + +// ============================================================================= +// Async Consumers +// ============================================================================= + +/** + * Collect all bytes from an async or sync source. + * @param {AsyncIterable|Iterable} source + * @param {{ signal?: AbortSignal, limit?: number }} [options] + * @returns {Promise} + */ +async function bytes(source, options) { + const signal = options?.signal; + const limit = options?.limit; + + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + + const chunks = []; + + // Fast path: no signal and no limit + if (!signal && limit === undefined) { + if (isAsyncIterable(source)) { + for await (const batch of source) { + for (let i = 0; i < batch.length; i++) { + ArrayPrototypePush(chunks, batch[i]); + } + } + } else if (isSyncIterable(source)) { + for (const batch of source) { + for (let i = 0; i < batch.length; i++) { + ArrayPrototypePush(chunks, batch[i]); + } + } + } else { + throw new ERR_INVALID_ARG_TYPE('source', ['AsyncIterable', 'Iterable'], source); + } + return concatBytes(chunks); + } + + // Slow path: with signal or limit checks + let totalBytes = 0; + + if (isAsyncIterable(source)) { + for await (const batch of source) { + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + for (let i = 0; i < batch.length; i++) { + const chunk = batch[i]; + if (limit !== undefined) { + totalBytes += chunk.byteLength; + if (totalBytes > limit) { + throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); + } + } + ArrayPrototypePush(chunks, chunk); + } + } + } else if (isSyncIterable(source)) { + for (const batch of source) { + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + for (let i = 0; i < batch.length; i++) { + const chunk = batch[i]; + if (limit !== undefined) { + totalBytes += chunk.byteLength; + if (totalBytes > limit) { + throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); + } + } + ArrayPrototypePush(chunks, chunk); + } + } + } else { + throw new ERR_INVALID_ARG_TYPE('source', ['AsyncIterable', 'Iterable'], source); + } + + return concatBytes(chunks); +} + +/** + * Collect and decode text from an async or sync source. + * @param {AsyncIterable|Iterable} source + * @param {{ encoding?: string, signal?: AbortSignal, limit?: number }} [options] + * @returns {Promise} + */ +async function text(source, options) { + const data = await bytes(source, options); + const decoder = new TextDecoder(options?.encoding ?? 'utf-8', { + fatal: true, + ignoreBOM: true, + }); + return decoder.decode(data); +} + +/** + * Collect bytes as ArrayBuffer from an async or sync source. + * @param {AsyncIterable|Iterable} source + * @param {{ signal?: AbortSignal, limit?: number }} [options] + * @returns {Promise} + */ +async function arrayBuffer(source, options) { + const data = await bytes(source, options); + if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) { + return data.buffer; + } + return data.buffer.slice(data.byteOffset, + data.byteOffset + data.byteLength); +} + +/** + * Collect all chunks as an array from an async or sync source. + * @param {AsyncIterable|Iterable} source + * @param {{ signal?: AbortSignal, limit?: number }} [options] + * @returns {Promise} + */ +async function array(source, options) { + const signal = options?.signal; + const limit = options?.limit; + + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + + const chunks = []; + + // Fast path: no signal and no limit + if (!signal && limit === undefined) { + if (isAsyncIterable(source)) { + for await (const batch of source) { + for (let i = 0; i < batch.length; i++) { + ArrayPrototypePush(chunks, batch[i]); + } + } + } else if (isSyncIterable(source)) { + for (const batch of source) { + for (let i = 0; i < batch.length; i++) { + ArrayPrototypePush(chunks, batch[i]); + } + } + } else { + throw new ERR_INVALID_ARG_TYPE('source', ['AsyncIterable', 'Iterable'], source); + } + return chunks; + } + + // Slow path + let totalBytes = 0; + + if (isAsyncIterable(source)) { + for await (const batch of source) { + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + for (let i = 0; i < batch.length; i++) { + const chunk = batch[i]; + if (limit !== undefined) { + totalBytes += chunk.byteLength; + if (totalBytes > limit) { + throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); + } + } + ArrayPrototypePush(chunks, chunk); + } + } + } else if (isSyncIterable(source)) { + for (const batch of source) { + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + for (let i = 0; i < batch.length; i++) { + const chunk = batch[i]; + if (limit !== undefined) { + totalBytes += chunk.byteLength; + if (totalBytes > limit) { + throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); + } + } + ArrayPrototypePush(chunks, chunk); + } + } + } else { + throw new ERR_INVALID_ARG_TYPE('source', ['AsyncIterable', 'Iterable'], source); + } + + return chunks; +} + +// ============================================================================= +// Tap Utilities +// ============================================================================= + +/** + * Create a pass-through transform that observes chunks without modifying them. + * @param {Function} callback + * @returns {Function} + */ +function tap(callback) { + return async (chunks) => { + await callback(chunks); + return chunks; + }; +} + +/** + * Create a sync pass-through transform that observes chunks. + * @param {Function} callback + * @returns {Function} + */ +function tapSync(callback) { + return (chunks) => { + callback(chunks); + return chunks; + }; +} + +// ============================================================================= +// Drain Utility +// ============================================================================= + +/** + * Wait for a drainable object's backpressure to clear. + * @param {object} drainable + * @returns {Promise|null} + */ +function ondrain(drainable) { + if ( + drainable === null || + drainable === undefined || + typeof drainable !== 'object' + ) { + return null; + } + + if ( + !(drainableProtocol in drainable) || + typeof drainable[drainableProtocol] !== 'function' + ) { + return null; + } + + try { + return drainable[drainableProtocol](); + } catch { + return null; + } +} + +// ============================================================================= +// Merge Utility +// ============================================================================= + +/** + * Merge multiple async iterables by yielding values in temporal order. + * @param {...(AsyncIterable|object)} args + * @returns {AsyncIterable} + */ +function merge(...args) { + let sources; + let options; + + if (args.length > 0 && isMergeOptions(args[args.length - 1])) { + options = args[args.length - 1]; + sources = ArrayPrototypeSlice(args, 0, -1); + } else { + sources = args; + } + + return { + async *[SymbolAsyncIterator]() { + const signal = options?.signal; + + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + + if (sources.length === 0) return; + + if (sources.length === 1) { + for await (const batch of sources[0]) { + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + yield batch; + } + return; + } + + // Multiple sources - race them + const states = ArrayPrototypeMap(sources, (source) => ({ + iterator: source[SymbolAsyncIterator](), + done: false, + pending: null, + })); + + const startIterator = (state, index) => { + if (!state.done && !state.pending) { + state.pending = state.iterator.next().then( + (result) => ({ index, result })); + } + }; + + // Start all + for (let i = 0; i < states.length; i++) { + startIterator(states[i], i); + } + + try { + while (true) { + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + + const pending = ArrayPrototypeFilter( + ArrayPrototypeMap(states, + (state) => state.pending), + (p) => p !== null); + + if (pending.length === 0) break; + + const { index, result } = await SafePromiseRace(pending); + + states[index].pending = null; + + if (result.done) { + states[index].done = true; + } else { + yield result.value; + startIterator(states[index], index); + } + } + } finally { + // Clean up: return all iterators + await SafePromiseAllReturnVoid(states, async (state) => { + if (!state.done && state.iterator.return) { + try { + await state.iterator.return(); + } catch { + // Ignore return errors + } + } + }); + } + }, + }; +} + +module.exports = { + bytes, + bytesSync, + text, + textSync, + arrayBuffer, + arrayBufferSync, + array, + arraySync, + tap, + tapSync, + merge, + ondrain, +}; diff --git a/lib/internal/streams/new/duplex.js b/lib/internal/streams/new/duplex.js new file mode 100644 index 00000000000000..272bf0a816dca2 --- /dev/null +++ b/lib/internal/streams/new/duplex.js @@ -0,0 +1,79 @@ +'use strict'; + +// New Streams API - Duplex Channel +// +// Creates a pair of connected channels where data written to one +// channel's writer appears in the other channel's readable. + +const { + SymbolAsyncDispose, +} = primordials; + +const { + push, +} = require('internal/streams/new/push'); + +/** + * Create a pair of connected duplex channels for bidirectional communication. + * @param {{ highWaterMark?: number, backpressure?: string, signal?: AbortSignal, + * a?: object, b?: object }} [options] + * @returns {[DuplexChannel, DuplexChannel]} + */ +function duplex(options) { + const { highWaterMark, backpressure, signal, a, b } = options ?? {}; + + // Channel A writes to B's readable (A->B direction) + const { writer: aWriter, readable: bReadable } = push({ + highWaterMark: a?.highWaterMark ?? highWaterMark, + backpressure: a?.backpressure ?? backpressure, + signal, + }); + + // Channel B writes to A's readable (B->A direction) + const { writer: bWriter, readable: aReadable } = push({ + highWaterMark: b?.highWaterMark ?? highWaterMark, + backpressure: b?.backpressure ?? backpressure, + signal, + }); + + let aWriterRef = aWriter; + let bWriterRef = bWriter; + + const channelA = { + get writer() { return aWriter; }, + readable: aReadable, + async close() { + if (aWriterRef === null) return; + const writer = aWriterRef; + aWriterRef = null; + if (!writer.endSync()) { + await writer.end(); + } + }, + [SymbolAsyncDispose]() { + return this.close(); + }, + }; + + const channelB = { + get writer() { return bWriter; }, + readable: bReadable, + async close() { + if (bWriterRef === null) return; + const writer = bWriterRef; + bWriterRef = null; + if (!writer.endSync()) { + await writer.end(); + } + }, + [SymbolAsyncDispose]() { + return this.close(); + }, + }; + + return [channelA, channelB]; +} + +module.exports = { + duplex, +}; diff --git a/lib/internal/streams/new/from.js b/lib/internal/streams/new/from.js new file mode 100644 index 00000000000000..552ac800e33d0e --- /dev/null +++ b/lib/internal/streams/new/from.js @@ -0,0 +1,575 @@ +'use strict'; + +// New Streams API - from() and fromSync() +// +// Creates normalized byte stream iterables from various input types. +// Handles recursive flattening of nested iterables and protocol conversions. + +const { + ArrayBuffer, + ArrayBufferIsView, + ArrayIsArray, + ArrayPrototypeEvery, + ArrayPrototypePush, + ArrayPrototypeSlice, + ObjectPrototypeToString, + Promise, + SymbolAsyncIterator, + SymbolIterator, + SymbolToPrimitive, + Uint8Array, +} = primordials; + +const { + codes: { + ERR_INVALID_ARG_TYPE, + }, +} = require('internal/errors'); +const { TextEncoder } = require('internal/encoding'); + +const { + toStreamable, + toAsyncStreamable, +} = require('internal/streams/new/types'); + +// Shared TextEncoder instance for string conversion. +const encoder = new TextEncoder(); + +// ============================================================================= +// Type Guards and Detection +// ============================================================================= + +/** + * Check if value is a primitive chunk (string, ArrayBuffer, or ArrayBufferView). + * @returns {boolean} + */ +function isPrimitiveChunk(value) { + if (typeof value === 'string') return true; + if (value instanceof ArrayBuffer) return true; + if (ArrayBufferIsView(value)) return true; + return false; +} + +/** + * Check if value implements ToStreamable protocol. + * @returns {boolean} + */ +function isToStreamable(value) { + return ( + value !== null && + typeof value === 'object' && + toStreamable in value && + typeof value[toStreamable] === 'function' + ); +} + +/** + * Check if value implements ToAsyncStreamable protocol. + * @returns {boolean} + */ +function isToAsyncStreamable(value) { + return ( + value !== null && + typeof value === 'object' && + toAsyncStreamable in value && + typeof value[toAsyncStreamable] === 'function' + ); +} + +/** + * Check if value is a sync iterable (has Symbol.iterator). + * @returns {boolean} + */ +function isSyncIterable(value) { + return ( + value !== null && + typeof value === 'object' && + SymbolIterator in value && + typeof value[SymbolIterator] === 'function' + ); +} + +/** + * Check if value is an async iterable (has Symbol.asyncIterator). + * @returns {boolean} + */ +function isAsyncIterable(value) { + return ( + value !== null && + typeof value === 'object' && + SymbolAsyncIterator in value && + typeof value[SymbolAsyncIterator] === 'function' + ); +} + +/** + * Check if object has a custom toString() (not Object.prototype.toString). + * @returns {boolean} + */ +function hasCustomToString(obj) { + const toString = obj.toString; + return typeof toString === 'function' && + toString !== ObjectPrototypeToString; +} + +/** + * Check if object has Symbol.toPrimitive. + * @returns {boolean} + */ +function hasToPrimitive(obj) { + return ( + SymbolToPrimitive in obj && + typeof obj[SymbolToPrimitive] === 'function' + ); +} + +// ============================================================================= +// Primitive Conversion +// ============================================================================= + +/** + * Convert a primitive chunk to Uint8Array. + * - string: UTF-8 encoded + * - ArrayBuffer: wrapped as Uint8Array view (no copy) + * - ArrayBufferView: converted to Uint8Array view of same memory + * @param {string|ArrayBuffer|ArrayBufferView} chunk + * @returns {Uint8Array} + */ +function primitiveToUint8Array(chunk) { + if (typeof chunk === 'string') { + return encoder.encode(chunk); + } + if (chunk instanceof ArrayBuffer) { + return new Uint8Array(chunk); + } + if (chunk instanceof Uint8Array) { + return chunk; + } + // Other ArrayBufferView types (Int8Array, DataView, etc.) + return new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength); +} + +/** + * Try to coerce an object to string using custom methods. + * Returns null if object has no custom string coercion. + * @returns {string|null} + */ +function tryStringCoercion(obj) { + // Check for Symbol.toPrimitive first + if (hasToPrimitive(obj)) { + const toPrimitive = obj[SymbolToPrimitive]; + const result = toPrimitive.call(obj, 'string'); + if (typeof result === 'string') { + return result; + } + // toPrimitive returned non-string, fall through to toString + } + + // Check for custom toString + if (hasCustomToString(obj)) { + const result = obj.toString(); + return result; + } + + return null; +} + +// ============================================================================= +// Sync Normalization (for fromSync and sync contexts) +// ============================================================================= + +/** + * Normalize a sync streamable yield value to Uint8Array chunks. + * Recursively flattens arrays, iterables, and protocol conversions. + * @yields {Uint8Array} + */ +function* normalizeSyncValue(value) { + // Handle primitives + if (isPrimitiveChunk(value)) { + yield primitiveToUint8Array(value); + return; + } + + // Handle ToStreamable protocol + if (isToStreamable(value)) { + const result = value[toStreamable](); + yield* normalizeSyncValue(result); + return; + } + + // Handle arrays (which are also iterable, but check first for efficiency) + if (ArrayIsArray(value)) { + for (let i = 0; i < value.length; i++) { + yield* normalizeSyncValue(value[i]); + } + return; + } + + // Handle other sync iterables + if (isSyncIterable(value)) { + for (const item of value) { + yield* normalizeSyncValue(item); + } + return; + } + + // Try string coercion for objects with custom toString/toPrimitive + if (typeof value === 'object' && value !== null) { + const str = tryStringCoercion(value); + if (str !== null) { + yield encoder.encode(str); + return; + } + } + + // Reject: no valid conversion + throw new ERR_INVALID_ARG_TYPE( + 'value', + ['string', 'ArrayBuffer', 'ArrayBufferView', 'Iterable'], + value, + ); +} + +/** + * Check if value is already a Uint8Array[] batch (fast path). + * @returns {boolean} + */ +function isUint8ArrayBatch(value) { + if (!ArrayIsArray(value)) return false; + if (value.length === 0) return true; + // Check first element - if it's a Uint8Array, assume the rest are too + return value[0] instanceof Uint8Array; +} + +/** + * Normalize a sync streamable source, yielding batches of Uint8Array. + * @param {Iterable} source + * @yields {Uint8Array[]} + */ +function* normalizeSyncSource(source) { + for (const value of source) { + // Fast path 1: value is already a Uint8Array[] batch + if (isUint8ArrayBatch(value)) { + if (value.length > 0) { + yield value; + } + continue; + } + // Fast path 2: value is a single Uint8Array (very common) + if (value instanceof Uint8Array) { + yield [value]; + continue; + } + // Slow path: normalize the value + const batch = []; + for (const chunk of normalizeSyncValue(value)) { + ArrayPrototypePush(batch, chunk); + } + if (batch.length > 0) { + yield batch; + } + } +} + +// ============================================================================= +// Async Normalization (for from and async contexts) +// ============================================================================= + +/** + * Normalize an async streamable yield value to Uint8Array chunks. + * Recursively flattens arrays, iterables, async iterables, promises, + * and protocol conversions. + * @yields {Uint8Array} + */ +async function* normalizeAsyncValue(value) { + // Handle promises first + if (value instanceof Promise) { + const resolved = await value; + yield* normalizeAsyncValue(resolved); + return; + } + + // Handle primitives + if (isPrimitiveChunk(value)) { + yield primitiveToUint8Array(value); + return; + } + + // Handle ToAsyncStreamable protocol (check before ToStreamable) + if (isToAsyncStreamable(value)) { + const result = value[toAsyncStreamable](); + if (result instanceof Promise) { + yield* normalizeAsyncValue(await result); + } else { + yield* normalizeAsyncValue(result); + } + return; + } + + // Handle ToStreamable protocol + if (isToStreamable(value)) { + const result = value[toStreamable](); + yield* normalizeAsyncValue(result); + return; + } + + // Handle arrays (which are also iterable, but check first for efficiency) + if (ArrayIsArray(value)) { + for (let i = 0; i < value.length; i++) { + yield* normalizeAsyncValue(value[i]); + } + return; + } + + // Handle async iterables (check before sync iterables since some objects + // have both) + if (isAsyncIterable(value)) { + for await (const item of value) { + yield* normalizeAsyncValue(item); + } + return; + } + + // Handle sync iterables + if (isSyncIterable(value)) { + for (const item of value) { + yield* normalizeAsyncValue(item); + } + return; + } + + // Try string coercion for objects with custom toString/toPrimitive + if (typeof value === 'object' && value !== null) { + const str = tryStringCoercion(value); + if (str !== null) { + yield encoder.encode(str); + return; + } + } + + // Reject: no valid conversion + throw new ERR_INVALID_ARG_TYPE( + 'value', + ['string', 'ArrayBuffer', 'ArrayBufferView', 'Iterable', 'AsyncIterable'], + value, + ); +} + +/** + * Normalize an async streamable source, yielding batches of Uint8Array. + * @param {AsyncIterable|Iterable} source + * @yields {Uint8Array[]} + */ +async function* normalizeAsyncSource(source) { + // Prefer async iteration if available + if (isAsyncIterable(source)) { + for await (const value of source) { + // Fast path 1: value is already a Uint8Array[] batch + if (isUint8ArrayBatch(value)) { + if (value.length > 0) { + yield value; + } + continue; + } + // Fast path 2: value is a single Uint8Array (very common) + if (value instanceof Uint8Array) { + yield [value]; + continue; + } + // Slow path: normalize the value + const batch = []; + for await (const chunk of normalizeAsyncValue(value)) { + ArrayPrototypePush(batch, chunk); + } + if (batch.length > 0) { + yield batch; + } + } + return; + } + + // Fall back to sync iteration - batch all sync values together + if (isSyncIterable(source)) { + const batch = []; + + for (const value of source) { + // Fast path 1: value is already a Uint8Array[] batch + if (isUint8ArrayBatch(value)) { + // Flush any accumulated batch first + if (batch.length > 0) { + yield ArrayPrototypeSlice(batch); + batch.length = 0; + } + if (value.length > 0) { + yield value; + } + continue; + } + // Fast path 2: value is a single Uint8Array (very common) + if (value instanceof Uint8Array) { + ArrayPrototypePush(batch, value); + continue; + } + // Slow path: normalize the value - must flush and yield individually + if (batch.length > 0) { + yield ArrayPrototypeSlice(batch); + batch.length = 0; + } + const asyncBatch = []; + for await (const chunk of normalizeAsyncValue(value)) { + ArrayPrototypePush(asyncBatch, chunk); + } + if (asyncBatch.length > 0) { + yield asyncBatch; + } + } + + // Yield any remaining batched values + if (batch.length > 0) { + yield batch; + } + return; + } + + throw new ERR_INVALID_ARG_TYPE( + 'source', + ['Iterable', 'AsyncIterable'], + source, + ); +} + +// ============================================================================= +// Public API: from() and fromSync() +// ============================================================================= + +/** + * Create a SyncByteStreamReadable from a ByteInput or SyncStreamable. + * @param {string|ArrayBuffer|ArrayBufferView|Iterable} input + * @returns {Iterable} + */ +function fromSync(input) { + // Check for primitives first (ByteInput) + if (isPrimitiveChunk(input)) { + const chunk = primitiveToUint8Array(input); + return { + *[SymbolIterator]() { + yield [chunk]; + }, + }; + } + + // Fast path: Uint8Array[] - yield as a single batch + if (ArrayIsArray(input)) { + if (input.length === 0) { + return { + *[SymbolIterator]() { + // Empty - yield nothing + }, + }; + } + // Check if it's an array of Uint8Array (common case) + if (input[0] instanceof Uint8Array) { + const allUint8 = ArrayPrototypeEvery(input, + (item) => item instanceof Uint8Array); + if (allUint8) { + const batch = input; + return { + *[SymbolIterator]() { + yield batch; + }, + }; + } + } + } + + // Must be a SyncStreamable + if (!isSyncIterable(input)) { + throw new ERR_INVALID_ARG_TYPE( + 'input', + ['string', 'ArrayBuffer', 'ArrayBufferView', 'Iterable'], + input, + ); + } + + return { + *[SymbolIterator]() { + yield* normalizeSyncSource(input); + }, + }; +} + +/** + * Create a ByteStreamReadable from a ByteInput or Streamable. + * @param {string|ArrayBuffer|ArrayBufferView|Iterable|AsyncIterable} input + * @returns {AsyncIterable} + */ +function from(input) { + // Check for primitives first (ByteInput) + if (isPrimitiveChunk(input)) { + const chunk = primitiveToUint8Array(input); + return { + async *[SymbolAsyncIterator]() { + yield [chunk]; + }, + }; + } + + // Fast path: Uint8Array[] - yield as a single batch + if (ArrayIsArray(input)) { + if (input.length === 0) { + return { + async *[SymbolAsyncIterator]() { + // Empty - yield nothing + }, + }; + } + if (input[0] instanceof Uint8Array) { + const allUint8 = ArrayPrototypeEvery(input, + (item) => item instanceof Uint8Array); + if (allUint8) { + const batch = input; + return { + async *[SymbolAsyncIterator]() { + yield batch; + }, + }; + } + } + } + + // Must be a Streamable (sync or async iterable) + if (!isSyncIterable(input) && !isAsyncIterable(input)) { + throw new ERR_INVALID_ARG_TYPE( + 'input', + ['string', 'ArrayBuffer', 'ArrayBufferView', 'Iterable', 'AsyncIterable'], + input, + ); + } + + return { + async *[SymbolAsyncIterator]() { + yield* normalizeAsyncSource(input); + }, + }; +} + +// ============================================================================= +// Exports +// ============================================================================= + +module.exports = { + from, + fromSync, + // Internal helpers used by pull, pipeTo, etc. + normalizeSyncValue, + normalizeSyncSource, + normalizeAsyncValue, + normalizeAsyncSource, + isPrimitiveChunk, + isToStreamable, + isToAsyncStreamable, + isSyncIterable, + isAsyncIterable, + isUint8ArrayBatch, + primitiveToUint8Array, +}; diff --git a/lib/internal/streams/new/pull.js b/lib/internal/streams/new/pull.js new file mode 100644 index 00000000000000..8f80ee19585784 --- /dev/null +++ b/lib/internal/streams/new/pull.js @@ -0,0 +1,710 @@ +'use strict'; + +// New Streams API - Pull Pipeline +// +// pull(), pullSync(), pipeTo(), pipeToSync() +// Pull-through pipelines with transforms. Data flows on-demand from source +// through transforms to consumer. + +const { + ArrayIsArray, + ArrayPrototypePush, + ArrayPrototypeSlice, + Error, + Promise, + SafePromiseAllReturnVoid, + String, + SymbolAsyncIterator, + SymbolIterator, + Uint8Array, +} = primordials; + +const { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_OPERATION_FAILED, + }, +} = require('internal/errors'); +const { TextEncoder } = require('internal/encoding'); +const { lazyDOMException } = require('internal/util'); + +const { + normalizeAsyncSource, + normalizeSyncSource, + isSyncIterable, + isAsyncIterable, + isUint8ArrayBatch, +} = require('internal/streams/new/from'); + +const { + isPullOptions, + parsePullArgs, +} = require('internal/streams/new/utils'); + +// Shared TextEncoder instance for string conversion. +const encoder = new TextEncoder(); + +// ============================================================================= +// Type Guards and Helpers +// ============================================================================= + +/** + * Check if a value is a TransformObject (has transform property). + * @returns {boolean} + */ +function isTransformObject(value) { + return ( + value !== null && + typeof value === 'object' && + 'transform' in value && + typeof value.transform === 'function' + ); +} + +/** + * Check if a value is a Writer (has write method). + * @returns {boolean} + */ +function isWriter(value) { + return ( + value !== null && + typeof value === 'object' && + 'write' in value && + typeof value.write === 'function' + ); +} + +/** + * Parse variadic arguments for pipeTo/pipeToSync. + * Returns { transforms, writer, options } + * @returns {object} + */ +function parsePipeToArgs(args) { + if (args.length === 0) { + throw new ERR_INVALID_ARG_VALUE('args', args, 'pipeTo requires a writer argument'); + } + + let options; + let writerIndex = args.length - 1; + + // Check if last arg is options + const last = args[args.length - 1]; + if (isPullOptions(last) && !isWriter(last)) { + options = last; + writerIndex = args.length - 2; + } + + if (writerIndex < 0) { + throw new ERR_INVALID_ARG_VALUE('args', args, 'pipeTo requires a writer argument'); + } + + const writer = args[writerIndex]; + if (!isWriter(writer)) { + throw new ERR_INVALID_ARG_TYPE('writer', 'object with a write method', writer); + } + + return { + transforms: ArrayPrototypeSlice(args, 0, writerIndex), + writer, + options, + }; +} + +// ============================================================================= +// Transform Output Flattening +// ============================================================================= + +/** + * Flatten transform yield to Uint8Array chunks (sync). + * @yields {Uint8Array} + */ +function* flattenTransformYieldSync(value) { + if (value instanceof Uint8Array) { + yield value; + return; + } + if (typeof value === 'string') { + yield encoder.encode(value); + return; + } + // Must be Iterable + if (isSyncIterable(value)) { + for (const item of value) { + yield* flattenTransformYieldSync(item); + } + return; + } + throw new ERR_INVALID_ARG_TYPE('value', ['Uint8Array', 'string', 'Iterable'], value); +} + +/** + * Flatten transform yield to Uint8Array chunks (async). + * @yields {Uint8Array} + */ +async function* flattenTransformYieldAsync(value) { + if (value instanceof Uint8Array) { + yield value; + return; + } + if (typeof value === 'string') { + yield encoder.encode(value); + return; + } + // Check for async iterable first + if (isAsyncIterable(value)) { + for await (const item of value) { + yield* flattenTransformYieldAsync(item); + } + return; + } + // Must be sync Iterable + if (isSyncIterable(value)) { + for (const item of value) { + yield* flattenTransformYieldAsync(item); + } + return; + } + throw new ERR_INVALID_ARG_TYPE('value', ['Uint8Array', 'string', 'Iterable', 'AsyncIterable'], value); +} + +/** + * Process transform result (sync). + * @yields {Uint8Array[]} + */ +function* processTransformResultSync(result) { + if (result === null) { + return; + } + if (ArrayIsArray(result) && result.length > 0 && + result[0] instanceof Uint8Array) { + // Fast path: Uint8Array[] + if (result.length > 0) { + yield result; + } + return; + } + // Iterable or Generator + if (isSyncIterable(result)) { + const batch = []; + for (const item of result) { + for (const chunk of flattenTransformYieldSync(item)) { + ArrayPrototypePush(batch, chunk); + } + } + if (batch.length > 0) { + yield batch; + } + return; + } + throw new ERR_INVALID_ARG_TYPE('result', ['Array', 'Iterable'], result); +} + +/** + * Process transform result (async). + * @yields {Uint8Array[]} + */ +async function* processTransformResultAsync(result) { + // Handle Promise + if (result instanceof Promise) { + const resolved = await result; + yield* processTransformResultAsync(resolved); + return; + } + if (result === null) { + return; + } + if (ArrayIsArray(result) && + (result.length === 0 || result[0] instanceof Uint8Array)) { + // Fast path: Uint8Array[] + if (result.length > 0) { + yield result; + } + return; + } + // Check for async iterable/generator first + if (isAsyncIterable(result)) { + const batch = []; + for await (const item of result) { + // Fast path: item is already Uint8Array + if (item instanceof Uint8Array) { + ArrayPrototypePush(batch, item); + continue; + } + // Slow path: flatten the item + for await (const chunk of flattenTransformYieldAsync(item)) { + ArrayPrototypePush(batch, chunk); + } + } + if (batch.length > 0) { + yield batch; + } + return; + } + // Sync Iterable or Generator + if (isSyncIterable(result)) { + const batch = []; + for (const item of result) { + // Fast path: item is already Uint8Array + if (item instanceof Uint8Array) { + ArrayPrototypePush(batch, item); + continue; + } + // Slow path: flatten the item + for await (const chunk of flattenTransformYieldAsync(item)) { + ArrayPrototypePush(batch, chunk); + } + } + if (batch.length > 0) { + yield batch; + } + return; + } + throw new ERR_INVALID_ARG_TYPE('result', ['Array', 'Iterable', 'AsyncIterable'], result); +} + +// ============================================================================= +// Sync Pipeline Implementation +// ============================================================================= + +/** + * Apply a single stateless sync transform to a source. + * @yields {Uint8Array[]} + */ +function* applyStatelessSyncTransform(source, transform) { + for (const chunks of source) { + const result = transform(chunks); + yield* processTransformResultSync(result); + } +} + +/** + * Apply a single stateful sync transform to a source. + * @yields {Uint8Array[]} + */ +function* applyStatefulSyncTransform(source, transform) { + const output = transform(source); + const batch = []; + for (const item of output) { + for (const chunk of flattenTransformYieldSync(item)) { + ArrayPrototypePush(batch, chunk); + } + } + if (batch.length > 0) { + yield batch; + } +} + +/** + * Wrap sync source to add null flush signal at end. + * @yields {Uint8Array[]} + */ +function* withFlushSignalSync(source) { + for (const batch of source) { + yield batch; + } + yield null; // Flush signal +} + +/** + * Create a sync pipeline from source through transforms. + * @yields {Uint8Array[]} + */ +function* createSyncPipeline(source, transforms) { + // Normalize source + let current = withFlushSignalSync(normalizeSyncSource(source)); + + // Apply transforms - Object = stateful, function = stateless + for (let i = 0; i < transforms.length; i++) { + const transform = transforms[i]; + if (isTransformObject(transform)) { + current = applyStatefulSyncTransform(current, transform.transform); + } else { + current = applyStatelessSyncTransform(current, transform); + } + } + + // Yield results (filter out null from final output) + for (const batch of current) { + if (batch !== null) { + yield batch; + } + } +} + +// ============================================================================= +// Async Pipeline Implementation +// ============================================================================= + +/** + * Apply a single stateless async transform to a source. + * @yields {Uint8Array[]} + */ +async function* applyStatelessAsyncTransform(source, transform) { + for await (const chunks of source) { + const result = transform(chunks); + // Fast path: result is already Uint8Array[] (common case) + if (result === null) continue; + if (isUint8ArrayBatch(result)) { + if (result.length > 0) { + yield result; + } + continue; + } + // Handle Promise of Uint8Array[] + if (result instanceof Promise) { + const resolved = await result; + if (resolved === null) continue; + if (isUint8ArrayBatch(resolved)) { + if (resolved.length > 0) { + yield resolved; + } + continue; + } + // Fall through to slow path + yield* processTransformResultAsync(resolved); + continue; + } + // Fast path: sync generator/iterable - collect all yielded items + if (isSyncIterable(result) && !isAsyncIterable(result)) { + const batch = []; + for (const item of result) { + if (isUint8ArrayBatch(item)) { + for (let i = 0; i < item.length; i++) { + ArrayPrototypePush(batch, item[i]); + } + } else if (item instanceof Uint8Array) { + ArrayPrototypePush(batch, item); + } else if (item !== null && item !== undefined) { + for await (const chunk of flattenTransformYieldAsync(item)) { + ArrayPrototypePush(batch, chunk); + } + } + } + if (batch.length > 0) { + yield batch; + } + continue; + } + // Slow path for other types + yield* processTransformResultAsync(result); + } +} + +/** + * Apply a single stateful async transform to a source. + * @yields {Uint8Array[]} + */ +async function* applyStatefulAsyncTransform(source, transform) { + const output = transform(source); + for await (const item of output) { + // Fast path: item is already a Uint8Array[] batch (e.g. compression transforms) + if (isUint8ArrayBatch(item)) { + if (item.length > 0) { + yield item; + } + continue; + } + // Fast path: single Uint8Array + if (item instanceof Uint8Array) { + yield [item]; + continue; + } + // Slow path: flatten arbitrary transform yield + const batch = []; + for await (const chunk of flattenTransformYieldAsync(item)) { + ArrayPrototypePush(batch, chunk); + } + if (batch.length > 0) { + yield batch; + } + } +} + +/** + * Wrap async source to add null flush signal at end. + * @yields {Uint8Array[]} + */ +async function* withFlushSignalAsync(source) { + for await (const batch of source) { + yield batch; + } + yield null; // Flush signal +} + +/** + * Convert sync iterable to async iterable. + * @yields {Uint8Array[]} + */ +async function* syncToAsync(source) { + for (const item of source) { + yield item; + } +} + +/** + * Create an async pipeline from source through transforms. + * @yields {Uint8Array[]} + */ +async function* createAsyncPipeline(source, transforms, signal) { + // Check for abort + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + + // Normalize source to async + let normalized; + if (isAsyncIterable(source)) { + normalized = normalizeAsyncSource(source); + } else if (isSyncIterable(source)) { + normalized = syncToAsync(normalizeSyncSource(source)); + } else { + throw new ERR_INVALID_ARG_TYPE('source', ['Iterable', 'AsyncIterable'], source); + } + + // Fast path: no transforms, just yield normalized source directly + if (transforms.length === 0) { + for await (const batch of normalized) { + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + yield batch; + } + return; + } + + // Add flush signal + let current = withFlushSignalAsync(normalized); + + // Track stateful transforms for abort handling + const statefulTransforms = []; + + try { + // Apply transforms - Object = stateful, function = stateless + for (let i = 0; i < transforms.length; i++) { + const transform = transforms[i]; + if (isTransformObject(transform)) { + ArrayPrototypePush(statefulTransforms, transform); + current = applyStatefulAsyncTransform(current, transform.transform); + } else { + current = applyStatelessAsyncTransform(current, transform); + } + } + + // Yield results (filter out null from final output) + for await (const batch of current) { + // Check for abort on each iteration + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + if (batch !== null) { + yield batch; + } + } + } catch (error) { + // Abort all stateful transforms + for (let i = 0; i < statefulTransforms.length; i++) { + const transformObj = statefulTransforms[i]; + if (transformObj.abort) { + try { + await transformObj.abort( + error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error))); + } catch { + // Ignore abort errors + } + } + } + throw error; + } +} + +// ============================================================================= +// Public API: pull() and pullSync() +// ============================================================================= + +/** + * Create a sync pull-through pipeline with transforms. + * @param {Iterable} source - The sync streamable source + * @param {...Function} transforms - Variadic transforms + * @returns {Iterable} + */ +function pullSync(source, ...transforms) { + return { + *[SymbolIterator]() { + yield* createSyncPipeline(source, transforms); + }, + }; +} + +/** + * Create an async pull-through pipeline with transforms. + * @param {Iterable|AsyncIterable} source - The streamable source + * @param {...(Function|object)} args - Transforms, with optional PullOptions + * as last argument + * @returns {AsyncIterable} + */ +function pull(source, ...args) { + const { transforms, options } = parsePullArgs(args); + + return { + async *[SymbolAsyncIterator]() { + yield* createAsyncPipeline(source, transforms, options?.signal); + }, + }; +} + +// ============================================================================= +// Public API: pipeTo() and pipeToSync() +// ============================================================================= + +/** + * Write a sync source through transforms to a sync writer. + * @param {Iterable} source + * @param {...(Function|object)} args - Transforms, writer, and optional options + * @returns {number} Total bytes written + */ +function pipeToSync(source, ...args) { + const { transforms, writer, options } = parsePipeToArgs(args); + + // Handle transform-writer + const finalTransforms = ArrayPrototypeSlice(transforms); + if (isTransformObject(writer)) { + ArrayPrototypePush(finalTransforms, writer); + } + + // Create pipeline + const pipeline = finalTransforms.length > 0 ? + createSyncPipeline( + { [SymbolIterator]: () => source[SymbolIterator]() }, + finalTransforms) : + source; + + let totalBytes = 0; + + try { + for (const batch of pipeline) { + for (let i = 0; i < batch.length; i++) { + const chunk = batch[i]; + writer.write(chunk); + totalBytes += chunk.byteLength; + } + } + + if (!options?.preventClose) { + writer.end(); + } + } catch (error) { + if (!options?.preventAbort) { + writer.abort(error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error))); + } + throw error; + } + + return totalBytes; +} + +/** + * Write an async source through transforms to a writer. + * @param {AsyncIterable|Iterable} source + * @param {...(Function|object)} args - Transforms, writer, and optional options + * @returns {Promise} Total bytes written + */ +async function pipeTo(source, ...args) { + const { transforms, writer, options } = parsePipeToArgs(args); + + // Handle transform-writer + const finalTransforms = ArrayPrototypeSlice(transforms); + if (isTransformObject(writer)) { + ArrayPrototypePush(finalTransforms, writer); + } + + const signal = options?.signal; + + // Check for abort + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + + let totalBytes = 0; + const hasWritev = typeof writer.writev === 'function'; + + // Helper to write a batch efficiently + const writeBatch = async (batch) => { + if (hasWritev && batch.length > 1) { + await writer.writev(batch); + for (let i = 0; i < batch.length; i++) { + totalBytes += batch[i].byteLength; + } + } else { + const promises = []; + for (let i = 0; i < batch.length; i++) { + const chunk = batch[i]; + const result = writer.write(chunk); + if (result !== undefined) { + ArrayPrototypePush(promises, result); + } + totalBytes += chunk.byteLength; + } + if (promises.length > 0) { + await SafePromiseAllReturnVoid(promises); + } + } + }; + + try { + // Fast path: no transforms - iterate directly + if (finalTransforms.length === 0) { + if (isAsyncIterable(source)) { + for await (const batch of source) { + if (signal?.aborted) { + throw signal.reason ?? + lazyDOMException('Aborted', 'AbortError'); + } + await writeBatch(batch); + } + } else { + for (const batch of source) { + if (signal?.aborted) { + throw signal.reason ?? + lazyDOMException('Aborted', 'AbortError'); + } + await writeBatch(batch); + } + } + } else { + // Slow path: has transforms - need pipeline + const streamableSource = isAsyncIterable(source) ? + { [SymbolAsyncIterator]: () => source[SymbolAsyncIterator]() } : + { [SymbolIterator]: () => source[SymbolIterator]() }; + + const pipeline = createAsyncPipeline( + streamableSource, finalTransforms, signal); + + for await (const batch of pipeline) { + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + await writeBatch(batch); + } + } + + if (!options?.preventClose) { + await writer.end(); + } + } catch (error) { + if (!options?.preventAbort) { + await writer.abort( + error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error))); + } + throw error; + } + + return totalBytes; +} + +module.exports = { + pull, + pullSync, + pipeTo, + pipeToSync, +}; diff --git a/lib/internal/streams/new/push.js b/lib/internal/streams/new/push.js new file mode 100644 index 00000000000000..0260cf10ba2600 --- /dev/null +++ b/lib/internal/streams/new/push.js @@ -0,0 +1,519 @@ +'use strict'; + +// New Streams API - Push Stream Implementation +// +// Creates a bonded pair of writer and async iterable for push-based streaming +// with built-in backpressure. + +const { + ArrayPrototypePush, + ArrayPrototypeShift, + ArrayPrototypeSlice, + Error, + MathMax, + Promise, + PromiseResolve, + SymbolAsyncIterator, +} = primordials; + +const { + codes: { + ERR_INVALID_STATE, + }, +} = require('internal/errors'); +const { lazyDOMException } = require('internal/util'); + +const { + drainableProtocol, +} = require('internal/streams/new/types'); + +const { + toUint8Array, +} = require('internal/streams/new/utils'); + +const { + pull: pullWithTransforms, +} = require('internal/streams/new/pull'); + +// ============================================================================= +// PushQueue - Internal Queue with Chunk-Based Backpressure +// ============================================================================= + +class PushQueue { + constructor(options = {}) { + /** Buffered chunks (each slot is from one write/writev call) */ + this._slots = []; + /** Pending writes waiting for buffer space */ + this._pendingWrites = []; + /** Pending reads waiting for data */ + this._pendingReads = []; + /** Pending drains waiting for backpressure to clear */ + this._pendingDrains = []; + /** Writer state: 'open' | 'closed' | 'errored' */ + this._writerState = 'open'; + /** Consumer state: 'active' | 'returned' | 'thrown' */ + this._consumerState = 'active'; + /** Error that closed the stream */ + this._error = null; + /** Total bytes written */ + this._bytesWritten = 0; + + /** Configuration */ + this._highWaterMark = options.highWaterMark ?? 1; + this._backpressure = options.backpressure ?? 'strict'; + this._signal = options.signal; + this._abortHandler = undefined; + + if (this._signal) { + if (this._signal.aborted) { + this.abort(this._signal.reason instanceof Error ? + this._signal.reason : + lazyDOMException('Aborted', 'AbortError')); + } else { + this._abortHandler = () => { + this.abort(this._signal.reason instanceof Error ? + this._signal.reason : + lazyDOMException('Aborted', 'AbortError')); + }; + this._signal.addEventListener('abort', this._abortHandler, + { once: true }); + } + } + } + + // =========================================================================== + // Writer Methods + // =========================================================================== + + /** + * Get slots available before hitting highWaterMark. + * Returns null if writer is closed/errored or consumer has terminated. + * @returns {number | null} + */ + get desiredSize() { + if (this._writerState !== 'open' || this._consumerState !== 'active') { + return null; + } + return MathMax(0, this._highWaterMark - this._slots.length); + } + + /** + * Check if a sync write would be accepted. + * @returns {boolean} + */ + canWriteSync() { + if (this._writerState !== 'open') return false; + if (this._consumerState !== 'active') return false; + if ((this._backpressure === 'strict' || + this._backpressure === 'block') && + this._slots.length >= this._highWaterMark) { + return false; + } + return true; + } + + /** + * Write chunks synchronously if possible. + * Returns true if write completed, false if buffer is full. + * @returns {boolean} + */ + writeSync(chunks) { + if (this._writerState !== 'open') return false; + if (this._consumerState !== 'active') return false; + + if (this._slots.length >= this._highWaterMark) { + switch (this._backpressure) { + case 'strict': + case 'block': + return false; + case 'drop-oldest': + if (this._slots.length > 0) { + ArrayPrototypeShift(this._slots); + } + break; + case 'drop-newest': + // Discard this write, but return true + for (let i = 0; i < chunks.length; i++) { + this._bytesWritten += chunks[i].byteLength; + } + return true; + } + } + + ArrayPrototypePush(this._slots, chunks); + for (let i = 0; i < chunks.length; i++) { + this._bytesWritten += chunks[i].byteLength; + } + + this._resolvePendingReads(); + return true; + } + + /** + * Write chunks asynchronously. + */ + async writeAsync(chunks) { + if (this._writerState !== 'open') { + throw new ERR_INVALID_STATE('Writer is closed'); + } + if (this._consumerState !== 'active') { + throw this._consumerState === 'thrown' && this._error ? + this._error : + new ERR_INVALID_STATE('Stream closed by consumer'); + } + + // Try sync first + if (this.writeSync(chunks)) { + return; + } + + // Buffer is full + switch (this._backpressure) { + case 'strict': + if (this._pendingWrites.length >= this._highWaterMark) { + throw new ERR_INVALID_STATE( + 'Backpressure violation: too many pending writes. ' + + 'Await each write() call to respect backpressure.'); + } + return new Promise((resolve, reject) => { + ArrayPrototypePush(this._pendingWrites, + { chunks, resolve, reject }); + }); + case 'block': + return new Promise((resolve, reject) => { + ArrayPrototypePush(this._pendingWrites, + { chunks, resolve, reject }); + }); + default: + throw new ERR_INVALID_STATE( + 'Unexpected: writeSync should have handled non-strict policy'); + } + } + + /** + * Signal end of stream. Returns total bytes written. + * @returns {number} + */ + end() { + if (this._writerState !== 'open') { + return this._bytesWritten; + } + + this._writerState = 'closed'; + this._cleanup(); + this._resolvePendingReads(); + this._rejectPendingWrites(new ERR_INVALID_STATE('Writer closed')); + this._resolvePendingDrains(false); + return this._bytesWritten; + } + + /** + * Signal error/abort. + */ + abort(reason) { + if (this._writerState === 'errored') return; + + this._writerState = 'errored'; + this._error = reason ?? new ERR_INVALID_STATE('Aborted'); + this._cleanup(); + this._rejectPendingReads(this._error); + this._rejectPendingWrites(this._error); + this._rejectPendingDrains(this._error); + } + + get totalBytesWritten() { + return this._bytesWritten; + } + + /** + * Wait for backpressure to clear (desiredSize > 0). + */ + waitForDrain() { + return new Promise((resolve, reject) => { + ArrayPrototypePush(this._pendingDrains, { resolve, reject }); + }); + } + + // =========================================================================== + // Consumer Methods + // =========================================================================== + + async read() { + // If there's data in the buffer, return it immediately + if (this._slots.length > 0) { + const result = this._drain(); + this._resolvePendingWrites(); + return { __proto__: null, value: result, done: false }; + } + + if (this._writerState === 'closed') { + return { __proto__: null, value: undefined, done: true }; + } + + if (this._writerState === 'errored' && this._error) { + throw this._error; + } + + return new Promise((resolve, reject) => { + ArrayPrototypePush(this._pendingReads, { resolve, reject }); + }); + } + + consumerReturn() { + if (this._consumerState !== 'active') return; + this._consumerState = 'returned'; + this._cleanup(); + this._rejectPendingWrites(new ERR_INVALID_STATE('Stream closed by consumer')); + } + + consumerThrow(error) { + if (this._consumerState !== 'active') return; + this._consumerState = 'thrown'; + this._error = error; + this._cleanup(); + this._rejectPendingWrites(error); + } + + // =========================================================================== + // Private Methods + // =========================================================================== + + _drain() { + const result = []; + for (let i = 0; i < this._slots.length; i++) { + const slot = this._slots[i]; + for (let j = 0; j < slot.length; j++) { + ArrayPrototypePush(result, slot[j]); + } + } + this._slots = []; + return result; + } + + _resolvePendingReads() { + while (this._pendingReads.length > 0) { + if (this._slots.length > 0) { + const pending = ArrayPrototypeShift(this._pendingReads); + const result = this._drain(); + this._resolvePendingWrites(); + pending.resolve({ value: result, done: false }); + } else if (this._writerState === 'closed') { + const pending = ArrayPrototypeShift(this._pendingReads); + pending.resolve({ value: undefined, done: true }); + } else if (this._writerState === 'errored' && this._error) { + const pending = ArrayPrototypeShift(this._pendingReads); + pending.reject(this._error); + } else { + break; + } + } + } + + _resolvePendingWrites() { + while (this._pendingWrites.length > 0 && + this._slots.length < this._highWaterMark) { + const pending = ArrayPrototypeShift(this._pendingWrites); + ArrayPrototypePush(this._slots, pending.chunks); + for (let i = 0; i < pending.chunks.length; i++) { + this._bytesWritten += pending.chunks[i].byteLength; + } + pending.resolve(); + } + + if (this._slots.length < this._highWaterMark) { + this._resolvePendingDrains(true); + } + } + + _resolvePendingDrains(canWrite) { + const drains = this._pendingDrains; + this._pendingDrains = []; + for (let i = 0; i < drains.length; i++) { + drains[i].resolve(canWrite); + } + } + + _rejectPendingDrains(error) { + const drains = this._pendingDrains; + this._pendingDrains = []; + for (let i = 0; i < drains.length; i++) { + drains[i].reject(error); + } + } + + _rejectPendingReads(error) { + const reads = this._pendingReads; + this._pendingReads = []; + for (let i = 0; i < reads.length; i++) { + reads[i].reject(error); + } + } + + _rejectPendingWrites(error) { + const writes = this._pendingWrites; + this._pendingWrites = []; + for (let i = 0; i < writes.length; i++) { + writes[i].reject(error); + } + } + + _cleanup() { + if (this._signal && this._abortHandler) { + this._signal.removeEventListener('abort', this._abortHandler); + this._abortHandler = undefined; + } + } +} + +// ============================================================================= +// PushWriter Implementation +// ============================================================================= + +class PushWriter { + constructor(queue) { + this._queue = queue; + } + + [drainableProtocol]() { + const desired = this.desiredSize; + if (desired === null) return null; + if (desired > 0) return PromiseResolve(true); + return this._queue.waitForDrain(); + } + + get desiredSize() { + return this._queue.desiredSize; + } + + async write(chunk) { + const bytes = toUint8Array(chunk); + await this._queue.writeAsync([bytes]); + } + + async writev(chunks) { + const bytes = []; + for (let i = 0; i < chunks.length; i++) { + ArrayPrototypePush(bytes, toUint8Array(chunks[i])); + } + await this._queue.writeAsync(bytes); + } + + writeSync(chunk) { + if (!this._queue.canWriteSync()) return false; + const bytes = toUint8Array(chunk); + return this._queue.writeSync([bytes]); + } + + writevSync(chunks) { + if (!this._queue.canWriteSync()) return false; + const bytes = []; + for (let i = 0; i < chunks.length; i++) { + ArrayPrototypePush(bytes, toUint8Array(chunks[i])); + } + return this._queue.writeSync(bytes); + } + + async end() { + return this._queue.end(); + } + + endSync() { + return this._queue.end(); + } + + async abort(reason) { + this._queue.abort(reason); + } + + abortSync(reason) { + this._queue.abort(reason); + return true; + } +} + +// ============================================================================= +// Readable Implementation +// ============================================================================= + +function createReadable(queue) { + return { + [SymbolAsyncIterator]() { + return { + async next() { + return queue.read(); + }, + async return() { + queue.consumerReturn(); + return { __proto__: null, value: undefined, done: true }; + }, + async throw(error) { + queue.consumerThrow(error); + return { __proto__: null, value: undefined, done: true }; + }, + }; + }, + }; +} + +// ============================================================================= +// Stream.push() Factory +// ============================================================================= + +function isOptions(arg) { + return ( + typeof arg === 'object' && + arg !== null && + !('transform' in arg) + ); +} + +function parseArgs(args) { + if (args.length === 0) { + return { transforms: [], options: {} }; + } + + const last = args[args.length - 1]; + if (isOptions(last)) { + return { + transforms: ArrayPrototypeSlice(args, 0, -1), + options: last, + }; + } + + return { + transforms: args, + options: {}, + }; +} + +/** + * Create a push stream with optional transforms. + * @param {...(Function|object)} args - Transforms, then options (optional) + * @returns {{ writer: Writer, readable: AsyncIterable }} + */ +function push(...args) { + const { transforms, options } = parseArgs(args); + + const queue = new PushQueue(options); + const writer = new PushWriter(queue); + const rawReadable = createReadable(queue); + + // Apply transforms lazily if provided + let readable; + if (transforms.length > 0) { + if (options.signal) { + readable = pullWithTransforms( + rawReadable, ...transforms, { signal: options.signal }); + } else { + readable = pullWithTransforms(rawReadable, ...transforms); + } + } else { + readable = rawReadable; + } + + return { writer, readable }; +} + +module.exports = { + push, +}; diff --git a/lib/internal/streams/new/share.js b/lib/internal/streams/new/share.js new file mode 100644 index 00000000000000..f7f4353393a36e --- /dev/null +++ b/lib/internal/streams/new/share.js @@ -0,0 +1,636 @@ +'use strict'; + +// New Streams API - Share +// +// Pull-model multi-consumer streaming. Shares a single source among +// multiple consumers with explicit buffering. + +const { + ArrayPrototypePush, + ArrayPrototypeShift, + ArrayPrototypeSplice, + Error, + Promise, + PromiseResolve, + SafeSet, + String, + SymbolAsyncIterator, + SymbolDispose, + SymbolIterator, +} = primordials; + +const { + shareProtocol, + shareSyncProtocol, +} = require('internal/streams/new/types'); + +const { + isAsyncIterable, + isSyncIterable, +} = require('internal/streams/new/from'); + +const { + pull: pullWithTransforms, + pullSync: pullSyncWithTransforms, +} = require('internal/streams/new/pull'); + +const { + parsePullArgs, +} = require('internal/streams/new/utils'); + +const { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_OPERATION_FAILED, + ERR_OUT_OF_RANGE, + }, +} = require('internal/errors'); + +// ============================================================================= +// Async Share Implementation +// ============================================================================= + +class ShareImpl { + constructor(source, options) { + this._source = source; + this._options = options; + this._buffer = []; + this._bufferStart = 0; + this._consumers = new SafeSet(); + this._sourceIterator = null; + this._sourceExhausted = false; + this._sourceError = null; + this._cancelled = false; + this._pulling = false; + this._pullWaiters = []; + } + + get consumerCount() { + return this._consumers.size; + } + + get bufferSize() { + return this._buffer.length; + } + + pull(...args) { + const { transforms, options } = parsePullArgs(args); + const rawConsumer = this._createRawConsumer(); + + if (transforms.length > 0) { + if (options) { + return pullWithTransforms(rawConsumer, ...transforms, options); + } + return pullWithTransforms(rawConsumer, ...transforms); + } + return rawConsumer; + } + + _createRawConsumer() { + const state = { + cursor: this._bufferStart, + resolve: null, + reject: null, + detached: false, + }; + + this._consumers.add(state); + const self = this; + + return { + [SymbolAsyncIterator]() { + return { + async next() { + if (self._sourceError) { + state.detached = true; + self._consumers.delete(state); + throw self._sourceError; + } + + if (state.detached) { + return { __proto__: null, done: true, value: undefined }; + } + + if (self._cancelled) { + state.detached = true; + self._consumers.delete(state); + return { __proto__: null, done: true, value: undefined }; + } + + // Check if data is available in buffer + const bufferIndex = state.cursor - self._bufferStart; + if (bufferIndex < self._buffer.length) { + const chunk = self._buffer[bufferIndex]; + state.cursor++; + self._tryTrimBuffer(); + return { __proto__: null, done: false, value: chunk }; + } + + if (self._sourceExhausted) { + state.detached = true; + self._consumers.delete(state); + return { __proto__: null, done: true, value: undefined }; + } + + // Need to pull from source - check buffer limit + const canPull = await self._waitForBufferSpace(state); + if (!canPull) { + state.detached = true; + self._consumers.delete(state); + if (self._sourceError) throw self._sourceError; + return { __proto__: null, done: true, value: undefined }; + } + + await self._pullFromSource(); + + if (self._sourceError) { + state.detached = true; + self._consumers.delete(state); + throw self._sourceError; + } + + const newBufferIndex = state.cursor - self._bufferStart; + if (newBufferIndex < self._buffer.length) { + const chunk = self._buffer[newBufferIndex]; + state.cursor++; + self._tryTrimBuffer(); + return { __proto__: null, done: false, value: chunk }; + } + + if (self._sourceExhausted) { + state.detached = true; + self._consumers.delete(state); + return { __proto__: null, done: true, value: undefined }; + } + + return { __proto__: null, done: true, value: undefined }; + }, + + async return() { + state.detached = true; + state.resolve = null; + state.reject = null; + self._consumers.delete(state); + self._tryTrimBuffer(); + return { __proto__: null, done: true, value: undefined }; + }, + + async throw() { + state.detached = true; + state.resolve = null; + state.reject = null; + self._consumers.delete(state); + self._tryTrimBuffer(); + return { __proto__: null, done: true, value: undefined }; + }, + }; + }, + }; + } + + cancel(reason) { + if (this._cancelled) return; + this._cancelled = true; + + if (reason) { + this._sourceError = reason; + } + + if (this._sourceIterator?.return) { + this._sourceIterator.return().catch(() => {}); + } + + for (const consumer of this._consumers) { + if (consumer.resolve) { + if (reason) { + consumer.reject?.(reason); + } else { + consumer.resolve({ done: true, value: undefined }); + } + consumer.resolve = null; + consumer.reject = null; + } + consumer.detached = true; + } + this._consumers.clear(); + + for (let i = 0; i < this._pullWaiters.length; i++) { + this._pullWaiters[i](); + } + this._pullWaiters = []; + } + + [SymbolDispose]() { + this.cancel(); + } + + // Internal methods + + async _waitForBufferSpace(_state) { + while (this._buffer.length >= this._options.highWaterMark) { + if (this._cancelled || this._sourceError || this._sourceExhausted) { + return !this._cancelled; + } + + switch (this._options.backpressure) { + case 'strict': + throw new ERR_OUT_OF_RANGE( + 'buffer size', `<= ${this._options.highWaterMark}`, + this._buffer.length); + case 'block': + await new Promise((resolve) => { + ArrayPrototypePush(this._pullWaiters, resolve); + }); + break; + case 'drop-oldest': + ArrayPrototypeShift(this._buffer); + this._bufferStart++; + for (const consumer of this._consumers) { + if (consumer.cursor < this._bufferStart) { + consumer.cursor = this._bufferStart; + } + } + return true; + case 'drop-newest': + return true; + } + } + return true; + } + + _pullFromSource() { + if (this._sourceExhausted || this._cancelled) { + return PromiseResolve(); + } + + if (this._pulling) { + return new Promise((resolve) => { + ArrayPrototypePush(this._pullWaiters, resolve); + }); + } + + this._pulling = true; + + return (async () => { + try { + if (!this._sourceIterator) { + if (isAsyncIterable(this._source)) { + this._sourceIterator = + this._source[SymbolAsyncIterator](); + } else if (isSyncIterable(this._source)) { + const syncIterator = + this._source[SymbolIterator](); + this._sourceIterator = { + async next() { + return syncIterator.next(); + }, + async return() { + return syncIterator.return?.() ?? + { done: true, value: undefined }; + }, + }; + } else { + throw new ERR_INVALID_ARG_TYPE( + 'source', ['AsyncIterable', 'Iterable'], this._source); + } + } + + const result = await this._sourceIterator.next(); + + if (result.done) { + this._sourceExhausted = true; + } else { + ArrayPrototypePush(this._buffer, result.value); + } + } catch (error) { + this._sourceError = + error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error)); + this._sourceExhausted = true; + } finally { + this._pulling = false; + for (let i = 0; i < this._pullWaiters.length; i++) { + this._pullWaiters[i](); + } + this._pullWaiters = []; + } + })(); + } + + _getMinCursor() { + let min = Infinity; + for (const consumer of this._consumers) { + if (consumer.cursor < min) { + min = consumer.cursor; + } + } + return min === Infinity ? + this._bufferStart + this._buffer.length : min; + } + + _tryTrimBuffer() { + const minCursor = this._getMinCursor(); + const trimCount = minCursor - this._bufferStart; + if (trimCount > 0) { + ArrayPrototypeSplice(this._buffer, 0, trimCount); + this._bufferStart = minCursor; + for (let i = 0; i < this._pullWaiters.length; i++) { + this._pullWaiters[i](); + } + this._pullWaiters = []; + } + } +} + +// ============================================================================= +// Sync Share Implementation +// ============================================================================= + +class SyncShareImpl { + constructor(source, options) { + this._source = source; + this._options = options; + this._buffer = []; + this._bufferStart = 0; + this._consumers = new SafeSet(); + this._sourceIterator = null; + this._sourceExhausted = false; + this._sourceError = null; + this._cancelled = false; + } + + get consumerCount() { + return this._consumers.size; + } + + get bufferSize() { + return this._buffer.length; + } + + pull(...transforms) { + const rawConsumer = this._createRawConsumer(); + + if (transforms.length > 0) { + return pullSyncWithTransforms(rawConsumer, ...transforms); + } + return rawConsumer; + } + + _createRawConsumer() { + const state = { + cursor: this._bufferStart, + detached: false, + }; + + this._consumers.add(state); + const self = this; + + return { + [SymbolIterator]() { + return { + next() { + if (state.detached) { + return { done: true, value: undefined }; + } + if (self._sourceError) { + state.detached = true; + self._consumers.delete(state); + throw self._sourceError; + } + if (self._cancelled) { + state.detached = true; + self._consumers.delete(state); + return { done: true, value: undefined }; + } + + const bufferIndex = state.cursor - self._bufferStart; + if (bufferIndex < self._buffer.length) { + const chunk = self._buffer[bufferIndex]; + state.cursor++; + self._tryTrimBuffer(); + return { done: false, value: chunk }; + } + + if (self._sourceExhausted) { + state.detached = true; + self._consumers.delete(state); + return { done: true, value: undefined }; + } + + // Check buffer limit + if (self._buffer.length >= self._options.highWaterMark) { + switch (self._options.backpressure) { + case 'strict': + throw new ERR_OUT_OF_RANGE( + 'buffer size', `<= ${self._options.highWaterMark}`, + self._buffer.length); + case 'block': + throw new ERR_OUT_OF_RANGE( + 'buffer size', `<= ${self._options.highWaterMark}`, + self._buffer.length); + case 'drop-oldest': + ArrayPrototypeShift(self._buffer); + self._bufferStart++; + for (const consumer of self._consumers) { + if (consumer.cursor < self._bufferStart) { + consumer.cursor = self._bufferStart; + } + } + break; + case 'drop-newest': + state.detached = true; + self._consumers.delete(state); + return { done: true, value: undefined }; + } + } + + self._pullFromSource(); + + if (self._sourceError) { + state.detached = true; + self._consumers.delete(state); + throw self._sourceError; + } + + const newBufferIndex = state.cursor - self._bufferStart; + if (newBufferIndex < self._buffer.length) { + const chunk = self._buffer[newBufferIndex]; + state.cursor++; + self._tryTrimBuffer(); + return { done: false, value: chunk }; + } + + if (self._sourceExhausted) { + state.detached = true; + self._consumers.delete(state); + return { done: true, value: undefined }; + } + + return { done: true, value: undefined }; + }, + + return() { + state.detached = true; + self._consumers.delete(state); + self._tryTrimBuffer(); + return { done: true, value: undefined }; + }, + + throw() { + state.detached = true; + self._consumers.delete(state); + self._tryTrimBuffer(); + return { done: true, value: undefined }; + }, + }; + }, + }; + } + + cancel(reason) { + if (this._cancelled) return; + this._cancelled = true; + + if (reason) { + this._sourceError = reason; + } + + if (this._sourceIterator?.return) { + this._sourceIterator.return(); + } + + for (const consumer of this._consumers) { + consumer.detached = true; + } + this._consumers.clear(); + } + + [SymbolDispose]() { + this.cancel(); + } + + _pullFromSource() { + if (this._sourceExhausted || this._cancelled) return; + + try { + this._sourceIterator ||= this._source[SymbolIterator](); + + const result = this._sourceIterator.next(); + + if (result.done) { + this._sourceExhausted = true; + } else { + ArrayPrototypePush(this._buffer, result.value); + } + } catch (error) { + this._sourceError = + error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error)); + this._sourceExhausted = true; + } + } + + _getMinCursor() { + let min = Infinity; + for (const consumer of this._consumers) { + if (consumer.cursor < min) { + min = consumer.cursor; + } + } + return min === Infinity ? + this._bufferStart + this._buffer.length : min; + } + + _tryTrimBuffer() { + const minCursor = this._getMinCursor(); + const trimCount = minCursor - this._bufferStart; + if (trimCount > 0) { + ArrayPrototypeSplice(this._buffer, 0, trimCount); + this._bufferStart = minCursor; + } + } +} + +// ============================================================================= +// Public API +// ============================================================================= + +function share(source, options) { + const opts = { + highWaterMark: options?.highWaterMark ?? 16, + backpressure: options?.backpressure ?? 'strict', + signal: options?.signal, + }; + + const shareImpl = new ShareImpl(source, opts); + + if (opts.signal) { + if (opts.signal.aborted) { + shareImpl.cancel(); + } else { + opts.signal.addEventListener('abort', () => { + shareImpl.cancel(); + }, { once: true }); + } + } + + return shareImpl; +} + +function shareSync(source, options) { + const opts = { + highWaterMark: options?.highWaterMark ?? 16, + backpressure: options?.backpressure ?? 'strict', + }; + + return new SyncShareImpl(source, opts); +} + +function isShareable(value) { + return ( + value !== null && + typeof value === 'object' && + shareProtocol in value && + typeof value[shareProtocol] === 'function' + ); +} + +function isSyncShareable(value) { + return ( + value !== null && + typeof value === 'object' && + shareSyncProtocol in value && + typeof value[shareSyncProtocol] === 'function' + ); +} + +const Share = { + from(input, options) { + if (isShareable(input)) { + return input[shareProtocol](options); + } + if (isAsyncIterable(input) || isSyncIterable(input)) { + return share(input, options); + } + throw new ERR_INVALID_ARG_TYPE( + 'input', ['Shareable', 'AsyncIterable', 'Iterable'], input); + }, +}; + +const SyncShare = { + fromSync(input, options) { + if (isSyncShareable(input)) { + return input[shareSyncProtocol](options); + } + if (isSyncIterable(input)) { + return shareSync(input, options); + } + throw new ERR_INVALID_ARG_TYPE( + 'input', ['SyncShareable', 'Iterable'], input); + }, +}; + +module.exports = { + share, + shareSync, + Share, + SyncShare, +}; diff --git a/lib/internal/streams/new/transform.js b/lib/internal/streams/new/transform.js new file mode 100644 index 00000000000000..c9bf3ad845866b --- /dev/null +++ b/lib/internal/streams/new/transform.js @@ -0,0 +1,430 @@ +'use strict'; + +// New Streams API - Compression / Decompression Transforms +// +// Creates bare native zlib handles via internalBinding('zlib'), bypassing +// the stream.Transform / ZlibBase / EventEmitter machinery entirely. +// Compression runs on the libuv threadpool via handle.write() (async) so +// I/O and upstream transforms can overlap with compression work. +// Each factory returns a transform descriptor that can be passed to pull(). + +const { + ArrayPrototypePush, + ArrayPrototypeSplice, + MathMax, + NumberIsNaN, + ObjectEntries, + ObjectKeys, + Promise, + SymbolAsyncIterator, + Uint32Array, +} = primordials; + +const { Buffer } = require('buffer'); +const { + genericNodeError, +} = require('internal/errors'); +const binding = internalBinding('zlib'); +const constants = internalBinding('constants').zlib; + +const { + // Zlib modes + DEFLATE, INFLATE, GZIP, GUNZIP, + BROTLI_ENCODE, BROTLI_DECODE, + ZSTD_COMPRESS, ZSTD_DECOMPRESS, + // Zlib flush + Z_NO_FLUSH, Z_FINISH, + // Zlib defaults + Z_DEFAULT_WINDOWBITS, Z_DEFAULT_COMPRESSION, + Z_DEFAULT_MEMLEVEL, Z_DEFAULT_STRATEGY, Z_DEFAULT_CHUNK, + // Brotli flush + BROTLI_OPERATION_PROCESS, BROTLI_OPERATION_FINISH, + // Zstd flush + ZSTD_e_continue, ZSTD_e_end, +} = constants; + +// --------------------------------------------------------------------------- +// Batch high water mark - yield output in chunks of approximately this size. +// --------------------------------------------------------------------------- +const BATCH_HWM = 64 * 1024; + +// Pre-allocated empty buffer for flush/finalize calls. +const kEmpty = Buffer.alloc(0); + +// --------------------------------------------------------------------------- +// Brotli / Zstd parameter arrays (computed once, reused per init call). +// Mirrors the pattern in lib/zlib.js. +// --------------------------------------------------------------------------- +const kMaxBrotliParam = MathMax( + ...ObjectEntries(constants) + .map(({ 0: key, 1: value }) => + (key.startsWith('BROTLI_PARAM_') ? value : 0)), +); +const brotliInitParamsArray = new Uint32Array(kMaxBrotliParam + 1); + +const kMaxZstdCParam = MathMax(...ObjectKeys(constants).map( + (key) => (key.startsWith('ZSTD_c_') ? constants[key] : 0)), +); +const zstdInitCParamsArray = new Uint32Array(kMaxZstdCParam + 1); + +const kMaxZstdDParam = MathMax(...ObjectKeys(constants).map( + (key) => (key.startsWith('ZSTD_d_') ? constants[key] : 0)), +); +const zstdInitDParamsArray = new Uint32Array(kMaxZstdDParam + 1); + +// --------------------------------------------------------------------------- +// Handle creation - bare native handles, no Transform/EventEmitter overhead. +// +// Each factory accepts a processCallback (called from the threadpool +// completion path in C++) and an onError handler. +// --------------------------------------------------------------------------- + +/** + * Create a bare Zlib handle (gzip, gunzip, deflate, inflate). + * @returns {{ handle: object, writeState: Uint32Array, chunkSize: number }} + */ +function createZlibHandle(mode, options, processCallback, onError) { + const handle = new binding.Zlib(mode); + const writeState = new Uint32Array(2); + const chunkSize = options?.chunkSize ?? Z_DEFAULT_CHUNK; + + handle.onerror = onError; + handle.init( + options?.windowBits ?? Z_DEFAULT_WINDOWBITS, + options?.level ?? Z_DEFAULT_COMPRESSION, + options?.memLevel ?? Z_DEFAULT_MEMLEVEL, + options?.strategy ?? Z_DEFAULT_STRATEGY, + writeState, + processCallback, + options?.dictionary, + ); + + return { handle, writeState, chunkSize }; +} + +/** + * Create a bare Brotli handle. + * @returns {{ handle: object, writeState: Uint32Array, chunkSize: number }} + */ +function createBrotliHandle(mode, options, processCallback, onError) { + const handle = mode === BROTLI_ENCODE ? + new binding.BrotliEncoder(mode) : new binding.BrotliDecoder(mode); + const writeState = new Uint32Array(2); + const chunkSize = options?.chunkSize ?? Z_DEFAULT_CHUNK; + + brotliInitParamsArray.fill(-1); + if (options?.params) { + const params = options.params; + const keys = ObjectKeys(params); + for (let i = 0; i < keys.length; i++) { + const key = +keys[i]; + if (!NumberIsNaN(key) && key >= 0 && key <= kMaxBrotliParam) { + brotliInitParamsArray[key] = params[keys[i]]; + } + } + } + + handle.onerror = onError; + handle.init( + brotliInitParamsArray, + writeState, + processCallback, + options?.dictionary, + ); + + return { handle, writeState, chunkSize }; +} + +/** + * Create a bare Zstd handle. + * @returns {{ handle: object, writeState: Uint32Array, chunkSize: number }} + */ +function createZstdHandle(mode, options, processCallback, onError) { + const isCompress = mode === ZSTD_COMPRESS; + const handle = isCompress ? + new binding.ZstdCompress() : new binding.ZstdDecompress(); + const writeState = new Uint32Array(2); + const chunkSize = options?.chunkSize ?? Z_DEFAULT_CHUNK; + + const initArray = isCompress ? zstdInitCParamsArray : zstdInitDParamsArray; + const maxParam = isCompress ? kMaxZstdCParam : kMaxZstdDParam; + + initArray.fill(-1); + if (options?.params) { + const params = options.params; + const keys = ObjectKeys(params); + for (let i = 0; i < keys.length; i++) { + const key = +keys[i]; + if (!NumberIsNaN(key) && key >= 0 && key <= maxParam) { + initArray[key] = params[keys[i]]; + } + } + } + + handle.onerror = onError; + handle.init( + initArray, + options?.pledgedSrcSize, + writeState, + processCallback, + options?.dictionary, + ); + + return { handle, writeState, chunkSize }; +} + +// --------------------------------------------------------------------------- +// Core: makeZlibTransform +// +// Uses async handle.write() so compression runs on the libuv threadpool. +// The generator manually iterates the source with pre-reading: the next +// upstream read+transform is started before awaiting the current compression, +// so I/O and upstream work overlap with threadpool compression. +// --------------------------------------------------------------------------- +function makeZlibTransform(createHandleFn, processFlag, finishFlag) { + return { + transform: async function*(source) { + // ---- Per-invocation state shared with the write callback ---- + let outBuf; + let outOffset = 0; + let chunkSize; + const pending = []; + let pendingBytes = 0; + + // Current write operation state (read by the callback for looping). + let resolveWrite, rejectWrite; + let writeInput, writeFlush; + let writeInOff, writeAvailIn, writeAvailOutBefore; + + // processCallback: called by C++ AfterThreadPoolWork when compression + // on the threadpool completes. Collects output, loops if the engine + // has more output to produce (availOut === 0), then resolves the + // promise when all output for this input chunk is collected. + function onWriteComplete() { + const availOut = writeState[0]; + const availInAfter = writeState[1]; + const have = writeAvailOutBefore - availOut; + + if (have > 0) { + ArrayPrototypePush(pending, + outBuf.slice(outOffset, outOffset + have)); + pendingBytes += have; + outOffset += have; + } + + // Reallocate output buffer if exhausted. + if (availOut === 0 || outOffset >= chunkSize) { + outBuf = Buffer.allocUnsafe(chunkSize); + outOffset = 0; + } + + if (availOut === 0) { + // Engine has more output - loop on the threadpool. + const consumed = writeAvailIn - availInAfter; + writeInOff += consumed; + writeAvailIn = availInAfter; + writeAvailOutBefore = chunkSize - outOffset; + + handle.write(writeFlush, + writeInput, writeInOff, writeAvailIn, + outBuf, outOffset, writeAvailOutBefore); + return; // Will call onWriteComplete again. + } + + // All input consumed and output collected. + handle.buffer = null; + const resolve = resolveWrite; + resolveWrite = undefined; + rejectWrite = undefined; + resolve(); + } + + // onError: called by C++ when the engine encounters an error. + // Fires instead of onWriteComplete - reject the promise. + function onError(message, errno, code) { + const error = genericNodeError(message, { errno, code }); + error.errno = errno; + error.code = code; + const reject = rejectWrite; + resolveWrite = undefined; + rejectWrite = undefined; + if (reject) reject(error); + } + + // ---- Create the handle with our callbacks ---- + const result = createHandleFn(onWriteComplete, onError); + const handle = result.handle; + const writeState = result.writeState; + chunkSize = result.chunkSize; + outBuf = Buffer.allocUnsafe(chunkSize); + + // Dispatch input to the threadpool and return a promise. + function processInputAsync(input, flushFlag) { + return new Promise((resolve, reject) => { + resolveWrite = resolve; + rejectWrite = reject; + writeInput = input; + writeFlush = flushFlag; + writeInOff = 0; + writeAvailIn = input.byteLength; + writeAvailOutBefore = chunkSize - outOffset; + + // Keep input alive while the threadpool references it. + handle.buffer = input; + + handle.write(flushFlag, + input, 0, input.byteLength, + outBuf, outOffset, writeAvailOutBefore); + }); + } + + function drainBatch() { + if (pendingBytes <= BATCH_HWM) { + const batch = ArrayPrototypeSplice(pending, 0, pending.length); + pendingBytes = 0; + return batch; + } + const batch = []; + let batchBytes = 0; + while (pending.length > 0 && batchBytes < BATCH_HWM) { + const buf = pending.shift(); + ArrayPrototypePush(batch, buf); + batchBytes += buf.byteLength; + pendingBytes -= buf.byteLength; + } + return batch; + } + + let finalized = false; + + try { + // Manually iterate the source so we can pre-read: calling + // iter.next() starts the upstream read + transform on libuv + // before we await the current compression on the threadpool. + const iter = source[SymbolAsyncIterator](); + let nextResult = iter.next(); + + while (true) { + const { value: chunks, done } = await nextResult; + if (done) break; + + if (chunks === null) { + // Flush signal - finalize the engine. + if (!finalized) { + finalized = true; + await processInputAsync(kEmpty, finishFlag); + while (pending.length > 0) { + yield drainBatch(); + } + } + nextResult = iter.next(); + continue; + } + + // Pre-read: start upstream I/O + transform for the NEXT batch + // while we compress the current batch on the threadpool. + nextResult = iter.next(); + + for (let i = 0; i < chunks.length; i++) { + await processInputAsync(chunks[i], processFlag); + } + + if (pendingBytes >= BATCH_HWM) { + while (pending.length > 0 && pendingBytes >= BATCH_HWM) { + yield drainBatch(); + } + } + if (pending.length > 0) { + yield drainBatch(); + } + } + + // Source ended - finalize if not already done by a null signal. + if (!finalized) { + finalized = true; + await processInputAsync(kEmpty, finishFlag); + while (pending.length > 0) { + yield drainBatch(); + } + } + } finally { + handle.close(); + } + }, + }; +} + +// --------------------------------------------------------------------------- +// Compression factories +// --------------------------------------------------------------------------- + +function compressGzip(options) { + return makeZlibTransform( + (cb, onErr) => createZlibHandle(GZIP, options, cb, onErr), + Z_NO_FLUSH, Z_FINISH, + ); +} + +function compressDeflate(options) { + return makeZlibTransform( + (cb, onErr) => createZlibHandle(DEFLATE, options, cb, onErr), + Z_NO_FLUSH, Z_FINISH, + ); +} + +function compressBrotli(options) { + return makeZlibTransform( + (cb, onErr) => createBrotliHandle(BROTLI_ENCODE, options, cb, onErr), + BROTLI_OPERATION_PROCESS, BROTLI_OPERATION_FINISH, + ); +} + +function compressZstd(options) { + return makeZlibTransform( + (cb, onErr) => createZstdHandle(ZSTD_COMPRESS, options, cb, onErr), + ZSTD_e_continue, ZSTD_e_end, + ); +} + +// --------------------------------------------------------------------------- +// Decompression factories +// --------------------------------------------------------------------------- + +function decompressGzip(options) { + return makeZlibTransform( + (cb, onErr) => createZlibHandle(GUNZIP, options, cb, onErr), + Z_NO_FLUSH, Z_FINISH, + ); +} + +function decompressDeflate(options) { + return makeZlibTransform( + (cb, onErr) => createZlibHandle(INFLATE, options, cb, onErr), + Z_NO_FLUSH, Z_FINISH, + ); +} + +function decompressBrotli(options) { + return makeZlibTransform( + (cb, onErr) => createBrotliHandle(BROTLI_DECODE, options, cb, onErr), + BROTLI_OPERATION_PROCESS, BROTLI_OPERATION_FINISH, + ); +} + +function decompressZstd(options) { + return makeZlibTransform( + (cb, onErr) => createZstdHandle(ZSTD_DECOMPRESS, options, cb, onErr), + ZSTD_e_continue, ZSTD_e_end, + ); +} + +module.exports = { + compressGzip, + compressDeflate, + compressBrotli, + compressZstd, + decompressGzip, + decompressDeflate, + decompressBrotli, + decompressZstd, +}; diff --git a/lib/internal/streams/new/types.js b/lib/internal/streams/new/types.js new file mode 100644 index 00000000000000..ce4b385f9dfbb0 --- /dev/null +++ b/lib/internal/streams/new/types.js @@ -0,0 +1,59 @@ +'use strict'; + +// New Streams API - Protocol Symbols +// +// These symbols allow objects to participate in streaming. +// Using Symbol.for() allows third-party code to implement protocols +// without importing these symbols directly. + +const { + SymbolFor, +} = primordials; + +/** + * Symbol for sync value-to-streamable conversion protocol. + * Objects implementing this can be written to streams or yielded + * from generators. Works in both sync and async contexts. + * + * Third-party: [Symbol.for('Stream.toStreamable')]() { ... } + */ +const toStreamable = SymbolFor('Stream.toStreamable'); + +/** + * Symbol for async value-to-streamable conversion protocol. + * Objects implementing this can be written to async streams. + * Works in async contexts only. + * + * Third-party: [Symbol.for('Stream.toAsyncStreamable')]() { ... } + */ +const toAsyncStreamable = SymbolFor('Stream.toAsyncStreamable'); + +/** + * Symbol for Broadcastable protocol - object can provide a Broadcast. + */ +const broadcastProtocol = SymbolFor('Stream.broadcastProtocol'); + +/** + * Symbol for Shareable protocol - object can provide a Share. + */ +const shareProtocol = SymbolFor('Stream.shareProtocol'); + +/** + * Symbol for SyncShareable protocol - object can provide a SyncShare. + */ +const shareSyncProtocol = SymbolFor('Stream.shareSyncProtocol'); + +/** + * Symbol for Drainable protocol - object can signal when backpressure + * clears. Used to bridge event-driven sources that need drain notification. + */ +const drainableProtocol = SymbolFor('Stream.drainableProtocol'); + +module.exports = { + toStreamable, + toAsyncStreamable, + broadcastProtocol, + shareProtocol, + shareSyncProtocol, + drainableProtocol, +}; diff --git a/lib/internal/streams/new/utils.js b/lib/internal/streams/new/utils.js new file mode 100644 index 00000000000000..57ca93b85cd09c --- /dev/null +++ b/lib/internal/streams/new/utils.js @@ -0,0 +1,106 @@ +'use strict'; + +// New Streams API - Utility Functions + +const { + ArrayPrototypeSlice, + TypedArrayPrototypeSet, + Uint8Array, +} = primordials; + +const { TextEncoder } = require('internal/encoding'); + +// Shared TextEncoder instance for string conversion. +const encoder = new TextEncoder(); + +/** + * Convert a chunk (string or Uint8Array) to Uint8Array. + * Strings are UTF-8 encoded. + * @param {Uint8Array|string} chunk + * @returns {Uint8Array} + */ +function toUint8Array(chunk) { + if (typeof chunk === 'string') { + return encoder.encode(chunk); + } + return chunk; +} + +/** + * Calculate total byte length of an array of chunks. + * @param {Uint8Array[]} chunks + * @returns {number} + */ +function totalByteLength(chunks) { + let total = 0; + for (let i = 0; i < chunks.length; i++) { + total += chunks[i].byteLength; + } + return total; +} + +/** + * Concatenate multiple Uint8Arrays into a single Uint8Array. + * @param {Uint8Array[]} chunks + * @returns {Uint8Array} + */ +function concatBytes(chunks) { + if (chunks.length === 0) { + return new Uint8Array(0); + } + if (chunks.length === 1) { + return chunks[0]; + } + + const total = totalByteLength(chunks); + const result = new Uint8Array(total); + let offset = 0; + for (let i = 0; i < chunks.length; i++) { + TypedArrayPrototypeSet(result, chunks[i], offset); + offset += chunks[i].byteLength; + } + return result; +} + +/** + * Check if a value is PullOptions (object without transform or write property). + * @param {unknown} value + * @returns {boolean} + */ +function isPullOptions(value) { + return ( + value !== null && + typeof value === 'object' && + !('transform' in value) && + !('write' in value) + ); +} + +/** + * Parse variadic arguments for pull/pullSync. + * Returns { transforms, options } + * @param {Array} args + * @returns {{ transforms: Array, options: object|undefined }} + */ +function parsePullArgs(args) { + if (args.length === 0) { + return { transforms: [], options: undefined }; + } + + const last = args[args.length - 1]; + if (isPullOptions(last)) { + return { + transforms: ArrayPrototypeSlice(args, 0, -1), + options: last, + }; + } + + return { transforms: args, options: undefined }; +} + +module.exports = { + toUint8Array, + concatBytes, + isPullOptions, + parsePullArgs, +}; diff --git a/lib/stream/new.js b/lib/stream/new.js new file mode 100644 index 00000000000000..27596be8ba0604 --- /dev/null +++ b/lib/stream/new.js @@ -0,0 +1,208 @@ +'use strict'; + +// Public entry point for the new streams API. +// Usage: require('stream/new') or require('node:stream/new') + +const { + ObjectFreeze, +} = primordials; + +// Protocol symbols +const { + toStreamable, + toAsyncStreamable, + broadcastProtocol, + shareProtocol, + shareSyncProtocol, + drainableProtocol, +} = require('internal/streams/new/types'); + +// Factories +const { push } = require('internal/streams/new/push'); +const { duplex } = require('internal/streams/new/duplex'); +const { from, fromSync } = require('internal/streams/new/from'); + +// Pipelines +const { + pull, + pullSync, + pipeTo, + pipeToSync, +} = require('internal/streams/new/pull'); + +// Consumers +const { + bytes, + bytesSync, + text, + textSync, + arrayBuffer, + arrayBufferSync, + array, + arraySync, + tap, + tapSync, + merge, + ondrain, +} = require('internal/streams/new/consumers'); + +// Transforms +const { + compressGzip, + compressDeflate, + compressBrotli, + compressZstd, + decompressGzip, + decompressDeflate, + decompressBrotli, + decompressZstd, +} = require('internal/streams/new/transform'); + +// Multi-consumer +const { broadcast, Broadcast } = require('internal/streams/new/broadcast'); +const { + share, + shareSync, + Share, + SyncShare, +} = require('internal/streams/new/share'); + +/** + * Stream namespace - unified access to all stream functions. + * @example + * const { Stream } = require('stream/new'); + * + * const { writer, readable } = Stream.push(); + * await writer.write("hello"); + * await writer.end(); + * + * const output = Stream.pull(readable, transform1, transform2); + * const data = await Stream.bytes(output); + */ +const Stream = ObjectFreeze({ + // Factories + push, + duplex, + from, + fromSync, + + // Pipelines + pull, + pullSync, + + // Pipe to destination + pipeTo, + pipeToSync, + + // Consumers (async) + bytes, + text, + arrayBuffer, + array, + + // Consumers (sync) + bytesSync, + textSync, + arrayBufferSync, + arraySync, + + // Combining + merge, + + // Multi-consumer (push model) + broadcast, + + // Multi-consumer (pull model) + share, + shareSync, + + // Utilities + tap, + tapSync, + + // Drain utility for event source integration + ondrain, + + // Compression / decompression transforms + compressGzip, + compressDeflate, + compressBrotli, + compressZstd, + decompressGzip, + decompressDeflate, + decompressBrotli, + decompressZstd, + + // Protocol symbols + toStreamable, + toAsyncStreamable, + broadcastProtocol, + shareProtocol, + shareSyncProtocol, + drainableProtocol, +}); + +module.exports = { + // The Stream namespace + Stream, + + // Also export everything individually for destructured imports + + // Protocol symbols + toStreamable, + toAsyncStreamable, + broadcastProtocol, + shareProtocol, + shareSyncProtocol, + drainableProtocol, + + // Factories + push, + duplex, + from, + fromSync, + + // Pipelines + pull, + pullSync, + pipeTo, + pipeToSync, + + // Consumers (async) + bytes, + text, + arrayBuffer, + array, + + // Consumers (sync) + bytesSync, + textSync, + arrayBufferSync, + arraySync, + + // Combining + merge, + + // Multi-consumer + broadcast, + Broadcast, + share, + shareSync, + Share, + SyncShare, + + // Utilities + tap, + tapSync, + ondrain, + + // Compression / decompression transforms + compressGzip, + compressDeflate, + compressBrotli, + compressZstd, + decompressGzip, + decompressDeflate, + decompressBrotli, + decompressZstd, +}; diff --git a/test/parallel/test-fs-promises-file-handle-pull.js b/test/parallel/test-fs-promises-file-handle-pull.js new file mode 100644 index 00000000000000..6b206ee3fae2ab --- /dev/null +++ b/test/parallel/test-fs-promises-file-handle-pull.js @@ -0,0 +1,254 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const fs = require('fs'); +const { open } = fs.promises; +const path = require('path'); +const tmpdir = require('../common/tmpdir'); +const { text, bytes } = require('stream/new'); + +tmpdir.refresh(); + +const tmpDir = tmpdir.path; + +// ============================================================================= +// Basic pull() +// ============================================================================= + +async function testBasicPull() { + const filePath = path.join(tmpDir, 'pull-basic.txt'); + fs.writeFileSync(filePath, 'hello from file'); + + const fh = await open(filePath, 'r'); + try { + const readable = fh.pull(); + const data = await text(readable); + assert.strictEqual(data, 'hello from file'); + } finally { + await fh.close(); + } +} + +async function testPullBinary() { + const filePath = path.join(tmpDir, 'pull-binary.bin'); + const buf = Buffer.alloc(256); + for (let i = 0; i < 256; i++) buf[i] = i; + fs.writeFileSync(filePath, buf); + + const fh = await open(filePath, 'r'); + try { + const readable = fh.pull(); + const data = await bytes(readable); + assert.strictEqual(data.byteLength, 256); + for (let i = 0; i < 256; i++) { + assert.strictEqual(data[i], i); + } + } finally { + await fh.close(); + } +} + +async function testPullEmptyFile() { + const filePath = path.join(tmpDir, 'pull-empty.txt'); + fs.writeFileSync(filePath, ''); + + const fh = await open(filePath, 'r'); + try { + const readable = fh.pull(); + const data = await bytes(readable); + assert.strictEqual(data.byteLength, 0); + } finally { + await fh.close(); + } +} + +// ============================================================================= +// Large file (multi-chunk) +// ============================================================================= + +async function testPullLargeFile() { + const filePath = path.join(tmpDir, 'pull-large.bin'); + // Write 64KB — enough for multiple 16KB read chunks + const size = 64 * 1024; + const buf = Buffer.alloc(size, 0x42); + fs.writeFileSync(filePath, buf); + + const fh = await open(filePath, 'r'); + try { + const readable = fh.pull(); + const data = await bytes(readable); + assert.strictEqual(data.byteLength, size); + // Verify content + for (let i = 0; i < data.byteLength; i++) { + assert.strictEqual(data[i], 0x42); + } + } finally { + await fh.close(); + } +} + +// ============================================================================= +// With transforms +// ============================================================================= + +async function testPullWithTransform() { + const filePath = path.join(tmpDir, 'pull-transform.txt'); + fs.writeFileSync(filePath, 'hello'); + + const fh = await open(filePath, 'r'); + try { + const upper = (chunks) => { + if (chunks === null) return null; + return chunks.map((c) => { + const str = new TextDecoder().decode(c); + return new TextEncoder().encode(str.toUpperCase()); + }); + }; + + const readable = fh.pull(upper); + const data = await text(readable); + assert.strictEqual(data, 'HELLO'); + } finally { + await fh.close(); + } +} + +// ============================================================================= +// autoClose option +// ============================================================================= + +async function testPullAutoClose() { + const filePath = path.join(tmpDir, 'pull-autoclose.txt'); + fs.writeFileSync(filePath, 'auto close data'); + + const fh = await open(filePath, 'r'); + const readable = fh.pull({ autoClose: true }); + const data = await text(readable); + assert.strictEqual(data, 'auto close data'); + + // After consuming with autoClose, the file handle should be closed + // Trying to read again should throw + await assert.rejects( + async () => { + await fh.stat(); + }, + (err) => err.code === 'ERR_INVALID_STATE' || err.code === 'EBADF', + ); +} + +// ============================================================================= +// Locking +// ============================================================================= + +async function testPullLocking() { + const filePath = path.join(tmpDir, 'pull-lock.txt'); + fs.writeFileSync(filePath, 'lock data'); + + const fh = await open(filePath, 'r'); + try { + // First pull locks the handle + const readable = fh.pull(); + + // Second pull while locked should throw + assert.throws( + () => fh.pull(), + { code: 'ERR_INVALID_STATE' }, + ); + + // Consume the first stream to unlock + await text(readable); + + // Now it should be usable again + const readable2 = fh.pull(); + const data = await text(readable2); + assert.strictEqual(data, ''); // Already read to end + } finally { + await fh.close(); + } +} + +// ============================================================================= +// Closed handle +// ============================================================================= + +async function testPullClosedHandle() { + const filePath = path.join(tmpDir, 'pull-closed.txt'); + fs.writeFileSync(filePath, 'data'); + + const fh = await open(filePath, 'r'); + await fh.close(); + + assert.throws( + () => fh.pull(), + { code: 'ERR_INVALID_STATE' }, + ); +} + +// ============================================================================= +// AbortSignal +// ============================================================================= + +async function testPullAbortSignal() { + const filePath = path.join(tmpDir, 'pull-abort.txt'); + // Write enough data that we can abort mid-stream + fs.writeFileSync(filePath, 'a'.repeat(1024)); + + const ac = new AbortController(); + const fh = await open(filePath, 'r'); + try { + ac.abort(); + const readable = fh.pull({ signal: ac.signal }); + + await assert.rejects( + async () => { + // eslint-disable-next-line no-unused-vars + for await (const _ of readable) { + assert.fail('Should not reach here'); + } + }, + (err) => err.name === 'AbortError', + ); + } finally { + await fh.close(); + } +} + +// ============================================================================= +// Iterate batches directly +// ============================================================================= + +async function testPullIterateBatches() { + const filePath = path.join(tmpDir, 'pull-batches.txt'); + fs.writeFileSync(filePath, 'batch data'); + + const fh = await open(filePath, 'r'); + try { + const readable = fh.pull(); + const batches = []; + for await (const batch of readable) { + batches.push(batch); + // Each batch should be an array of Uint8Array + assert.ok(Array.isArray(batch)); + for (const chunk of batch) { + assert.ok(chunk instanceof Uint8Array); + } + } + assert.ok(batches.length > 0); + } finally { + await fh.close(); + } +} + +Promise.all([ + testBasicPull(), + testPullBinary(), + testPullEmptyFile(), + testPullLargeFile(), + testPullWithTransform(), + testPullAutoClose(), + testPullLocking(), + testPullClosedHandle(), + testPullAbortSignal(), + testPullIterateBatches(), +]).then(common.mustCall()); diff --git a/test/parallel/test-fs-promises-file-handle-writer.js b/test/parallel/test-fs-promises-file-handle-writer.js new file mode 100644 index 00000000000000..5a0e2914a3e52c --- /dev/null +++ b/test/parallel/test-fs-promises-file-handle-writer.js @@ -0,0 +1,473 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const fs = require('fs'); +const { open } = fs.promises; +const path = require('path'); +const tmpdir = require('../common/tmpdir'); +const { + pipeTo, text, + compressGzip, decompressGzip, +} = require('stream/new'); + +tmpdir.refresh(); + +const tmpDir = tmpdir.path; + +// ============================================================================= +// Basic write() +// ============================================================================= + +async function testBasicWrite() { + const filePath = path.join(tmpDir, 'writer-basic.txt'); + const fh = await open(filePath, 'w'); + const w = fh.writer(); + await w.write(Buffer.from('Hello ')); + await w.write(Buffer.from('World!')); + const totalBytes = await w.end(); + await fh.close(); + + assert.strictEqual(totalBytes, 12); + assert.strictEqual(fs.readFileSync(filePath, 'utf8'), 'Hello World!'); +} + +// ============================================================================= +// Basic writev() +// ============================================================================= + +async function testBasicWritev() { + const filePath = path.join(tmpDir, 'writer-writev.txt'); + const fh = await open(filePath, 'w'); + const w = fh.writer(); + await w.writev([ + Buffer.from('aaa'), + Buffer.from('bbb'), + Buffer.from('ccc'), + ]); + const totalBytes = await w.end(); + await fh.close(); + + assert.strictEqual(totalBytes, 9); + assert.strictEqual(fs.readFileSync(filePath, 'utf8'), 'aaabbbccc'); +} + +// ============================================================================= +// Mixed write() and writev() +// ============================================================================= + +async function testMixedWriteAndWritev() { + const filePath = path.join(tmpDir, 'writer-mixed.txt'); + const fh = await open(filePath, 'w'); + const w = fh.writer(); + await w.write(Buffer.from('head-')); + await w.writev([Buffer.from('mid1-'), Buffer.from('mid2-')]); + await w.write(Buffer.from('tail')); + const totalBytes = await w.end(); + await fh.close(); + + assert.strictEqual(totalBytes, 19); + assert.strictEqual(fs.readFileSync(filePath, 'utf8'), 'head-mid1-mid2-tail'); +} + +// ============================================================================= +// end() returns totalBytesWritten +// ============================================================================= + +async function testEndReturnsTotalBytes() { + const filePath = path.join(tmpDir, 'writer-totalbytes.txt'); + const fh = await open(filePath, 'w'); + const w = fh.writer(); + + // Write some data in various sizes + const sizes = [100, 200, 300, 400, 500]; + let expected = 0; + for (const size of sizes) { + await w.write(Buffer.alloc(size, 0x41)); + expected += size; + } + const totalBytes = await w.end(); + await fh.close(); + + assert.strictEqual(totalBytes, expected); + assert.strictEqual(totalBytes, 1500); + assert.strictEqual(fs.statSync(filePath).size, 1500); +} + +// ============================================================================= +// autoClose: true — handle closed after end() +// ============================================================================= + +async function testAutoCloseOnEnd() { + const filePath = path.join(tmpDir, 'writer-autoclose-end.txt'); + const fh = await open(filePath, 'w'); + const w = fh.writer({ autoClose: true }); + await w.write(Buffer.from('auto close test')); + await w.end(); + + // Handle should be closed + await assert.rejects(fh.stat(), { code: 'EBADF' }); + assert.strictEqual(fs.readFileSync(filePath, 'utf8'), 'auto close test'); +} + +// ============================================================================= +// autoClose: true — handle closed after abort() +// ============================================================================= + +async function testAutoCloseOnAbort() { + const filePath = path.join(tmpDir, 'writer-autoclose-abort.txt'); + const fh = await open(filePath, 'w'); + const w = fh.writer({ autoClose: true }); + await w.write(Buffer.from('partial')); + await w.abort(new Error('test abort')); + + // Handle should be closed + await assert.rejects(fh.stat(), { code: 'EBADF' }); + // Partial data should still be on disk (abort doesn't truncate) + assert.strictEqual(fs.readFileSync(filePath, 'utf8'), 'partial'); +} + +// ============================================================================= +// start option — write at specified offset +// ============================================================================= + +async function testStartOption() { + const filePath = path.join(tmpDir, 'writer-start.txt'); + // Pre-fill with 10 A's + fs.writeFileSync(filePath, 'AAAAAAAAAA'); + + const fh = await open(filePath, 'r+'); + const w = fh.writer({ start: 3 }); + await w.write(Buffer.from('BBB')); + await w.end(); + await fh.close(); + + assert.strictEqual(fs.readFileSync(filePath, 'utf8'), 'AAABBBAAAA'); +} + +// ============================================================================= +// start option — sequential writes advance position +// ============================================================================= + +async function testStartSequentialPosition() { + const filePath = path.join(tmpDir, 'writer-start-seq.txt'); + fs.writeFileSync(filePath, 'XXXXXXXXXX'); + + const fh = await open(filePath, 'r+'); + const w = fh.writer({ start: 2 }); + await w.write(Buffer.from('AA')); + await w.write(Buffer.from('BB')); + await w.writev([Buffer.from('C'), Buffer.from('D')]); + await w.end(); + await fh.close(); + + assert.strictEqual(fs.readFileSync(filePath, 'utf8'), 'XXAABBCDXX'); +} + +// ============================================================================= +// Locked state — can't create second writer while active +// ============================================================================= + +async function testLockedState() { + const filePath = path.join(tmpDir, 'writer-locked.txt'); + const fh = await open(filePath, 'w'); + const w = fh.writer(); + + assert.throws(() => fh.writer(), { + name: 'Error', + message: /locked/, + }); + + // Also can't pull while writer is active + assert.throws(() => fh.pull(), { + name: 'Error', + message: /locked/, + }); + + await w.end(); + await fh.close(); +} + +// ============================================================================= +// Unlock after end — handle reusable +// ============================================================================= + +async function testUnlockAfterEnd() { + const filePath = path.join(tmpDir, 'writer-unlock.txt'); + const fh = await open(filePath, 'w'); + + const w1 = fh.writer(); + await w1.write(Buffer.from('first')); + await w1.end(); + + // Should work — handle is unlocked + const w2 = fh.writer(); + await w2.write(Buffer.from(' second')); + await w2.end(); + await fh.close(); + + assert.strictEqual(fs.readFileSync(filePath, 'utf8'), 'first second'); +} + +// ============================================================================= +// Unlock after abort — handle reusable +// ============================================================================= + +async function testUnlockAfterAbort() { + const filePath = path.join(tmpDir, 'writer-unlock-abort.txt'); + const fh = await open(filePath, 'w'); + + const w1 = fh.writer(); + await w1.write(Buffer.from('aborted')); + await w1.abort(new Error('test')); + + // Should work — handle is unlocked + const w2 = fh.writer(); + await w2.write(Buffer.from('recovered')); + await w2.end(); + await fh.close(); + + // 'recovered' is appended after 'aborted' at current file offset + const content = fs.readFileSync(filePath, 'utf8'); + assert.ok(content.startsWith('aborted')); + assert.ok(content.includes('recovered')); +} + +// ============================================================================= +// Write after end/abort rejects +// ============================================================================= + +async function testWriteAfterEndRejects() { + const filePath = path.join(tmpDir, 'writer-closed.txt'); + const fh = await open(filePath, 'w'); + const w = fh.writer(); + await w.write(Buffer.from('data')); + await w.end(); + + await assert.rejects(w.write(Buffer.from('more')), { + name: 'Error', + message: /closed/, + }); + await assert.rejects(w.writev([Buffer.from('more')]), { + name: 'Error', + message: /closed/, + }); + + await fh.close(); +} + +// ============================================================================= +// Closed handle — writer() throws +// ============================================================================= + +async function testClosedHandle() { + const filePath = path.join(tmpDir, 'writer-closed-handle.txt'); + const fh = await open(filePath, 'w'); + await fh.close(); + + assert.throws(() => fh.writer(), { + name: 'Error', + message: /closed/, + }); +} + +// ============================================================================= +// pipeTo() integration — pipe source through writer +// ============================================================================= + +async function testPipeToIntegration() { + const srcPath = path.join(tmpDir, 'writer-pipeto-src.txt'); + const dstPath = path.join(tmpDir, 'writer-pipeto-dst.txt'); + const data = 'The quick brown fox jumps over the lazy dog.\n'.repeat(500); + fs.writeFileSync(srcPath, data); + + const rfh = await open(srcPath, 'r'); + const wfh = await open(dstPath, 'w'); + const w = wfh.writer(); + + const totalBytes = await pipeTo(rfh.pull(), w); + + await rfh.close(); + await wfh.close(); + + assert.strictEqual(totalBytes, Buffer.byteLength(data)); + assert.strictEqual(fs.readFileSync(dstPath, 'utf8'), data); +} + +// ============================================================================= +// pipeTo() with transforms — uppercase through writer +// ============================================================================= + +async function testPipeToWithTransform() { + const srcPath = path.join(tmpDir, 'writer-transform-src.txt'); + const dstPath = path.join(tmpDir, 'writer-transform-dst.txt'); + const data = 'hello world from transforms test\n'.repeat(200); + fs.writeFileSync(srcPath, data); + + function uppercase(chunks) { + if (chunks === null) return null; + const out = new Array(chunks.length); + for (let i = 0; i < chunks.length; i++) { + const src = chunks[i]; + const buf = Buffer.allocUnsafe(src.length); + for (let j = 0; j < src.length; j++) { + const b = src[j]; + buf[j] = (b >= 0x61 && b <= 0x7a) ? b - 0x20 : b; + } + out[i] = buf; + } + return out; + } + + const rfh = await open(srcPath, 'r'); + const wfh = await open(dstPath, 'w'); + const w = wfh.writer(); + + await pipeTo(rfh.pull(), uppercase, w); + + await rfh.close(); + await wfh.close(); + + assert.strictEqual(fs.readFileSync(dstPath, 'utf8'), data.toUpperCase()); +} + +// ============================================================================= +// Round-trip: pull → compress → writer, pull → decompress → verify +// ============================================================================= + +async function testCompressRoundTrip() { + const srcPath = path.join(tmpDir, 'writer-rt-src.txt'); + const gzPath = path.join(tmpDir, 'writer-rt.gz'); + const original = 'Round trip compression test data. '.repeat(2000); + fs.writeFileSync(srcPath, original); + + // Compress: pull → gzip → writer + { + const rfh = await open(srcPath, 'r'); + const wfh = await open(gzPath, 'w'); + const w = wfh.writer({ autoClose: true }); + await pipeTo(rfh.pull(), compressGzip(), w); + await rfh.close(); + } + + // Verify compressed file is smaller + const compressedSize = fs.statSync(gzPath).size; + assert.ok(compressedSize < Buffer.byteLength(original), + `Compressed ${compressedSize} should be < original ${Buffer.byteLength(original)}`); + + // Decompress: pull → gunzip → text → verify + { + const rfh = await open(gzPath, 'r'); + const result = await text(rfh.pull(decompressGzip())); + await rfh.close(); + assert.strictEqual(result, original); + } +} + +// ============================================================================= +// Large file write — write 1MB in 64KB chunks +// ============================================================================= + +async function testLargeFileWrite() { + const filePath = path.join(tmpDir, 'writer-large.bin'); + const fh = await open(filePath, 'w'); + const w = fh.writer(); + + const chunkSize = 65536; + const totalSize = 1024 * 1024; // 1MB + const chunk = Buffer.alloc(chunkSize, 0x42); + let written = 0; + + while (written < totalSize) { + await w.write(chunk); + written += chunkSize; + } + + const totalBytes = await w.end(); + await fh.close(); + + assert.strictEqual(totalBytes, totalSize); + assert.strictEqual(fs.statSync(filePath).size, totalSize); + + // Verify content + const data = fs.readFileSync(filePath); + for (let i = 0; i < data.length; i++) { + if (data[i] !== 0x42) { + assert.fail(`Byte at offset ${i} is ${data[i]}, expected 0x42`); + } + } +} + +// ============================================================================= +// Symbol.asyncDispose — await using +// ============================================================================= + +async function testAsyncDispose() { + const filePath = path.join(tmpDir, 'writer-async-dispose.txt'); + { + await using fh = await open(filePath, 'w'); + await using w = fh.writer({ autoClose: true }); + await w.write(Buffer.from('async dispose')); + } + // Both writer and file handle should be cleaned up + assert.strictEqual(fs.readFileSync(filePath, 'utf8'), 'async dispose'); + + // Verify the handle is actually closed by trying to open a new one + // (if the old one were still open with a write lock on some OSes, + // this could fail — but it should succeed). + const fh2 = await open(filePath, 'r'); + await fh2.close(); +} + +// ============================================================================= +// Symbol.asyncDispose — cleanup on error (await using unwinds) +// ============================================================================= + +async function testAsyncDisposeOnError() { + const filePath = path.join(tmpDir, 'writer-dispose-error.txt'); + const fh = await open(filePath, 'w'); + + try { + await using w = fh.writer(); + await w.write(Buffer.from('before error')); + throw new Error('intentional'); + } catch (e) { + assert.strictEqual(e.message, 'intentional'); + } + + // If asyncDispose ran, the handle should be unlocked and reusable + const w2 = fh.writer(); + await w2.write(Buffer.from('after error')); + await w2.end(); + await fh.close(); + + const content = fs.readFileSync(filePath, 'utf8'); + assert.ok(content.includes('after error'), + `Expected 'after error' in ${JSON.stringify(content)}`); +} + +// ============================================================================= +// Run all tests +// ============================================================================= + +Promise.all([ + testBasicWrite(), + testBasicWritev(), + testMixedWriteAndWritev(), + testEndReturnsTotalBytes(), + testAutoCloseOnEnd(), + testAutoCloseOnAbort(), + testStartOption(), + testStartSequentialPosition(), + testLockedState(), + testUnlockAfterEnd(), + testUnlockAfterAbort(), + testWriteAfterEndRejects(), + testClosedHandle(), + testPipeToIntegration(), + testPipeToWithTransform(), + testCompressRoundTrip(), + testLargeFileWrite(), + testAsyncDispose(), + testAsyncDisposeOnError(), +]).then(common.mustCall()); diff --git a/test/parallel/test-stream-new-broadcast.js b/test/parallel/test-stream-new-broadcast.js new file mode 100644 index 00000000000000..6ea0b3858bb64d --- /dev/null +++ b/test/parallel/test-stream-new-broadcast.js @@ -0,0 +1,276 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { broadcast, Broadcast, from, text } = require('stream/new'); + +// ============================================================================= +// Basic broadcast +// ============================================================================= + +async function testBasicBroadcast() { + const { writer, broadcast: bc } = broadcast(); + + // Create two consumers + const consumer1 = bc.push(); + const consumer2 = bc.push(); + + assert.strictEqual(bc.consumerCount, 2); + + await writer.write('hello'); + await writer.end(); + + const [data1, data2] = await Promise.all([ + text(consumer1), + text(consumer2), + ]); + + assert.strictEqual(data1, 'hello'); + assert.strictEqual(data2, 'hello'); +} + +async function testMultipleWrites() { + const { writer, broadcast: bc } = broadcast({ highWaterMark: 10 }); + + const consumer = bc.push(); + + await writer.write('a'); + await writer.write('b'); + await writer.write('c'); + await writer.end(); + + const data = await text(consumer); + assert.strictEqual(data, 'abc'); +} + +async function testConsumerCount() { + const { broadcast: bc } = broadcast(); + + assert.strictEqual(bc.consumerCount, 0); + + const c1 = bc.push(); + assert.strictEqual(bc.consumerCount, 1); + + bc.push(); + assert.strictEqual(bc.consumerCount, 2); + + // Consume c1 to completion (it returns immediately since no data has been + // pushed and we haven't ended yet — but we'll cancel to detach) + bc.cancel(); + + // After cancel, consumers are detached + const batches = []; + for await (const batch of c1) { + batches.push(batch); + } + assert.strictEqual(batches.length, 0); +} + +// ============================================================================= +// Writer methods +// ============================================================================= + +async function testWriteSync() { + const { writer, broadcast: bc } = broadcast({ highWaterMark: 2 }); + const consumer = bc.push(); + + assert.strictEqual(writer.writeSync('a'), true); + assert.strictEqual(writer.writeSync('b'), true); + // Buffer full (highWaterMark=2, strict policy) + assert.strictEqual(writer.writeSync('c'), false); + + writer.endSync(); + + const data = await text(consumer); + assert.strictEqual(data, 'ab'); +} + +async function testWritevSync() { + const { writer, broadcast: bc } = broadcast({ highWaterMark: 10 }); + const consumer = bc.push(); + + assert.strictEqual(writer.writevSync(['hello', ' ', 'world']), true); + writer.endSync(); + + const data = await text(consumer); + assert.strictEqual(data, 'hello world'); +} + +async function testWriterEnd() { + const { writer, broadcast: bc } = broadcast(); + const consumer = bc.push(); + + await writer.write('data'); + const totalBytes = await writer.end(); + assert.ok(totalBytes > 0); + + const data = await text(consumer); + assert.strictEqual(data, 'data'); +} + +async function testWriterAbort() { + const { writer, broadcast: bc } = broadcast(); + const consumer = bc.push(); + + await writer.abort(new Error('test error')); + + await assert.rejects( + async () => { + // eslint-disable-next-line no-unused-vars + for await (const _ of consumer) { + assert.fail('Should not reach here'); + } + }, + { message: 'test error' }, + ); +} + +// ============================================================================= +// Backpressure policies +// ============================================================================= + +async function testDropOldest() { + const { writer, broadcast: bc } = broadcast({ + highWaterMark: 2, + backpressure: 'drop-oldest', + }); + const consumer = bc.push(); + + writer.writeSync('first'); + writer.writeSync('second'); + // This should drop 'first' + writer.writeSync('third'); + writer.endSync(); + + const data = await text(consumer); + assert.strictEqual(data, 'secondthird'); +} + +async function testDropNewest() { + const { writer, broadcast: bc } = broadcast({ + highWaterMark: 1, + backpressure: 'drop-newest', + }); + const consumer = bc.push(); + + writer.writeSync('kept'); + // This should be silently dropped + writer.writeSync('dropped'); + writer.endSync(); + + const data = await text(consumer); + assert.strictEqual(data, 'kept'); +} + +// ============================================================================= +// Cancel +// ============================================================================= + +async function testCancelWithoutReason() { + const { broadcast: bc } = broadcast(); + const consumer = bc.push(); + + bc.cancel(); + + const batches = []; + for await (const batch of consumer) { + batches.push(batch); + } + assert.strictEqual(batches.length, 0); +} + +async function testCancelWithReason() { + const { broadcast: bc } = broadcast(); + + // Start a consumer that is waiting for data (promise pending) + const consumer = bc.push(); + const resultPromise = text(consumer).catch((err) => err); + + // Give the consumer time to enter the waiting state + await new Promise((resolve) => setImmediate(resolve)); + + bc.cancel(new Error('cancelled')); + + const result = await resultPromise; + assert.ok(result instanceof Error); + assert.strictEqual(result.message, 'cancelled'); +} + +// ============================================================================= +// Broadcast.from +// ============================================================================= + +async function testBroadcastFromAsyncIterable() { + const source = from('broadcast-from'); + const { broadcast: bc } = Broadcast.from(source); + const consumer = bc.push(); + + const data = await text(consumer); + assert.strictEqual(data, 'broadcast-from'); +} + +async function testBroadcastFromMultipleConsumers() { + const source = from('shared-data'); + const { broadcast: bc } = Broadcast.from(source); + + const c1 = bc.push(); + const c2 = bc.push(); + + const [data1, data2] = await Promise.all([ + text(c1), + text(c2), + ]); + + assert.strictEqual(data1, 'shared-data'); + assert.strictEqual(data2, 'shared-data'); +} + +// ============================================================================= +// AbortSignal +// ============================================================================= + +async function testAbortSignal() { + const ac = new AbortController(); + const { broadcast: bc } = broadcast({ signal: ac.signal }); + const consumer = bc.push(); + + ac.abort(); + + const batches = []; + for await (const batch of consumer) { + batches.push(batch); + } + assert.strictEqual(batches.length, 0); +} + +async function testAlreadyAbortedSignal() { + const ac = new AbortController(); + ac.abort(); + + const { broadcast: bc } = broadcast({ signal: ac.signal }); + const consumer = bc.push(); + + const batches = []; + for await (const batch of consumer) { + batches.push(batch); + } + assert.strictEqual(batches.length, 0); +} + +Promise.all([ + testBasicBroadcast(), + testMultipleWrites(), + testConsumerCount(), + testWriteSync(), + testWritevSync(), + testWriterEnd(), + testWriterAbort(), + testDropOldest(), + testDropNewest(), + testCancelWithoutReason(), + testCancelWithReason(), + testBroadcastFromAsyncIterable(), + testBroadcastFromMultipleConsumers(), + testAbortSignal(), + testAlreadyAbortedSignal(), +]).then(common.mustCall()); diff --git a/test/parallel/test-stream-new-consumers.js b/test/parallel/test-stream-new-consumers.js new file mode 100644 index 00000000000000..68561ecf18d834 --- /dev/null +++ b/test/parallel/test-stream-new-consumers.js @@ -0,0 +1,319 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { + from, + fromSync, + push, + bytes, + bytesSync, + text, + textSync, + arrayBuffer, + arrayBufferSync, + array, + arraySync, + tap, + tapSync, + merge, +} = require('stream/new'); + +// ============================================================================= +// bytesSync / bytes +// ============================================================================= + +async function testBytesSyncBasic() { + const source = fromSync('hello'); + const data = bytesSync(source); + assert.deepStrictEqual(data, new TextEncoder().encode('hello')); +} + +async function testBytesSyncLimit() { + const source = fromSync('hello world'); + assert.throws( + () => bytesSync(source, { limit: 3 }), + { name: 'RangeError' }, + ); +} + +async function testBytesAsync() { + const source = from('hello-async'); + const data = await bytes(source); + assert.deepStrictEqual(data, new TextEncoder().encode('hello-async')); +} + +async function testBytesAsyncLimit() { + const source = from('hello world'); + await assert.rejects( + () => bytes(source, { limit: 3 }), + { name: 'RangeError' }, + ); +} + +async function testBytesAsyncAbort() { + const ac = new AbortController(); + ac.abort(); + const source = from('data'); + await assert.rejects( + () => bytes(source, { signal: ac.signal }), + (err) => err.name === 'AbortError', + ); +} + +async function testBytesEmpty() { + const source = from([]); + const data = await bytes(source); + assert.strictEqual(data.byteLength, 0); +} + +// ============================================================================= +// textSync / text +// ============================================================================= + +async function testTextSyncBasic() { + const source = fromSync('hello text'); + const data = textSync(source); + assert.strictEqual(data, 'hello text'); +} + +async function testTextAsync() { + const source = from('hello async text'); + const data = await text(source); + assert.strictEqual(data, 'hello async text'); +} + +async function testTextEncoding() { + // Default encoding is utf-8 + const source = from('café'); + const data = await text(source); + assert.strictEqual(data, 'café'); +} + +// ============================================================================= +// arrayBufferSync / arrayBuffer +// ============================================================================= + +async function testArrayBufferSyncBasic() { + const source = fromSync(new Uint8Array([1, 2, 3])); + const ab = arrayBufferSync(source); + assert.ok(ab instanceof ArrayBuffer); + assert.strictEqual(ab.byteLength, 3); + const view = new Uint8Array(ab); + assert.deepStrictEqual(view, new Uint8Array([1, 2, 3])); +} + +async function testArrayBufferAsync() { + const source = from(new Uint8Array([10, 20, 30])); + const ab = await arrayBuffer(source); + assert.ok(ab instanceof ArrayBuffer); + assert.strictEqual(ab.byteLength, 3); + const view = new Uint8Array(ab); + assert.deepStrictEqual(view, new Uint8Array([10, 20, 30])); +} + +// ============================================================================= +// arraySync / array +// ============================================================================= + +async function testArraySyncBasic() { + function* gen() { + yield new Uint8Array([1]); + yield new Uint8Array([2]); + yield new Uint8Array([3]); + } + const source = fromSync(gen()); + const chunks = arraySync(source); + assert.strictEqual(chunks.length, 3); + assert.deepStrictEqual(chunks[0], new Uint8Array([1])); + assert.deepStrictEqual(chunks[1], new Uint8Array([2])); + assert.deepStrictEqual(chunks[2], new Uint8Array([3])); +} + +async function testArraySyncLimit() { + function* gen() { + yield new Uint8Array(100); + yield new Uint8Array(100); + } + const source = fromSync(gen()); + assert.throws( + () => arraySync(source, { limit: 50 }), + { name: 'RangeError' }, + ); +} + +async function testArrayAsync() { + async function* gen() { + yield [new Uint8Array([1])]; + yield [new Uint8Array([2])]; + } + const chunks = await array(gen()); + assert.strictEqual(chunks.length, 2); + assert.deepStrictEqual(chunks[0], new Uint8Array([1])); + assert.deepStrictEqual(chunks[1], new Uint8Array([2])); +} + +async function testArrayAsyncLimit() { + async function* gen() { + yield [new Uint8Array(100)]; + yield [new Uint8Array(100)]; + } + await assert.rejects( + () => array(gen(), { limit: 50 }), + { name: 'RangeError' }, + ); +} + +// ============================================================================= +// tap / tapSync +// ============================================================================= + +async function testTapSync() { + const observed = []; + const observer = tapSync((chunks) => { + if (chunks !== null) { + observed.push(chunks.length); + } + }); + + // tapSync returns a function transform + assert.strictEqual(typeof observer, 'function'); + + // Test that it passes data through unchanged + const input = [new Uint8Array([1]), new Uint8Array([2])]; + const result = observer(input); + assert.deepStrictEqual(result, input); + assert.deepStrictEqual(observed, [2]); + + // null (flush) passes through + const flushResult = observer(null); + assert.strictEqual(flushResult, null); +} + +async function testTapAsync() { + const observed = []; + const observer = tap(async (chunks) => { + if (chunks !== null) { + observed.push(chunks.length); + } + }); + + assert.strictEqual(typeof observer, 'function'); + + const input = [new Uint8Array([1])]; + const result = await observer(input); + assert.deepStrictEqual(result, input); + assert.deepStrictEqual(observed, [1]); +} + +async function testTapInPipeline() { + const { writer, readable } = push(); + const seen = []; + + const observer = tap(async (chunks) => { + if (chunks !== null) { + for (const chunk of chunks) { + seen.push(new TextDecoder().decode(chunk)); + } + } + }); + + writer.write('hello'); + writer.end(); + + // Use pull with tap as a transform + const { pull } = require('stream/new'); + const result = pull(readable, observer); + const data = await text(result); + + assert.strictEqual(data, 'hello'); + assert.strictEqual(seen.length, 1); + assert.strictEqual(seen[0], 'hello'); +} + +// ============================================================================= +// merge +// ============================================================================= + +async function testMergeTwoSources() { + const { writer: w1, readable: r1 } = push(); + const { writer: w2, readable: r2 } = push(); + + w1.write('from-a'); + w1.end(); + w2.write('from-b'); + w2.end(); + + const merged = merge(r1, r2); + const chunks = []; + for await (const batch of merged) { + for (const chunk of batch) { + chunks.push(new TextDecoder().decode(chunk)); + } + } + + // Both sources should be present (order is temporal, not guaranteed) + assert.strictEqual(chunks.length, 2); + assert.ok(chunks.includes('from-a')); + assert.ok(chunks.includes('from-b')); +} + +async function testMergeSingleSource() { + const source = from('only-one'); + const merged = merge(source); + + const data = await text(merged); + assert.strictEqual(data, 'only-one'); +} + +async function testMergeEmpty() { + const merged = merge(); + const batches = []; + for await (const batch of merged) { + batches.push(batch); + } + assert.strictEqual(batches.length, 0); +} + +async function testMergeWithAbortSignal() { + const ac = new AbortController(); + ac.abort(); + + const source = from('data'); + const merged = merge(source, { signal: ac.signal }); + + await assert.rejects( + async () => { + // eslint-disable-next-line no-unused-vars + for await (const _ of merged) { + assert.fail('Should not reach here'); + } + }, + (err) => err.name === 'AbortError', + ); +} + +Promise.all([ + testBytesSyncBasic(), + testBytesSyncLimit(), + testBytesAsync(), + testBytesAsyncLimit(), + testBytesAsyncAbort(), + testBytesEmpty(), + testTextSyncBasic(), + testTextAsync(), + testTextEncoding(), + testArrayBufferSyncBasic(), + testArrayBufferAsync(), + testArraySyncBasic(), + testArraySyncLimit(), + testArrayAsync(), + testArrayAsyncLimit(), + testTapSync(), + testTapAsync(), + testTapInPipeline(), + testMergeTwoSources(), + testMergeSingleSource(), + testMergeEmpty(), + testMergeWithAbortSignal(), +]).then(common.mustCall()); diff --git a/test/parallel/test-stream-new-duplex.js b/test/parallel/test-stream-new-duplex.js new file mode 100644 index 00000000000000..7692cb53e360d2 --- /dev/null +++ b/test/parallel/test-stream-new-duplex.js @@ -0,0 +1,152 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { duplex, text, bytes } = require('stream/new'); + +// ============================================================================= +// Basic duplex +// ============================================================================= + +async function testBasicDuplex() { + const [channelA, channelB] = duplex(); + + // A writes, B reads + await channelA.writer.write('hello from A'); + await channelA.close(); + + const dataAtB = await text(channelB.readable); + assert.strictEqual(dataAtB, 'hello from A'); +} + +async function testBidirectional() { + const [channelA, channelB] = duplex(); + + // A writes to B, B writes to A concurrently + const writeA = (async () => { + await channelA.writer.write('A to B'); + await channelA.close(); + })(); + + const writeB = (async () => { + await channelB.writer.write('B to A'); + await channelB.close(); + })(); + + const readAtB = text(channelB.readable); + const readAtA = text(channelA.readable); + + await Promise.all([writeA, writeB]); + + const [dataAtA, dataAtB] = await Promise.all([readAtA, readAtB]); + + assert.strictEqual(dataAtB, 'A to B'); + assert.strictEqual(dataAtA, 'B to A'); +} + +async function testMultipleWrites() { + const [channelA, channelB] = duplex({ highWaterMark: 10 }); + + await channelA.writer.write('one'); + await channelA.writer.write('two'); + await channelA.writer.write('three'); + await channelA.close(); + + const data = await text(channelB.readable); + assert.strictEqual(data, 'onetwothree'); +} + +async function testChannelClose() { + const [channelA, channelB] = duplex(); + + await channelA.close(); + + // Should be able to close twice without error + await channelA.close(); + + // B's readable should end (A -> B direction is closed) + const batches = []; + for await (const batch of channelB.readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 0); +} + +async function testWithOptions() { + const [channelA, channelB] = duplex({ + highWaterMark: 2, + backpressure: 'strict', + }); + + await channelA.writer.write('msg'); + await channelA.close(); + + const data = await text(channelB.readable); + assert.strictEqual(data, 'msg'); +} + +async function testPerChannelOptions() { + const [channelA, channelB] = duplex({ + a: { highWaterMark: 1 }, + b: { highWaterMark: 4 }, + }); + + // Channel A -> B direction uses A's options + // Channel B -> A direction uses B's options + await channelA.writer.write('from-a'); + await channelA.close(); + + await channelB.writer.write('from-b'); + await channelB.close(); + + const [dataAtA, dataAtB] = await Promise.all([ + text(channelA.readable), + text(channelB.readable), + ]); + + assert.strictEqual(dataAtB, 'from-a'); + assert.strictEqual(dataAtA, 'from-b'); +} + +async function testAbortSignal() { + const ac = new AbortController(); + const [channelA] = duplex({ signal: ac.signal }); + + ac.abort(); + + // Both directions should error + await assert.rejects( + async () => { + // eslint-disable-next-line no-unused-vars + for await (const _ of channelA.readable) { + assert.fail('Should not reach here'); + } + }, + (err) => err.name === 'AbortError', + ); +} + +async function testEmptyDuplex() { + const [channelA, channelB] = duplex(); + + // Close without writing + await channelA.close(); + await channelB.close(); + + const dataAtA = await bytes(channelA.readable); + const dataAtB = await bytes(channelB.readable); + + assert.strictEqual(dataAtA.byteLength, 0); + assert.strictEqual(dataAtB.byteLength, 0); +} + +Promise.all([ + testBasicDuplex(), + testBidirectional(), + testMultipleWrites(), + testChannelClose(), + testWithOptions(), + testPerChannelOptions(), + testAbortSignal(), + testEmptyDuplex(), +]).then(common.mustCall()); diff --git a/test/parallel/test-stream-new-from.js b/test/parallel/test-stream-new-from.js new file mode 100644 index 00000000000000..e4925128cd26a2 --- /dev/null +++ b/test/parallel/test-stream-new-from.js @@ -0,0 +1,223 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { from, fromSync, Stream } = require('stream/new'); + +// ============================================================================= +// fromSync() tests +// ============================================================================= + +async function testFromSyncString() { + // String input should be UTF-8 encoded + const readable = fromSync('hello'); + const batches = []; + for (const batch of readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 1); + assert.strictEqual(batches[0].length, 1); + assert.deepStrictEqual(batches[0][0], + new TextEncoder().encode('hello')); +} + +async function testFromSyncUint8Array() { + const input = new Uint8Array([1, 2, 3]); + const readable = fromSync(input); + const batches = []; + for (const batch of readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 1); + assert.strictEqual(batches[0].length, 1); + assert.deepStrictEqual(batches[0][0], input); +} + +async function testFromSyncArrayBuffer() { + const ab = new ArrayBuffer(4); + new Uint8Array(ab).set([10, 20, 30, 40]); + const readable = fromSync(ab); + const batches = []; + for (const batch of readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 1); + assert.deepStrictEqual(batches[0][0], new Uint8Array([10, 20, 30, 40])); +} + +async function testFromSyncUint8ArrayArray() { + // Array of Uint8Array should yield as a single batch + const chunks = [new Uint8Array([1]), new Uint8Array([2])]; + const readable = fromSync(chunks); + const batches = []; + for (const batch of readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 1); + assert.strictEqual(batches[0].length, 2); +} + +async function testFromSyncGenerator() { + function* gen() { + yield new Uint8Array([1, 2]); + yield new Uint8Array([3, 4]); + } + const readable = fromSync(gen()); + const batches = []; + for (const batch of readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 2); + assert.deepStrictEqual(batches[0][0], new Uint8Array([1, 2])); + assert.deepStrictEqual(batches[1][0], new Uint8Array([3, 4])); +} + +async function testFromSyncNestedIterables() { + // Nested arrays and strings should be flattened + function* gen() { + yield ['hello', ' ', 'world']; + } + const readable = fromSync(gen()); + const batches = []; + for (const batch of readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 1); + assert.strictEqual(batches[0].length, 3); +} + +async function testFromSyncToStreamableProtocol() { + const sym = Symbol.for('Stream.toStreamable'); + const obj = { + [sym]() { + return 'protocol-data'; + }, + }; + function* gen() { + yield obj; + } + const readable = fromSync(gen()); + const batches = []; + for (const batch of readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 1); + assert.deepStrictEqual(batches[0][0], + new TextEncoder().encode('protocol-data')); +} + +async function testFromSyncRejectsNonStreamable() { + assert.throws( + () => fromSync(12345), + { name: 'TypeError' }, + ); +} + +// ============================================================================= +// from() tests (async) +// ============================================================================= + +async function testFromString() { + const readable = from('hello-async'); + const batches = []; + for await (const batch of readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 1); + assert.deepStrictEqual(batches[0][0], + new TextEncoder().encode('hello-async')); +} + +async function testFromAsyncGenerator() { + async function* gen() { + yield new Uint8Array([10, 20]); + yield new Uint8Array([30, 40]); + } + const readable = from(gen()); + const batches = []; + for await (const batch of readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 2); + assert.deepStrictEqual(batches[0][0], new Uint8Array([10, 20])); + assert.deepStrictEqual(batches[1][0], new Uint8Array([30, 40])); +} + +async function testFromSyncIterableAsAsync() { + // Sync iterable passed to from() should work + function* gen() { + yield new Uint8Array([1]); + yield new Uint8Array([2]); + } + const readable = from(gen()); + const batches = []; + for await (const batch of readable) { + batches.push(batch); + } + // Sync iterables get batched together + assert.ok(batches.length >= 1); +} + +async function testFromToAsyncStreamableProtocol() { + const sym = Symbol.for('Stream.toAsyncStreamable'); + const obj = { + [sym]() { + return 'async-protocol-data'; + }, + }; + async function* gen() { + yield obj; + } + const readable = from(gen()); + const batches = []; + for await (const batch of readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 1); + assert.deepStrictEqual(batches[0][0], + new TextEncoder().encode('async-protocol-data')); +} + +async function testFromRejectsNonStreamable() { + assert.throws( + () => from(12345), + { name: 'TypeError' }, + ); +} + +async function testFromEmptyArray() { + const readable = from([]); + const batches = []; + for await (const batch of readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 0); +} + +// Also accessible via Stream namespace +async function testStreamNamespace() { + const readable = Stream.from('via-namespace'); + const batches = []; + for await (const batch of readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 1); +} + +Promise.all([ + testFromSyncString(), + testFromSyncUint8Array(), + testFromSyncArrayBuffer(), + testFromSyncUint8ArrayArray(), + testFromSyncGenerator(), + testFromSyncNestedIterables(), + testFromSyncToStreamableProtocol(), + testFromSyncRejectsNonStreamable(), + testFromString(), + testFromAsyncGenerator(), + testFromSyncIterableAsAsync(), + testFromToAsyncStreamableProtocol(), + testFromRejectsNonStreamable(), + testFromEmptyArray(), + testStreamNamespace(), +]).then(common.mustCall()); diff --git a/test/parallel/test-stream-new-namespace.js b/test/parallel/test-stream-new-namespace.js new file mode 100644 index 00000000000000..cacdead5a19ea0 --- /dev/null +++ b/test/parallel/test-stream-new-namespace.js @@ -0,0 +1,209 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const streamNew = require('stream/new'); + +// ============================================================================= +// Stream namespace object +// ============================================================================= + +async function testStreamNamespaceExists() { + assert.ok(streamNew.Stream); + assert.strictEqual(typeof streamNew.Stream, 'object'); +} + +async function testStreamNamespaceFrozen() { + assert.ok(Object.isFrozen(streamNew.Stream)); +} + +async function testStreamNamespaceFactories() { + const { Stream } = streamNew; + + assert.strictEqual(typeof Stream.push, 'function'); + assert.strictEqual(typeof Stream.duplex, 'function'); + assert.strictEqual(typeof Stream.from, 'function'); + assert.strictEqual(typeof Stream.fromSync, 'function'); +} + +async function testStreamNamespacePipelines() { + const { Stream } = streamNew; + + assert.strictEqual(typeof Stream.pull, 'function'); + assert.strictEqual(typeof Stream.pullSync, 'function'); + assert.strictEqual(typeof Stream.pipeTo, 'function'); + assert.strictEqual(typeof Stream.pipeToSync, 'function'); +} + +async function testStreamNamespaceAsyncConsumers() { + const { Stream } = streamNew; + + assert.strictEqual(typeof Stream.bytes, 'function'); + assert.strictEqual(typeof Stream.text, 'function'); + assert.strictEqual(typeof Stream.arrayBuffer, 'function'); + assert.strictEqual(typeof Stream.array, 'function'); +} + +async function testStreamNamespaceSyncConsumers() { + const { Stream } = streamNew; + + assert.strictEqual(typeof Stream.bytesSync, 'function'); + assert.strictEqual(typeof Stream.textSync, 'function'); + assert.strictEqual(typeof Stream.arrayBufferSync, 'function'); + assert.strictEqual(typeof Stream.arraySync, 'function'); +} + +async function testStreamNamespaceCombining() { + const { Stream } = streamNew; + + assert.strictEqual(typeof Stream.merge, 'function'); + assert.strictEqual(typeof Stream.broadcast, 'function'); + assert.strictEqual(typeof Stream.share, 'function'); + assert.strictEqual(typeof Stream.shareSync, 'function'); +} + +async function testStreamNamespaceUtilities() { + const { Stream } = streamNew; + + assert.strictEqual(typeof Stream.tap, 'function'); + assert.strictEqual(typeof Stream.tapSync, 'function'); + assert.strictEqual(typeof Stream.ondrain, 'function'); +} + +async function testStreamNamespaceProtocols() { + const { Stream } = streamNew; + + assert.strictEqual(typeof Stream.toStreamable, 'symbol'); + assert.strictEqual(typeof Stream.toAsyncStreamable, 'symbol'); + assert.strictEqual(typeof Stream.broadcastProtocol, 'symbol'); + assert.strictEqual(typeof Stream.shareProtocol, 'symbol'); + assert.strictEqual(typeof Stream.shareSyncProtocol, 'symbol'); + assert.strictEqual(typeof Stream.drainableProtocol, 'symbol'); +} + +// ============================================================================= +// Individual exports (destructured imports) +// ============================================================================= + +async function testIndividualExports() { + // Factories + assert.strictEqual(typeof streamNew.push, 'function'); + assert.strictEqual(typeof streamNew.duplex, 'function'); + assert.strictEqual(typeof streamNew.from, 'function'); + assert.strictEqual(typeof streamNew.fromSync, 'function'); + + // Pipelines + assert.strictEqual(typeof streamNew.pull, 'function'); + assert.strictEqual(typeof streamNew.pullSync, 'function'); + assert.strictEqual(typeof streamNew.pipeTo, 'function'); + assert.strictEqual(typeof streamNew.pipeToSync, 'function'); + + // Consumers + assert.strictEqual(typeof streamNew.bytes, 'function'); + assert.strictEqual(typeof streamNew.bytesSync, 'function'); + assert.strictEqual(typeof streamNew.text, 'function'); + assert.strictEqual(typeof streamNew.textSync, 'function'); + assert.strictEqual(typeof streamNew.arrayBuffer, 'function'); + assert.strictEqual(typeof streamNew.arrayBufferSync, 'function'); + assert.strictEqual(typeof streamNew.array, 'function'); + assert.strictEqual(typeof streamNew.arraySync, 'function'); + + // Combining + assert.strictEqual(typeof streamNew.merge, 'function'); + assert.strictEqual(typeof streamNew.broadcast, 'function'); + assert.strictEqual(typeof streamNew.share, 'function'); + assert.strictEqual(typeof streamNew.shareSync, 'function'); + + // Utilities + assert.strictEqual(typeof streamNew.tap, 'function'); + assert.strictEqual(typeof streamNew.tapSync, 'function'); + assert.strictEqual(typeof streamNew.ondrain, 'function'); + + // Protocol symbols + assert.strictEqual(typeof streamNew.toStreamable, 'symbol'); + assert.strictEqual(typeof streamNew.toAsyncStreamable, 'symbol'); + assert.strictEqual(typeof streamNew.broadcastProtocol, 'symbol'); + assert.strictEqual(typeof streamNew.shareProtocol, 'symbol'); + assert.strictEqual(typeof streamNew.shareSyncProtocol, 'symbol'); + assert.strictEqual(typeof streamNew.drainableProtocol, 'symbol'); +} + +async function testMultiConsumerExports() { + // Broadcast and Share constructors/factories + assert.ok(streamNew.Broadcast); + assert.strictEqual(typeof streamNew.Broadcast.from, 'function'); + assert.ok(streamNew.Share); + assert.strictEqual(typeof streamNew.Share.from, 'function'); + assert.ok(streamNew.SyncShare); + assert.strictEqual(typeof streamNew.SyncShare.fromSync, 'function'); +} + +// ============================================================================= +// Cross-check: namespace matches individual exports +// ============================================================================= + +async function testNamespaceMatchesExports() { + const { Stream } = streamNew; + + // Every function on Stream should also be available as a direct export + assert.strictEqual(Stream.push, streamNew.push); + assert.strictEqual(Stream.duplex, streamNew.duplex); + assert.strictEqual(Stream.from, streamNew.from); + assert.strictEqual(Stream.fromSync, streamNew.fromSync); + assert.strictEqual(Stream.pull, streamNew.pull); + assert.strictEqual(Stream.pullSync, streamNew.pullSync); + assert.strictEqual(Stream.pipeTo, streamNew.pipeTo); + assert.strictEqual(Stream.pipeToSync, streamNew.pipeToSync); + assert.strictEqual(Stream.bytes, streamNew.bytes); + assert.strictEqual(Stream.text, streamNew.text); + assert.strictEqual(Stream.arrayBuffer, streamNew.arrayBuffer); + assert.strictEqual(Stream.array, streamNew.array); + assert.strictEqual(Stream.bytesSync, streamNew.bytesSync); + assert.strictEqual(Stream.textSync, streamNew.textSync); + assert.strictEqual(Stream.arrayBufferSync, streamNew.arrayBufferSync); + assert.strictEqual(Stream.arraySync, streamNew.arraySync); + assert.strictEqual(Stream.merge, streamNew.merge); + assert.strictEqual(Stream.broadcast, streamNew.broadcast); + assert.strictEqual(Stream.share, streamNew.share); + assert.strictEqual(Stream.shareSync, streamNew.shareSync); + assert.strictEqual(Stream.tap, streamNew.tap); + assert.strictEqual(Stream.tapSync, streamNew.tapSync); + assert.strictEqual(Stream.ondrain, streamNew.ondrain); + + // Protocol symbols + assert.strictEqual(Stream.toStreamable, streamNew.toStreamable); + assert.strictEqual(Stream.toAsyncStreamable, streamNew.toAsyncStreamable); + assert.strictEqual(Stream.broadcastProtocol, streamNew.broadcastProtocol); + assert.strictEqual(Stream.shareProtocol, streamNew.shareProtocol); + assert.strictEqual(Stream.shareSyncProtocol, streamNew.shareSyncProtocol); + assert.strictEqual(Stream.drainableProtocol, streamNew.drainableProtocol); +} + +// ============================================================================= +// Require paths +// ============================================================================= + +async function testRequirePaths() { + // Both require('stream/new') and require('node:stream/new') should work + const fromPlain = require('stream/new'); + const fromNode = require('node:stream/new'); + + assert.strictEqual(fromPlain.Stream, fromNode.Stream); + assert.strictEqual(fromPlain.push, fromNode.push); +} + +Promise.all([ + testStreamNamespaceExists(), + testStreamNamespaceFrozen(), + testStreamNamespaceFactories(), + testStreamNamespacePipelines(), + testStreamNamespaceAsyncConsumers(), + testStreamNamespaceSyncConsumers(), + testStreamNamespaceCombining(), + testStreamNamespaceUtilities(), + testStreamNamespaceProtocols(), + testIndividualExports(), + testMultiConsumerExports(), + testNamespaceMatchesExports(), + testRequirePaths(), +]).then(common.mustCall()); diff --git a/test/parallel/test-stream-new-pull.js b/test/parallel/test-stream-new-pull.js new file mode 100644 index 00000000000000..34db9b96c74e5c --- /dev/null +++ b/test/parallel/test-stream-new-pull.js @@ -0,0 +1,213 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { pull, pullSync, pipeTo, pipeToSync, from, fromSync, bytesSync, + text } = require('stream/new'); + +// ============================================================================= +// pullSync() tests +// ============================================================================= + +async function testPullSyncIdentity() { + // No transforms - just pass through + const source = fromSync('hello'); + const result = pullSync(source); + const data = bytesSync(result); + assert.deepStrictEqual(data, new TextEncoder().encode('hello')); +} + +async function testPullSyncStatelessTransform() { + const source = fromSync('abc'); + const upper = (chunks) => { + if (chunks === null) return null; + return chunks.map((c) => { + const str = new TextDecoder().decode(c); + return new TextEncoder().encode(str.toUpperCase()); + }); + }; + const result = pullSync(source, upper); + const data = bytesSync(result); + assert.deepStrictEqual(data, new TextEncoder().encode('ABC')); +} + +async function testPullSyncStatefulTransform() { + const source = fromSync('data'); + const stateful = { + transform: function*(source) { + for (const chunks of source) { + if (chunks === null) { + // Flush: emit trailer + yield new TextEncoder().encode('-END'); + continue; + } + for (const chunk of chunks) { + yield chunk; + } + } + }, + }; + const result = pullSync(source, stateful); + const data = new TextDecoder().decode(bytesSync(result)); + assert.strictEqual(data, 'data-END'); +} + +async function testPullSyncChainedTransforms() { + const source = fromSync('hello'); + const addExcl = (chunks) => { + if (chunks === null) return null; + return [...chunks, new TextEncoder().encode('!')]; + }; + const addQ = (chunks) => { + if (chunks === null) return null; + return [...chunks, new TextEncoder().encode('?')]; + }; + const result = pullSync(source, addExcl, addQ); + const data = new TextDecoder().decode(bytesSync(result)); + assert.strictEqual(data, 'hello!?'); +} + +// ============================================================================= +// pull() tests (async) +// ============================================================================= + +async function testPullIdentity() { + const source = from('hello-async'); + const result = pull(source); + const data = await text(result); + assert.strictEqual(data, 'hello-async'); +} + +async function testPullStatelessTransform() { + const source = from('abc'); + const upper = (chunks) => { + if (chunks === null) return null; + return chunks.map((c) => { + const str = new TextDecoder().decode(c); + return new TextEncoder().encode(str.toUpperCase()); + }); + }; + const result = pull(source, upper); + const data = await text(result); + assert.strictEqual(data, 'ABC'); +} + +async function testPullStatefulTransform() { + const source = from('data'); + const stateful = { + transform: async function*(source) { + for await (const chunks of source) { + if (chunks === null) { + yield new TextEncoder().encode('-ASYNC-END'); + continue; + } + for (const chunk of chunks) { + yield chunk; + } + } + }, + }; + const result = pull(source, stateful); + const data = await text(result); + assert.strictEqual(data, 'data-ASYNC-END'); +} + +async function testPullWithAbortSignal() { + const ac = new AbortController(); + ac.abort(); + + async function* gen() { + yield [new Uint8Array([1])]; + } + + const result = pull(gen(), { signal: ac.signal }); + await assert.rejects( + async () => { + // eslint-disable-next-line no-unused-vars + for await (const _ of result) { + assert.fail('Should not reach here'); + } + }, + (err) => err.name === 'AbortError', + ); +} + +async function testPullChainedTransforms() { + const source = from('hello'); + const transforms = [ + (chunks) => { + if (chunks === null) return null; + return [...chunks, new TextEncoder().encode('!')]; + }, + (chunks) => { + if (chunks === null) return null; + return [...chunks, new TextEncoder().encode('?')]; + }, + ]; + const result = pull(source, ...transforms); + const data = await text(result); + assert.strictEqual(data, 'hello!?'); +} + +// ============================================================================= +// pipeTo() / pipeToSync() tests +// ============================================================================= + +async function testPipeToSync() { + const source = fromSync('pipe-data'); + const written = []; + const writer = { + write(chunk) { written.push(chunk); }, + end() { return written.length; }, + abort() {}, + }; + + const totalBytes = pipeToSync(source, writer); + assert.ok(totalBytes > 0); + assert.ok(written.length > 0); + const result = new TextDecoder().decode( + new Uint8Array(written.reduce((acc, c) => [...acc, ...c], []))); + assert.strictEqual(result, 'pipe-data'); +} + +async function testPipeTo() { + const source = from('async-pipe-data'); + const written = []; + const writer = { + async write(chunk) { written.push(chunk); }, + async end() { return written.length; }, + async abort() {}, + }; + + const totalBytes = await pipeTo(source, writer); + assert.ok(totalBytes > 0); + assert.ok(written.length > 0); +} + +async function testPipeToPreventClose() { + const source = from('data'); + let endCalled = false; + const writer = { + async write() {}, + async end() { endCalled = true; }, + async abort() {}, + }; + + await pipeTo(source, writer, { preventClose: true }); + assert.strictEqual(endCalled, false); +} + +Promise.all([ + testPullSyncIdentity(), + testPullSyncStatelessTransform(), + testPullSyncStatefulTransform(), + testPullSyncChainedTransforms(), + testPullIdentity(), + testPullStatelessTransform(), + testPullStatefulTransform(), + testPullWithAbortSignal(), + testPullChainedTransforms(), + testPipeToSync(), + testPipeTo(), + testPipeToPreventClose(), +]).then(common.mustCall()); diff --git a/test/parallel/test-stream-new-push.js b/test/parallel/test-stream-new-push.js new file mode 100644 index 00000000000000..2c0a2550ed510d --- /dev/null +++ b/test/parallel/test-stream-new-push.js @@ -0,0 +1,220 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { push, text, ondrain } = require('stream/new'); + +async function testBasicWriteRead() { + const { writer, readable } = push(); + + writer.write('hello'); + writer.end(); + + const data = await text(readable); + assert.strictEqual(data, 'hello'); +} + +async function testMultipleWrites() { + const { writer, readable } = push({ highWaterMark: 10 }); + + writer.write('a'); + writer.write('b'); + writer.write('c'); + writer.end(); + + const data = await text(readable); + assert.strictEqual(data, 'abc'); +} + +async function testDesiredSize() { + const { writer } = push({ highWaterMark: 3 }); + + assert.strictEqual(writer.desiredSize, 3); + writer.writeSync('a'); + assert.strictEqual(writer.desiredSize, 2); + writer.writeSync('b'); + assert.strictEqual(writer.desiredSize, 1); + writer.writeSync('c'); + assert.strictEqual(writer.desiredSize, 0); + + writer.end(); + assert.strictEqual(writer.desiredSize, null); +} + +async function testStrictBackpressure() { + const { writer, readable } = push({ + highWaterMark: 1, + backpressure: 'strict', + }); + + // First write should succeed synchronously + assert.strictEqual(writer.writeSync('a'), true); + // Second write should fail synchronously (buffer full) + assert.strictEqual(writer.writeSync('b'), false); + + // Consume to free space, then end + const resultPromise = text(readable); + writer.end(); + const data = await resultPromise; + assert.strictEqual(data, 'a'); +} + +async function testDropOldest() { + const { writer, readable } = push({ + highWaterMark: 2, + backpressure: 'drop-oldest', + }); + + writer.writeSync('first'); + writer.writeSync('second'); + // This should drop 'first' + writer.writeSync('third'); + writer.end(); + + const batches = []; + for await (const batch of readable) { + batches.push(batch); + } + // Should have 'second' and 'third' + const allBytes = []; + for (const batch of batches) { + for (const chunk of batch) { + allBytes.push(...chunk); + } + } + const result = new TextDecoder().decode(new Uint8Array(allBytes)); + assert.strictEqual(result, 'secondthird'); +} + +async function testDropNewest() { + const { writer, readable } = push({ + highWaterMark: 1, + backpressure: 'drop-newest', + }); + + writer.writeSync('kept'); + // This is silently dropped + writer.writeSync('dropped'); + writer.end(); + + const data = await text(readable); + assert.strictEqual(data, 'kept'); +} + +async function testWriterEnd() { + const { writer, readable } = push(); + + const totalBytes = writer.endSync(); + assert.strictEqual(totalBytes, 0); + + const batches = []; + for await (const batch of readable) { + batches.push(batch); + } + assert.strictEqual(batches.length, 0); +} + +async function testWriterAbort() { + const { writer, readable } = push(); + + writer.abort(new Error('test abort')); + + await assert.rejects( + async () => { + // eslint-disable-next-line no-unused-vars + for await (const _ of readable) { + assert.fail('Should not reach here'); + } + }, + { message: 'test abort' }, + ); +} + +async function testConsumerBreak() { + const { writer, readable } = push({ highWaterMark: 10 }); + + writer.writeSync('a'); + writer.writeSync('b'); + writer.writeSync('c'); + + // Break after first batch + // eslint-disable-next-line no-unused-vars + for await (const _ of readable) { + break; + } + + // Writer should now see null desiredSize + assert.strictEqual(writer.desiredSize, null); +} + +async function testAbortSignal() { + const ac = new AbortController(); + const { readable } = push({ signal: ac.signal }); + + ac.abort(); + + await assert.rejects( + async () => { + // eslint-disable-next-line no-unused-vars + for await (const _ of readable) { + assert.fail('Should not reach here'); + } + }, + (err) => err.name === 'AbortError', + ); +} + +async function testOndrain() { + const { writer } = push({ highWaterMark: 1 }); + + // With space available, ondrain resolves immediately + const drainResult = ondrain(writer); + assert.ok(drainResult instanceof Promise); + const result = await drainResult; + assert.strictEqual(result, true); + + // After close, ondrain returns null + writer.end(); + assert.strictEqual(ondrain(writer), null); +} + +async function testOndainNonDrainable() { + // Non-drainable objects return null + assert.strictEqual(ondrain(null), null); + assert.strictEqual(ondrain({}), null); + assert.strictEqual(ondrain('string'), null); +} + +async function testPushWithTransforms() { + const upper = (chunks) => { + if (chunks === null) return null; + return chunks.map((c) => { + const str = new TextDecoder().decode(c); + return new TextEncoder().encode(str.toUpperCase()); + }); + }; + + const { writer, readable } = push(upper); + + writer.write('hello'); + writer.end(); + + const data = await text(readable); + assert.strictEqual(data, 'HELLO'); +} + +Promise.all([ + testBasicWriteRead(), + testMultipleWrites(), + testDesiredSize(), + testStrictBackpressure(), + testDropOldest(), + testDropNewest(), + testWriterEnd(), + testWriterAbort(), + testConsumerBreak(), + testAbortSignal(), + testOndrain(), + testOndainNonDrainable(), + testPushWithTransforms(), +]).then(common.mustCall()); diff --git a/test/parallel/test-stream-new-share.js b/test/parallel/test-stream-new-share.js new file mode 100644 index 00000000000000..a97ae62deaf3ad --- /dev/null +++ b/test/parallel/test-stream-new-share.js @@ -0,0 +1,240 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { + share, + shareSync, + Share, + SyncShare, + from, + fromSync, + text, + textSync, + +} = require('stream/new'); + +// ============================================================================= +// Async share() +// ============================================================================= + +async function testBasicShare() { + const source = from('hello shared'); + const shared = share(source); + + const consumer = shared.pull(); + const data = await text(consumer); + assert.strictEqual(data, 'hello shared'); +} + +async function testShareMultipleConsumers() { + async function* gen() { + yield [new TextEncoder().encode('chunk1')]; + yield [new TextEncoder().encode('chunk2')]; + yield [new TextEncoder().encode('chunk3')]; + } + + const shared = share(gen(), { highWaterMark: 16 }); + + const c1 = shared.pull(); + const c2 = shared.pull(); + + assert.strictEqual(shared.consumerCount, 2); + + const [data1, data2] = await Promise.all([ + text(c1), + text(c2), + ]); + + assert.strictEqual(data1, 'chunk1chunk2chunk3'); + assert.strictEqual(data2, 'chunk1chunk2chunk3'); +} + +async function testShareConsumerCount() { + const source = from('data'); + const shared = share(source); + + assert.strictEqual(shared.consumerCount, 0); + + const c1 = shared.pull(); + assert.strictEqual(shared.consumerCount, 1); + + const c2 = shared.pull(); + assert.strictEqual(shared.consumerCount, 2); + + // Cancel detaches all consumers + shared.cancel(); + + // Both should complete immediately + const [data1, data2] = await Promise.all([ + text(c1), + text(c2), + ]); + assert.strictEqual(data1, ''); + assert.strictEqual(data2, ''); +} + +async function testShareCancel() { + const source = from('data'); + const shared = share(source); + const consumer = shared.pull(); + + shared.cancel(); + + const batches = []; + for await (const batch of consumer) { + batches.push(batch); + } + assert.strictEqual(batches.length, 0); +} + +async function testShareCancelWithReason() { + const source = from('data'); + const shared = share(source); + const consumer = shared.pull(); + + shared.cancel(new Error('share cancelled')); + + await assert.rejects( + async () => { + // eslint-disable-next-line no-unused-vars + for await (const _ of consumer) { + assert.fail('Should not reach here'); + } + }, + { message: 'share cancelled' }, + ); +} + +async function testShareAbortSignal() { + const ac = new AbortController(); + const source = from('data'); + const shared = share(source, { signal: ac.signal }); + const consumer = shared.pull(); + + ac.abort(); + + const batches = []; + for await (const batch of consumer) { + batches.push(batch); + } + assert.strictEqual(batches.length, 0); +} + +async function testShareAlreadyAborted() { + const ac = new AbortController(); + ac.abort(); + + const source = from('data'); + const shared = share(source, { signal: ac.signal }); + const consumer = shared.pull(); + + const batches = []; + for await (const batch of consumer) { + batches.push(batch); + } + assert.strictEqual(batches.length, 0); +} + +// ============================================================================= +// Share.from +// ============================================================================= + +async function testShareFrom() { + const source = from('share-from'); + const shared = Share.from(source); + const consumer = shared.pull(); + + const data = await text(consumer); + assert.strictEqual(data, 'share-from'); +} + +async function testShareFromRejectsNonStreamable() { + assert.throws( + () => Share.from(12345), + { name: 'TypeError' }, + ); +} + +// ============================================================================= +// Sync share +// ============================================================================= + +async function testShareSyncBasic() { + const source = fromSync('sync shared'); + const shared = shareSync(source); + + const consumer = shared.pull(); + const data = textSync(consumer); + assert.strictEqual(data, 'sync shared'); +} + +async function testShareSyncMultipleConsumers() { + function* gen() { + yield [new TextEncoder().encode('a')]; + yield [new TextEncoder().encode('b')]; + yield [new TextEncoder().encode('c')]; + } + + const shared = shareSync(gen(), { highWaterMark: 16 }); + + const c1 = shared.pull(); + const c2 = shared.pull(); + + const data1 = textSync(c1); + const data2 = textSync(c2); + + assert.strictEqual(data1, 'abc'); + assert.strictEqual(data2, 'abc'); +} + +async function testShareSyncCancel() { + const source = fromSync('data'); + const shared = shareSync(source); + const consumer = shared.pull(); + + shared.cancel(); + + const batches = []; + for (const batch of consumer) { + batches.push(batch); + } + assert.strictEqual(batches.length, 0); +} + +// ============================================================================= +// SyncShare.fromSync +// ============================================================================= + +async function testSyncShareFromSync() { + const source = fromSync('sync-share-from'); + const shared = SyncShare.fromSync(source); + const consumer = shared.pull(); + + const data = textSync(consumer); + assert.strictEqual(data, 'sync-share-from'); +} + +async function testSyncShareFromRejectsNonStreamable() { + assert.throws( + () => SyncShare.fromSync(12345), + { name: 'TypeError' }, + ); +} + +Promise.all([ + testBasicShare(), + testShareMultipleConsumers(), + testShareConsumerCount(), + testShareCancel(), + testShareCancelWithReason(), + testShareAbortSignal(), + testShareAlreadyAborted(), + testShareFrom(), + testShareFromRejectsNonStreamable(), + testShareSyncBasic(), + testShareSyncMultipleConsumers(), + testShareSyncCancel(), + testSyncShareFromSync(), + testSyncShareFromRejectsNonStreamable(), +]).then(common.mustCall()); diff --git a/test/parallel/test-stream-new-transform.js b/test/parallel/test-stream-new-transform.js new file mode 100644 index 00000000000000..fb027dcff5497d --- /dev/null +++ b/test/parallel/test-stream-new-transform.js @@ -0,0 +1,395 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { + from, + pull, + bytes, + text, + compressGzip, + compressDeflate, + compressBrotli, + compressZstd, + decompressGzip, + decompressDeflate, + decompressBrotli, + decompressZstd, +} = require('stream/new'); + +// ============================================================================= +// Helper: compress then decompress, verify round-trip equality +// ============================================================================= + +async function roundTrip(input, compress, decompress) { + const source = from(input); + const compressed = pull(source, compress); + const decompressed = pull(compressed, decompress); + return text(decompressed); +} + +async function roundTripBytes(inputBuf, compress, decompress) { + const source = from(inputBuf); + const compressed = pull(source, compress); + const decompressed = pull(compressed, decompress); + return bytes(decompressed); +} + +// ============================================================================= +// Gzip round-trip tests +// ============================================================================= + +async function testGzipRoundTrip() { + const input = 'Hello, gzip compression!'; + const result = await roundTrip(input, compressGzip(), decompressGzip()); + assert.strictEqual(result, input); +} + +async function testGzipLargeData() { + // 100KB of repeated text - exercises multi-chunk path + const input = 'gzip large data test. '.repeat(5000); + const result = await roundTrip(input, compressGzip(), decompressGzip()); + assert.strictEqual(result, input); +} + +async function testGzipActuallyCompresses() { + const input = 'Repeated data compresses well. '.repeat(1000); + const inputBuf = Buffer.from(input); + const source = from(inputBuf); + const compressed = await bytes(pull(source, compressGzip())); + assert.ok(compressed.byteLength < inputBuf.byteLength, + `Compressed ${compressed.byteLength} should be < original ${inputBuf.byteLength}`); +} + +// ============================================================================= +// Deflate round-trip tests +// ============================================================================= + +async function testDeflateRoundTrip() { + const input = 'Hello, deflate compression!'; + const result = await roundTrip(input, compressDeflate(), decompressDeflate()); + assert.strictEqual(result, input); +} + +async function testDeflateLargeData() { + const input = 'deflate large data test. '.repeat(5000); + const result = await roundTrip(input, compressDeflate(), decompressDeflate()); + assert.strictEqual(result, input); +} + +async function testDeflateActuallyCompresses() { + const input = 'Repeated data compresses well. '.repeat(1000); + const inputBuf = Buffer.from(input); + const source = from(inputBuf); + const compressed = await bytes(pull(source, compressDeflate())); + assert.ok(compressed.byteLength < inputBuf.byteLength, + `Compressed ${compressed.byteLength} should be < original ${inputBuf.byteLength}`); +} + +// ============================================================================= +// Brotli round-trip tests +// ============================================================================= + +async function testBrotliRoundTrip() { + const input = 'Hello, brotli compression!'; + const result = await roundTrip(input, compressBrotli(), decompressBrotli()); + assert.strictEqual(result, input); +} + +async function testBrotliLargeData() { + const input = 'brotli large data test. '.repeat(5000); + const result = await roundTrip(input, compressBrotli(), decompressBrotli()); + assert.strictEqual(result, input); +} + +async function testBrotliActuallyCompresses() { + const input = 'Repeated data compresses well. '.repeat(1000); + const inputBuf = Buffer.from(input); + const source = from(inputBuf); + const compressed = await bytes(pull(source, compressBrotli())); + assert.ok(compressed.byteLength < inputBuf.byteLength, + `Compressed ${compressed.byteLength} should be < original ${inputBuf.byteLength}`); +} + +// ============================================================================= +// Zstd round-trip tests +// ============================================================================= + +async function testZstdRoundTrip() { + const input = 'Hello, zstd compression!'; + const result = await roundTrip(input, compressZstd(), decompressZstd()); + assert.strictEqual(result, input); +} + +async function testZstdLargeData() { + const input = 'zstd large data test. '.repeat(5000); + const result = await roundTrip(input, compressZstd(), decompressZstd()); + assert.strictEqual(result, input); +} + +async function testZstdActuallyCompresses() { + const input = 'Repeated data compresses well. '.repeat(1000); + const inputBuf = Buffer.from(input); + const source = from(inputBuf); + const compressed = await bytes(pull(source, compressZstd())); + assert.ok(compressed.byteLength < inputBuf.byteLength, + `Compressed ${compressed.byteLength} should be < original ${inputBuf.byteLength}`); +} + +// ============================================================================= +// Binary data round-trip - verify no corruption on non-text data +// ============================================================================= + +async function testBinaryRoundTripGzip() { + const input = Buffer.alloc(1024); + for (let i = 0; i < input.length; i++) input[i] = i & 0xFF; + const result = await roundTripBytes(input, compressGzip(), decompressGzip()); + assert.strictEqual(result.byteLength, input.byteLength); + assert.deepStrictEqual(Buffer.from(result), input); +} + +async function testBinaryRoundTripDeflate() { + const input = Buffer.alloc(1024); + for (let i = 0; i < input.length; i++) input[i] = i & 0xFF; + const result = await roundTripBytes(input, compressDeflate(), + decompressDeflate()); + assert.strictEqual(result.byteLength, input.byteLength); + assert.deepStrictEqual(Buffer.from(result), input); +} + +async function testBinaryRoundTripBrotli() { + const input = Buffer.alloc(1024); + for (let i = 0; i < input.length; i++) input[i] = i & 0xFF; + const result = await roundTripBytes(input, compressBrotli(), + decompressBrotli()); + assert.strictEqual(result.byteLength, input.byteLength); + assert.deepStrictEqual(Buffer.from(result), input); +} + +async function testBinaryRoundTripZstd() { + const input = Buffer.alloc(1024); + for (let i = 0; i < input.length; i++) input[i] = i & 0xFF; + const result = await roundTripBytes(input, compressZstd(), + decompressZstd()); + assert.strictEqual(result.byteLength, input.byteLength); + assert.deepStrictEqual(Buffer.from(result), input); +} + +// ============================================================================= +// Empty input +// ============================================================================= + +async function testEmptyInputGzip() { + const result = await roundTrip('', compressGzip(), decompressGzip()); + assert.strictEqual(result, ''); +} + +async function testEmptyInputDeflate() { + const result = await roundTrip('', compressDeflate(), decompressDeflate()); + assert.strictEqual(result, ''); +} + +async function testEmptyInputBrotli() { + const result = await roundTrip('', compressBrotli(), decompressBrotli()); + assert.strictEqual(result, ''); +} + +async function testEmptyInputZstd() { + const result = await roundTrip('', compressZstd(), decompressZstd()); + assert.strictEqual(result, ''); +} + +// ============================================================================= +// Chained transforms - compress with one, then another, decompress in reverse +// ============================================================================= + +async function testChainedGzipDeflate() { + const input = 'Double compression test data. '.repeat(100); + const source = from(input); + // Compress: gzip then deflate + const compressed = pull(pull(source, compressGzip()), compressDeflate()); + // Decompress: deflate then gzip (reverse order) + const decompressed = pull(pull(compressed, decompressDeflate()), + decompressGzip()); + const result = await text(decompressed); + assert.strictEqual(result, input); +} + +// ============================================================================= +// Transform protocol: verify each factory returns a proper transform object +// ============================================================================= + +async function testTransformProtocol() { + const factories = [ + compressGzip, compressDeflate, compressBrotli, compressZstd, + decompressGzip, decompressDeflate, decompressBrotli, decompressZstd, + ]; + + for (const factory of factories) { + const t = factory(); + assert.strictEqual(typeof t.transform, 'function', + `${factory.name}() should have a transform function`); + } +} + +// ============================================================================= +// Cross-compatibility: verify gzip/deflate output is compatible with zlib +// ============================================================================= + +async function testGzipCompatWithZlib() { + const zlib = require('zlib'); + const { promisify } = require('util'); + const gunzip = promisify(zlib.gunzip); + + const input = 'Cross-compat test with node:zlib. '.repeat(100); + const source = from(input); + const compressed = await bytes(pull(source, compressGzip())); + + // Decompress with standard zlib + const decompressed = await gunzip(compressed); + assert.strictEqual(decompressed.toString(), input); +} + +async function testDeflateCompatWithZlib() { + const zlib = require('zlib'); + const { promisify } = require('util'); + const inflate = promisify(zlib.inflate); + + const input = 'Cross-compat deflate test. '.repeat(100); + const source = from(input); + const compressed = await bytes(pull(source, compressDeflate())); + + // Decompress with standard zlib + const decompressed = await inflate(compressed); + assert.strictEqual(decompressed.toString(), input); +} + +async function testBrotliCompatWithZlib() { + const zlib = require('zlib'); + const { promisify } = require('util'); + const brotliDecompress = promisify(zlib.brotliDecompress); + + const input = 'Cross-compat brotli test. '.repeat(100); + const source = from(input); + const compressed = await bytes(pull(source, compressBrotli())); + + const decompressed = await brotliDecompress(compressed); + assert.strictEqual(decompressed.toString(), input); +} + +async function testZstdCompatWithZlib() { + const zlib = require('zlib'); + const { promisify } = require('util'); + const zstdDecompress = promisify(zlib.zstdDecompress); + + const input = 'Cross-compat zstd test. '.repeat(100); + const source = from(input); + const compressed = await bytes(pull(source, compressZstd())); + + const decompressed = await zstdDecompress(compressed); + assert.strictEqual(decompressed.toString(), input); +} + +// ============================================================================= +// Reverse compat: compress with zlib, decompress with new streams +// ============================================================================= + +async function testZlibGzipToNewStreams() { + const zlib = require('zlib'); + const { promisify } = require('util'); + const gzip = promisify(zlib.gzip); + + const input = 'Reverse compat gzip test. '.repeat(100); + const compressed = await gzip(input); + const result = await text(pull(from(compressed), decompressGzip())); + assert.strictEqual(result, input); +} + +async function testZlibDeflateToNewStreams() { + const zlib = require('zlib'); + const { promisify } = require('util'); + const deflate = promisify(zlib.deflate); + + const input = 'Reverse compat deflate test. '.repeat(100); + const compressed = await deflate(input); + const result = await text(pull(from(compressed), decompressDeflate())); + assert.strictEqual(result, input); +} + +async function testZlibBrotliToNewStreams() { + const zlib = require('zlib'); + const { promisify } = require('util'); + const brotliCompress = promisify(zlib.brotliCompress); + + const input = 'Reverse compat brotli test. '.repeat(100); + const compressed = await brotliCompress(input); + const result = await text(pull(from(compressed), decompressBrotli())); + assert.strictEqual(result, input); +} + +async function testZlibZstdToNewStreams() { + const zlib = require('zlib'); + const { promisify } = require('util'); + const zstdCompress = promisify(zlib.zstdCompress); + + const input = 'Reverse compat zstd test. '.repeat(100); + const compressed = await zstdCompress(input); + const result = await text(pull(from(compressed), decompressZstd())); + assert.strictEqual(result, input); +} + +// ============================================================================= +// Run all tests +// ============================================================================= + +(async () => { + // Gzip + await testGzipRoundTrip(); + await testGzipLargeData(); + await testGzipActuallyCompresses(); + + // Deflate + await testDeflateRoundTrip(); + await testDeflateLargeData(); + await testDeflateActuallyCompresses(); + + // Brotli + await testBrotliRoundTrip(); + await testBrotliLargeData(); + await testBrotliActuallyCompresses(); + + // Zstd + await testZstdRoundTrip(); + await testZstdLargeData(); + await testZstdActuallyCompresses(); + + // Binary data + await testBinaryRoundTripGzip(); + await testBinaryRoundTripDeflate(); + await testBinaryRoundTripBrotli(); + await testBinaryRoundTripZstd(); + + // Empty input + await testEmptyInputGzip(); + await testEmptyInputDeflate(); + await testEmptyInputBrotli(); + await testEmptyInputZstd(); + + // Chained + await testChainedGzipDeflate(); + + // Protocol + await testTransformProtocol(); + + // Cross-compat: new streams compress → zlib decompress + await testGzipCompatWithZlib(); + await testDeflateCompatWithZlib(); + await testBrotliCompatWithZlib(); + await testZstdCompatWithZlib(); + + // Reverse compat: zlib compress → new streams decompress + await testZlibGzipToNewStreams(); + await testZlibDeflateToNewStreams(); + await testZlibBrotliToNewStreams(); + await testZlibZstdToNewStreams(); +})().then(common.mustCall()); From 3da923b3201e80f36661994babaef95e0eb344d5 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Mon, 2 Mar 2026 18:46:28 -0800 Subject: [PATCH 02/42] stream: updates to stream/new impl Refactors the cancelation per updates in the design doc --- doc/api/fs.md | 25 +++- doc/api/stream_new.md | 53 +++++-- lib/internal/fs/promises.js | 32 +++- lib/internal/streams/new/broadcast.js | 120 ++++++++++++--- lib/internal/streams/new/consumers.js | 4 +- lib/internal/streams/new/pull.js | 102 ++++++++----- lib/internal/streams/new/push.js | 94 +++++++++--- lib/internal/streams/new/share.js | 5 +- lib/internal/streams/new/transform.js | 43 +++++- .../test-fs-promises-file-handle-pull.js | 2 +- .../test-fs-promises-file-handle-writer.js | 129 ++++++++++++---- test/parallel/test-stream-new-broadcast.js | 81 +++++++++- test/parallel/test-stream-new-pull.js | 6 +- test/parallel/test-stream-new-push.js | 139 +++++++++++++++++- 14 files changed, 680 insertions(+), 155 deletions(-) diff --git a/doc/api/fs.md b/doc/api/fs.md index 09a2c40f246280..7705de1dab43d5 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -928,11 +928,26 @@ added: REPLACEME * `start` {number} Byte offset to start writing at. **Default:** current position (append). * Returns: {Object} - * `write(chunk)` {Function} Returns {Promise\}. - * `writev(chunks)` {Function} Returns {Promise\}. Uses scatter/gather - I/O via a single `writev()` syscall. - * `end()` {Function} Returns {Promise\} total bytes written. - * `abort(reason)` {Function} Returns {Promise\}. + * `write(chunk[, options])` {Function} Returns {Promise\}. + * `chunk` {Buffer|TypedArray|DataView} + * `options` {Object} + * `signal` {AbortSignal} If the signal is already aborted, the write + rejects with `AbortError` without performing I/O. + * `writev(chunks[, options])` {Function} Returns {Promise\}. Uses + scatter/gather I/O via a single `writev()` syscall. + * `chunks` {Buffer\[]|TypedArray\[]|DataView\[]} + * `options` {Object} + * `signal` {AbortSignal} If the signal is already aborted, the write + rejects with `AbortError` without performing I/O. + * `end([options])` {Function} Returns {Promise\} total bytes written. + * `options` {Object} + * `signal` {AbortSignal} If the signal is already aborted, `end()` + rejects with `AbortError` and the writer remains open. + * `fail(reason)` {Function} Returns {Promise\}. Puts the writer + into a terminal error state. + * `failSync(reason)` {Function} Returns {boolean}. Synchronous best-effort + cleanup. Marks the writer as closed so subsequent writes fail immediately. + Cannot honor `autoClose` (requires async I/O). Return a [`node:stream/new`][] writer backed by this file handle. diff --git a/doc/api/stream_new.md b/doc/api/stream_new.md index 0217aa372ad67b..afd51db4811152 100644 --- a/doc/api/stream_new.md +++ b/doc/api/stream_new.md @@ -111,14 +111,22 @@ async function run() { Transforms come in two forms: -* **Stateless** -- a function `(chunks) => result` called once per batch. - Receives `Uint8Array[]` (or `null` as the flush signal). Returns - `Uint8Array[]`, `null`, or an iterable of chunks. +* **Stateless** -- a function `(chunks, options) => result` called once per + batch. Receives `Uint8Array[]` (or `null` as the flush signal) and an + `options` object. Returns `Uint8Array[]`, `null`, or an iterable of chunks. -* **Stateful** -- an object `{ transform(source) }` where `transform` is a - generator (sync or async) that receives the entire upstream iterable and - yields output. This form is used for compression, encryption, and any - transform that needs to buffer across batches. +* **Stateful** -- an object `{ transform(source, options) }` where `transform` + is a generator (sync or async) that receives the entire upstream iterable + and an `options` object, and yields output. This form is used for + compression, encryption, and any transform that needs to buffer across + batches. + +Both forms receive an `options` parameter with the following property: + +* `options.signal` {AbortSignal} An AbortSignal that fires when the pipeline + is cancelled, encounters an error, or the consumer stops reading. Transforms + can check `signal.aborted` or listen for the `'abort'` event to perform + early cleanup. The flush signal (`null`) is sent after the source ends, giving transforms a chance to emit trailing data (e.g., compression footers). @@ -169,7 +177,7 @@ The API supports two models: A writer is any object with a `write(chunk)` method. Writers optionally support `writev(chunks)` for batch writes (mapped to scatter/gather I/O where -available), `end()` to signal completion, and `abort(reason)` to signal +available), `end()` to signal completion, and `fail(reason)` to signal failure. ## `require('node:stream/new')` @@ -269,7 +277,7 @@ added: REPLACEME * `signal` {AbortSignal} Abort the pipeline. * `preventClose` {boolean} If `true`, do not call `writer.end()` when the source ends. **Default:** `false`. - * `preventAbort` {boolean} If `true`, do not call `writer.abort()` on + * `preventFail` {boolean} If `true`, do not call `writer.fail()` on error. **Default:** `false`. * Returns: {Promise\} Total bytes written. @@ -316,7 +324,7 @@ added: REPLACEME * `writer` {Object} Destination with `write(chunk)` method. * `options` {Object} * `preventClose` {boolean} **Default:** `false`. - * `preventAbort` {boolean} **Default:** `false`. + * `preventFail` {boolean} **Default:** `false`. * Returns: {number} Total bytes written. Synchronous version of [`pipeTo()`][]. @@ -451,12 +459,18 @@ run().catch(console.error); The writer returned by `push()` has the following methods: -##### `writer.abort(reason)` +##### `writer.fail(reason)` * `reason` {Error} * Returns: {Promise\} -Abort the stream with an error. +Fail the stream with an error. + +##### `writer.failSync(reason)` + +* `reason` {Error} + +Synchronously fail the stream with an error. Does not return a promise. ##### `writer.desiredSize` @@ -465,15 +479,21 @@ Abort the stream with an error. The number of buffer slots available before the high water mark is reached. Returns `null` if the writer is closed or the consumer has disconnected. -##### `writer.end()` +##### `writer.end([options])` +* `options` {Object} + * `signal` {AbortSignal} Cancel just this operation. The signal cancels only + the pending `end()` call; it does not fail the writer itself. * Returns: {Promise\} Total bytes written. Signal that no more data will be written. -##### `writer.write(chunk)` +##### `writer.write(chunk[, options])` * `chunk` {Uint8Array|string} +* `options` {Object} + * `signal` {AbortSignal} Cancel just this write operation. The signal cancels + only the pending `write()` call; it does not fail the writer itself. * Returns: {Promise\} Write a chunk. The promise resolves when buffer space is available. @@ -486,9 +506,12 @@ Write a chunk. The promise resolves when buffer space is available. Synchronous write. Does not block; returns `false` if backpressure is active. -##### `writer.writev(chunks)` +##### `writer.writev(chunks[, options])` * `chunks` {Uint8Array\[]|string\[]} +* `options` {Object} + * `signal` {AbortSignal} Cancel just this write operation. The signal cancels + only the pending `writev()` call; it does not fail the writer itself. * Returns: {Promise\} Write multiple chunks as a single batch. diff --git a/lib/internal/fs/promises.js b/lib/internal/fs/promises.js index d258e85e631dc7..b78c121de737b9 100644 --- a/lib/internal/fs/promises.js +++ b/lib/internal/fs/promises.js @@ -458,7 +458,7 @@ class FileHandle extends EventEmitter { * autoClose?: boolean; * start?: number; * }} [options] - * @returns {{ write, writev, end, abort }} + * @returns {{ write, writev, end, fail, failSync }} */ writer(options) { if (this[kFd] === -1) @@ -558,21 +558,29 @@ class FileHandle extends EventEmitter { } return { - write(chunk) { + write(chunk, options) { if (closed) { return PromiseReject( new ERR_INVALID_STATE('The writer is closed')); } + if (options?.signal?.aborted) { + return PromiseReject( + new AbortError(undefined, { cause: options.signal.reason })); + } const position = pos; if (pos >= 0) pos += chunk.byteLength; return writeAll(chunk, 0, chunk.byteLength, position); }, - writev(chunks) { + writev(chunks, options) { if (closed) { return PromiseReject( new ERR_INVALID_STATE('The writer is closed')); } + if (options?.signal?.aborted) { + return PromiseReject( + new AbortError(undefined, { cause: options.signal.reason })); + } const position = pos; if (pos >= 0) { for (let i = 0; i < chunks.length; i++) { @@ -582,15 +590,29 @@ class FileHandle extends EventEmitter { return writevAll(chunks, position); }, - async end() { + async end(options) { + if (options?.signal?.aborted) { + throw new AbortError(undefined, { cause: options.signal.reason }); + } await cleanup(); return totalBytesWritten; }, - async abort(reason) { + async fail(reason) { await cleanup(); }, + failSync(reason) { + // Synchronous cleanup is best-effort for file handles. + // Mark as closed so subsequent writes fail immediately. + if (closed) return true; + closed = true; + handle[kLocked] = false; + handle[kUnref](); + // autoClose cannot be handled synchronously - skip it. + return true; + }, + async [SymbolAsyncDispose]() { await cleanup(); }, diff --git a/lib/internal/streams/new/broadcast.js b/lib/internal/streams/new/broadcast.js index 3c4a02bae6b6c0..ce1a1a00a5e141 100644 --- a/lib/internal/streams/new/broadcast.js +++ b/lib/internal/streams/new/broadcast.js @@ -8,6 +8,7 @@ const { ArrayIsArray, + ArrayPrototypeIndexOf, ArrayPrototypeMap, ArrayPrototypePush, ArrayPrototypeShift, @@ -19,12 +20,15 @@ const { PromiseResolve, SafeSet, String, + Symbol, SymbolAsyncIterator, SymbolDispose, } = primordials; const { TextEncoder } = require('internal/encoding'); +const { lazyDOMException } = require('internal/util'); + const { codes: { ERR_INVALID_ARG_TYPE, @@ -48,6 +52,11 @@ const { const encoder = new TextEncoder(); +// Non-exported symbol for internal cancel notification from BroadcastImpl +// to BroadcastWriter. Because this symbol is not exported, external code +// cannot call it. +const kCancelWriter = Symbol('cancelWriter'); + // ============================================================================= // Argument Parsing // ============================================================================= @@ -89,6 +98,11 @@ class BroadcastImpl { this._cancelled = false; this._options = options; this._onBufferDrained = null; + this._writer = null; + } + + setWriter(writer) { + this._writer = writer; } get consumerCount() { @@ -129,6 +143,8 @@ class BroadcastImpl { return { async next() { if (state.detached) { + // If detached due to an error, throw the error + if (self._error) throw self._error; return { __proto__: null, done: true, value: undefined }; } @@ -183,11 +199,15 @@ class BroadcastImpl { cancel(reason) { if (this._cancelled) return; this._cancelled = true; + this._ended = true; // Prevents _abort() from redundantly iterating consumers if (reason) { this._error = reason; } + // Reject pending writes on the writer so the pump doesn't hang + this._writer?.[kCancelWriter](); + for (const consumer of this._consumers) { if (consumer.resolve) { if (reason) { @@ -261,13 +281,16 @@ class BroadcastImpl { this._error = reason; this._ended = true; + // Notify all waiting consumers and detach them for (const consumer of this._consumers) { if (consumer.reject) { consumer.reject(reason); consumer.resolve = null; consumer.reject = null; } + consumer.detached = true; } + this._consumers.clear(); } _getDesiredSize() { @@ -361,11 +384,18 @@ class BroadcastWriter { return this._broadcast._getDesiredSize(); } - async write(chunk) { - return this.writev([chunk]); + async write(chunk, options) { + return this.writev([chunk], options); } - async writev(chunks) { + async writev(chunks, options) { + const signal = options?.signal; + + // Check for pre-aborted signal + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + if (this._closed || this._aborted) { throw new ERR_INVALID_STATE('Writer is closed'); } @@ -389,17 +419,11 @@ class BroadcastWriter { 'Backpressure violation: too many pending writes. ' + 'Await each write() call to respect backpressure.'); } - return new Promise((resolve, reject) => { - ArrayPrototypePush(this._pendingWrites, - { chunk: converted, resolve, reject }); - }); + return this._createPendingWrite(converted, signal); } // 'block' policy - return new Promise((resolve, reject) => { - ArrayPrototypePush(this._pendingWrites, - { chunk: converted, resolve, reject }); - }); + return this._createPendingWrite(converted, signal); } writeSync(chunk) { @@ -428,7 +452,8 @@ class BroadcastWriter { return false; } - async end() { + // end() is synchronous internally - signal accepted for interface compliance. + async end(options) { if (this._closed) return this._totalBytes; this._closed = true; this._broadcast._end(); @@ -444,27 +469,70 @@ class BroadcastWriter { return this._totalBytes; } - async abort(reason) { + async fail(reason) { if (this._aborted) return; this._aborted = true; this._closed = true; - const error = reason ?? new ERR_INVALID_STATE('Aborted'); + const error = reason ?? new ERR_INVALID_STATE('Failed'); this._rejectPendingWrites(error); this._rejectPendingDrains(error); this._broadcast._abort(error); } - abortSync(reason) { + failSync(reason) { if (this._aborted) return true; this._aborted = true; this._closed = true; - const error = reason ?? new ERR_INVALID_STATE('Aborted'); + const error = reason ?? new ERR_INVALID_STATE('Failed'); this._rejectPendingWrites(error); this._rejectPendingDrains(error); this._broadcast._abort(error); return true; } + [kCancelWriter]() { + if (this._closed) return; + this._closed = true; + this._rejectPendingWrites( + lazyDOMException('Broadcast cancelled', 'AbortError')); + this._resolvePendingDrains(false); + } + + /** + * Create a pending write promise, optionally racing against a signal. + * If the signal fires, the entry is removed from pendingWrites and the + * promise rejects. Signal listeners are cleaned up on normal resolution. + */ + _createPendingWrite(chunk, signal) { + return new Promise((resolve, reject) => { + const entry = { chunk, resolve, reject }; + ArrayPrototypePush(this._pendingWrites, entry); + + if (!signal) return; + + const onAbort = () => { + // Remove from queue so it doesn't occupy a slot + const idx = ArrayPrototypeIndexOf(this._pendingWrites, entry); + if (idx !== -1) ArrayPrototypeSplice(this._pendingWrites, idx, 1); + reject(signal.reason ?? lazyDOMException('Aborted', 'AbortError')); + }; + + // Wrap resolve/reject to clean up signal listener + const origResolve = entry.resolve; + const origReject = entry.reject; + entry.resolve = function() { + signal.removeEventListener('abort', onAbort); + origResolve(); + }; + entry.reject = function(reason) { + signal.removeEventListener('abort', onAbort); + origReject(reason); + }; + + signal.addEventListener('abort', onAbort, { once: true }); + }); + } + _resolvePendingWrites() { while (this._pendingWrites.length > 0 && this._broadcast._canWrite()) { const pending = ArrayPrototypeShift(this._pendingWrites); @@ -523,6 +591,7 @@ function broadcast(options) { const broadcastImpl = new BroadcastImpl(opts); const writer = new BroadcastWriter(broadcastImpl); + broadcastImpl.setWriter(writer); if (opts.signal) { if (opts.signal.aborted) { @@ -554,25 +623,36 @@ const Broadcast = { } const result = broadcast(options); + const signal = options?.signal; (async () => { try { if (isAsyncIterable(input)) { for await (const chunks of input) { + if (signal?.aborted) { + throw signal.reason ?? + lazyDOMException('Aborted', 'AbortError'); + } if (ArrayIsArray(chunks)) { - await result.writer.writev(chunks); + await result.writer.writev( + chunks, signal ? { signal } : undefined); } } } else if (isSyncIterable(input)) { for (const chunks of input) { + if (signal?.aborted) { + throw signal.reason ?? + lazyDOMException('Aborted', 'AbortError'); + } if (ArrayIsArray(chunks)) { - await result.writer.writev(chunks); + await result.writer.writev( + chunks, signal ? { signal } : undefined); } } } - await result.writer.end(); + await result.writer.end(signal ? { signal } : undefined); } catch (error) { - await result.writer.abort( + await result.writer.fail( error instanceof Error ? error : new ERR_INVALID_ARG_TYPE('error', 'Error', String(error))); } })(); diff --git a/lib/internal/streams/new/consumers.js b/lib/internal/streams/new/consumers.js index b4ee67c88c779f..4bf6330905cd6e 100644 --- a/lib/internal/streams/new/consumers.js +++ b/lib/internal/streams/new/consumers.js @@ -340,8 +340,8 @@ async function array(source, options) { * @returns {Function} */ function tap(callback) { - return async (chunks) => { - await callback(chunks); + return async (chunks, options) => { + await callback(chunks, options); return chunks; }; } diff --git a/lib/internal/streams/new/pull.js b/lib/internal/streams/new/pull.js index 8f80ee19585784..a447284f2e7d96 100644 --- a/lib/internal/streams/new/pull.js +++ b/lib/internal/streams/new/pull.js @@ -28,6 +28,7 @@ const { } = require('internal/errors'); const { TextEncoder } = require('internal/encoding'); const { lazyDOMException } = require('internal/util'); +const { AbortController } = require('internal/abort_controller'); const { normalizeAsyncSource, @@ -340,9 +341,9 @@ function* createSyncPipeline(source, transforms) { * Apply a single stateless async transform to a source. * @yields {Uint8Array[]} */ -async function* applyStatelessAsyncTransform(source, transform) { +async function* applyStatelessAsyncTransform(source, transform, options) { for await (const chunks of source) { - const result = transform(chunks); + const result = transform(chunks, options); // Fast path: result is already Uint8Array[] (common case) if (result === null) continue; if (isUint8ArrayBatch(result)) { @@ -395,8 +396,8 @@ async function* applyStatelessAsyncTransform(source, transform) { * Apply a single stateful async transform to a source. * @yields {Uint8Array[]} */ -async function* applyStatefulAsyncTransform(source, transform) { - const output = transform(source); +async function* applyStatefulAsyncTransform(source, transform, options) { + const output = transform(source, options); for await (const item of output) { // Fast path: item is already a Uint8Array[] batch (e.g. compression transforms) if (isUint8ArrayBatch(item)) { @@ -473,48 +474,75 @@ async function* createAsyncPipeline(source, transforms, signal) { return; } + // Create internal controller for transform cancellation. + // Note: if signal was already aborted, we threw above - no need to check here. + const controller = new AbortController(); + let abortHandler; + if (signal) { + abortHandler = () => { + try { + controller.abort(signal.reason ?? + lazyDOMException('Aborted', 'AbortError')); + } catch { + // Transform signal listeners may throw - suppress + } + }; + signal.addEventListener('abort', abortHandler, { once: true }); + } + // Add flush signal let current = withFlushSignalAsync(normalized); - // Track stateful transforms for abort handling - const statefulTransforms = []; - - try { - // Apply transforms - Object = stateful, function = stateless - for (let i = 0; i < transforms.length; i++) { - const transform = transforms[i]; - if (isTransformObject(transform)) { - ArrayPrototypePush(statefulTransforms, transform); - current = applyStatefulAsyncTransform(current, transform.transform); - } else { - current = applyStatelessAsyncTransform(current, transform); - } + // Apply transforms - each gets the controller's signal + // Object = stateful, function = stateless + for (let i = 0; i < transforms.length; i++) { + const transform = transforms[i]; + const options = { signal: controller.signal }; + if (isTransformObject(transform)) { + current = applyStatefulAsyncTransform(current, transform.transform, + options); + } else { + current = applyStatelessAsyncTransform(current, transform, options); } + } - // Yield results (filter out null from final output) + // Yield results (filter out null from final output) + let completed = false; + try { for await (const batch of current) { // Check for abort on each iteration - if (signal?.aborted) { - throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + if (controller.signal.aborted) { + throw controller.signal.reason ?? + lazyDOMException('Aborted', 'AbortError'); } if (batch !== null) { yield batch; } } + completed = true; } catch (error) { - // Abort all stateful transforms - for (let i = 0; i < statefulTransforms.length; i++) { - const transformObj = statefulTransforms[i]; - if (transformObj.abort) { - try { - await transformObj.abort( - error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error))); - } catch { - // Ignore abort errors - } + if (!controller.signal.aborted) { + try { + controller.abort( + error instanceof Error ? error : + new ERR_OPERATION_FAILED(String(error))); + } catch { + // Transform signal listeners may throw - suppress } } throw error; + } finally { + if (!completed && !controller.signal.aborted) { + try { + controller.abort(lazyDOMException('Aborted', 'AbortError')); + } catch { + // Transform signal listeners may throw - suppress + } + } + // Clean up user signal listener to prevent holding controller alive + if (signal && abortHandler) { + signal.removeEventListener('abort', abortHandler); + } } } @@ -594,8 +622,8 @@ function pipeToSync(source, ...args) { writer.end(); } } catch (error) { - if (!options?.preventAbort) { - writer.abort(error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error))); + if (!options?.preventFail) { + writer.fail(error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error))); } throw error; } @@ -631,7 +659,7 @@ async function pipeTo(source, ...args) { // Helper to write a batch efficiently const writeBatch = async (batch) => { if (hasWritev && batch.length > 1) { - await writer.writev(batch); + await writer.writev(batch, signal ? { signal } : undefined); for (let i = 0; i < batch.length; i++) { totalBytes += batch[i].byteLength; } @@ -639,7 +667,7 @@ async function pipeTo(source, ...args) { const promises = []; for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; - const result = writer.write(chunk); + const result = writer.write(chunk, signal ? { signal } : undefined); if (result !== undefined) { ArrayPrototypePush(promises, result); } @@ -689,11 +717,11 @@ async function pipeTo(source, ...args) { } if (!options?.preventClose) { - await writer.end(); + await writer.end(signal ? { signal } : undefined); } } catch (error) { - if (!options?.preventAbort) { - await writer.abort( + if (!options?.preventFail) { + await writer.fail( error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error))); } throw error; diff --git a/lib/internal/streams/new/push.js b/lib/internal/streams/new/push.js index 0260cf10ba2600..1c2b900bf0a530 100644 --- a/lib/internal/streams/new/push.js +++ b/lib/internal/streams/new/push.js @@ -6,9 +6,11 @@ // with built-in backpressure. const { + ArrayPrototypeIndexOf, ArrayPrototypePush, ArrayPrototypeShift, ArrayPrototypeSlice, + ArrayPrototypeSplice, Error, MathMax, Promise, @@ -66,12 +68,12 @@ class PushQueue { if (this._signal) { if (this._signal.aborted) { - this.abort(this._signal.reason instanceof Error ? + this.fail(this._signal.reason instanceof Error ? this._signal.reason : lazyDOMException('Aborted', 'AbortError')); } else { this._abortHandler = () => { - this.abort(this._signal.reason instanceof Error ? + this.fail(this._signal.reason instanceof Error ? this._signal.reason : lazyDOMException('Aborted', 'AbortError')); }; @@ -151,8 +153,19 @@ class PushQueue { /** * Write chunks asynchronously. + * If signal is provided, a write blocked on backpressure will reject + * immediately when the signal fires. The cancelled write is removed from + * pendingWrites so it does not occupy a slot. The queue itself is NOT put + * into an error state - this is per-operation cancellation, not terminal + * failure. + * @returns {Promise} */ - async writeAsync(chunks) { + async writeAsync(chunks, signal) { + // Check for pre-aborted signal + if (signal?.aborted) { + throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); + } + if (this._writerState !== 'open') { throw new ERR_INVALID_STATE('Writer is closed'); } @@ -175,21 +188,50 @@ class PushQueue { 'Backpressure violation: too many pending writes. ' + 'Await each write() call to respect backpressure.'); } - return new Promise((resolve, reject) => { - ArrayPrototypePush(this._pendingWrites, - { chunks, resolve, reject }); - }); + return this._createPendingWrite(chunks, signal); case 'block': - return new Promise((resolve, reject) => { - ArrayPrototypePush(this._pendingWrites, - { chunks, resolve, reject }); - }); + return this._createPendingWrite(chunks, signal); default: throw new ERR_INVALID_STATE( 'Unexpected: writeSync should have handled non-strict policy'); } } + /** + * Create a pending write promise, optionally racing against a signal. + * If the signal fires, the entry is removed from pendingWrites and the + * promise rejects. Signal listeners are cleaned up on normal resolution. + */ + _createPendingWrite(chunks, signal) { + return new Promise((resolve, reject) => { + const entry = { chunks, resolve, reject }; + ArrayPrototypePush(this._pendingWrites, entry); + + if (!signal) return; + + const onAbort = () => { + // Remove from queue so it doesn't occupy a slot + const idx = ArrayPrototypeIndexOf(this._pendingWrites, entry); + if (idx !== -1) ArrayPrototypeSplice(this._pendingWrites, idx, 1); + reject(signal.reason ?? lazyDOMException('Aborted', 'AbortError')); + }; + + // Wrap resolve/reject to clean up signal listener + const origResolve = entry.resolve; + const origReject = entry.reject; + entry.resolve = function() { + signal.removeEventListener('abort', onAbort); + origResolve(); + }; + entry.reject = function(reason) { + signal.removeEventListener('abort', onAbort); + origReject(reason); + }; + + signal.addEventListener('abort', onAbort, { once: true }); + }); + } + /** * Signal end of stream. Returns total bytes written. * @returns {number} @@ -208,13 +250,13 @@ class PushQueue { } /** - * Signal error/abort. + * Put queue into terminal error state. */ - abort(reason) { + fail(reason) { if (this._writerState === 'errored') return; this._writerState = 'errored'; - this._error = reason ?? new ERR_INVALID_STATE('Aborted'); + this._error = reason ?? new ERR_INVALID_STATE('Failed'); this._cleanup(); this._rejectPendingReads(this._error); this._rejectPendingWrites(this._error); @@ -264,6 +306,8 @@ class PushQueue { this._consumerState = 'returned'; this._cleanup(); this._rejectPendingWrites(new ERR_INVALID_STATE('Stream closed by consumer')); + // Resolve pending drains with false - no more data will be consumed + this._resolvePendingDrains(false); } consumerThrow(error) { @@ -272,6 +316,8 @@ class PushQueue { this._error = error; this._cleanup(); this._rejectPendingWrites(error); + // Reject pending drains - the consumer errored + this._rejectPendingDrains(error); } // =========================================================================== @@ -385,17 +431,17 @@ class PushWriter { return this._queue.desiredSize; } - async write(chunk) { + async write(chunk, options) { const bytes = toUint8Array(chunk); - await this._queue.writeAsync([bytes]); + await this._queue.writeAsync([bytes], options?.signal); } - async writev(chunks) { + async writev(chunks, options) { const bytes = []; for (let i = 0; i < chunks.length; i++) { ArrayPrototypePush(bytes, toUint8Array(chunks[i])); } - await this._queue.writeAsync(bytes); + await this._queue.writeAsync(bytes, options?.signal); } writeSync(chunk) { @@ -413,7 +459,9 @@ class PushWriter { return this._queue.writeSync(bytes); } - async end() { + async end(options) { + // end() on PushQueue is synchronous (sets state, resolves pending reads). + // Signal accepted for interface compliance but there is nothing to cancel. return this._queue.end(); } @@ -421,12 +469,12 @@ class PushWriter { return this._queue.end(); } - async abort(reason) { - this._queue.abort(reason); + async fail(reason) { + this._queue.fail(reason); } - abortSync(reason) { - this._queue.abort(reason); + failSync(reason) { + this._queue.fail(reason); return true; } } diff --git a/lib/internal/streams/new/share.js b/lib/internal/streams/new/share.js index f7f4353393a36e..987073dd5cbfb9 100644 --- a/lib/internal/streams/new/share.js +++ b/lib/internal/streams/new/share.js @@ -205,7 +205,7 @@ class ShareImpl { if (reason) { consumer.reject?.(reason); } else { - consumer.resolve({ done: true, value: undefined }); + consumer.resolve({ __proto__: null, done: true, value: undefined }); } consumer.resolve = null; consumer.reject = null; @@ -425,7 +425,8 @@ class SyncShareImpl { self._buffer.length); case 'block': throw new ERR_OUT_OF_RANGE( - 'buffer size', `<= ${self._options.highWaterMark}`, + 'buffer size', `<= ${self._options.highWaterMark} ` + + '(blocking not available in sync context)', self._buffer.length); case 'drop-oldest': ArrayPrototypeShift(self._buffer); diff --git a/lib/internal/streams/new/transform.js b/lib/internal/streams/new/transform.js index c9bf3ad845866b..e073e343998ef2 100644 --- a/lib/internal/streams/new/transform.js +++ b/lib/internal/streams/new/transform.js @@ -24,6 +24,7 @@ const { Buffer } = require('buffer'); const { genericNodeError, } = require('internal/errors'); +const { lazyDOMException } = require('internal/util'); const binding = internalBinding('zlib'); const constants = internalBinding('constants').zlib; @@ -183,7 +184,15 @@ function createZstdHandle(mode, options, processCallback, onError) { // --------------------------------------------------------------------------- function makeZlibTransform(createHandleFn, processFlag, finishFlag) { return { - transform: async function*(source) { + transform: async function*(source, options) { + const { signal } = options; + + // Fail fast if already aborted - don't allocate a native handle. + if (signal.aborted) { + throw signal.reason ?? + lazyDOMException('The operation was aborted', 'AbortError'); + } + // ---- Per-invocation state shared with the write callback ---- let outBuf; let outOffset = 0; @@ -219,7 +228,9 @@ function makeZlibTransform(createHandleFn, processFlag, finishFlag) { } if (availOut === 0) { - // Engine has more output - loop on the threadpool. + // Engine has more output - but if aborted, don't loop. + if (!resolveWrite) return; + const consumed = writeAvailIn - availInAfter; writeInOff += consumed; writeAvailIn = availInAfter; @@ -236,7 +247,7 @@ function makeZlibTransform(createHandleFn, processFlag, finishFlag) { const resolve = resolveWrite; resolveWrite = undefined; rejectWrite = undefined; - resolve(); + if (resolve) resolve(); } // onError: called by C++ when the engine encounters an error. @@ -258,6 +269,19 @@ function makeZlibTransform(createHandleFn, processFlag, finishFlag) { chunkSize = result.chunkSize; outBuf = Buffer.allocUnsafe(chunkSize); + // Abort handler: reject any in-flight threadpool operation so the + // generator doesn't block waiting for compression to finish. + const onAbort = () => { + const reject = rejectWrite; + resolveWrite = undefined; + rejectWrite = undefined; + if (reject) { + reject(signal.reason ?? + lazyDOMException('The operation was aborted', 'AbortError')); + } + }; + signal.addEventListener('abort', onAbort, { once: true }); + // Dispatch input to the threadpool and return a promise. function processInputAsync(input, flushFlag) { return new Promise((resolve, reject) => { @@ -297,17 +321,22 @@ function makeZlibTransform(createHandleFn, processFlag, finishFlag) { let finalized = false; + const iter = source[SymbolAsyncIterator](); try { // Manually iterate the source so we can pre-read: calling // iter.next() starts the upstream read + transform on libuv // before we await the current compression on the threadpool. - const iter = source[SymbolAsyncIterator](); let nextResult = iter.next(); while (true) { const { value: chunks, done } = await nextResult; if (done) break; + if (signal.aborted) { + throw signal.reason ?? + lazyDOMException('The operation was aborted', 'AbortError'); + } + if (chunks === null) { // Flush signal - finalize the engine. if (!finalized) { @@ -340,7 +369,7 @@ function makeZlibTransform(createHandleFn, processFlag, finishFlag) { } // Source ended - finalize if not already done by a null signal. - if (!finalized) { + if (!finalized && !signal.aborted) { finalized = true; await processInputAsync(kEmpty, finishFlag); while (pending.length > 0) { @@ -348,7 +377,11 @@ function makeZlibTransform(createHandleFn, processFlag, finishFlag) { } } } finally { + signal.removeEventListener('abort', onAbort); handle.close(); + // Close the upstream iterator so its finally blocks run promptly + // rather than waiting for GC. + try { await iter.return?.(); } catch { /* Intentional no-op. */ } } }, }; diff --git a/test/parallel/test-fs-promises-file-handle-pull.js b/test/parallel/test-fs-promises-file-handle-pull.js index 6b206ee3fae2ab..60a2160cbae4b9 100644 --- a/test/parallel/test-fs-promises-file-handle-pull.js +++ b/test/parallel/test-fs-promises-file-handle-pull.js @@ -69,7 +69,7 @@ async function testPullEmptyFile() { async function testPullLargeFile() { const filePath = path.join(tmpDir, 'pull-large.bin'); - // Write 64KB — enough for multiple 16KB read chunks + // Write 64KB - enough for multiple 16KB read chunks const size = 64 * 1024; const buf = Buffer.alloc(size, 0x42); fs.writeFileSync(filePath, buf); diff --git a/test/parallel/test-fs-promises-file-handle-writer.js b/test/parallel/test-fs-promises-file-handle-writer.js index 5a0e2914a3e52c..84168e54045a8d 100644 --- a/test/parallel/test-fs-promises-file-handle-writer.js +++ b/test/parallel/test-fs-promises-file-handle-writer.js @@ -95,7 +95,7 @@ async function testEndReturnsTotalBytes() { } // ============================================================================= -// autoClose: true — handle closed after end() +// autoClose: true - handle closed after end() // ============================================================================= async function testAutoCloseOnEnd() { @@ -111,24 +111,24 @@ async function testAutoCloseOnEnd() { } // ============================================================================= -// autoClose: true — handle closed after abort() +// autoClose: true - handle closed after fail() // ============================================================================= -async function testAutoCloseOnAbort() { - const filePath = path.join(tmpDir, 'writer-autoclose-abort.txt'); +async function testAutoCloseOnFail() { + const filePath = path.join(tmpDir, 'writer-autoclose-fail.txt'); const fh = await open(filePath, 'w'); const w = fh.writer({ autoClose: true }); await w.write(Buffer.from('partial')); - await w.abort(new Error('test abort')); + await w.fail(new Error('test fail')); // Handle should be closed await assert.rejects(fh.stat(), { code: 'EBADF' }); - // Partial data should still be on disk (abort doesn't truncate) + // Partial data should still be on disk (fail doesn't truncate) assert.strictEqual(fs.readFileSync(filePath, 'utf8'), 'partial'); } // ============================================================================= -// start option — write at specified offset +// start option - write at specified offset // ============================================================================= async function testStartOption() { @@ -146,7 +146,7 @@ async function testStartOption() { } // ============================================================================= -// start option — sequential writes advance position +// start option - sequential writes advance position // ============================================================================= async function testStartSequentialPosition() { @@ -165,7 +165,7 @@ async function testStartSequentialPosition() { } // ============================================================================= -// Locked state — can't create second writer while active +// Locked state - can't create second writer while active // ============================================================================= async function testLockedState() { @@ -189,7 +189,7 @@ async function testLockedState() { } // ============================================================================= -// Unlock after end — handle reusable +// Unlock after end - handle reusable // ============================================================================= async function testUnlockAfterEnd() { @@ -200,7 +200,7 @@ async function testUnlockAfterEnd() { await w1.write(Buffer.from('first')); await w1.end(); - // Should work — handle is unlocked + // Should work - handle is unlocked const w2 = fh.writer(); await w2.write(Buffer.from(' second')); await w2.end(); @@ -210,31 +210,31 @@ async function testUnlockAfterEnd() { } // ============================================================================= -// Unlock after abort — handle reusable +// Unlock after fail - handle reusable // ============================================================================= -async function testUnlockAfterAbort() { - const filePath = path.join(tmpDir, 'writer-unlock-abort.txt'); +async function testUnlockAfterFail() { + const filePath = path.join(tmpDir, 'writer-unlock-fail.txt'); const fh = await open(filePath, 'w'); const w1 = fh.writer(); - await w1.write(Buffer.from('aborted')); - await w1.abort(new Error('test')); + await w1.write(Buffer.from('failed')); + await w1.fail(new Error('test')); - // Should work — handle is unlocked + // Should work - handle is unlocked const w2 = fh.writer(); await w2.write(Buffer.from('recovered')); await w2.end(); await fh.close(); - // 'recovered' is appended after 'aborted' at current file offset + // 'recovered' is appended after 'failed' at current file offset const content = fs.readFileSync(filePath, 'utf8'); - assert.ok(content.startsWith('aborted')); + assert.ok(content.startsWith('failed')); assert.ok(content.includes('recovered')); } // ============================================================================= -// Write after end/abort rejects +// Write after end/fail rejects // ============================================================================= async function testWriteAfterEndRejects() { @@ -257,7 +257,7 @@ async function testWriteAfterEndRejects() { } // ============================================================================= -// Closed handle — writer() throws +// Closed handle - writer() throws // ============================================================================= async function testClosedHandle() { @@ -272,7 +272,7 @@ async function testClosedHandle() { } // ============================================================================= -// pipeTo() integration — pipe source through writer +// pipeTo() integration - pipe source through writer // ============================================================================= async function testPipeToIntegration() { @@ -295,7 +295,7 @@ async function testPipeToIntegration() { } // ============================================================================= -// pipeTo() with transforms — uppercase through writer +// pipeTo() with transforms - uppercase through writer // ============================================================================= async function testPipeToWithTransform() { @@ -365,7 +365,7 @@ async function testCompressRoundTrip() { } // ============================================================================= -// Large file write — write 1MB in 64KB chunks +// Large file write - write 1MB in 64KB chunks // ============================================================================= async function testLargeFileWrite() { @@ -399,7 +399,7 @@ async function testLargeFileWrite() { } // ============================================================================= -// Symbol.asyncDispose — await using +// Symbol.asyncDispose - await using // ============================================================================= async function testAsyncDispose() { @@ -414,13 +414,13 @@ async function testAsyncDispose() { // Verify the handle is actually closed by trying to open a new one // (if the old one were still open with a write lock on some OSes, - // this could fail — but it should succeed). + // this could fail - but it should succeed). const fh2 = await open(filePath, 'r'); await fh2.close(); } // ============================================================================= -// Symbol.asyncDispose — cleanup on error (await using unwinds) +// Symbol.asyncDispose - cleanup on error (await using unwinds) // ============================================================================= async function testAsyncDisposeOnError() { @@ -446,6 +446,74 @@ async function testAsyncDisposeOnError() { `Expected 'after error' in ${JSON.stringify(content)}`); } +// ============================================================================= +// Pre-aborted signal rejects write/writev/end +// ============================================================================= + +async function testWriteWithAbortedSignalRejects() { + const filePath = path.join(tmpDir, 'writer-signal-write.txt'); + const fh = await open(filePath, 'w'); + const w = fh.writer(); + + const ac = new AbortController(); + ac.abort(); + + await assert.rejects( + w.write(Buffer.from('data'), { signal: ac.signal }), + { name: 'AbortError' }, + ); + + // Writer should still be usable after a signal rejection + await w.write(Buffer.from('ok')); + await w.end(); + await fh.close(); + + assert.strictEqual(fs.readFileSync(filePath, 'utf8'), 'ok'); +} + +async function testWritevWithAbortedSignalRejects() { + const filePath = path.join(tmpDir, 'writer-signal-writev.txt'); + const fh = await open(filePath, 'w'); + const w = fh.writer(); + + const ac = new AbortController(); + ac.abort(); + + await assert.rejects( + w.writev([Buffer.from('a'), Buffer.from('b')], { signal: ac.signal }), + { name: 'AbortError' }, + ); + + await w.writev([Buffer.from('ok')]); + await w.end(); + await fh.close(); + + assert.strictEqual(fs.readFileSync(filePath, 'utf8'), 'ok'); +} + +async function testEndWithAbortedSignalRejects() { + const filePath = path.join(tmpDir, 'writer-signal-end.txt'); + const fh = await open(filePath, 'w'); + const w = fh.writer(); + + await w.write(Buffer.from('data')); + + const ac = new AbortController(); + ac.abort(); + + await assert.rejects( + w.end({ signal: ac.signal }), + { name: 'AbortError' }, + ); + + // end() was rejected so writer is still open - end it cleanly + const totalBytes = await w.end(); + await fh.close(); + + assert.strictEqual(totalBytes, 4); + assert.strictEqual(fs.readFileSync(filePath, 'utf8'), 'data'); +} + // ============================================================================= // Run all tests // ============================================================================= @@ -456,12 +524,12 @@ Promise.all([ testMixedWriteAndWritev(), testEndReturnsTotalBytes(), testAutoCloseOnEnd(), - testAutoCloseOnAbort(), + testAutoCloseOnFail(), testStartOption(), testStartSequentialPosition(), testLockedState(), testUnlockAfterEnd(), - testUnlockAfterAbort(), + testUnlockAfterFail(), testWriteAfterEndRejects(), testClosedHandle(), testPipeToIntegration(), @@ -470,4 +538,7 @@ Promise.all([ testLargeFileWrite(), testAsyncDispose(), testAsyncDisposeOnError(), + testWriteWithAbortedSignalRejects(), + testWritevWithAbortedSignalRejects(), + testEndWithAbortedSignalRejects(), ]).then(common.mustCall()); diff --git a/test/parallel/test-stream-new-broadcast.js b/test/parallel/test-stream-new-broadcast.js index 6ea0b3858bb64d..a7445f856c674d 100644 --- a/test/parallel/test-stream-new-broadcast.js +++ b/test/parallel/test-stream-new-broadcast.js @@ -55,7 +55,7 @@ async function testConsumerCount() { assert.strictEqual(bc.consumerCount, 2); // Consume c1 to completion (it returns immediately since no data has been - // pushed and we haven't ended yet — but we'll cancel to detach) + // pushed and we haven't ended yet - but we'll cancel to detach) bc.cancel(); // After cancel, consumers are detached @@ -108,11 +108,11 @@ async function testWriterEnd() { assert.strictEqual(data, 'data'); } -async function testWriterAbort() { +async function testWriterFail() { const { writer, broadcast: bc } = broadcast(); const consumer = bc.push(); - await writer.abort(new Error('test error')); + await writer.fail(new Error('test error')); await assert.rejects( async () => { @@ -257,6 +257,77 @@ async function testAlreadyAbortedSignal() { assert.strictEqual(batches.length, 0); } +// ============================================================================= +// Broadcast.from() hang fix - cancel while write blocked on backpressure +// ============================================================================= + +async function testBroadcastFromCancelWhileBlocked() { + // Create a slow async source that blocks between yields + let sourceFinished = false; + async function* slowSource() { + yield [new TextEncoder().encode('chunk1')]; + // Simulate a long delay - the cancel should unblock this + await new Promise((resolve) => setTimeout(resolve, 10000)); + yield [new TextEncoder().encode('chunk2')]; + sourceFinished = true; + } + + const { broadcast: bc } = Broadcast.from(slowSource()); + const consumer = bc.push(); + + // Read the first chunk + const iter = consumer[Symbol.asyncIterator](); + const first = await iter.next(); + assert.strictEqual(first.done, false); + + // Cancel while the source is blocked waiting to yield the next chunk + bc.cancel(); + + // The iteration should complete (not hang) + const next = await iter.next(); + assert.strictEqual(next.done, true); + + // Source should NOT have finished (we cancelled before chunk2) + assert.strictEqual(sourceFinished, false); +} + +// ============================================================================= +// Writer fail detaches consumers +// ============================================================================= + +async function testFailDetachesConsumers() { + const { writer, broadcast: bc } = broadcast(); + const consumer1 = bc.push(); + const consumer2 = bc.push(); + + assert.strictEqual(bc.consumerCount, 2); + + // Write some data, then fail the writer + await writer.write('data'); + await writer.fail(new Error('writer failed')); + + // Both consumers should see the error + await assert.rejects( + async () => { + // eslint-disable-next-line no-unused-vars + for await (const _ of consumer1) { + assert.fail('Should not reach here'); + } + }, + { message: 'writer failed' }, + ); + + await assert.rejects( + async () => { + // eslint-disable-next-line no-unused-vars + for await (const _ of consumer2) { + assert.fail('Should not reach here'); + } + }, + { message: 'writer failed' }, + ); +} + Promise.all([ testBasicBroadcast(), testMultipleWrites(), @@ -264,7 +335,7 @@ Promise.all([ testWriteSync(), testWritevSync(), testWriterEnd(), - testWriterAbort(), + testWriterFail(), testDropOldest(), testDropNewest(), testCancelWithoutReason(), @@ -273,4 +344,6 @@ Promise.all([ testBroadcastFromMultipleConsumers(), testAbortSignal(), testAlreadyAbortedSignal(), + testBroadcastFromCancelWhileBlocked(), + testFailDetachesConsumers(), ]).then(common.mustCall()); diff --git a/test/parallel/test-stream-new-pull.js b/test/parallel/test-stream-new-pull.js index 34db9b96c74e5c..e6ddaf46d99fb2 100644 --- a/test/parallel/test-stream-new-pull.js +++ b/test/parallel/test-stream-new-pull.js @@ -159,7 +159,7 @@ async function testPipeToSync() { const writer = { write(chunk) { written.push(chunk); }, end() { return written.length; }, - abort() {}, + fail() {}, }; const totalBytes = pipeToSync(source, writer); @@ -176,7 +176,7 @@ async function testPipeTo() { const writer = { async write(chunk) { written.push(chunk); }, async end() { return written.length; }, - async abort() {}, + async fail() {}, }; const totalBytes = await pipeTo(source, writer); @@ -190,7 +190,7 @@ async function testPipeToPreventClose() { const writer = { async write() {}, async end() { endCalled = true; }, - async abort() {}, + async fail() {}, }; await pipeTo(source, writer, { preventClose: true }); diff --git a/test/parallel/test-stream-new-push.js b/test/parallel/test-stream-new-push.js index 2c0a2550ed510d..3475bfd6b6ab7c 100644 --- a/test/parallel/test-stream-new-push.js +++ b/test/parallel/test-stream-new-push.js @@ -114,10 +114,10 @@ async function testWriterEnd() { assert.strictEqual(batches.length, 0); } -async function testWriterAbort() { +async function testWriterFail() { const { writer, readable } = push(); - writer.abort(new Error('test abort')); + writer.fail(new Error('test fail')); await assert.rejects( async () => { @@ -126,7 +126,7 @@ async function testWriterAbort() { assert.fail('Should not reach here'); } }, - { message: 'test abort' }, + { message: 'test fail' }, ); } @@ -203,6 +203,132 @@ async function testPushWithTransforms() { assert.strictEqual(data, 'HELLO'); } +// ============================================================================= +// Per-operation signal tests +// ============================================================================= + +async function testWriteWithSignalRejects() { + const { writer, readable } = push({ highWaterMark: 1 }); + + // Fill the buffer so write will block + writer.writeSync('a'); + + const ac = new AbortController(); + const writePromise = writer.write('b', { signal: ac.signal }); + + // Signal fires while write is pending + ac.abort(); + + await assert.rejects(writePromise, (err) => err.name === 'AbortError'); + + // Clean up + writer.end(); + // eslint-disable-next-line no-unused-vars + for await (const _ of readable) { break; } +} + +async function testWriteWithPreAbortedSignal() { + const { writer, readable } = push({ highWaterMark: 1 }); + + const ac = new AbortController(); + ac.abort(); + + // Pre-aborted signal should reject immediately + await assert.rejects( + writer.write('data', { signal: ac.signal }), + (err) => err.name === 'AbortError', + ); + + // Writer should still be usable for other writes + writer.write('ok'); + writer.end(); + const data = await text(readable); + assert.strictEqual(data, 'ok'); +} + +async function testCancelledWriteRemovedFromQueue() { + const { writer, readable } = push({ highWaterMark: 1 }); + + // Fill the buffer + writer.writeSync('first'); + + const ac = new AbortController(); + // This write should be queued since buffer is full + const cancelledWrite = writer.write('cancelled', { signal: ac.signal }); + + // Cancel it + ac.abort(); + await cancelledWrite.catch(() => {}); + + // Drain 'first' to make room for the replacement write + const iter = readable[Symbol.asyncIterator](); + await iter.next(); + + // The cancelled write should NOT occupy a pending slot. + // A new write should succeed now that the buffer has room. + await writer.write('second'); + writer.end(); + + const result = await iter.next(); + // 'second' should be the next (and only remaining) chunk + const decoder = new TextDecoder(); + let data = ''; + for (const chunk of result.value) { + data += decoder.decode(chunk, { stream: true }); + } + assert.strictEqual(data, 'second'); + await iter.return(); +} + +// ============================================================================= +// Ondrain cleanup on consumer termination +// ============================================================================= + +async function testOndrainResolvesFalseOnConsumerBreak() { + const { writer, readable } = push({ highWaterMark: 1 }); + + // Fill the buffer so desiredSize = 0 + writer.writeSync('a'); + + // Also queue a pending write so that reading one chunk + // doesn't clear backpressure (the pending write refills the slot) + const pendingWrite = writer.write('b'); + + // Start a drain wait - still at capacity + const drainPromise = ondrain(writer); + + // Consumer returns without draining enough to clear backpressure + const iter = readable[Symbol.asyncIterator](); + await iter.return(); + + // Ondrain should resolve false since the consumer terminated + const result = await drainPromise; + assert.strictEqual(result, false); + await pendingWrite.catch(() => {}); // Ignore write rejection +} + +async function testOndrainRejectsOnConsumerThrow() { + const { writer, readable } = push({ highWaterMark: 1 }); + + // Fill the buffer so desiredSize = 0 + writer.writeSync('a'); + + // Also queue a pending write so that reading one chunk + // doesn't clear backpressure (the pending write refills the slot) + const pendingWrite = writer.write('b'); + + // Start a drain wait - still at capacity + const drainPromise = ondrain(writer); + + // Consumer throws via iterator.throw() before draining enough + // to clear backpressure. The drain should reject. + const iter = readable[Symbol.asyncIterator](); + await iter.throw(new Error('consumer error')); + + await assert.rejects(drainPromise, /consumer error/); + await pendingWrite.catch(() => {}); // Ignore write rejection +} + Promise.all([ testBasicWriteRead(), testMultipleWrites(), @@ -211,10 +337,15 @@ Promise.all([ testDropOldest(), testDropNewest(), testWriterEnd(), - testWriterAbort(), + testWriterFail(), testConsumerBreak(), testAbortSignal(), testOndrain(), testOndainNonDrainable(), testPushWithTransforms(), + testWriteWithSignalRejects(), + testWriteWithPreAbortedSignal(), + testCancelledWriteRemovedFromQueue(), + testOndrainResolvesFalseOnConsumerBreak(), + testOndrainRejectsOnConsumerThrow(), ]).then(common.mustCall()); From d8a77438d400e575289199a9ac1e953b28311254 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Mon, 2 Mar 2026 20:33:27 -0800 Subject: [PATCH 03/42] stream: clarify backpressure details in stream_new --- doc/api/stream_new.md | 311 ++++++++++++++++++++++++-- lib/internal/streams/new/broadcast.js | 2 +- lib/internal/streams/new/push.js | 2 +- lib/internal/streams/new/share.js | 5 +- 4 files changed, 294 insertions(+), 26 deletions(-) diff --git a/doc/api/stream_new.md b/doc/api/stream_new.md index afd51db4811152..8cdc8381b0bf25 100644 --- a/doc/api/stream_new.md +++ b/doc/api/stream_new.md @@ -173,6 +173,227 @@ The API supports two models: pair with backpressure. The writer pushes data in; the readable is consumed as an async iterable. +### Backpressure + +Pull streams have natural backpressure -- the consumer drives the pace, so +the source is never read faster than the consumer can process. Push streams +need explicit backpressure because the producer and consumer run +independently. The `highWaterMark` and `backpressure` options on `push()`, +`broadcast()`, and `share()` control how this works. + +#### The two-buffer model + +Push streams use a two-part buffering system. Think of it like a bucket +(slots) being filled through a hose (pending writes), with a float valve +that closes when the bucket is full: + +```text + highWaterMark (e.g., 3) + | + Producer v + | +---------+ + v | | + [ write() ] ----+ +--->| slots |---> Consumer pulls + [ write() ] | | | (bucket)| for await (...) + [ write() ] v | +---------+ + +--------+ ^ + | pending| | + | writes | float valve + | (hose) | (backpressure) + +--------+ + ^ + | + 'strict' mode limits this too! +``` + +* **Slots (the bucket)** -- data ready for the consumer, capped at + `highWaterMark`. When the consumer pulls, it drains all slots at once + into a single batch. + +* **Pending writes (the hose)** -- writes waiting for slot space. After + the consumer drains, pending writes are promoted into the now-empty + slots and their promises resolve. + +How each policy uses these buffers: + +| Policy | Slots limit | Pending writes limit | +|--------|-------------|---------------------| +| `'strict'` | `highWaterMark` | `highWaterMark` | +| `'block'` | `highWaterMark` | Unbounded | +| `'drop-oldest'` | `highWaterMark` | N/A (never waits) | +| `'drop-newest'` | `highWaterMark` | N/A (never waits) | + +#### Strict (default) + +Strict mode catches "fire-and-forget" patterns where the producer calls +`write()` without awaiting, which would cause unbounded memory growth. +It limits both the slots buffer and the pending writes queue to +`highWaterMark`. + +If you properly await each write, you can only ever have one pending +write at a time (yours), so you never hit the pending writes limit. +Unawaited writes accumulate in the pending queue and throw once it +overflows: + +```mjs +import { push, text } from 'node:stream/new'; + +const { writer, readable } = push({ highWaterMark: 16 }); + +// Consumer must run concurrently -- without it, the first write +// that fills the buffer blocks the producer forever. +const consuming = text(readable); + +// GOOD: awaited writes. The producer waits for the consumer to +// make room when the buffer is full. +for (const item of dataset) { + await writer.write(item); +} +await writer.end(); +console.log(await consuming); +``` + +```cjs +const { push, text } = require('node:stream/new'); + +async function run() { + const { writer, readable } = push({ highWaterMark: 16 }); + + // Consumer must run concurrently -- without it, the first write + // that fills the buffer blocks the producer forever. + const consuming = text(readable); + + // GOOD: awaited writes. The producer waits for the consumer to + // make room when the buffer is full. + for (const item of dataset) { + await writer.write(item); + } + await writer.end(); + console.log(await consuming); +} + +run().catch(console.error); +``` + +Forgetting to `await` will eventually throw: + +```js +// BAD: fire-and-forget. Strict mode throws once both buffers fill. +for (const item of dataset) { + writer.write(item); // Not awaited -- queues without bound +} +// --> throws "Backpressure violation: too many pending writes" +``` + +This is the default policy because it catches the exact class of bug +that push streams exist to prevent. + +#### Block + +Block mode caps slots at `highWaterMark` but places no limit on the +pending writes queue. Awaited writes block until the consumer makes room, +just like strict mode. The difference is that unawaited writes silently +queue forever instead of throwing -- a potential memory leak if the +producer forgets to `await`. + +This is the mode that existing Node.js classic streams and Web Streams +default to. Use it when you control the producer and know it awaits +properly, or when migrating code from those APIs. + +```mjs +import { push, text } from 'node:stream/new'; + +const { writer, readable } = push({ + highWaterMark: 16, + backpressure: 'block', +}); + +const consuming = text(readable); + +// Safe -- awaited writes block until the consumer reads. +for (const item of dataset) { + await writer.write(item); +} +await writer.end(); +console.log(await consuming); +``` + +```cjs +const { push, text } = require('node:stream/new'); + +async function run() { + const { writer, readable } = push({ + highWaterMark: 16, + backpressure: 'block', + }); + + const consuming = text(readable); + + // Safe -- awaited writes block until the consumer reads. + for (const item of dataset) { + await writer.write(item); + } + await writer.end(); + console.log(await consuming); +} + +run().catch(console.error); +``` + +#### Drop-oldest + +Writes never wait. When the slots buffer is full, the oldest buffered +chunk is evicted to make room for the incoming write. The consumer +always sees the most recent data. Useful for live feeds, telemetry, or +any scenario where stale data is less valuable than current data. + +```mjs +import { push } from 'node:stream/new'; + +// Keep only the 5 most recent readings +const { writer, readable } = push({ + highWaterMark: 5, + backpressure: 'drop-oldest', +}); +``` + +```cjs +const { push } = require('node:stream/new'); + +// Keep only the 5 most recent readings +const { writer, readable } = push({ + highWaterMark: 5, + backpressure: 'drop-oldest', +}); +``` + +#### Drop-newest + +Writes never wait. When the slots buffer is full, the incoming write is +silently discarded. The consumer processes what is already buffered +without being overwhelmed by new data. Useful for rate-limiting or +shedding load under pressure. + +```mjs +import { push } from 'node:stream/new'; + +// Accept up to 10 buffered items; discard anything beyond that +const { writer, readable } = push({ + highWaterMark: 10, + backpressure: 'drop-newest', +}); +``` + +```cjs +const { push } = require('node:stream/new'); + +// Accept up to 10 buffered items; discard anything beyond that +const { writer, readable } = push({ + highWaterMark: 10, + backpressure: 'drop-newest', +}); +``` + ### Writers A writer is any object with a `write(chunk)` method. Writers optionally @@ -418,7 +639,8 @@ added: REPLACEME readable side. * `options` {Object} * `highWaterMark` {number} Maximum number of buffered slots before - backpressure is applied. **Default:** `1`. + backpressure is applied. Must be >= 1; values below 1 are clamped to 1. + **Default:** `1`. * `backpressure` {string} Backpressure policy: `'strict'`, `'block'`, `'drop-oldest'`, or `'drop-newest'`. **Default:** `'strict'`. * `signal` {AbortSignal} Abort the stream. @@ -433,11 +655,17 @@ readable side is consumed as an async iterable. import { push, text } from 'node:stream/new'; const { writer, readable } = push(); -writer.write('hello'); -writer.write(' world'); -writer.end(); + +// Producer and consumer must run concurrently. With strict backpressure +// (the default), awaited writes block until the consumer reads. +const producing = (async () => { + await writer.write('hello'); + await writer.write(' world'); + await writer.end(); +})(); console.log(await text(readable)); // 'hello world' +await producing; ``` ```cjs @@ -445,11 +673,17 @@ const { push, text } = require('node:stream/new'); async function run() { const { writer, readable } = push(); - writer.write('hello'); - writer.write(' world'); - writer.end(); + + // Producer and consumer must run concurrently. With strict backpressure + // (the default), awaited writes block until the consumer reads. + const producing = (async () => { + await writer.write('hello'); + await writer.write(' world'); + await writer.end(); + })(); console.log(await text(readable)); // 'hello world' + await producing; } run().catch(console.error); @@ -718,26 +952,42 @@ resolves to `true` when the writer can accept more data, or `null` if the object does not implement the drainable protocol. ```mjs -import { push, ondrain } from 'node:stream/new'; +import { push, ondrain, text } from 'node:stream/new'; const { writer, readable } = push({ highWaterMark: 2 }); writer.writeSync('a'); writer.writeSync('b'); +// Start consuming so the buffer can actually drain +const consuming = text(readable); + // Buffer is full -- wait for drain const canWrite = await ondrain(writer); +if (canWrite) { + await writer.write('c'); +} +await writer.end(); +await consuming; ``` ```cjs -const { push, ondrain } = require('node:stream/new'); +const { push, ondrain, text } = require('node:stream/new'); async function run() { const { writer, readable } = push({ highWaterMark: 2 }); writer.writeSync('a'); writer.writeSync('b'); + // Start consuming so the buffer can actually drain + const consuming = text(readable); + // Buffer is full -- wait for drain const canWrite = await ondrain(writer); + if (canWrite) { + await writer.write('c'); + } + await writer.end(); + await consuming; } run().catch(console.error); @@ -799,7 +1049,8 @@ added: REPLACEME --> * `options` {Object} - * `highWaterMark` {number} Buffer size in slots. **Default:** `16`. + * `highWaterMark` {number} Buffer size in slots. Must be >= 1; values + below 1 are clamped to 1. **Default:** `16`. * `backpressure` {string} `'strict'` or `'block'`. **Default:** `'strict'`. * `signal` {AbortSignal} * Returns: {Object} @@ -815,14 +1066,21 @@ import { broadcast, text } from 'node:stream/new'; const { writer, broadcast: bc } = broadcast(); +// Create consumers before writing const c1 = bc.push(); // Consumer 1 const c2 = bc.push(); // Consumer 2 -writer.write('hello'); -writer.end(); - -console.log(await text(c1)); // 'hello' -console.log(await text(c2)); // 'hello' +// Producer and consumers must run concurrently. Awaited writes +// block when the buffer fills until consumers read. +const producing = (async () => { + await writer.write('hello'); + await writer.end(); +})(); + +const [r1, r2] = await Promise.all([text(c1), text(c2)]); +console.log(r1); // 'hello' +console.log(r2); // 'hello' +await producing; ``` ```cjs @@ -831,14 +1089,21 @@ const { broadcast, text } = require('node:stream/new'); async function run() { const { writer, broadcast: bc } = broadcast(); + // Create consumers before writing const c1 = bc.push(); // Consumer 1 const c2 = bc.push(); // Consumer 2 - writer.write('hello'); - writer.end(); - - console.log(await text(c1)); // 'hello' - console.log(await text(c2)); // 'hello' + // Producer and consumers must run concurrently. Awaited writes + // block when the buffer fills until consumers read. + const producing = (async () => { + await writer.write('hello'); + await writer.end(); + })(); + + const [r1, r2] = await Promise.all([text(c1), text(c2)]); + console.log(r1); // 'hello' + console.log(r2); // 'hello' + await producing; } run().catch(console.error); @@ -886,7 +1151,8 @@ added: REPLACEME * `source` {AsyncIterable} The source to share. * `options` {Object} - * `highWaterMark` {number} Buffer size. **Default:** `16`. + * `highWaterMark` {number} Buffer size. Must be >= 1; values below 1 + are clamped to 1. **Default:** `16`. * `backpressure` {string} `'strict'`, `'block'`, or `'drop-oldest'`. **Default:** `'strict'`. * Returns: {Share} @@ -962,7 +1228,8 @@ added: REPLACEME * `source` {Iterable} The sync source to share. * `options` {Object} - * `highWaterMark` {number} **Default:** `16`. + * `highWaterMark` {number} Must be >= 1; values below 1 are clamped + to 1. **Default:** `16`. * `backpressure` {string} **Default:** `'strict'`. * Returns: {SyncShare} diff --git a/lib/internal/streams/new/broadcast.js b/lib/internal/streams/new/broadcast.js index ce1a1a00a5e141..97bd98a9cbd15e 100644 --- a/lib/internal/streams/new/broadcast.js +++ b/lib/internal/streams/new/broadcast.js @@ -584,7 +584,7 @@ class BroadcastWriter { */ function broadcast(options) { const opts = { - highWaterMark: options?.highWaterMark ?? 16, + highWaterMark: MathMax(1, options?.highWaterMark ?? 16), backpressure: options?.backpressure ?? 'strict', signal: options?.signal, }; diff --git a/lib/internal/streams/new/push.js b/lib/internal/streams/new/push.js index 1c2b900bf0a530..d97717928a5d4a 100644 --- a/lib/internal/streams/new/push.js +++ b/lib/internal/streams/new/push.js @@ -61,7 +61,7 @@ class PushQueue { this._bytesWritten = 0; /** Configuration */ - this._highWaterMark = options.highWaterMark ?? 1; + this._highWaterMark = MathMax(1, options.highWaterMark ?? 1); this._backpressure = options.backpressure ?? 'strict'; this._signal = options.signal; this._abortHandler = undefined; diff --git a/lib/internal/streams/new/share.js b/lib/internal/streams/new/share.js index 987073dd5cbfb9..e368ea7ed58a7f 100644 --- a/lib/internal/streams/new/share.js +++ b/lib/internal/streams/new/share.js @@ -10,6 +10,7 @@ const { ArrayPrototypeShift, ArrayPrototypeSplice, Error, + MathMax, Promise, PromiseResolve, SafeSet, @@ -556,7 +557,7 @@ class SyncShareImpl { function share(source, options) { const opts = { - highWaterMark: options?.highWaterMark ?? 16, + highWaterMark: MathMax(1, options?.highWaterMark ?? 16), backpressure: options?.backpressure ?? 'strict', signal: options?.signal, }; @@ -578,7 +579,7 @@ function share(source, options) { function shareSync(source, options) { const opts = { - highWaterMark: options?.highWaterMark ?? 16, + highWaterMark: MathMax(1, options?.highWaterMark ?? 16), backpressure: options?.backpressure ?? 'strict', }; From 9803d112156b46fbc1f54e88c900cbb823c904c3 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 3 Mar 2026 06:04:38 -0800 Subject: [PATCH 04/42] stream: fixup sync pull batching in stream/new --- lib/internal/streams/new/pull.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/internal/streams/new/pull.js b/lib/internal/streams/new/pull.js index a447284f2e7d96..057780c52b5f1a 100644 --- a/lib/internal/streams/new/pull.js +++ b/lib/internal/streams/new/pull.js @@ -285,14 +285,14 @@ function* applyStatelessSyncTransform(source, transform) { */ function* applyStatefulSyncTransform(source, transform) { const output = transform(source); - const batch = []; for (const item of output) { + const batch = []; for (const chunk of flattenTransformYieldSync(item)) { ArrayPrototypePush(batch, chunk); } - } - if (batch.length > 0) { - yield batch; + if (batch.length > 0) { + yield batch; + } } } From e428158e66ab26f050acd1bea436275ac3bafd4b Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 3 Mar 2026 06:06:25 -0800 Subject: [PATCH 05/42] stream: fixup stream_new.md linting --- doc/api/stream_new.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/api/stream_new.md b/doc/api/stream_new.md index 8cdc8381b0bf25..b2671886f7fed7 100644 --- a/doc/api/stream_new.md +++ b/doc/api/stream_new.md @@ -216,12 +216,12 @@ that closes when the bucket is full: How each policy uses these buffers: -| Policy | Slots limit | Pending writes limit | -|--------|-------------|---------------------| -| `'strict'` | `highWaterMark` | `highWaterMark` | -| `'block'` | `highWaterMark` | Unbounded | -| `'drop-oldest'` | `highWaterMark` | N/A (never waits) | -| `'drop-newest'` | `highWaterMark` | N/A (never waits) | +| Policy | Slots limit | Pending writes limit | +| --------------- | --------------- | -------------------- | +| `'strict'` | `highWaterMark` | `highWaterMark` | +| `'block'` | `highWaterMark` | Unbounded | +| `'drop-oldest'` | `highWaterMark` | N/A (never waits) | +| `'drop-newest'` | `highWaterMark` | N/A (never waits) | #### Strict (default) From 104074af0aabb46e993c8e02ef3ace3e405fdaf3 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 3 Mar 2026 06:23:56 -0800 Subject: [PATCH 06/42] stream: fixup some perf bugs in stream/new --- lib/internal/streams/new/broadcast.js | 16 ++++++++++++---- lib/internal/streams/new/push.js | 23 +++++++++++++++++------ lib/internal/streams/new/utils.js | 14 ++++++++++++++ 3 files changed, 43 insertions(+), 10 deletions(-) diff --git a/lib/internal/streams/new/broadcast.js b/lib/internal/streams/new/broadcast.js index 97bd98a9cbd15e..d391c04325e207 100644 --- a/lib/internal/streams/new/broadcast.js +++ b/lib/internal/streams/new/broadcast.js @@ -50,6 +50,10 @@ const { pull: pullWithTransforms, } = require('internal/streams/new/pull'); +const { + allUint8Array, +} = require('internal/streams/new/utils'); + const encoder = new TextEncoder(); // Non-exported symbol for internal cancel notification from BroadcastImpl @@ -400,8 +404,10 @@ class BroadcastWriter { throw new ERR_INVALID_STATE('Writer is closed'); } - const converted = ArrayPrototypeMap(chunks, (c) => - (typeof c === 'string' ? encoder.encode(c) : c)); + const converted = allUint8Array(chunks) + ? ArrayPrototypeSlice(chunks) + : ArrayPrototypeMap(chunks, (c) => + (typeof c === 'string' ? encoder.encode(c) : c)); if (this._broadcast._write(converted)) { for (let i = 0; i < converted.length; i++) { @@ -441,8 +447,10 @@ class BroadcastWriter { writevSync(chunks) { if (this._closed || this._aborted) return false; if (!this._broadcast._canWrite()) return false; - const converted = ArrayPrototypeMap(chunks, (c) => - (typeof c === 'string' ? encoder.encode(c) : c)); + const converted = allUint8Array(chunks) + ? ArrayPrototypeSlice(chunks) + : ArrayPrototypeMap(chunks, (c) => + (typeof c === 'string' ? encoder.encode(c) : c)); if (this._broadcast._write(converted)) { for (let i = 0; i < converted.length; i++) { this._totalBytes += converted[i].byteLength; diff --git a/lib/internal/streams/new/push.js b/lib/internal/streams/new/push.js index d97717928a5d4a..37eaf229c68b90 100644 --- a/lib/internal/streams/new/push.js +++ b/lib/internal/streams/new/push.js @@ -31,6 +31,7 @@ const { const { toUint8Array, + allUint8Array, } = require('internal/streams/new/utils'); const { @@ -437,9 +438,14 @@ class PushWriter { } async writev(chunks, options) { - const bytes = []; - for (let i = 0; i < chunks.length; i++) { - ArrayPrototypePush(bytes, toUint8Array(chunks[i])); + let bytes; + if (allUint8Array(chunks)) { + bytes = ArrayPrototypeSlice(chunks); + } else { + bytes = []; + for (let i = 0; i < chunks.length; i++) { + ArrayPrototypePush(bytes, toUint8Array(chunks[i])); + } } await this._queue.writeAsync(bytes, options?.signal); } @@ -452,9 +458,14 @@ class PushWriter { writevSync(chunks) { if (!this._queue.canWriteSync()) return false; - const bytes = []; - for (let i = 0; i < chunks.length; i++) { - ArrayPrototypePush(bytes, toUint8Array(chunks[i])); + let bytes; + if (allUint8Array(chunks)) { + bytes = ArrayPrototypeSlice(chunks); + } else { + bytes = []; + for (let i = 0; i < chunks.length; i++) { + ArrayPrototypePush(bytes, toUint8Array(chunks[i])); + } } return this._queue.writeSync(bytes); } diff --git a/lib/internal/streams/new/utils.js b/lib/internal/streams/new/utils.js index 57ca93b85cd09c..ceacc7c904b463 100644 --- a/lib/internal/streams/new/utils.js +++ b/lib/internal/streams/new/utils.js @@ -26,6 +26,19 @@ function toUint8Array(chunk) { return chunk; } +/** + * Check if all chunks in an array are already Uint8Array (no strings). + * Short-circuits on the first string found. + * @param {Array} chunks + * @returns {boolean} + */ +function allUint8Array(chunks) { + for (let i = 0; i < chunks.length; i++) { + if (typeof chunks[i] === 'string') return false; + } + return true; +} + /** * Calculate total byte length of an array of chunks. * @param {Uint8Array[]} chunks @@ -100,6 +113,7 @@ function parsePullArgs(args) { module.exports = { toUint8Array, + allUint8Array, concatBytes, isPullOptions, parsePullArgs, From d393c19b093c9c7c0c4f568c927d9830fb7c082b Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 3 Mar 2026 06:44:22 -0800 Subject: [PATCH 07/42] stream: apply more perf improvements to stream/new --- lib/internal/streams/new/broadcast.js | 56 ++++++++++++++++++++------- lib/internal/streams/new/push.js | 36 +++++++++++++---- 2 files changed, 72 insertions(+), 20 deletions(-) diff --git a/lib/internal/streams/new/broadcast.js b/lib/internal/streams/new/broadcast.js index d391c04325e207..157f5e44fe7b07 100644 --- a/lib/internal/streams/new/broadcast.js +++ b/lib/internal/streams/new/broadcast.js @@ -56,6 +56,9 @@ const { const encoder = new TextEncoder(); +// Cached resolved promise to avoid allocating a new one on every sync fast-path. +const kResolvedPromise = PromiseResolve(); + // Non-exported symbol for internal cancel notification from BroadcastImpl // to BroadcastWriter. Because this symbol is not exported, external code // cannot call it. @@ -388,11 +391,37 @@ class BroadcastWriter { return this._broadcast._getDesiredSize(); } - async write(chunk, options) { + write(chunk, options) { + // Fast path: no signal, writer open, buffer has space + if (!options?.signal && !this._closed && !this._aborted && + this._broadcast._canWrite()) { + const converted = + typeof chunk === 'string' ? encoder.encode(chunk) : chunk; + this._broadcast._write([converted]); + this._totalBytes += converted.byteLength; + return kResolvedPromise; + } return this.writev([chunk], options); } - async writev(chunks, options) { + writev(chunks, options) { + // Fast path: no signal, writer open, buffer has space + if (!options?.signal && !this._closed && !this._aborted && + this._broadcast._canWrite()) { + const converted = allUint8Array(chunks) ? + ArrayPrototypeSlice(chunks) : + ArrayPrototypeMap(chunks, (c) => + (typeof c === 'string' ? encoder.encode(c) : c)); + this._broadcast._write(converted); + for (let i = 0; i < converted.length; i++) { + this._totalBytes += converted[i].byteLength; + } + return kResolvedPromise; + } + return this._writevSlow(chunks, options); + } + + async _writevSlow(chunks, options) { const signal = options?.signal; // Check for pre-aborted signal @@ -404,9 +433,9 @@ class BroadcastWriter { throw new ERR_INVALID_STATE('Writer is closed'); } - const converted = allUint8Array(chunks) - ? ArrayPrototypeSlice(chunks) - : ArrayPrototypeMap(chunks, (c) => + const converted = allUint8Array(chunks) ? + ArrayPrototypeSlice(chunks) : + ArrayPrototypeMap(chunks, (c) => (typeof c === 'string' ? encoder.encode(c) : c)); if (this._broadcast._write(converted)) { @@ -447,9 +476,9 @@ class BroadcastWriter { writevSync(chunks) { if (this._closed || this._aborted) return false; if (!this._broadcast._canWrite()) return false; - const converted = allUint8Array(chunks) - ? ArrayPrototypeSlice(chunks) - : ArrayPrototypeMap(chunks, (c) => + const converted = allUint8Array(chunks) ? + ArrayPrototypeSlice(chunks) : + ArrayPrototypeMap(chunks, (c) => (typeof c === 'string' ? encoder.encode(c) : c)); if (this._broadcast._write(converted)) { for (let i = 0; i < converted.length; i++) { @@ -461,12 +490,12 @@ class BroadcastWriter { } // end() is synchronous internally - signal accepted for interface compliance. - async end(options) { - if (this._closed) return this._totalBytes; + end(options) { + if (this._closed) return PromiseResolve(this._totalBytes); this._closed = true; this._broadcast._end(); this._resolvePendingDrains(false); - return this._totalBytes; + return PromiseResolve(this._totalBytes); } endSync() { @@ -477,14 +506,15 @@ class BroadcastWriter { return this._totalBytes; } - async fail(reason) { - if (this._aborted) return; + fail(reason) { + if (this._aborted) return kResolvedPromise; this._aborted = true; this._closed = true; const error = reason ?? new ERR_INVALID_STATE('Failed'); this._rejectPendingWrites(error); this._rejectPendingDrains(error); this._broadcast._abort(error); + return kResolvedPromise; } failSync(reason) { diff --git a/lib/internal/streams/new/push.js b/lib/internal/streams/new/push.js index 37eaf229c68b90..83fdda21cb7a27 100644 --- a/lib/internal/streams/new/push.js +++ b/lib/internal/streams/new/push.js @@ -38,6 +38,9 @@ const { pull: pullWithTransforms, } = require('internal/streams/new/pull'); +// Cached resolved promise to avoid allocating a new one on every sync fast-path. +const kResolvedPromise = PromiseResolve(); + // ============================================================================= // PushQueue - Internal Queue with Chunk-Based Backpressure // ============================================================================= @@ -432,12 +435,30 @@ class PushWriter { return this._queue.desiredSize; } - async write(chunk, options) { + write(chunk, options) { + if (!options?.signal && this._queue.canWriteSync()) { + const bytes = toUint8Array(chunk); + this._queue.writeSync([bytes]); + return kResolvedPromise; + } const bytes = toUint8Array(chunk); - await this._queue.writeAsync([bytes], options?.signal); + return this._queue.writeAsync([bytes], options?.signal); } - async writev(chunks, options) { + writev(chunks, options) { + if (!options?.signal && this._queue.canWriteSync()) { + let bytes; + if (allUint8Array(chunks)) { + bytes = ArrayPrototypeSlice(chunks); + } else { + bytes = []; + for (let i = 0; i < chunks.length; i++) { + ArrayPrototypePush(bytes, toUint8Array(chunks[i])); + } + } + this._queue.writeSync(bytes); + return kResolvedPromise; + } let bytes; if (allUint8Array(chunks)) { bytes = ArrayPrototypeSlice(chunks); @@ -447,7 +468,7 @@ class PushWriter { ArrayPrototypePush(bytes, toUint8Array(chunks[i])); } } - await this._queue.writeAsync(bytes, options?.signal); + return this._queue.writeAsync(bytes, options?.signal); } writeSync(chunk) { @@ -470,18 +491,19 @@ class PushWriter { return this._queue.writeSync(bytes); } - async end(options) { + end(options) { // end() on PushQueue is synchronous (sets state, resolves pending reads). // Signal accepted for interface compliance but there is nothing to cancel. - return this._queue.end(); + return PromiseResolve(this._queue.end()); } endSync() { return this._queue.end(); } - async fail(reason) { + fail(reason) { this._queue.fail(reason); + return kResolvedPromise; } failSync(reason) { From 13cafd6aa16525a7b8b3e05938ab6bfa35872eac Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 3 Mar 2026 07:14:09 -0800 Subject: [PATCH 08/42] stream: apply more perf improvements to stream/new --- lib/internal/streams/new/broadcast.js | 37 +++---- lib/internal/streams/new/push.js | 51 ++++----- lib/internal/streams/new/ringbuffer.js | 147 +++++++++++++++++++++++++ lib/internal/streams/new/share.js | 30 ++--- 4 files changed, 205 insertions(+), 60 deletions(-) create mode 100644 lib/internal/streams/new/ringbuffer.js diff --git a/lib/internal/streams/new/broadcast.js b/lib/internal/streams/new/broadcast.js index 157f5e44fe7b07..338d43626cdfd3 100644 --- a/lib/internal/streams/new/broadcast.js +++ b/lib/internal/streams/new/broadcast.js @@ -8,12 +8,9 @@ const { ArrayIsArray, - ArrayPrototypeIndexOf, ArrayPrototypeMap, ArrayPrototypePush, - ArrayPrototypeShift, ArrayPrototypeSlice, - ArrayPrototypeSplice, Error, MathMax, Promise, @@ -54,6 +51,10 @@ const { allUint8Array, } = require('internal/streams/new/utils'); +const { + RingBuffer, +} = require('internal/streams/new/ringbuffer'); + const encoder = new TextEncoder(); // Cached resolved promise to avoid allocating a new one on every sync fast-path. @@ -97,7 +98,7 @@ function parsePushArgs(args) { class BroadcastImpl { constructor(options) { - this._buffer = []; + this._buffer = new RingBuffer(); this._bufferStart = 0; this._consumers = new SafeSet(); this._ended = false; @@ -157,7 +158,7 @@ class BroadcastImpl { const bufferIndex = state.cursor - self._bufferStart; if (bufferIndex < self._buffer.length) { - const chunk = self._buffer[bufferIndex]; + const chunk = self._buffer.get(bufferIndex); state.cursor++; self._tryTrimBuffer(); return { __proto__: null, done: false, value: chunk }; @@ -245,7 +246,7 @@ class BroadcastImpl { case 'block': return false; case 'drop-oldest': - ArrayPrototypeShift(this._buffer); + this._buffer.shift(); this._bufferStart++; for (const consumer of this._consumers) { if (consumer.cursor < this._bufferStart) { @@ -258,7 +259,7 @@ class BroadcastImpl { } } - ArrayPrototypePush(this._buffer, chunk); + this._buffer.push(chunk); this._notifyConsumers(); return true; } @@ -271,7 +272,7 @@ class BroadcastImpl { if (consumer.resolve) { const bufferIndex = consumer.cursor - this._bufferStart; if (bufferIndex < this._buffer.length) { - const chunk = this._buffer[bufferIndex]; + const chunk = this._buffer.get(bufferIndex); consumer.cursor++; consumer.resolve({ done: false, value: chunk }); } else { @@ -330,7 +331,7 @@ class BroadcastImpl { const minCursor = this._getMinCursor(); const trimCount = minCursor - this._bufferStart; if (trimCount > 0) { - ArrayPrototypeSplice(this._buffer, 0, trimCount); + this._buffer.trimFront(trimCount); this._bufferStart = minCursor; if (this._onBufferDrained && @@ -345,7 +346,7 @@ class BroadcastImpl { if (consumer.resolve) { const bufferIndex = consumer.cursor - this._bufferStart; if (bufferIndex < this._buffer.length) { - const chunk = this._buffer[bufferIndex]; + const chunk = this._buffer.get(bufferIndex); consumer.cursor++; const resolve = consumer.resolve; consumer.resolve = null; @@ -368,7 +369,7 @@ class BroadcastWriter { this._totalBytes = 0; this._closed = false; this._aborted = false; - this._pendingWrites = []; + this._pendingWrites = new RingBuffer(); this._pendingDrains = []; this._broadcast._onBufferDrained = () => { @@ -544,14 +545,14 @@ class BroadcastWriter { _createPendingWrite(chunk, signal) { return new Promise((resolve, reject) => { const entry = { chunk, resolve, reject }; - ArrayPrototypePush(this._pendingWrites, entry); + this._pendingWrites.push(entry); if (!signal) return; const onAbort = () => { // Remove from queue so it doesn't occupy a slot - const idx = ArrayPrototypeIndexOf(this._pendingWrites, entry); - if (idx !== -1) ArrayPrototypeSplice(this._pendingWrites, idx, 1); + const idx = this._pendingWrites.indexOf(entry); + if (idx !== -1) this._pendingWrites.removeAt(idx); reject(signal.reason ?? lazyDOMException('Aborted', 'AbortError')); }; @@ -573,7 +574,7 @@ class BroadcastWriter { _resolvePendingWrites() { while (this._pendingWrites.length > 0 && this._broadcast._canWrite()) { - const pending = ArrayPrototypeShift(this._pendingWrites); + const pending = this._pendingWrites.shift(); if (this._broadcast._write(pending.chunk)) { for (let i = 0; i < pending.chunk.length; i++) { this._totalBytes += pending.chunk[i].byteLength; @@ -587,10 +588,8 @@ class BroadcastWriter { } _rejectPendingWrites(error) { - const writes = this._pendingWrites; - this._pendingWrites = []; - for (let i = 0; i < writes.length; i++) { - writes[i].reject(error); + while (this._pendingWrites.length > 0) { + this._pendingWrites.shift().reject(error); } } diff --git a/lib/internal/streams/new/push.js b/lib/internal/streams/new/push.js index 83fdda21cb7a27..a0821f30bc3c3a 100644 --- a/lib/internal/streams/new/push.js +++ b/lib/internal/streams/new/push.js @@ -6,11 +6,8 @@ // with built-in backpressure. const { - ArrayPrototypeIndexOf, ArrayPrototypePush, - ArrayPrototypeShift, ArrayPrototypeSlice, - ArrayPrototypeSplice, Error, MathMax, Promise, @@ -38,6 +35,10 @@ const { pull: pullWithTransforms, } = require('internal/streams/new/pull'); +const { + RingBuffer, +} = require('internal/streams/new/ringbuffer'); + // Cached resolved promise to avoid allocating a new one on every sync fast-path. const kResolvedPromise = PromiseResolve(); @@ -48,11 +49,11 @@ const kResolvedPromise = PromiseResolve(); class PushQueue { constructor(options = {}) { /** Buffered chunks (each slot is from one write/writev call) */ - this._slots = []; + this._slots = new RingBuffer(); /** Pending writes waiting for buffer space */ - this._pendingWrites = []; + this._pendingWrites = new RingBuffer(); /** Pending reads waiting for data */ - this._pendingReads = []; + this._pendingReads = new RingBuffer(); /** Pending drains waiting for backpressure to clear */ this._pendingDrains = []; /** Writer state: 'open' | 'closed' | 'errored' */ @@ -134,7 +135,7 @@ class PushQueue { return false; case 'drop-oldest': if (this._slots.length > 0) { - ArrayPrototypeShift(this._slots); + this._slots.shift(); } break; case 'drop-newest': @@ -146,7 +147,7 @@ class PushQueue { } } - ArrayPrototypePush(this._slots, chunks); + this._slots.push(chunks); for (let i = 0; i < chunks.length; i++) { this._bytesWritten += chunks[i].byteLength; } @@ -209,14 +210,14 @@ class PushQueue { _createPendingWrite(chunks, signal) { return new Promise((resolve, reject) => { const entry = { chunks, resolve, reject }; - ArrayPrototypePush(this._pendingWrites, entry); + this._pendingWrites.push(entry); if (!signal) return; const onAbort = () => { // Remove from queue so it doesn't occupy a slot - const idx = ArrayPrototypeIndexOf(this._pendingWrites, entry); - if (idx !== -1) ArrayPrototypeSplice(this._pendingWrites, idx, 1); + const idx = this._pendingWrites.indexOf(entry); + if (idx !== -1) this._pendingWrites.removeAt(idx); reject(signal.reason ?? lazyDOMException('Aborted', 'AbortError')); }; @@ -301,7 +302,7 @@ class PushQueue { } return new Promise((resolve, reject) => { - ArrayPrototypePush(this._pendingReads, { resolve, reject }); + this._pendingReads.push({ resolve, reject }); }); } @@ -331,27 +332,27 @@ class PushQueue { _drain() { const result = []; for (let i = 0; i < this._slots.length; i++) { - const slot = this._slots[i]; + const slot = this._slots.get(i); for (let j = 0; j < slot.length; j++) { ArrayPrototypePush(result, slot[j]); } } - this._slots = []; + this._slots.clear(); return result; } _resolvePendingReads() { while (this._pendingReads.length > 0) { if (this._slots.length > 0) { - const pending = ArrayPrototypeShift(this._pendingReads); + const pending = this._pendingReads.shift(); const result = this._drain(); this._resolvePendingWrites(); pending.resolve({ value: result, done: false }); } else if (this._writerState === 'closed') { - const pending = ArrayPrototypeShift(this._pendingReads); + const pending = this._pendingReads.shift(); pending.resolve({ value: undefined, done: true }); } else if (this._writerState === 'errored' && this._error) { - const pending = ArrayPrototypeShift(this._pendingReads); + const pending = this._pendingReads.shift(); pending.reject(this._error); } else { break; @@ -362,8 +363,8 @@ class PushQueue { _resolvePendingWrites() { while (this._pendingWrites.length > 0 && this._slots.length < this._highWaterMark) { - const pending = ArrayPrototypeShift(this._pendingWrites); - ArrayPrototypePush(this._slots, pending.chunks); + const pending = this._pendingWrites.shift(); + this._slots.push(pending.chunks); for (let i = 0; i < pending.chunks.length; i++) { this._bytesWritten += pending.chunks[i].byteLength; } @@ -392,18 +393,14 @@ class PushQueue { } _rejectPendingReads(error) { - const reads = this._pendingReads; - this._pendingReads = []; - for (let i = 0; i < reads.length; i++) { - reads[i].reject(error); + while (this._pendingReads.length > 0) { + this._pendingReads.shift().reject(error); } } _rejectPendingWrites(error) { - const writes = this._pendingWrites; - this._pendingWrites = []; - for (let i = 0; i < writes.length; i++) { - writes[i].reject(error); + while (this._pendingWrites.length > 0) { + this._pendingWrites.shift().reject(error); } } diff --git a/lib/internal/streams/new/ringbuffer.js b/lib/internal/streams/new/ringbuffer.js new file mode 100644 index 00000000000000..c3ad401a3bdb08 --- /dev/null +++ b/lib/internal/streams/new/ringbuffer.js @@ -0,0 +1,147 @@ +'use strict'; + +// RingBuffer - O(1) FIFO queue with indexed access. +// +// Replaces plain JS arrays that are used as queues with shift()/push(). +// Array.shift() is O(n) because it copies all remaining elements; +// RingBuffer.shift() is O(1) -- it just advances a head pointer. +// +// Also provides O(1) trimFront(count) to replace Array.splice(0, count). + +const { + Array, +} = primordials; + +class RingBuffer { + #backing; + #head = 0; + #size = 0; + #capacity; + + constructor(initialCapacity = 16) { + this.#capacity = initialCapacity; + this.#backing = new Array(initialCapacity); + } + + get length() { + return this.#size; + } + + /** + * Append an item to the tail. O(1) amortized. + */ + push(item) { + if (this.#size === this.#capacity) { + this.#grow(); + } + this.#backing[(this.#head + this.#size) % this.#capacity] = item; + this.#size++; + } + + /** + * Prepend an item to the head. O(1) amortized. + */ + unshift(item) { + if (this.#size === this.#capacity) { + this.#grow(); + } + this.#head = (this.#head - 1 + this.#capacity) % this.#capacity; + this.#backing[this.#head] = item; + this.#size++; + } + + /** + * Remove and return the item at the head. O(1). + * @returns {any} + */ + shift() { + if (this.#size === 0) return undefined; + const item = this.#backing[this.#head]; + this.#backing[this.#head] = undefined; // Help GC + this.#head = (this.#head + 1) % this.#capacity; + this.#size--; + return item; + } + + /** + * Read item at a logical index (0 = head). O(1). + * @returns {any} + */ + get(index) { + return this.#backing[(this.#head + index) % this.#capacity]; + } + + /** + * Remove `count` items from the head without returning them. + * O(count) for GC cleanup. + */ + trimFront(count) { + if (count <= 0) return; + if (count >= this.#size) { + this.clear(); + return; + } + for (let i = 0; i < count; i++) { + this.#backing[(this.#head + i) % this.#capacity] = undefined; + } + this.#head = (this.#head + count) % this.#capacity; + this.#size -= count; + } + + /** + * Find the logical index of `item` (reference equality). O(n). + * Returns -1 if not found. + * @returns {number} + */ + indexOf(item) { + for (let i = 0; i < this.#size; i++) { + if (this.#backing[(this.#head + i) % this.#capacity] === item) { + return i; + } + } + return -1; + } + + /** + * Remove the item at logical `index`, shifting later elements. O(n) worst case. + * Used only on rare abort-signal cancellation path. + */ + removeAt(index) { + if (index < 0 || index >= this.#size) return; + for (let i = index; i < this.#size - 1; i++) { + const from = (this.#head + i + 1) % this.#capacity; + const to = (this.#head + i) % this.#capacity; + this.#backing[to] = this.#backing[from]; + } + const last = (this.#head + this.#size - 1) % this.#capacity; + this.#backing[last] = undefined; + this.#size--; + } + + /** + * Remove all items. O(n) for GC cleanup. + */ + clear() { + for (let i = 0; i < this.#size; i++) { + this.#backing[(this.#head + i) % this.#capacity] = undefined; + } + this.#head = 0; + this.#size = 0; + } + + /** + * Double the backing capacity, linearizing the circular layout. + */ + #grow() { + const newCapacity = this.#capacity * 2; + const newBacking = new Array(newCapacity); + for (let i = 0; i < this.#size; i++) { + newBacking[i] = this.#backing[(this.#head + i) % this.#capacity]; + } + this.#backing = newBacking; + this.#head = 0; + this.#capacity = newCapacity; + } +} + +module.exports = { RingBuffer }; diff --git a/lib/internal/streams/new/share.js b/lib/internal/streams/new/share.js index e368ea7ed58a7f..1b4698f0f0686c 100644 --- a/lib/internal/streams/new/share.js +++ b/lib/internal/streams/new/share.js @@ -7,8 +7,6 @@ const { ArrayPrototypePush, - ArrayPrototypeShift, - ArrayPrototypeSplice, Error, MathMax, Promise, @@ -39,6 +37,10 @@ const { parsePullArgs, } = require('internal/streams/new/utils'); +const { + RingBuffer, +} = require('internal/streams/new/ringbuffer'); + const { codes: { ERR_INVALID_ARG_TYPE, @@ -55,7 +57,7 @@ class ShareImpl { constructor(source, options) { this._source = source; this._options = options; - this._buffer = []; + this._buffer = new RingBuffer(); this._bufferStart = 0; this._consumers = new SafeSet(); this._sourceIterator = null; @@ -121,7 +123,7 @@ class ShareImpl { // Check if data is available in buffer const bufferIndex = state.cursor - self._bufferStart; if (bufferIndex < self._buffer.length) { - const chunk = self._buffer[bufferIndex]; + const chunk = self._buffer.get(bufferIndex); state.cursor++; self._tryTrimBuffer(); return { __proto__: null, done: false, value: chunk }; @@ -152,7 +154,7 @@ class ShareImpl { const newBufferIndex = state.cursor - self._bufferStart; if (newBufferIndex < self._buffer.length) { - const chunk = self._buffer[newBufferIndex]; + const chunk = self._buffer.get(newBufferIndex); state.cursor++; self._tryTrimBuffer(); return { __proto__: null, done: false, value: chunk }; @@ -244,7 +246,7 @@ class ShareImpl { }); break; case 'drop-oldest': - ArrayPrototypeShift(this._buffer); + this._buffer.shift(); this._bufferStart++; for (const consumer of this._consumers) { if (consumer.cursor < this._bufferStart) { @@ -301,7 +303,7 @@ class ShareImpl { if (result.done) { this._sourceExhausted = true; } else { - ArrayPrototypePush(this._buffer, result.value); + this._buffer.push(result.value); } } catch (error) { this._sourceError = @@ -332,7 +334,7 @@ class ShareImpl { const minCursor = this._getMinCursor(); const trimCount = minCursor - this._bufferStart; if (trimCount > 0) { - ArrayPrototypeSplice(this._buffer, 0, trimCount); + this._buffer.trimFront(trimCount); this._bufferStart = minCursor; for (let i = 0; i < this._pullWaiters.length; i++) { this._pullWaiters[i](); @@ -350,7 +352,7 @@ class SyncShareImpl { constructor(source, options) { this._source = source; this._options = options; - this._buffer = []; + this._buffer = new RingBuffer(); this._bufferStart = 0; this._consumers = new SafeSet(); this._sourceIterator = null; @@ -405,7 +407,7 @@ class SyncShareImpl { const bufferIndex = state.cursor - self._bufferStart; if (bufferIndex < self._buffer.length) { - const chunk = self._buffer[bufferIndex]; + const chunk = self._buffer.get(bufferIndex); state.cursor++; self._tryTrimBuffer(); return { done: false, value: chunk }; @@ -430,7 +432,7 @@ class SyncShareImpl { '(blocking not available in sync context)', self._buffer.length); case 'drop-oldest': - ArrayPrototypeShift(self._buffer); + self._buffer.shift(); self._bufferStart++; for (const consumer of self._consumers) { if (consumer.cursor < self._bufferStart) { @@ -455,7 +457,7 @@ class SyncShareImpl { const newBufferIndex = state.cursor - self._bufferStart; if (newBufferIndex < self._buffer.length) { - const chunk = self._buffer[newBufferIndex]; + const chunk = self._buffer.get(newBufferIndex); state.cursor++; self._tryTrimBuffer(); return { done: false, value: chunk }; @@ -521,7 +523,7 @@ class SyncShareImpl { if (result.done) { this._sourceExhausted = true; } else { - ArrayPrototypePush(this._buffer, result.value); + this._buffer.push(result.value); } } catch (error) { this._sourceError = @@ -545,7 +547,7 @@ class SyncShareImpl { const minCursor = this._getMinCursor(); const trimCount = minCursor - this._bufferStart; if (trimCount > 0) { - ArrayPrototypeSplice(this._buffer, 0, trimCount); + this._buffer.trimFront(trimCount); this._bufferStart = minCursor; } } From 97ee8a347679befa274119c3f65f934bb2056db8 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 3 Mar 2026 07:57:31 -0800 Subject: [PATCH 09/42] stream: apply another minor perf improvement in stream/new --- lib/internal/streams/new/broadcast.js | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/internal/streams/new/broadcast.js b/lib/internal/streams/new/broadcast.js index 338d43626cdfd3..26cf5df5eb03c6 100644 --- a/lib/internal/streams/new/broadcast.js +++ b/lib/internal/streams/new/broadcast.js @@ -352,7 +352,6 @@ class BroadcastImpl { consumer.resolve = null; consumer.reject = null; resolve({ done: false, value: chunk }); - this._tryTrimBuffer(); } } } From 36bbc94d47deca93d755e3ef9fef29d6c18ddf7f Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 3 Mar 2026 09:06:47 -0800 Subject: [PATCH 10/42] stream: use proper # private fields in stream/new --- lib/internal/streams/new/broadcast.js | 367 ++++++++++++++------------ lib/internal/streams/new/push.js | 310 +++++++++++----------- lib/internal/streams/new/share.js | 358 +++++++++++++------------ 3 files changed, 535 insertions(+), 500 deletions(-) diff --git a/lib/internal/streams/new/broadcast.js b/lib/internal/streams/new/broadcast.js index 26cf5df5eb03c6..1c671571fbe676 100644 --- a/lib/internal/streams/new/broadcast.js +++ b/lib/internal/streams/new/broadcast.js @@ -60,10 +60,16 @@ const encoder = new TextEncoder(); // Cached resolved promise to avoid allocating a new one on every sync fast-path. const kResolvedPromise = PromiseResolve(); -// Non-exported symbol for internal cancel notification from BroadcastImpl -// to BroadcastWriter. Because this symbol is not exported, external code -// cannot call it. +// Non-exported symbols for internal cross-class communication between +// BroadcastImpl and BroadcastWriter. Because these symbols are not exported, +// external code cannot access the internal methods/fields. const kCancelWriter = Symbol('cancelWriter'); +const kWrite = Symbol('write'); +const kEnd = Symbol('end'); +const kAbort = Symbol('abort'); +const kGetDesiredSize = Symbol('getDesiredSize'); +const kCanWrite = Symbol('canWrite'); +const kOnBufferDrained = Symbol('onBufferDrained'); // ============================================================================= // Argument Parsing @@ -97,33 +103,43 @@ function parsePushArgs(args) { // ============================================================================= class BroadcastImpl { + #buffer = new RingBuffer(); + #bufferStart = 0; + #consumers = new SafeSet(); + #ended = false; + #error = null; + #cancelled = false; + #options; + #writer = null; + constructor(options) { - this._buffer = new RingBuffer(); - this._bufferStart = 0; - this._consumers = new SafeSet(); - this._ended = false; - this._error = null; - this._cancelled = false; - this._options = options; - this._onBufferDrained = null; - this._writer = null; + this.#options = options; + this[kOnBufferDrained] = null; } setWriter(writer) { - this._writer = writer; + this.#writer = writer; + } + + get backpressurePolicy() { + return this.#options.backpressure; + } + + get highWaterMark() { + return this.#options.highWaterMark; } get consumerCount() { - return this._consumers.size; + return this.#consumers.size; } get bufferSize() { - return this._buffer.length; + return this.#buffer.length; } push(...args) { const { transforms, options } = parsePushArgs(args); - const rawConsumer = this._createRawConsumer(); + const rawConsumer = this.#createRawConsumer(); if (transforms.length > 0) { if (options?.signal) { @@ -135,15 +151,15 @@ class BroadcastImpl { return rawConsumer; } - _createRawConsumer() { + #createRawConsumer() { const state = { - cursor: this._bufferStart + this._buffer.length, + cursor: this.#bufferStart + this.#buffer.length, resolve: null, reject: null, detached: false, }; - this._consumers.add(state); + this.#consumers.add(state); const self = this; return { @@ -152,27 +168,27 @@ class BroadcastImpl { async next() { if (state.detached) { // If detached due to an error, throw the error - if (self._error) throw self._error; + if (self.#error) throw self.#error; return { __proto__: null, done: true, value: undefined }; } - const bufferIndex = state.cursor - self._bufferStart; - if (bufferIndex < self._buffer.length) { - const chunk = self._buffer.get(bufferIndex); + const bufferIndex = state.cursor - self.#bufferStart; + if (bufferIndex < self.#buffer.length) { + const chunk = self.#buffer.get(bufferIndex); state.cursor++; - self._tryTrimBuffer(); + self.#tryTrimBuffer(); return { __proto__: null, done: false, value: chunk }; } - if (self._error) { + if (self.#error) { state.detached = true; - self._consumers.delete(state); - throw self._error; + self.#consumers.delete(state); + throw self.#error; } - if (self._ended || self._cancelled) { + if (self.#ended || self.#cancelled) { state.detached = true; - self._consumers.delete(state); + self.#consumers.delete(state); return { __proto__: null, done: true, value: undefined }; } @@ -186,8 +202,8 @@ class BroadcastImpl { state.detached = true; state.resolve = null; state.reject = null; - self._consumers.delete(state); - self._tryTrimBuffer(); + self.#consumers.delete(state); + self.#tryTrimBuffer(); return { __proto__: null, done: true, value: undefined }; }, @@ -195,8 +211,8 @@ class BroadcastImpl { state.detached = true; state.resolve = null; state.reject = null; - self._consumers.delete(state); - self._tryTrimBuffer(); + self.#consumers.delete(state); + self.#tryTrimBuffer(); return { __proto__: null, done: true, value: undefined }; }, }; @@ -205,18 +221,18 @@ class BroadcastImpl { } cancel(reason) { - if (this._cancelled) return; - this._cancelled = true; - this._ended = true; // Prevents _abort() from redundantly iterating consumers + if (this.#cancelled) return; + this.#cancelled = true; + this.#ended = true; // Prevents [kAbort]() from redundantly iterating consumers if (reason) { - this._error = reason; + this.#error = reason; } // Reject pending writes on the writer so the pump doesn't hang - this._writer?.[kCancelWriter](); + this.#writer?.[kCancelWriter](); - for (const consumer of this._consumers) { + for (const consumer of this.#consumers) { if (consumer.resolve) { if (reason) { consumer.reject?.(reason); @@ -228,29 +244,29 @@ class BroadcastImpl { } consumer.detached = true; } - this._consumers.clear(); + this.#consumers.clear(); } [SymbolDispose]() { this.cancel(); } - // Internal methods called by Writer + // Methods accessed by BroadcastWriter via symbol keys - _write(chunk) { - if (this._ended || this._cancelled) return false; + [kWrite](chunk) { + if (this.#ended || this.#cancelled) return false; - if (this._buffer.length >= this._options.highWaterMark) { - switch (this._options.backpressure) { + if (this.#buffer.length >= this.#options.highWaterMark) { + switch (this.#options.backpressure) { case 'strict': case 'block': return false; case 'drop-oldest': - this._buffer.shift(); - this._bufferStart++; - for (const consumer of this._consumers) { - if (consumer.cursor < this._bufferStart) { - consumer.cursor = this._bufferStart; + this.#buffer.shift(); + this.#bufferStart++; + for (const consumer of this.#consumers) { + if (consumer.cursor < this.#bufferStart) { + consumer.cursor = this.#bufferStart; } } break; @@ -259,20 +275,20 @@ class BroadcastImpl { } } - this._buffer.push(chunk); - this._notifyConsumers(); + this.#buffer.push(chunk); + this.#notifyConsumers(); return true; } - _end() { - if (this._ended) return; - this._ended = true; + [kEnd]() { + if (this.#ended) return; + this.#ended = true; - for (const consumer of this._consumers) { + for (const consumer of this.#consumers) { if (consumer.resolve) { - const bufferIndex = consumer.cursor - this._bufferStart; - if (bufferIndex < this._buffer.length) { - const chunk = this._buffer.get(bufferIndex); + const bufferIndex = consumer.cursor - this.#bufferStart; + if (bufferIndex < this.#buffer.length) { + const chunk = this.#buffer.get(bufferIndex); consumer.cursor++; consumer.resolve({ done: false, value: chunk }); } else { @@ -284,13 +300,13 @@ class BroadcastImpl { } } - _abort(reason) { - if (this._ended || this._error) return; - this._error = reason; - this._ended = true; + [kAbort](reason) { + if (this.#ended || this.#error) return; + this.#error = reason; + this.#ended = true; // Notify all waiting consumers and detach them - for (const consumer of this._consumers) { + for (const consumer of this.#consumers) { if (consumer.reject) { consumer.reject(reason); consumer.resolve = null; @@ -298,55 +314,57 @@ class BroadcastImpl { } consumer.detached = true; } - this._consumers.clear(); + this.#consumers.clear(); } - _getDesiredSize() { - if (this._ended || this._cancelled) return null; - return MathMax(0, this._options.highWaterMark - this._buffer.length); + [kGetDesiredSize]() { + if (this.#ended || this.#cancelled) return null; + return MathMax(0, this.#options.highWaterMark - this.#buffer.length); } - _canWrite() { - if (this._ended || this._cancelled) return false; - if ((this._options.backpressure === 'strict' || - this._options.backpressure === 'block') && - this._buffer.length >= this._options.highWaterMark) { + [kCanWrite]() { + if (this.#ended || this.#cancelled) return false; + if ((this.#options.backpressure === 'strict' || + this.#options.backpressure === 'block') && + this.#buffer.length >= this.#options.highWaterMark) { return false; } return true; } - _getMinCursor() { + // Private methods + + #getMinCursor() { let min = Infinity; - for (const consumer of this._consumers) { + for (const consumer of this.#consumers) { if (consumer.cursor < min) { min = consumer.cursor; } } return min === Infinity ? - this._bufferStart + this._buffer.length : min; + this.#bufferStart + this.#buffer.length : min; } - _tryTrimBuffer() { - const minCursor = this._getMinCursor(); - const trimCount = minCursor - this._bufferStart; + #tryTrimBuffer() { + const minCursor = this.#getMinCursor(); + const trimCount = minCursor - this.#bufferStart; if (trimCount > 0) { - this._buffer.trimFront(trimCount); - this._bufferStart = minCursor; + this.#buffer.trimFront(trimCount); + this.#bufferStart = minCursor; - if (this._onBufferDrained && - this._buffer.length < this._options.highWaterMark) { - this._onBufferDrained(); + if (this[kOnBufferDrained] && + this.#buffer.length < this.#options.highWaterMark) { + this[kOnBufferDrained](); } } } - _notifyConsumers() { - for (const consumer of this._consumers) { + #notifyConsumers() { + for (const consumer of this.#consumers) { if (consumer.resolve) { - const bufferIndex = consumer.cursor - this._bufferStart; - if (bufferIndex < this._buffer.length) { - const chunk = this._buffer.get(bufferIndex); + const bufferIndex = consumer.cursor - this.#bufferStart; + if (bufferIndex < this.#buffer.length) { + const chunk = this.#buffer.get(bufferIndex); consumer.cursor++; const resolve = consumer.resolve; consumer.resolve = null; @@ -363,17 +381,19 @@ class BroadcastImpl { // ============================================================================= class BroadcastWriter { + #broadcast; + #totalBytes = 0; + #closed = false; + #aborted = false; + #pendingWrites = new RingBuffer(); + #pendingDrains = []; + constructor(broadcastImpl) { - this._broadcast = broadcastImpl; - this._totalBytes = 0; - this._closed = false; - this._aborted = false; - this._pendingWrites = new RingBuffer(); - this._pendingDrains = []; - - this._broadcast._onBufferDrained = () => { - this._resolvePendingWrites(); - this._resolvePendingDrains(true); + this.#broadcast = broadcastImpl; + + this.#broadcast[kOnBufferDrained] = () => { + this.#resolvePendingWrites(); + this.#resolvePendingDrains(true); }; } @@ -382,23 +402,23 @@ class BroadcastWriter { if (desired === null) return null; if (desired > 0) return PromiseResolve(true); return new Promise((resolve, reject) => { - ArrayPrototypePush(this._pendingDrains, { resolve, reject }); + ArrayPrototypePush(this.#pendingDrains, { resolve, reject }); }); } get desiredSize() { - if (this._closed || this._aborted) return null; - return this._broadcast._getDesiredSize(); + if (this.#closed || this.#aborted) return null; + return this.#broadcast[kGetDesiredSize](); } write(chunk, options) { // Fast path: no signal, writer open, buffer has space - if (!options?.signal && !this._closed && !this._aborted && - this._broadcast._canWrite()) { + if (!options?.signal && !this.#closed && !this.#aborted && + this.#broadcast[kCanWrite]()) { const converted = typeof chunk === 'string' ? encoder.encode(chunk) : chunk; - this._broadcast._write([converted]); - this._totalBytes += converted.byteLength; + this.#broadcast[kWrite]([converted]); + this.#totalBytes += converted.byteLength; return kResolvedPromise; } return this.writev([chunk], options); @@ -406,22 +426,22 @@ class BroadcastWriter { writev(chunks, options) { // Fast path: no signal, writer open, buffer has space - if (!options?.signal && !this._closed && !this._aborted && - this._broadcast._canWrite()) { + if (!options?.signal && !this.#closed && !this.#aborted && + this.#broadcast[kCanWrite]()) { const converted = allUint8Array(chunks) ? ArrayPrototypeSlice(chunks) : ArrayPrototypeMap(chunks, (c) => (typeof c === 'string' ? encoder.encode(c) : c)); - this._broadcast._write(converted); + this.#broadcast[kWrite](converted); for (let i = 0; i < converted.length; i++) { - this._totalBytes += converted[i].byteLength; + this.#totalBytes += converted[i].byteLength; } return kResolvedPromise; } - return this._writevSlow(chunks, options); + return this.#writevSlow(chunks, options); } - async _writevSlow(chunks, options) { + async #writevSlow(chunks, options) { const signal = options?.signal; // Check for pre-aborted signal @@ -429,7 +449,7 @@ class BroadcastWriter { throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); } - if (this._closed || this._aborted) { + if (this.#closed || this.#aborted) { throw new ERR_INVALID_STATE('Writer is closed'); } @@ -438,51 +458,51 @@ class BroadcastWriter { ArrayPrototypeMap(chunks, (c) => (typeof c === 'string' ? encoder.encode(c) : c)); - if (this._broadcast._write(converted)) { + if (this.#broadcast[kWrite](converted)) { for (let i = 0; i < converted.length; i++) { - this._totalBytes += converted[i].byteLength; + this.#totalBytes += converted[i].byteLength; } return; } - const policy = this._broadcast._options?.backpressure ?? 'strict'; - const highWaterMark = this._broadcast._options?.highWaterMark ?? 16; + const policy = this.#broadcast.backpressurePolicy; + const hwm = this.#broadcast.highWaterMark; if (policy === 'strict') { - if (this._pendingWrites.length >= highWaterMark) { + if (this.#pendingWrites.length >= hwm) { throw new ERR_INVALID_STATE( 'Backpressure violation: too many pending writes. ' + 'Await each write() call to respect backpressure.'); } - return this._createPendingWrite(converted, signal); + return this.#createPendingWrite(converted, signal); } // 'block' policy - return this._createPendingWrite(converted, signal); + return this.#createPendingWrite(converted, signal); } writeSync(chunk) { - if (this._closed || this._aborted) return false; - if (!this._broadcast._canWrite()) return false; + if (this.#closed || this.#aborted) return false; + if (!this.#broadcast[kCanWrite]()) return false; const converted = typeof chunk === 'string' ? encoder.encode(chunk) : chunk; - if (this._broadcast._write([converted])) { - this._totalBytes += converted.byteLength; + if (this.#broadcast[kWrite]([converted])) { + this.#totalBytes += converted.byteLength; return true; } return false; } writevSync(chunks) { - if (this._closed || this._aborted) return false; - if (!this._broadcast._canWrite()) return false; + if (this.#closed || this.#aborted) return false; + if (!this.#broadcast[kCanWrite]()) return false; const converted = allUint8Array(chunks) ? ArrayPrototypeSlice(chunks) : ArrayPrototypeMap(chunks, (c) => (typeof c === 'string' ? encoder.encode(c) : c)); - if (this._broadcast._write(converted)) { + if (this.#broadcast[kWrite](converted)) { for (let i = 0; i < converted.length; i++) { - this._totalBytes += converted[i].byteLength; + this.#totalBytes += converted[i].byteLength; } return true; } @@ -491,49 +511,49 @@ class BroadcastWriter { // end() is synchronous internally - signal accepted for interface compliance. end(options) { - if (this._closed) return PromiseResolve(this._totalBytes); - this._closed = true; - this._broadcast._end(); - this._resolvePendingDrains(false); - return PromiseResolve(this._totalBytes); + if (this.#closed) return PromiseResolve(this.#totalBytes); + this.#closed = true; + this.#broadcast[kEnd](); + this.#resolvePendingDrains(false); + return PromiseResolve(this.#totalBytes); } endSync() { - if (this._closed) return this._totalBytes; - this._closed = true; - this._broadcast._end(); - this._resolvePendingDrains(false); - return this._totalBytes; + if (this.#closed) return this.#totalBytes; + this.#closed = true; + this.#broadcast[kEnd](); + this.#resolvePendingDrains(false); + return this.#totalBytes; } fail(reason) { - if (this._aborted) return kResolvedPromise; - this._aborted = true; - this._closed = true; + if (this.#aborted) return kResolvedPromise; + this.#aborted = true; + this.#closed = true; const error = reason ?? new ERR_INVALID_STATE('Failed'); - this._rejectPendingWrites(error); - this._rejectPendingDrains(error); - this._broadcast._abort(error); + this.#rejectPendingWrites(error); + this.#rejectPendingDrains(error); + this.#broadcast[kAbort](error); return kResolvedPromise; } failSync(reason) { - if (this._aborted) return true; - this._aborted = true; - this._closed = true; + if (this.#aborted) return true; + this.#aborted = true; + this.#closed = true; const error = reason ?? new ERR_INVALID_STATE('Failed'); - this._rejectPendingWrites(error); - this._rejectPendingDrains(error); - this._broadcast._abort(error); + this.#rejectPendingWrites(error); + this.#rejectPendingDrains(error); + this.#broadcast[kAbort](error); return true; } [kCancelWriter]() { - if (this._closed) return; - this._closed = true; - this._rejectPendingWrites( + if (this.#closed) return; + this.#closed = true; + this.#rejectPendingWrites( lazyDOMException('Broadcast cancelled', 'AbortError')); - this._resolvePendingDrains(false); + this.#resolvePendingDrains(false); } /** @@ -541,17 +561,17 @@ class BroadcastWriter { * If the signal fires, the entry is removed from pendingWrites and the * promise rejects. Signal listeners are cleaned up on normal resolution. */ - _createPendingWrite(chunk, signal) { + #createPendingWrite(chunk, signal) { return new Promise((resolve, reject) => { const entry = { chunk, resolve, reject }; - this._pendingWrites.push(entry); + this.#pendingWrites.push(entry); if (!signal) return; const onAbort = () => { // Remove from queue so it doesn't occupy a slot - const idx = this._pendingWrites.indexOf(entry); - if (idx !== -1) this._pendingWrites.removeAt(idx); + const idx = this.#pendingWrites.indexOf(entry); + if (idx !== -1) this.#pendingWrites.removeAt(idx); reject(signal.reason ?? lazyDOMException('Aborted', 'AbortError')); }; @@ -571,38 +591,39 @@ class BroadcastWriter { }); } - _resolvePendingWrites() { - while (this._pendingWrites.length > 0 && this._broadcast._canWrite()) { - const pending = this._pendingWrites.shift(); - if (this._broadcast._write(pending.chunk)) { + #resolvePendingWrites() { + while (this.#pendingWrites.length > 0 && + this.#broadcast[kCanWrite]()) { + const pending = this.#pendingWrites.shift(); + if (this.#broadcast[kWrite](pending.chunk)) { for (let i = 0; i < pending.chunk.length; i++) { - this._totalBytes += pending.chunk[i].byteLength; + this.#totalBytes += pending.chunk[i].byteLength; } pending.resolve(); } else { - this._pendingWrites.unshift(pending); + this.#pendingWrites.unshift(pending); break; } } } - _rejectPendingWrites(error) { - while (this._pendingWrites.length > 0) { - this._pendingWrites.shift().reject(error); + #rejectPendingWrites(error) { + while (this.#pendingWrites.length > 0) { + this.#pendingWrites.shift().reject(error); } } - _resolvePendingDrains(canWrite) { - const drains = this._pendingDrains; - this._pendingDrains = []; + #resolvePendingDrains(canWrite) { + const drains = this.#pendingDrains; + this.#pendingDrains = []; for (let i = 0; i < drains.length; i++) { drains[i].resolve(canWrite); } } - _rejectPendingDrains(error) { - const drains = this._pendingDrains; - this._pendingDrains = []; + #rejectPendingDrains(error) { + const drains = this.#pendingDrains; + this.#pendingDrains = []; for (let i = 0; i < drains.length; i++) { drains[i].reject(error); } diff --git a/lib/internal/streams/new/push.js b/lib/internal/streams/new/push.js index a0821f30bc3c3a..791a9acbbfe3c2 100644 --- a/lib/internal/streams/new/push.js +++ b/lib/internal/streams/new/push.js @@ -47,42 +47,47 @@ const kResolvedPromise = PromiseResolve(); // ============================================================================= class PushQueue { + /** Buffered chunks (each slot is from one write/writev call) */ + #slots = new RingBuffer(); + /** Pending writes waiting for buffer space */ + #pendingWrites = new RingBuffer(); + /** Pending reads waiting for data */ + #pendingReads = new RingBuffer(); + /** Pending drains waiting for backpressure to clear */ + #pendingDrains = []; + /** Writer state: 'open' | 'closed' | 'errored' */ + #writerState = 'open'; + /** Consumer state: 'active' | 'returned' | 'thrown' */ + #consumerState = 'active'; + /** Error that closed the stream */ + #error = null; + /** Total bytes written */ + #bytesWritten = 0; + + /** Configuration */ + #highWaterMark; + #backpressure; + #signal; + #abortHandler; + constructor(options = {}) { - /** Buffered chunks (each slot is from one write/writev call) */ - this._slots = new RingBuffer(); - /** Pending writes waiting for buffer space */ - this._pendingWrites = new RingBuffer(); - /** Pending reads waiting for data */ - this._pendingReads = new RingBuffer(); - /** Pending drains waiting for backpressure to clear */ - this._pendingDrains = []; - /** Writer state: 'open' | 'closed' | 'errored' */ - this._writerState = 'open'; - /** Consumer state: 'active' | 'returned' | 'thrown' */ - this._consumerState = 'active'; - /** Error that closed the stream */ - this._error = null; - /** Total bytes written */ - this._bytesWritten = 0; - - /** Configuration */ - this._highWaterMark = MathMax(1, options.highWaterMark ?? 1); - this._backpressure = options.backpressure ?? 'strict'; - this._signal = options.signal; - this._abortHandler = undefined; - - if (this._signal) { - if (this._signal.aborted) { - this.fail(this._signal.reason instanceof Error ? - this._signal.reason : + this.#highWaterMark = MathMax(1, options.highWaterMark ?? 1); + this.#backpressure = options.backpressure ?? 'strict'; + this.#signal = options.signal; + this.#abortHandler = undefined; + + if (this.#signal) { + if (this.#signal.aborted) { + this.fail(this.#signal.reason instanceof Error ? + this.#signal.reason : lazyDOMException('Aborted', 'AbortError')); } else { - this._abortHandler = () => { - this.fail(this._signal.reason instanceof Error ? - this._signal.reason : + this.#abortHandler = () => { + this.fail(this.#signal.reason instanceof Error ? + this.#signal.reason : lazyDOMException('Aborted', 'AbortError')); }; - this._signal.addEventListener('abort', this._abortHandler, + this.#signal.addEventListener('abort', this.#abortHandler, { once: true }); } } @@ -98,10 +103,10 @@ class PushQueue { * @returns {number | null} */ get desiredSize() { - if (this._writerState !== 'open' || this._consumerState !== 'active') { + if (this.#writerState !== 'open' || this.#consumerState !== 'active') { return null; } - return MathMax(0, this._highWaterMark - this._slots.length); + return MathMax(0, this.#highWaterMark - this.#slots.length); } /** @@ -109,11 +114,11 @@ class PushQueue { * @returns {boolean} */ canWriteSync() { - if (this._writerState !== 'open') return false; - if (this._consumerState !== 'active') return false; - if ((this._backpressure === 'strict' || - this._backpressure === 'block') && - this._slots.length >= this._highWaterMark) { + if (this.#writerState !== 'open') return false; + if (this.#consumerState !== 'active') return false; + if ((this.#backpressure === 'strict' || + this.#backpressure === 'block') && + this.#slots.length >= this.#highWaterMark) { return false; } return true; @@ -125,34 +130,34 @@ class PushQueue { * @returns {boolean} */ writeSync(chunks) { - if (this._writerState !== 'open') return false; - if (this._consumerState !== 'active') return false; + if (this.#writerState !== 'open') return false; + if (this.#consumerState !== 'active') return false; - if (this._slots.length >= this._highWaterMark) { - switch (this._backpressure) { + if (this.#slots.length >= this.#highWaterMark) { + switch (this.#backpressure) { case 'strict': case 'block': return false; case 'drop-oldest': - if (this._slots.length > 0) { - this._slots.shift(); + if (this.#slots.length > 0) { + this.#slots.shift(); } break; case 'drop-newest': // Discard this write, but return true for (let i = 0; i < chunks.length; i++) { - this._bytesWritten += chunks[i].byteLength; + this.#bytesWritten += chunks[i].byteLength; } return true; } } - this._slots.push(chunks); + this.#slots.push(chunks); for (let i = 0; i < chunks.length; i++) { - this._bytesWritten += chunks[i].byteLength; + this.#bytesWritten += chunks[i].byteLength; } - this._resolvePendingReads(); + this.#resolvePendingReads(); return true; } @@ -171,12 +176,12 @@ class PushQueue { throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); } - if (this._writerState !== 'open') { + if (this.#writerState !== 'open') { throw new ERR_INVALID_STATE('Writer is closed'); } - if (this._consumerState !== 'active') { - throw this._consumerState === 'thrown' && this._error ? - this._error : + if (this.#consumerState !== 'active') { + throw this.#consumerState === 'thrown' && this.#error ? + this.#error : new ERR_INVALID_STATE('Stream closed by consumer'); } @@ -186,16 +191,16 @@ class PushQueue { } // Buffer is full - switch (this._backpressure) { + switch (this.#backpressure) { case 'strict': - if (this._pendingWrites.length >= this._highWaterMark) { + if (this.#pendingWrites.length >= this.#highWaterMark) { throw new ERR_INVALID_STATE( 'Backpressure violation: too many pending writes. ' + 'Await each write() call to respect backpressure.'); } - return this._createPendingWrite(chunks, signal); + return this.#createPendingWrite(chunks, signal); case 'block': - return this._createPendingWrite(chunks, signal); + return this.#createPendingWrite(chunks, signal); default: throw new ERR_INVALID_STATE( 'Unexpected: writeSync should have handled non-strict policy'); @@ -207,17 +212,17 @@ class PushQueue { * If the signal fires, the entry is removed from pendingWrites and the * promise rejects. Signal listeners are cleaned up on normal resolution. */ - _createPendingWrite(chunks, signal) { + #createPendingWrite(chunks, signal) { return new Promise((resolve, reject) => { const entry = { chunks, resolve, reject }; - this._pendingWrites.push(entry); + this.#pendingWrites.push(entry); if (!signal) return; const onAbort = () => { // Remove from queue so it doesn't occupy a slot - const idx = this._pendingWrites.indexOf(entry); - if (idx !== -1) this._pendingWrites.removeAt(idx); + const idx = this.#pendingWrites.indexOf(entry); + if (idx !== -1) this.#pendingWrites.removeAt(idx); reject(signal.reason ?? lazyDOMException('Aborted', 'AbortError')); }; @@ -242,34 +247,34 @@ class PushQueue { * @returns {number} */ end() { - if (this._writerState !== 'open') { - return this._bytesWritten; + if (this.#writerState !== 'open') { + return this.#bytesWritten; } - this._writerState = 'closed'; - this._cleanup(); - this._resolvePendingReads(); - this._rejectPendingWrites(new ERR_INVALID_STATE('Writer closed')); - this._resolvePendingDrains(false); - return this._bytesWritten; + this.#writerState = 'closed'; + this.#cleanup(); + this.#resolvePendingReads(); + this.#rejectPendingWrites(new ERR_INVALID_STATE('Writer closed')); + this.#resolvePendingDrains(false); + return this.#bytesWritten; } /** * Put queue into terminal error state. */ fail(reason) { - if (this._writerState === 'errored') return; + if (this.#writerState === 'errored') return; - this._writerState = 'errored'; - this._error = reason ?? new ERR_INVALID_STATE('Failed'); - this._cleanup(); - this._rejectPendingReads(this._error); - this._rejectPendingWrites(this._error); - this._rejectPendingDrains(this._error); + this.#writerState = 'errored'; + this.#error = reason ?? new ERR_INVALID_STATE('Failed'); + this.#cleanup(); + this.#rejectPendingReads(this.#error); + this.#rejectPendingWrites(this.#error); + this.#rejectPendingDrains(this.#error); } get totalBytesWritten() { - return this._bytesWritten; + return this.#bytesWritten; } /** @@ -277,7 +282,7 @@ class PushQueue { */ waitForDrain() { return new Promise((resolve, reject) => { - ArrayPrototypePush(this._pendingDrains, { resolve, reject }); + ArrayPrototypePush(this.#pendingDrains, { resolve, reject }); }); } @@ -287,127 +292,128 @@ class PushQueue { async read() { // If there's data in the buffer, return it immediately - if (this._slots.length > 0) { - const result = this._drain(); - this._resolvePendingWrites(); + if (this.#slots.length > 0) { + const result = this.#drain(); + this.#resolvePendingWrites(); return { __proto__: null, value: result, done: false }; } - if (this._writerState === 'closed') { + if (this.#writerState === 'closed') { return { __proto__: null, value: undefined, done: true }; } - if (this._writerState === 'errored' && this._error) { - throw this._error; + if (this.#writerState === 'errored' && this.#error) { + throw this.#error; } return new Promise((resolve, reject) => { - this._pendingReads.push({ resolve, reject }); + this.#pendingReads.push({ resolve, reject }); }); } consumerReturn() { - if (this._consumerState !== 'active') return; - this._consumerState = 'returned'; - this._cleanup(); - this._rejectPendingWrites(new ERR_INVALID_STATE('Stream closed by consumer')); + if (this.#consumerState !== 'active') return; + this.#consumerState = 'returned'; + this.#cleanup(); + this.#rejectPendingWrites( + new ERR_INVALID_STATE('Stream closed by consumer')); // Resolve pending drains with false - no more data will be consumed - this._resolvePendingDrains(false); + this.#resolvePendingDrains(false); } consumerThrow(error) { - if (this._consumerState !== 'active') return; - this._consumerState = 'thrown'; - this._error = error; - this._cleanup(); - this._rejectPendingWrites(error); + if (this.#consumerState !== 'active') return; + this.#consumerState = 'thrown'; + this.#error = error; + this.#cleanup(); + this.#rejectPendingWrites(error); // Reject pending drains - the consumer errored - this._rejectPendingDrains(error); + this.#rejectPendingDrains(error); } // =========================================================================== // Private Methods // =========================================================================== - _drain() { + #drain() { const result = []; - for (let i = 0; i < this._slots.length; i++) { - const slot = this._slots.get(i); + for (let i = 0; i < this.#slots.length; i++) { + const slot = this.#slots.get(i); for (let j = 0; j < slot.length; j++) { ArrayPrototypePush(result, slot[j]); } } - this._slots.clear(); + this.#slots.clear(); return result; } - _resolvePendingReads() { - while (this._pendingReads.length > 0) { - if (this._slots.length > 0) { - const pending = this._pendingReads.shift(); - const result = this._drain(); - this._resolvePendingWrites(); + #resolvePendingReads() { + while (this.#pendingReads.length > 0) { + if (this.#slots.length > 0) { + const pending = this.#pendingReads.shift(); + const result = this.#drain(); + this.#resolvePendingWrites(); pending.resolve({ value: result, done: false }); - } else if (this._writerState === 'closed') { - const pending = this._pendingReads.shift(); + } else if (this.#writerState === 'closed') { + const pending = this.#pendingReads.shift(); pending.resolve({ value: undefined, done: true }); - } else if (this._writerState === 'errored' && this._error) { - const pending = this._pendingReads.shift(); - pending.reject(this._error); + } else if (this.#writerState === 'errored' && this.#error) { + const pending = this.#pendingReads.shift(); + pending.reject(this.#error); } else { break; } } } - _resolvePendingWrites() { - while (this._pendingWrites.length > 0 && - this._slots.length < this._highWaterMark) { - const pending = this._pendingWrites.shift(); - this._slots.push(pending.chunks); + #resolvePendingWrites() { + while (this.#pendingWrites.length > 0 && + this.#slots.length < this.#highWaterMark) { + const pending = this.#pendingWrites.shift(); + this.#slots.push(pending.chunks); for (let i = 0; i < pending.chunks.length; i++) { - this._bytesWritten += pending.chunks[i].byteLength; + this.#bytesWritten += pending.chunks[i].byteLength; } pending.resolve(); } - if (this._slots.length < this._highWaterMark) { - this._resolvePendingDrains(true); + if (this.#slots.length < this.#highWaterMark) { + this.#resolvePendingDrains(true); } } - _resolvePendingDrains(canWrite) { - const drains = this._pendingDrains; - this._pendingDrains = []; + #resolvePendingDrains(canWrite) { + const drains = this.#pendingDrains; + this.#pendingDrains = []; for (let i = 0; i < drains.length; i++) { drains[i].resolve(canWrite); } } - _rejectPendingDrains(error) { - const drains = this._pendingDrains; - this._pendingDrains = []; + #rejectPendingDrains(error) { + const drains = this.#pendingDrains; + this.#pendingDrains = []; for (let i = 0; i < drains.length; i++) { drains[i].reject(error); } } - _rejectPendingReads(error) { - while (this._pendingReads.length > 0) { - this._pendingReads.shift().reject(error); + #rejectPendingReads(error) { + while (this.#pendingReads.length > 0) { + this.#pendingReads.shift().reject(error); } } - _rejectPendingWrites(error) { - while (this._pendingWrites.length > 0) { - this._pendingWrites.shift().reject(error); + #rejectPendingWrites(error) { + while (this.#pendingWrites.length > 0) { + this.#pendingWrites.shift().reject(error); } } - _cleanup() { - if (this._signal && this._abortHandler) { - this._signal.removeEventListener('abort', this._abortHandler); - this._abortHandler = undefined; + #cleanup() { + if (this.#signal && this.#abortHandler) { + this.#signal.removeEventListener('abort', this.#abortHandler); + this.#abortHandler = undefined; } } } @@ -417,33 +423,35 @@ class PushQueue { // ============================================================================= class PushWriter { + #queue; + constructor(queue) { - this._queue = queue; + this.#queue = queue; } [drainableProtocol]() { const desired = this.desiredSize; if (desired === null) return null; if (desired > 0) return PromiseResolve(true); - return this._queue.waitForDrain(); + return this.#queue.waitForDrain(); } get desiredSize() { - return this._queue.desiredSize; + return this.#queue.desiredSize; } write(chunk, options) { - if (!options?.signal && this._queue.canWriteSync()) { + if (!options?.signal && this.#queue.canWriteSync()) { const bytes = toUint8Array(chunk); - this._queue.writeSync([bytes]); + this.#queue.writeSync([bytes]); return kResolvedPromise; } const bytes = toUint8Array(chunk); - return this._queue.writeAsync([bytes], options?.signal); + return this.#queue.writeAsync([bytes], options?.signal); } writev(chunks, options) { - if (!options?.signal && this._queue.canWriteSync()) { + if (!options?.signal && this.#queue.canWriteSync()) { let bytes; if (allUint8Array(chunks)) { bytes = ArrayPrototypeSlice(chunks); @@ -453,7 +461,7 @@ class PushWriter { ArrayPrototypePush(bytes, toUint8Array(chunks[i])); } } - this._queue.writeSync(bytes); + this.#queue.writeSync(bytes); return kResolvedPromise; } let bytes; @@ -465,17 +473,17 @@ class PushWriter { ArrayPrototypePush(bytes, toUint8Array(chunks[i])); } } - return this._queue.writeAsync(bytes, options?.signal); + return this.#queue.writeAsync(bytes, options?.signal); } writeSync(chunk) { - if (!this._queue.canWriteSync()) return false; + if (!this.#queue.canWriteSync()) return false; const bytes = toUint8Array(chunk); - return this._queue.writeSync([bytes]); + return this.#queue.writeSync([bytes]); } writevSync(chunks) { - if (!this._queue.canWriteSync()) return false; + if (!this.#queue.canWriteSync()) return false; let bytes; if (allUint8Array(chunks)) { bytes = ArrayPrototypeSlice(chunks); @@ -485,26 +493,26 @@ class PushWriter { ArrayPrototypePush(bytes, toUint8Array(chunks[i])); } } - return this._queue.writeSync(bytes); + return this.#queue.writeSync(bytes); } end(options) { // end() on PushQueue is synchronous (sets state, resolves pending reads). // Signal accepted for interface compliance but there is nothing to cancel. - return PromiseResolve(this._queue.end()); + return PromiseResolve(this.#queue.end()); } endSync() { - return this._queue.end(); + return this.#queue.end(); } fail(reason) { - this._queue.fail(reason); + this.#queue.fail(reason); return kResolvedPromise; } failSync(reason) { - this._queue.fail(reason); + this.#queue.fail(reason); return true; } } diff --git a/lib/internal/streams/new/share.js b/lib/internal/streams/new/share.js index 1b4698f0f0686c..46ab88709dcb1c 100644 --- a/lib/internal/streams/new/share.js +++ b/lib/internal/streams/new/share.js @@ -54,31 +54,34 @@ const { // ============================================================================= class ShareImpl { + #source; + #options; + #buffer = new RingBuffer(); + #bufferStart = 0; + #consumers = new SafeSet(); + #sourceIterator = null; + #sourceExhausted = false; + #sourceError = null; + #cancelled = false; + #pulling = false; + #pullWaiters = []; + constructor(source, options) { - this._source = source; - this._options = options; - this._buffer = new RingBuffer(); - this._bufferStart = 0; - this._consumers = new SafeSet(); - this._sourceIterator = null; - this._sourceExhausted = false; - this._sourceError = null; - this._cancelled = false; - this._pulling = false; - this._pullWaiters = []; + this.#source = source; + this.#options = options; } get consumerCount() { - return this._consumers.size; + return this.#consumers.size; } get bufferSize() { - return this._buffer.length; + return this.#buffer.length; } pull(...args) { const { transforms, options } = parsePullArgs(args); - const rawConsumer = this._createRawConsumer(); + const rawConsumer = this.#createRawConsumer(); if (transforms.length > 0) { if (options) { @@ -89,80 +92,80 @@ class ShareImpl { return rawConsumer; } - _createRawConsumer() { + #createRawConsumer() { const state = { - cursor: this._bufferStart, + cursor: this.#bufferStart, resolve: null, reject: null, detached: false, }; - this._consumers.add(state); + this.#consumers.add(state); const self = this; return { [SymbolAsyncIterator]() { return { async next() { - if (self._sourceError) { + if (self.#sourceError) { state.detached = true; - self._consumers.delete(state); - throw self._sourceError; + self.#consumers.delete(state); + throw self.#sourceError; } if (state.detached) { return { __proto__: null, done: true, value: undefined }; } - if (self._cancelled) { + if (self.#cancelled) { state.detached = true; - self._consumers.delete(state); + self.#consumers.delete(state); return { __proto__: null, done: true, value: undefined }; } // Check if data is available in buffer - const bufferIndex = state.cursor - self._bufferStart; - if (bufferIndex < self._buffer.length) { - const chunk = self._buffer.get(bufferIndex); + const bufferIndex = state.cursor - self.#bufferStart; + if (bufferIndex < self.#buffer.length) { + const chunk = self.#buffer.get(bufferIndex); state.cursor++; - self._tryTrimBuffer(); + self.#tryTrimBuffer(); return { __proto__: null, done: false, value: chunk }; } - if (self._sourceExhausted) { + if (self.#sourceExhausted) { state.detached = true; - self._consumers.delete(state); + self.#consumers.delete(state); return { __proto__: null, done: true, value: undefined }; } // Need to pull from source - check buffer limit - const canPull = await self._waitForBufferSpace(state); + const canPull = await self.#waitForBufferSpace(state); if (!canPull) { state.detached = true; - self._consumers.delete(state); - if (self._sourceError) throw self._sourceError; + self.#consumers.delete(state); + if (self.#sourceError) throw self.#sourceError; return { __proto__: null, done: true, value: undefined }; } - await self._pullFromSource(); + await self.#pullFromSource(); - if (self._sourceError) { + if (self.#sourceError) { state.detached = true; - self._consumers.delete(state); - throw self._sourceError; + self.#consumers.delete(state); + throw self.#sourceError; } - const newBufferIndex = state.cursor - self._bufferStart; - if (newBufferIndex < self._buffer.length) { - const chunk = self._buffer.get(newBufferIndex); + const newBufferIndex = state.cursor - self.#bufferStart; + if (newBufferIndex < self.#buffer.length) { + const chunk = self.#buffer.get(newBufferIndex); state.cursor++; - self._tryTrimBuffer(); + self.#tryTrimBuffer(); return { __proto__: null, done: false, value: chunk }; } - if (self._sourceExhausted) { + if (self.#sourceExhausted) { state.detached = true; - self._consumers.delete(state); + self.#consumers.delete(state); return { __proto__: null, done: true, value: undefined }; } @@ -173,8 +176,8 @@ class ShareImpl { state.detached = true; state.resolve = null; state.reject = null; - self._consumers.delete(state); - self._tryTrimBuffer(); + self.#consumers.delete(state); + self.#tryTrimBuffer(); return { __proto__: null, done: true, value: undefined }; }, @@ -182,8 +185,8 @@ class ShareImpl { state.detached = true; state.resolve = null; state.reject = null; - self._consumers.delete(state); - self._tryTrimBuffer(); + self.#consumers.delete(state); + self.#tryTrimBuffer(); return { __proto__: null, done: true, value: undefined }; }, }; @@ -192,18 +195,18 @@ class ShareImpl { } cancel(reason) { - if (this._cancelled) return; - this._cancelled = true; + if (this.#cancelled) return; + this.#cancelled = true; if (reason) { - this._sourceError = reason; + this.#sourceError = reason; } - if (this._sourceIterator?.return) { - this._sourceIterator.return().catch(() => {}); + if (this.#sourceIterator?.return) { + this.#sourceIterator.return().catch(() => {}); } - for (const consumer of this._consumers) { + for (const consumer of this.#consumers) { if (consumer.resolve) { if (reason) { consumer.reject?.(reason); @@ -215,12 +218,12 @@ class ShareImpl { } consumer.detached = true; } - this._consumers.clear(); + this.#consumers.clear(); - for (let i = 0; i < this._pullWaiters.length; i++) { - this._pullWaiters[i](); + for (let i = 0; i < this.#pullWaiters.length; i++) { + this.#pullWaiters[i](); } - this._pullWaiters = []; + this.#pullWaiters = []; } [SymbolDispose]() { @@ -229,28 +232,28 @@ class ShareImpl { // Internal methods - async _waitForBufferSpace(_state) { - while (this._buffer.length >= this._options.highWaterMark) { - if (this._cancelled || this._sourceError || this._sourceExhausted) { - return !this._cancelled; + async #waitForBufferSpace(_state) { + while (this.#buffer.length >= this.#options.highWaterMark) { + if (this.#cancelled || this.#sourceError || this.#sourceExhausted) { + return !this.#cancelled; } - switch (this._options.backpressure) { + switch (this.#options.backpressure) { case 'strict': throw new ERR_OUT_OF_RANGE( - 'buffer size', `<= ${this._options.highWaterMark}`, - this._buffer.length); + 'buffer size', `<= ${this.#options.highWaterMark}`, + this.#buffer.length); case 'block': await new Promise((resolve) => { - ArrayPrototypePush(this._pullWaiters, resolve); + ArrayPrototypePush(this.#pullWaiters, resolve); }); break; case 'drop-oldest': - this._buffer.shift(); - this._bufferStart++; - for (const consumer of this._consumers) { - if (consumer.cursor < this._bufferStart) { - consumer.cursor = this._bufferStart; + this.#buffer.shift(); + this.#bufferStart++; + for (const consumer of this.#consumers) { + if (consumer.cursor < this.#bufferStart) { + consumer.cursor = this.#bufferStart; } } return true; @@ -261,29 +264,29 @@ class ShareImpl { return true; } - _pullFromSource() { - if (this._sourceExhausted || this._cancelled) { + #pullFromSource() { + if (this.#sourceExhausted || this.#cancelled) { return PromiseResolve(); } - if (this._pulling) { + if (this.#pulling) { return new Promise((resolve) => { - ArrayPrototypePush(this._pullWaiters, resolve); + ArrayPrototypePush(this.#pullWaiters, resolve); }); } - this._pulling = true; + this.#pulling = true; return (async () => { try { - if (!this._sourceIterator) { - if (isAsyncIterable(this._source)) { - this._sourceIterator = - this._source[SymbolAsyncIterator](); - } else if (isSyncIterable(this._source)) { + if (!this.#sourceIterator) { + if (isAsyncIterable(this.#source)) { + this.#sourceIterator = + this.#source[SymbolAsyncIterator](); + } else if (isSyncIterable(this.#source)) { const syncIterator = - this._source[SymbolIterator](); - this._sourceIterator = { + this.#source[SymbolIterator](); + this.#sourceIterator = { async next() { return syncIterator.next(); }, @@ -294,52 +297,52 @@ class ShareImpl { }; } else { throw new ERR_INVALID_ARG_TYPE( - 'source', ['AsyncIterable', 'Iterable'], this._source); + 'source', ['AsyncIterable', 'Iterable'], this.#source); } } - const result = await this._sourceIterator.next(); + const result = await this.#sourceIterator.next(); if (result.done) { - this._sourceExhausted = true; + this.#sourceExhausted = true; } else { - this._buffer.push(result.value); + this.#buffer.push(result.value); } } catch (error) { - this._sourceError = + this.#sourceError = error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error)); - this._sourceExhausted = true; + this.#sourceExhausted = true; } finally { - this._pulling = false; - for (let i = 0; i < this._pullWaiters.length; i++) { - this._pullWaiters[i](); + this.#pulling = false; + for (let i = 0; i < this.#pullWaiters.length; i++) { + this.#pullWaiters[i](); } - this._pullWaiters = []; + this.#pullWaiters = []; } })(); } - _getMinCursor() { + #getMinCursor() { let min = Infinity; - for (const consumer of this._consumers) { + for (const consumer of this.#consumers) { if (consumer.cursor < min) { min = consumer.cursor; } } return min === Infinity ? - this._bufferStart + this._buffer.length : min; + this.#bufferStart + this.#buffer.length : min; } - _tryTrimBuffer() { - const minCursor = this._getMinCursor(); - const trimCount = minCursor - this._bufferStart; + #tryTrimBuffer() { + const minCursor = this.#getMinCursor(); + const trimCount = minCursor - this.#bufferStart; if (trimCount > 0) { - this._buffer.trimFront(trimCount); - this._bufferStart = minCursor; - for (let i = 0; i < this._pullWaiters.length; i++) { - this._pullWaiters[i](); + this.#buffer.trimFront(trimCount); + this.#bufferStart = minCursor; + for (let i = 0; i < this.#pullWaiters.length; i++) { + this.#pullWaiters[i](); } - this._pullWaiters = []; + this.#pullWaiters = []; } } } @@ -349,28 +352,31 @@ class ShareImpl { // ============================================================================= class SyncShareImpl { + #source; + #options; + #buffer = new RingBuffer(); + #bufferStart = 0; + #consumers = new SafeSet(); + #sourceIterator = null; + #sourceExhausted = false; + #sourceError = null; + #cancelled = false; + constructor(source, options) { - this._source = source; - this._options = options; - this._buffer = new RingBuffer(); - this._bufferStart = 0; - this._consumers = new SafeSet(); - this._sourceIterator = null; - this._sourceExhausted = false; - this._sourceError = null; - this._cancelled = false; + this.#source = source; + this.#options = options; } get consumerCount() { - return this._consumers.size; + return this.#consumers.size; } get bufferSize() { - return this._buffer.length; + return this.#buffer.length; } pull(...transforms) { - const rawConsumer = this._createRawConsumer(); + const rawConsumer = this.#createRawConsumer(); if (transforms.length > 0) { return pullSyncWithTransforms(rawConsumer, ...transforms); @@ -378,13 +384,13 @@ class SyncShareImpl { return rawConsumer; } - _createRawConsumer() { + #createRawConsumer() { const state = { - cursor: this._bufferStart, + cursor: this.#bufferStart, detached: false, }; - this._consumers.add(state); + this.#consumers.add(state); const self = this; return { @@ -394,78 +400,78 @@ class SyncShareImpl { if (state.detached) { return { done: true, value: undefined }; } - if (self._sourceError) { + if (self.#sourceError) { state.detached = true; - self._consumers.delete(state); - throw self._sourceError; + self.#consumers.delete(state); + throw self.#sourceError; } - if (self._cancelled) { + if (self.#cancelled) { state.detached = true; - self._consumers.delete(state); + self.#consumers.delete(state); return { done: true, value: undefined }; } - const bufferIndex = state.cursor - self._bufferStart; - if (bufferIndex < self._buffer.length) { - const chunk = self._buffer.get(bufferIndex); + const bufferIndex = state.cursor - self.#bufferStart; + if (bufferIndex < self.#buffer.length) { + const chunk = self.#buffer.get(bufferIndex); state.cursor++; - self._tryTrimBuffer(); + self.#tryTrimBuffer(); return { done: false, value: chunk }; } - if (self._sourceExhausted) { + if (self.#sourceExhausted) { state.detached = true; - self._consumers.delete(state); + self.#consumers.delete(state); return { done: true, value: undefined }; } // Check buffer limit - if (self._buffer.length >= self._options.highWaterMark) { - switch (self._options.backpressure) { + if (self.#buffer.length >= self.#options.highWaterMark) { + switch (self.#options.backpressure) { case 'strict': throw new ERR_OUT_OF_RANGE( - 'buffer size', `<= ${self._options.highWaterMark}`, - self._buffer.length); + 'buffer size', `<= ${self.#options.highWaterMark}`, + self.#buffer.length); case 'block': throw new ERR_OUT_OF_RANGE( - 'buffer size', `<= ${self._options.highWaterMark} ` + + 'buffer size', `<= ${self.#options.highWaterMark} ` + '(blocking not available in sync context)', - self._buffer.length); + self.#buffer.length); case 'drop-oldest': - self._buffer.shift(); - self._bufferStart++; - for (const consumer of self._consumers) { - if (consumer.cursor < self._bufferStart) { - consumer.cursor = self._bufferStart; + self.#buffer.shift(); + self.#bufferStart++; + for (const consumer of self.#consumers) { + if (consumer.cursor < self.#bufferStart) { + consumer.cursor = self.#bufferStart; } } break; case 'drop-newest': state.detached = true; - self._consumers.delete(state); + self.#consumers.delete(state); return { done: true, value: undefined }; } } - self._pullFromSource(); + self.#pullFromSource(); - if (self._sourceError) { + if (self.#sourceError) { state.detached = true; - self._consumers.delete(state); - throw self._sourceError; + self.#consumers.delete(state); + throw self.#sourceError; } - const newBufferIndex = state.cursor - self._bufferStart; - if (newBufferIndex < self._buffer.length) { - const chunk = self._buffer.get(newBufferIndex); + const newBufferIndex = state.cursor - self.#bufferStart; + if (newBufferIndex < self.#buffer.length) { + const chunk = self.#buffer.get(newBufferIndex); state.cursor++; - self._tryTrimBuffer(); + self.#tryTrimBuffer(); return { done: false, value: chunk }; } - if (self._sourceExhausted) { + if (self.#sourceExhausted) { state.detached = true; - self._consumers.delete(state); + self.#consumers.delete(state); return { done: true, value: undefined }; } @@ -474,15 +480,15 @@ class SyncShareImpl { return() { state.detached = true; - self._consumers.delete(state); - self._tryTrimBuffer(); + self.#consumers.delete(state); + self.#tryTrimBuffer(); return { done: true, value: undefined }; }, throw() { state.detached = true; - self._consumers.delete(state); - self._tryTrimBuffer(); + self.#consumers.delete(state); + self.#tryTrimBuffer(); return { done: true, value: undefined }; }, }; @@ -491,64 +497,64 @@ class SyncShareImpl { } cancel(reason) { - if (this._cancelled) return; - this._cancelled = true; + if (this.#cancelled) return; + this.#cancelled = true; if (reason) { - this._sourceError = reason; + this.#sourceError = reason; } - if (this._sourceIterator?.return) { - this._sourceIterator.return(); + if (this.#sourceIterator?.return) { + this.#sourceIterator.return(); } - for (const consumer of this._consumers) { + for (const consumer of this.#consumers) { consumer.detached = true; } - this._consumers.clear(); + this.#consumers.clear(); } [SymbolDispose]() { this.cancel(); } - _pullFromSource() { - if (this._sourceExhausted || this._cancelled) return; + #pullFromSource() { + if (this.#sourceExhausted || this.#cancelled) return; try { - this._sourceIterator ||= this._source[SymbolIterator](); + this.#sourceIterator ||= this.#source[SymbolIterator](); - const result = this._sourceIterator.next(); + const result = this.#sourceIterator.next(); if (result.done) { - this._sourceExhausted = true; + this.#sourceExhausted = true; } else { - this._buffer.push(result.value); + this.#buffer.push(result.value); } } catch (error) { - this._sourceError = + this.#sourceError = error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error)); - this._sourceExhausted = true; + this.#sourceExhausted = true; } } - _getMinCursor() { + #getMinCursor() { let min = Infinity; - for (const consumer of this._consumers) { + for (const consumer of this.#consumers) { if (consumer.cursor < min) { min = consumer.cursor; } } return min === Infinity ? - this._bufferStart + this._buffer.length : min; + this.#bufferStart + this.#buffer.length : min; } - _tryTrimBuffer() { - const minCursor = this._getMinCursor(); - const trimCount = minCursor - this._bufferStart; + #tryTrimBuffer() { + const minCursor = this.#getMinCursor(); + const trimCount = minCursor - this.#bufferStart; if (trimCount > 0) { - this._buffer.trimFront(trimCount); - this._bufferStart = minCursor; + this.#buffer.trimFront(trimCount); + this.#bufferStart = minCursor; } } } From 8669467b20f999a55b23dfcdbfb577798d21afbd Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 3 Mar 2026 10:58:24 -0800 Subject: [PATCH 11/42] stream: fixup from memory issue / batch yielding --- lib/internal/streams/new/from.js | 33 ++++++++++++++++++++++++++++---- 1 file changed, 29 insertions(+), 4 deletions(-) diff --git a/lib/internal/streams/new/from.js b/lib/internal/streams/new/from.js index 552ac800e33d0e..5837ab2b188d3e 100644 --- a/lib/internal/streams/new/from.js +++ b/lib/internal/streams/new/from.js @@ -35,6 +35,11 @@ const { // Shared TextEncoder instance for string conversion. const encoder = new TextEncoder(); +// Maximum number of chunks to yield per batch from from(Uint8Array[]). +// Bounds peak memory when arrays flow through transforms, which must +// allocate output for the entire batch at once. +const FROM_BATCH_SIZE = 128; + // ============================================================================= // Type Guards and Detection // ============================================================================= @@ -458,7 +463,11 @@ function fromSync(input) { }; } - // Fast path: Uint8Array[] - yield as a single batch + // Fast path: Uint8Array[] - yield in bounded sub-batches. + // Yielding the entire array as one batch forces downstream transforms + // to process all data at once, causing peak memory proportional to total + // data volume. Sub-batching keeps peak memory bounded while preserving + // the throughput benefit of batched processing. if (ArrayIsArray(input)) { if (input.length === 0) { return { @@ -475,7 +484,13 @@ function fromSync(input) { const batch = input; return { *[SymbolIterator]() { - yield batch; + if (batch.length <= FROM_BATCH_SIZE) { + yield batch; + } else { + for (let i = 0; i < batch.length; i += FROM_BATCH_SIZE) { + yield ArrayPrototypeSlice(batch, i, i + FROM_BATCH_SIZE); + } + } }, }; } @@ -514,7 +529,11 @@ function from(input) { }; } - // Fast path: Uint8Array[] - yield as a single batch + // Fast path: Uint8Array[] - yield in bounded sub-batches. + // Yielding the entire array as one batch forces downstream transforms + // to process all data at once, causing peak memory proportional to total + // data volume. Sub-batching keeps peak memory bounded while preserving + // the throughput benefit of batched processing. if (ArrayIsArray(input)) { if (input.length === 0) { return { @@ -530,7 +549,13 @@ function from(input) { const batch = input; return { async *[SymbolAsyncIterator]() { - yield batch; + if (batch.length <= FROM_BATCH_SIZE) { + yield batch; + } else { + for (let i = 0; i < batch.length; i += FROM_BATCH_SIZE) { + yield ArrayPrototypeSlice(batch, i, i + FROM_BATCH_SIZE); + } + } }, }; } From 63c887d66e3b375d6d03bdbfd9e5bd9bd95de011 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 3 Mar 2026 11:27:49 -0800 Subject: [PATCH 12/42] stream: update stream/new default highWaterMark to 4 benchmarking shows that a default HWM of 4 with Stream.push performs better in terms of memory and throughput than a default HWM of 1 --- doc/api/stream_new.md | 2 +- lib/internal/streams/new/push.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/api/stream_new.md b/doc/api/stream_new.md index b2671886f7fed7..1d0ec1aa32731f 100644 --- a/doc/api/stream_new.md +++ b/doc/api/stream_new.md @@ -640,7 +640,7 @@ added: REPLACEME * `options` {Object} * `highWaterMark` {number} Maximum number of buffered slots before backpressure is applied. Must be >= 1; values below 1 are clamped to 1. - **Default:** `1`. + **Default:** `4`. * `backpressure` {string} Backpressure policy: `'strict'`, `'block'`, `'drop-oldest'`, or `'drop-newest'`. **Default:** `'strict'`. * `signal` {AbortSignal} Abort the stream. diff --git a/lib/internal/streams/new/push.js b/lib/internal/streams/new/push.js index 791a9acbbfe3c2..683d692c7d22fa 100644 --- a/lib/internal/streams/new/push.js +++ b/lib/internal/streams/new/push.js @@ -71,7 +71,7 @@ class PushQueue { #abortHandler; constructor(options = {}) { - this.#highWaterMark = MathMax(1, options.highWaterMark ?? 1); + this.#highWaterMark = MathMax(1, options.highWaterMark ?? 4); this.#backpressure = options.backpressure ?? 'strict'; this.#signal = options.signal; this.#abortHandler = undefined; From 42da7add30ef5496d2a7a26159ebce01f665bf09 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 17 Mar 2026 18:40:19 -0700 Subject: [PATCH 13/42] stream: rename stream/new to stream/iter --- .../fs/bench-filehandle-pull-vs-webstream.js | 2 +- doc/api/fs.md | 20 ++-- doc/api/index.md | 2 +- doc/api/{stream_new.md => stream_iter.md} | 92 +++++++++---------- lib/internal/fs/promises.js | 4 +- .../streams/{new => iter}/broadcast.js | 10 +- .../streams/{new => iter}/consumers.js | 6 +- lib/internal/streams/{new => iter}/duplex.js | 2 +- lib/internal/streams/{new => iter}/from.js | 2 +- lib/internal/streams/{new => iter}/pull.js | 4 +- lib/internal/streams/{new => iter}/push.js | 8 +- .../streams/{new => iter}/ringbuffer.js | 0 lib/internal/streams/{new => iter}/share.js | 10 +- .../streams/{new => iter}/transform.js | 0 lib/internal/streams/{new => iter}/types.js | 0 lib/internal/streams/{new => iter}/utils.js | 0 lib/stream/{new.js => iter.js} | 22 ++--- .../test-fs-promises-file-handle-pull.js | 2 +- .../test-fs-promises-file-handle-writer.js | 2 +- ...dcast.js => test-stream-iter-broadcast.js} | 2 +- ...umers.js => test-stream-iter-consumers.js} | 4 +- ...w-duplex.js => test-stream-iter-duplex.js} | 2 +- ...m-new-from.js => test-stream-iter-from.js} | 2 +- ...space.js => test-stream-iter-namespace.js} | 8 +- ...m-new-pull.js => test-stream-iter-pull.js} | 2 +- ...m-new-push.js => test-stream-iter-push.js} | 2 +- ...new-share.js => test-stream-iter-share.js} | 2 +- ...sform.js => test-stream-iter-transform.js} | 2 +- 28 files changed, 107 insertions(+), 107 deletions(-) rename doc/api/{stream_new.md => stream_iter.md} (94%) rename lib/internal/streams/{new => iter}/broadcast.js (98%) rename lib/internal/streams/{new => iter}/consumers.js (99%) rename lib/internal/streams/{new => iter}/duplex.js (97%) rename lib/internal/streams/{new => iter}/from.js (99%) rename lib/internal/streams/{new => iter}/pull.js (99%) rename lib/internal/streams/{new => iter}/push.js (98%) rename lib/internal/streams/{new => iter}/ringbuffer.js (100%) rename lib/internal/streams/{new => iter}/share.js (98%) rename lib/internal/streams/{new => iter}/transform.js (100%) rename lib/internal/streams/{new => iter}/types.js (100%) rename lib/internal/streams/{new => iter}/utils.js (100%) rename lib/stream/{new.js => iter.js} (82%) rename test/parallel/{test-stream-new-broadcast.js => test-stream-iter-broadcast.js} (99%) rename test/parallel/{test-stream-new-consumers.js => test-stream-iter-consumers.js} (99%) rename test/parallel/{test-stream-new-duplex.js => test-stream-iter-duplex.js} (98%) rename test/parallel/{test-stream-new-from.js => test-stream-iter-from.js} (99%) rename test/parallel/{test-stream-new-namespace.js => test-stream-iter-namespace.js} (97%) rename test/parallel/{test-stream-new-pull.js => test-stream-iter-pull.js} (99%) rename test/parallel/{test-stream-new-push.js => test-stream-iter-push.js} (99%) rename test/parallel/{test-stream-new-share.js => test-stream-iter-share.js} (99%) rename test/parallel/{test-stream-new-transform.js => test-stream-iter-transform.js} (99%) diff --git a/benchmark/fs/bench-filehandle-pull-vs-webstream.js b/benchmark/fs/bench-filehandle-pull-vs-webstream.js index 5c81fe53b8bc50..298a8952bcdaf9 100644 --- a/benchmark/fs/bench-filehandle-pull-vs-webstream.js +++ b/benchmark/fs/bench-filehandle-pull-vs-webstream.js @@ -147,7 +147,7 @@ async function runWebStream() { // New streams path: pull() with uppercase transform + gzip transform // --------------------------------------------------------------------------- async function benchPull(n, filesize) { - const { pull, compressGzip } = require('stream/new'); + const { pull, compressGzip } = require('stream/iter'); // Warm up await runPull(pull, compressGzip); diff --git a/doc/api/fs.md b/doc/api/fs.md index 7705de1dab43d5..60ca3ce2941f61 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -386,7 +386,7 @@ added: REPLACEME > Stability: 1 - Experimental * `...transforms` {Function|Object} Optional transforms to apply via - [`stream/new pull()`][]. + [`stream/iter pull()`][]. * `options` {Object} * `signal` {AbortSignal} * `autoClose` {boolean} Close the file handle when the stream ends. @@ -394,15 +394,15 @@ added: REPLACEME * Returns: {AsyncIterable\} Return the file contents as an async iterable using the -[`node:stream/new`][] pull model. Reads are performed in 64 KB chunks. -If transforms are provided, they are applied via [`stream/new pull()`][]. +[`node:stream/iter`][] pull model. Reads are performed in 64 KB chunks. +If transforms are provided, they are applied via [`stream/iter pull()`][]. The file handle is locked while the iterable is being consumed and unlocked when iteration completes. ```mjs import { open } from 'node:fs/promises'; -import { text, compressGzip } from 'node:stream/new'; +import { text, compressGzip } from 'node:stream/iter'; const fh = await open('input.txt', 'r'); @@ -416,7 +416,7 @@ const compressed = fh2.pull(compressGzip(), { autoClose: true }); ```cjs const { open } = require('node:fs/promises'); -const { text, compressGzip } = require('node:stream/new'); +const { text, compressGzip } = require('node:stream/iter'); async function run() { const fh = await open('input.txt', 'r'); @@ -949,14 +949,14 @@ added: REPLACEME cleanup. Marks the writer as closed so subsequent writes fail immediately. Cannot honor `autoClose` (requires async I/O). -Return a [`node:stream/new`][] writer backed by this file handle. +Return a [`node:stream/iter`][] writer backed by this file handle. The writer supports `Symbol.asyncDispose`, so it can be used with `await using`. ```mjs import { open } from 'node:fs/promises'; -import { from, pipeTo, compressGzip } from 'node:stream/new'; +import { from, pipeTo, compressGzip } from 'node:stream/iter'; const fh = await open('output.gz', 'w'); const w = fh.writer({ autoClose: true }); @@ -966,7 +966,7 @@ await w.end(); ```cjs const { open } = require('node:fs/promises'); -const { from, pipeTo, compressGzip } = require('node:stream/new'); +const { from, pipeTo, compressGzip } = require('node:stream/iter'); async function run() { const fh = await open('output.gz', 'w'); @@ -8906,8 +8906,8 @@ the file contents. [`inotify(7)`]: https://man7.org/linux/man-pages/man7/inotify.7.html [`kqueue(2)`]: https://www.freebsd.org/cgi/man.cgi?query=kqueue&sektion=2 [`minimatch`]: https://github.com/isaacs/minimatch -[`node:stream/new`]: stream_new.md -[`stream/new pull()`]: stream_new.md#pullsource-transforms-options +[`node:stream/iter`]: stream_iter.md +[`stream/iter pull()`]: stream_iter.md#pullsource-transforms-options [`util.promisify()`]: util.md#utilpromisifyoriginal [bigints]: https://tc39.github.io/proposal-bigint [caveats]: #caveats diff --git a/doc/api/index.md b/doc/api/index.md index c6e4a1dd2ef568..762c3e6d0ee55a 100644 --- a/doc/api/index.md +++ b/doc/api/index.md @@ -40,7 +40,7 @@ * [Modules: Packages](packages.md) * [Modules: TypeScript](typescript.md) * [Net](net.md) -* [New Streams API](stream_new.md) +* [Iterable Streams API](stream_iter.md) * [OS](os.md) * [Path](path.md) * [Performance hooks](perf_hooks.md) diff --git a/doc/api/stream_new.md b/doc/api/stream_iter.md similarity index 94% rename from doc/api/stream_new.md rename to doc/api/stream_iter.md index 1d0ec1aa32731f..7438c1e97f7a5b 100644 --- a/doc/api/stream_new.md +++ b/doc/api/stream_iter.md @@ -4,9 +4,9 @@ > Stability: 1 - Experimental - + -The `node:stream/new` module provides a new streaming API built on iterables +The `node:stream/iter` module provides a new streaming API built on iterables rather than the event-driven `Readable`/`Writable`/`Transform` class hierarchy. Streams are represented as `AsyncIterable` (async) or @@ -18,7 +18,7 @@ Data flows in **batches** (`Uint8Array[]` per iteration) to amortize the cost of async operations. ```mjs -import { from, pull, text, compressGzip, decompressGzip } from 'node:stream/new'; +import { from, pull, text, compressGzip, decompressGzip } from 'node:stream/iter'; // Compress and decompress a string const compressed = pull(from('Hello, world!'), compressGzip()); @@ -27,7 +27,7 @@ console.log(result); // 'Hello, world!' ``` ```cjs -const { from, pull, text, compressGzip, decompressGzip } = require('node:stream/new'); +const { from, pull, text, compressGzip, decompressGzip } = require('node:stream/iter'); async function run() { // Compress and decompress a string @@ -41,7 +41,7 @@ run().catch(console.error); ```mjs import { open } from 'node:fs/promises'; -import { text, compressGzip, decompressGzip, pipeTo } from 'node:stream/new'; +import { text, compressGzip, decompressGzip, pipeTo } from 'node:stream/iter'; // Read a file, compress, write to another file const src = await open('input.txt', 'r'); @@ -56,7 +56,7 @@ console.log(await text(gz.pull(decompressGzip(), { autoClose: true }))); ```cjs const { open } = require('node:fs/promises'); -const { text, compressGzip, decompressGzip, pipeTo } = require('node:stream/new'); +const { text, compressGzip, decompressGzip, pipeTo } = require('node:stream/iter'); async function run() { // Read a file, compress, write to another file @@ -236,7 +236,7 @@ Unawaited writes accumulate in the pending queue and throw once it overflows: ```mjs -import { push, text } from 'node:stream/new'; +import { push, text } from 'node:stream/iter'; const { writer, readable } = push({ highWaterMark: 16 }); @@ -254,7 +254,7 @@ console.log(await consuming); ``` ```cjs -const { push, text } = require('node:stream/new'); +const { push, text } = require('node:stream/iter'); async function run() { const { writer, readable } = push({ highWaterMark: 16 }); @@ -301,7 +301,7 @@ default to. Use it when you control the producer and know it awaits properly, or when migrating code from those APIs. ```mjs -import { push, text } from 'node:stream/new'; +import { push, text } from 'node:stream/iter'; const { writer, readable } = push({ highWaterMark: 16, @@ -319,7 +319,7 @@ console.log(await consuming); ``` ```cjs -const { push, text } = require('node:stream/new'); +const { push, text } = require('node:stream/iter'); async function run() { const { writer, readable } = push({ @@ -348,7 +348,7 @@ always sees the most recent data. Useful for live feeds, telemetry, or any scenario where stale data is less valuable than current data. ```mjs -import { push } from 'node:stream/new'; +import { push } from 'node:stream/iter'; // Keep only the 5 most recent readings const { writer, readable } = push({ @@ -358,7 +358,7 @@ const { writer, readable } = push({ ``` ```cjs -const { push } = require('node:stream/new'); +const { push } = require('node:stream/iter'); // Keep only the 5 most recent readings const { writer, readable } = push({ @@ -375,7 +375,7 @@ without being overwhelmed by new data. Useful for rate-limiting or shedding load under pressure. ```mjs -import { push } from 'node:stream/new'; +import { push } from 'node:stream/iter'; // Accept up to 10 buffered items; discard anything beyond that const { writer, readable } = push({ @@ -385,7 +385,7 @@ const { writer, readable } = push({ ``` ```cjs -const { push } = require('node:stream/new'); +const { push } = require('node:stream/iter'); // Accept up to 10 buffered items; discard anything beyond that const { writer, readable } = push({ @@ -401,14 +401,14 @@ support `writev(chunks)` for batch writes (mapped to scatter/gather I/O where available), `end()` to signal completion, and `fail(reason)` to signal failure. -## `require('node:stream/new')` +## `require('node:stream/iter')` All functions are available both as named exports and as properties of the `Stream` namespace object: ```mjs // Named exports -import { from, pull, bytes, Stream } from 'node:stream/new'; +import { from, pull, bytes, Stream } from 'node:stream/iter'; // Namespace access Stream.from('hello'); @@ -416,7 +416,7 @@ Stream.from('hello'); ```cjs // Named exports -const { from, pull, bytes, Stream } = require('node:stream/new'); +const { from, pull, bytes, Stream } = require('node:stream/iter'); // Namespace access Stream.from('hello'); @@ -442,14 +442,14 @@ Objects implementing `Symbol.for('Stream.toAsyncStreamable')` or ```mjs import { Buffer } from 'node:buffer'; -import { from, text } from 'node:stream/new'; +import { from, text } from 'node:stream/iter'; console.log(await text(from('hello'))); // 'hello' console.log(await text(from(Buffer.from('hello')))); // 'hello' ``` ```cjs -const { from, text } = require('node:stream/new'); +const { from, text } = require('node:stream/iter'); async function run() { console.log(await text(from('hello'))); // 'hello' @@ -472,13 +472,13 @@ Synchronous version of [`from()`][]. Returns a sync iterable. Cannot accept async iterables or promises. ```mjs -import { fromSync, textSync } from 'node:stream/new'; +import { fromSync, textSync } from 'node:stream/iter'; console.log(textSync(fromSync('hello'))); // 'hello' ``` ```cjs -const { fromSync, textSync } = require('node:stream/new'); +const { fromSync, textSync } = require('node:stream/iter'); console.log(textSync(fromSync('hello'))); // 'hello' ``` @@ -507,7 +507,7 @@ Pipe a source through transforms into a writer. If the writer has a scatter/gather I/O). ```mjs -import { from, pipeTo, compressGzip } from 'node:stream/new'; +import { from, pipeTo, compressGzip } from 'node:stream/iter'; import { open } from 'node:fs/promises'; const fh = await open('output.gz', 'w'); @@ -519,7 +519,7 @@ const totalBytes = await pipeTo( ``` ```cjs -const { from, pipeTo, compressGzip } = require('node:stream/new'); +const { from, pipeTo, compressGzip } = require('node:stream/iter'); const { open } = require('node:fs/promises'); async function run() { @@ -566,7 +566,7 @@ Create a lazy async pipeline. Data is not read from `source` until the returned iterable is consumed. Transforms are applied in order. ```mjs -import { from, pull, text } from 'node:stream/new'; +import { from, pull, text } from 'node:stream/iter'; const upper = (chunks) => { if (chunks === null) return null; @@ -580,7 +580,7 @@ console.log(await text(result)); // 'HELLO' ``` ```cjs -const { from, pull, text } = require('node:stream/new'); +const { from, pull, text } = require('node:stream/iter'); const upper = (chunks) => { if (chunks === null) return null; @@ -600,7 +600,7 @@ run().catch(console.error); Using an `AbortSignal`: ```mjs -import { pull } from 'node:stream/new'; +import { pull } from 'node:stream/iter'; const ac = new AbortController(); const result = pull(source, transform, { signal: ac.signal }); @@ -608,7 +608,7 @@ ac.abort(); // Pipeline throws AbortError on next iteration ``` ```cjs -const { pull } = require('node:stream/new'); +const { pull } = require('node:stream/iter'); const ac = new AbortController(); const result = pull(source, transform, { signal: ac.signal }); @@ -652,7 +652,7 @@ Create a push stream with backpressure. The writer pushes data in; the readable side is consumed as an async iterable. ```mjs -import { push, text } from 'node:stream/new'; +import { push, text } from 'node:stream/iter'; const { writer, readable } = push(); @@ -669,7 +669,7 @@ await producing; ``` ```cjs -const { push, text } = require('node:stream/new'); +const { push, text } = require('node:stream/iter'); async function run() { const { writer, readable } = push(); @@ -828,14 +828,14 @@ added: REPLACEME Collect all bytes from a stream into a single `Uint8Array`. ```mjs -import { from, bytes } from 'node:stream/new'; +import { from, bytes } from 'node:stream/iter'; const data = await bytes(from('hello')); console.log(data); // Uint8Array(5) [ 104, 101, 108, 108, 111 ] ``` ```cjs -const { from, bytes } = require('node:stream/new'); +const { from, bytes } = require('node:stream/iter'); async function run() { const data = await bytes(from('hello')); @@ -874,13 +874,13 @@ added: REPLACEME Collect all bytes and decode as text. ```mjs -import { from, text } from 'node:stream/new'; +import { from, text } from 'node:stream/iter'; console.log(await text(from('hello'))); // 'hello' ``` ```cjs -const { from, text } = require('node:stream/new'); +const { from, text } = require('node:stream/iter'); async function run() { console.log(await text(from('hello'))); // 'hello' @@ -921,14 +921,14 @@ Merge multiple async iterables by yielding batches in temporal order concurrently. ```mjs -import { from, merge, text } from 'node:stream/new'; +import { from, merge, text } from 'node:stream/iter'; const merged = merge(from('hello '), from('world')); console.log(await text(merged)); // Order depends on timing ``` ```cjs -const { from, merge, text } = require('node:stream/new'); +const { from, merge, text } = require('node:stream/iter'); async function run() { const merged = merge(from('hello '), from('world')); @@ -952,7 +952,7 @@ resolves to `true` when the writer can accept more data, or `null` if the object does not implement the drainable protocol. ```mjs -import { push, ondrain, text } from 'node:stream/new'; +import { push, ondrain, text } from 'node:stream/iter'; const { writer, readable } = push({ highWaterMark: 2 }); writer.writeSync('a'); @@ -971,7 +971,7 @@ await consuming; ``` ```cjs -const { push, ondrain, text } = require('node:stream/new'); +const { push, ondrain, text } = require('node:stream/iter'); async function run() { const { writer, readable } = push({ highWaterMark: 2 }); @@ -1006,7 +1006,7 @@ Create a pass-through transform that observes batches without modifying them. Useful for logging, metrics, or debugging. ```mjs -import { from, pull, text, tap } from 'node:stream/new'; +import { from, pull, text, tap } from 'node:stream/iter'; const result = pull( from('hello'), @@ -1016,7 +1016,7 @@ console.log(await text(result)); ``` ```cjs -const { from, pull, text, tap } = require('node:stream/new'); +const { from, pull, text, tap } = require('node:stream/iter'); async function run() { const result = pull( @@ -1062,7 +1062,7 @@ data to multiple consumers. Each consumer has an independent cursor into a shared buffer. ```mjs -import { broadcast, text } from 'node:stream/new'; +import { broadcast, text } from 'node:stream/iter'; const { writer, broadcast: bc } = broadcast(); @@ -1084,7 +1084,7 @@ await producing; ``` ```cjs -const { broadcast, text } = require('node:stream/new'); +const { broadcast, text } = require('node:stream/iter'); async function run() { const { writer, broadcast: bc } = broadcast(); @@ -1162,7 +1162,7 @@ source is only read when a consumer pulls. Multiple consumers share a single buffer. ```mjs -import { from, share, text } from 'node:stream/new'; +import { from, share, text } from 'node:stream/iter'; const shared = share(from('hello')); @@ -1174,7 +1174,7 @@ console.log(await text(c2)); // 'hello' ``` ```cjs -const { from, share, text } = require('node:stream/new'); +const { from, share, text } = require('node:stream/iter'); async function run() { const shared = share(from('hello')); @@ -1318,7 +1318,7 @@ Create a gzip compression transform. Output is compatible with `zlib.gunzip()` and `decompressGzip()`. ```mjs -import { from, pull, bytes, text, compressGzip, decompressGzip } from 'node:stream/new'; +import { from, pull, bytes, text, compressGzip, decompressGzip } from 'node:stream/iter'; const compressed = await bytes(pull(from('hello'), compressGzip())); const original = await text(pull(from(compressed), decompressGzip())); @@ -1326,7 +1326,7 @@ console.log(original); // 'hello' ``` ```cjs -const { from, pull, bytes, text, compressGzip, decompressGzip } = require('node:stream/new'); +const { from, pull, bytes, text, compressGzip, decompressGzip } = require('node:stream/iter'); async function run() { const compressed = await bytes(pull(from('hello'), compressGzip())); @@ -1439,7 +1439,7 @@ Create a Zstandard decompression transform. ## Protocol symbols These well-known symbols allow third-party objects to participate in the -streaming protocol without importing from `node:stream/new` directly. +streaming protocol without importing from `node:stream/iter` directly. ### `Stream.broadcastProtocol` diff --git a/lib/internal/fs/promises.js b/lib/internal/fs/promises.js index b78c121de737b9..dba30f3bfee5c2 100644 --- a/lib/internal/fs/promises.js +++ b/lib/internal/fs/promises.js @@ -146,9 +146,9 @@ let newStreamsPull; let newStreamsParsePullArgs; function lazyNewStreams() { if (newStreamsPull === undefined) { - newStreamsPull = require('internal/streams/new/pull').pull; + newStreamsPull = require('internal/streams/iter/pull').pull; newStreamsParsePullArgs = - require('internal/streams/new/utils').parsePullArgs; + require('internal/streams/iter/utils').parsePullArgs; } } diff --git a/lib/internal/streams/new/broadcast.js b/lib/internal/streams/iter/broadcast.js similarity index 98% rename from lib/internal/streams/new/broadcast.js rename to lib/internal/streams/iter/broadcast.js index 1c671571fbe676..60bc543f42ead0 100644 --- a/lib/internal/streams/new/broadcast.js +++ b/lib/internal/streams/iter/broadcast.js @@ -36,24 +36,24 @@ const { const { broadcastProtocol, drainableProtocol, -} = require('internal/streams/new/types'); +} = require('internal/streams/iter/types'); const { isAsyncIterable, isSyncIterable, -} = require('internal/streams/new/from'); +} = require('internal/streams/iter/from'); const { pull: pullWithTransforms, -} = require('internal/streams/new/pull'); +} = require('internal/streams/iter/pull'); const { allUint8Array, -} = require('internal/streams/new/utils'); +} = require('internal/streams/iter/utils'); const { RingBuffer, -} = require('internal/streams/new/ringbuffer'); +} = require('internal/streams/iter/ringbuffer'); const encoder = new TextEncoder(); diff --git a/lib/internal/streams/new/consumers.js b/lib/internal/streams/iter/consumers.js similarity index 99% rename from lib/internal/streams/new/consumers.js rename to lib/internal/streams/iter/consumers.js index 4bf6330905cd6e..46ae87a2b39d5c 100644 --- a/lib/internal/streams/new/consumers.js +++ b/lib/internal/streams/iter/consumers.js @@ -29,15 +29,15 @@ const { lazyDOMException } = require('internal/util'); const { isAsyncIterable, isSyncIterable, -} = require('internal/streams/new/from'); +} = require('internal/streams/iter/from'); const { concatBytes, -} = require('internal/streams/new/utils'); +} = require('internal/streams/iter/utils'); const { drainableProtocol, -} = require('internal/streams/new/types'); +} = require('internal/streams/iter/types'); // ============================================================================= // Type Guards diff --git a/lib/internal/streams/new/duplex.js b/lib/internal/streams/iter/duplex.js similarity index 97% rename from lib/internal/streams/new/duplex.js rename to lib/internal/streams/iter/duplex.js index 272bf0a816dca2..bce898443d6552 100644 --- a/lib/internal/streams/new/duplex.js +++ b/lib/internal/streams/iter/duplex.js @@ -11,7 +11,7 @@ const { const { push, -} = require('internal/streams/new/push'); +} = require('internal/streams/iter/push'); /** * Create a pair of connected duplex channels for bidirectional communication. diff --git a/lib/internal/streams/new/from.js b/lib/internal/streams/iter/from.js similarity index 99% rename from lib/internal/streams/new/from.js rename to lib/internal/streams/iter/from.js index 5837ab2b188d3e..a0c40c9f356275 100644 --- a/lib/internal/streams/new/from.js +++ b/lib/internal/streams/iter/from.js @@ -30,7 +30,7 @@ const { TextEncoder } = require('internal/encoding'); const { toStreamable, toAsyncStreamable, -} = require('internal/streams/new/types'); +} = require('internal/streams/iter/types'); // Shared TextEncoder instance for string conversion. const encoder = new TextEncoder(); diff --git a/lib/internal/streams/new/pull.js b/lib/internal/streams/iter/pull.js similarity index 99% rename from lib/internal/streams/new/pull.js rename to lib/internal/streams/iter/pull.js index 057780c52b5f1a..3b0b2f6ad6faaf 100644 --- a/lib/internal/streams/new/pull.js +++ b/lib/internal/streams/iter/pull.js @@ -36,12 +36,12 @@ const { isSyncIterable, isAsyncIterable, isUint8ArrayBatch, -} = require('internal/streams/new/from'); +} = require('internal/streams/iter/from'); const { isPullOptions, parsePullArgs, -} = require('internal/streams/new/utils'); +} = require('internal/streams/iter/utils'); // Shared TextEncoder instance for string conversion. const encoder = new TextEncoder(); diff --git a/lib/internal/streams/new/push.js b/lib/internal/streams/iter/push.js similarity index 98% rename from lib/internal/streams/new/push.js rename to lib/internal/streams/iter/push.js index 683d692c7d22fa..8a40d7004691b1 100644 --- a/lib/internal/streams/new/push.js +++ b/lib/internal/streams/iter/push.js @@ -24,20 +24,20 @@ const { lazyDOMException } = require('internal/util'); const { drainableProtocol, -} = require('internal/streams/new/types'); +} = require('internal/streams/iter/types'); const { toUint8Array, allUint8Array, -} = require('internal/streams/new/utils'); +} = require('internal/streams/iter/utils'); const { pull: pullWithTransforms, -} = require('internal/streams/new/pull'); +} = require('internal/streams/iter/pull'); const { RingBuffer, -} = require('internal/streams/new/ringbuffer'); +} = require('internal/streams/iter/ringbuffer'); // Cached resolved promise to avoid allocating a new one on every sync fast-path. const kResolvedPromise = PromiseResolve(); diff --git a/lib/internal/streams/new/ringbuffer.js b/lib/internal/streams/iter/ringbuffer.js similarity index 100% rename from lib/internal/streams/new/ringbuffer.js rename to lib/internal/streams/iter/ringbuffer.js diff --git a/lib/internal/streams/new/share.js b/lib/internal/streams/iter/share.js similarity index 98% rename from lib/internal/streams/new/share.js rename to lib/internal/streams/iter/share.js index 46ab88709dcb1c..1ef73bf2f45a6f 100644 --- a/lib/internal/streams/new/share.js +++ b/lib/internal/streams/iter/share.js @@ -21,25 +21,25 @@ const { const { shareProtocol, shareSyncProtocol, -} = require('internal/streams/new/types'); +} = require('internal/streams/iter/types'); const { isAsyncIterable, isSyncIterable, -} = require('internal/streams/new/from'); +} = require('internal/streams/iter/from'); const { pull: pullWithTransforms, pullSync: pullSyncWithTransforms, -} = require('internal/streams/new/pull'); +} = require('internal/streams/iter/pull'); const { parsePullArgs, -} = require('internal/streams/new/utils'); +} = require('internal/streams/iter/utils'); const { RingBuffer, -} = require('internal/streams/new/ringbuffer'); +} = require('internal/streams/iter/ringbuffer'); const { codes: { diff --git a/lib/internal/streams/new/transform.js b/lib/internal/streams/iter/transform.js similarity index 100% rename from lib/internal/streams/new/transform.js rename to lib/internal/streams/iter/transform.js diff --git a/lib/internal/streams/new/types.js b/lib/internal/streams/iter/types.js similarity index 100% rename from lib/internal/streams/new/types.js rename to lib/internal/streams/iter/types.js diff --git a/lib/internal/streams/new/utils.js b/lib/internal/streams/iter/utils.js similarity index 100% rename from lib/internal/streams/new/utils.js rename to lib/internal/streams/iter/utils.js diff --git a/lib/stream/new.js b/lib/stream/iter.js similarity index 82% rename from lib/stream/new.js rename to lib/stream/iter.js index 27596be8ba0604..c8e6c45b3eeed8 100644 --- a/lib/stream/new.js +++ b/lib/stream/iter.js @@ -1,7 +1,7 @@ 'use strict'; // Public entry point for the new streams API. -// Usage: require('stream/new') or require('node:stream/new') +// Usage: require('stream/iter') or require('node:stream/iter') const { ObjectFreeze, @@ -15,12 +15,12 @@ const { shareProtocol, shareSyncProtocol, drainableProtocol, -} = require('internal/streams/new/types'); +} = require('internal/streams/iter/types'); // Factories -const { push } = require('internal/streams/new/push'); -const { duplex } = require('internal/streams/new/duplex'); -const { from, fromSync } = require('internal/streams/new/from'); +const { push } = require('internal/streams/iter/push'); +const { duplex } = require('internal/streams/iter/duplex'); +const { from, fromSync } = require('internal/streams/iter/from'); // Pipelines const { @@ -28,7 +28,7 @@ const { pullSync, pipeTo, pipeToSync, -} = require('internal/streams/new/pull'); +} = require('internal/streams/iter/pull'); // Consumers const { @@ -44,7 +44,7 @@ const { tapSync, merge, ondrain, -} = require('internal/streams/new/consumers'); +} = require('internal/streams/iter/consumers'); // Transforms const { @@ -56,21 +56,21 @@ const { decompressDeflate, decompressBrotli, decompressZstd, -} = require('internal/streams/new/transform'); +} = require('internal/streams/iter/transform'); // Multi-consumer -const { broadcast, Broadcast } = require('internal/streams/new/broadcast'); +const { broadcast, Broadcast } = require('internal/streams/iter/broadcast'); const { share, shareSync, Share, SyncShare, -} = require('internal/streams/new/share'); +} = require('internal/streams/iter/share'); /** * Stream namespace - unified access to all stream functions. * @example - * const { Stream } = require('stream/new'); + * const { Stream } = require('stream/iter'); * * const { writer, readable } = Stream.push(); * await writer.write("hello"); diff --git a/test/parallel/test-fs-promises-file-handle-pull.js b/test/parallel/test-fs-promises-file-handle-pull.js index 60a2160cbae4b9..6c971664aafbaa 100644 --- a/test/parallel/test-fs-promises-file-handle-pull.js +++ b/test/parallel/test-fs-promises-file-handle-pull.js @@ -6,7 +6,7 @@ const fs = require('fs'); const { open } = fs.promises; const path = require('path'); const tmpdir = require('../common/tmpdir'); -const { text, bytes } = require('stream/new'); +const { text, bytes } = require('stream/iter'); tmpdir.refresh(); diff --git a/test/parallel/test-fs-promises-file-handle-writer.js b/test/parallel/test-fs-promises-file-handle-writer.js index 84168e54045a8d..920cb3976a00d0 100644 --- a/test/parallel/test-fs-promises-file-handle-writer.js +++ b/test/parallel/test-fs-promises-file-handle-writer.js @@ -9,7 +9,7 @@ const tmpdir = require('../common/tmpdir'); const { pipeTo, text, compressGzip, decompressGzip, -} = require('stream/new'); +} = require('stream/iter'); tmpdir.refresh(); diff --git a/test/parallel/test-stream-new-broadcast.js b/test/parallel/test-stream-iter-broadcast.js similarity index 99% rename from test/parallel/test-stream-new-broadcast.js rename to test/parallel/test-stream-iter-broadcast.js index a7445f856c674d..b0aea6e3b5ec48 100644 --- a/test/parallel/test-stream-new-broadcast.js +++ b/test/parallel/test-stream-iter-broadcast.js @@ -2,7 +2,7 @@ const common = require('../common'); const assert = require('assert'); -const { broadcast, Broadcast, from, text } = require('stream/new'); +const { broadcast, Broadcast, from, text } = require('stream/iter'); // ============================================================================= // Basic broadcast diff --git a/test/parallel/test-stream-new-consumers.js b/test/parallel/test-stream-iter-consumers.js similarity index 99% rename from test/parallel/test-stream-new-consumers.js rename to test/parallel/test-stream-iter-consumers.js index 68561ecf18d834..3866f9bd00af2f 100644 --- a/test/parallel/test-stream-new-consumers.js +++ b/test/parallel/test-stream-iter-consumers.js @@ -17,7 +17,7 @@ const { tap, tapSync, merge, -} = require('stream/new'); +} = require('stream/iter'); // ============================================================================= // bytesSync / bytes @@ -222,7 +222,7 @@ async function testTapInPipeline() { writer.end(); // Use pull with tap as a transform - const { pull } = require('stream/new'); + const { pull } = require('stream/iter'); const result = pull(readable, observer); const data = await text(result); diff --git a/test/parallel/test-stream-new-duplex.js b/test/parallel/test-stream-iter-duplex.js similarity index 98% rename from test/parallel/test-stream-new-duplex.js rename to test/parallel/test-stream-iter-duplex.js index 7692cb53e360d2..74d60acec74dee 100644 --- a/test/parallel/test-stream-new-duplex.js +++ b/test/parallel/test-stream-iter-duplex.js @@ -2,7 +2,7 @@ const common = require('../common'); const assert = require('assert'); -const { duplex, text, bytes } = require('stream/new'); +const { duplex, text, bytes } = require('stream/iter'); // ============================================================================= // Basic duplex diff --git a/test/parallel/test-stream-new-from.js b/test/parallel/test-stream-iter-from.js similarity index 99% rename from test/parallel/test-stream-new-from.js rename to test/parallel/test-stream-iter-from.js index e4925128cd26a2..4c23f412a7bd8d 100644 --- a/test/parallel/test-stream-new-from.js +++ b/test/parallel/test-stream-iter-from.js @@ -2,7 +2,7 @@ const common = require('../common'); const assert = require('assert'); -const { from, fromSync, Stream } = require('stream/new'); +const { from, fromSync, Stream } = require('stream/iter'); // ============================================================================= // fromSync() tests diff --git a/test/parallel/test-stream-new-namespace.js b/test/parallel/test-stream-iter-namespace.js similarity index 97% rename from test/parallel/test-stream-new-namespace.js rename to test/parallel/test-stream-iter-namespace.js index cacdead5a19ea0..73330a8cbef44a 100644 --- a/test/parallel/test-stream-new-namespace.js +++ b/test/parallel/test-stream-iter-namespace.js @@ -2,7 +2,7 @@ const common = require('../common'); const assert = require('assert'); -const streamNew = require('stream/new'); +const streamNew = require('stream/iter'); // ============================================================================= // Stream namespace object @@ -184,9 +184,9 @@ async function testNamespaceMatchesExports() { // ============================================================================= async function testRequirePaths() { - // Both require('stream/new') and require('node:stream/new') should work - const fromPlain = require('stream/new'); - const fromNode = require('node:stream/new'); + // Both require('stream/iter') and require('node:stream/iter') should work + const fromPlain = require('stream/iter'); + const fromNode = require('node:stream/iter'); assert.strictEqual(fromPlain.Stream, fromNode.Stream); assert.strictEqual(fromPlain.push, fromNode.push); diff --git a/test/parallel/test-stream-new-pull.js b/test/parallel/test-stream-iter-pull.js similarity index 99% rename from test/parallel/test-stream-new-pull.js rename to test/parallel/test-stream-iter-pull.js index e6ddaf46d99fb2..a5206fe6a955e7 100644 --- a/test/parallel/test-stream-new-pull.js +++ b/test/parallel/test-stream-iter-pull.js @@ -3,7 +3,7 @@ const common = require('../common'); const assert = require('assert'); const { pull, pullSync, pipeTo, pipeToSync, from, fromSync, bytesSync, - text } = require('stream/new'); + text } = require('stream/iter'); // ============================================================================= // pullSync() tests diff --git a/test/parallel/test-stream-new-push.js b/test/parallel/test-stream-iter-push.js similarity index 99% rename from test/parallel/test-stream-new-push.js rename to test/parallel/test-stream-iter-push.js index 3475bfd6b6ab7c..8b9b0ec97d5378 100644 --- a/test/parallel/test-stream-new-push.js +++ b/test/parallel/test-stream-iter-push.js @@ -2,7 +2,7 @@ const common = require('../common'); const assert = require('assert'); -const { push, text, ondrain } = require('stream/new'); +const { push, text, ondrain } = require('stream/iter'); async function testBasicWriteRead() { const { writer, readable } = push(); diff --git a/test/parallel/test-stream-new-share.js b/test/parallel/test-stream-iter-share.js similarity index 99% rename from test/parallel/test-stream-new-share.js rename to test/parallel/test-stream-iter-share.js index a97ae62deaf3ad..fcfb5cec1a6401 100644 --- a/test/parallel/test-stream-new-share.js +++ b/test/parallel/test-stream-iter-share.js @@ -12,7 +12,7 @@ const { text, textSync, -} = require('stream/new'); +} = require('stream/iter'); // ============================================================================= // Async share() diff --git a/test/parallel/test-stream-new-transform.js b/test/parallel/test-stream-iter-transform.js similarity index 99% rename from test/parallel/test-stream-new-transform.js rename to test/parallel/test-stream-iter-transform.js index fb027dcff5497d..788240b71c519c 100644 --- a/test/parallel/test-stream-new-transform.js +++ b/test/parallel/test-stream-iter-transform.js @@ -15,7 +15,7 @@ const { decompressDeflate, decompressBrotli, decompressZstd, -} = require('stream/new'); +} = require('stream/iter'); // ============================================================================= // Helper: compress then decompress, verify round-trip equality From 6af362e3a99de9b5e7ca805bd64ef04e678f00aa Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 17 Mar 2026 19:33:45 -0700 Subject: [PATCH 14/42] stream: gate stream/iter behind --experimental-stream-iter Add the `--experimental-stream-iter`` CLI flag to gate access to the `node:stream/iter` module. When the flag is not specified, attempting to import the module will throw `ERR_UNKNOWN_BUILTIN_MODULE`. The module is added to the `experimentalModuleList` in `realm.js` and to the `cannot_be_required` set in `node_builtins.cc`. A setup function in `pre_execution.js` enables access at runtime when the flag is present. An experimental warning is emitted on first load. The module is accessible both with and without the node: prefix when the flag is enabled. --- .../fs/bench-filehandle-pull-vs-webstream.js | 1 + doc/api/cli.md | 12 ++++++++ doc/api/stream_iter.md | 10 +++++-- doc/node.1 | 5 ++++ lib/internal/bootstrap/realm.js | 2 +- lib/internal/process/pre_execution.js | 10 +++++++ lib/stream/iter.js | 6 +++- src/node_builtins.cc | 1 + src/node_options.cc | 4 +++ src/node_options.h | 1 + .../test-fs-promises-file-handle-pull.js | 1 + .../test-fs-promises-file-handle-writer.js | 1 + test/parallel/test-stream-iter-broadcast.js | 1 + test/parallel/test-stream-iter-consumers.js | 1 + test/parallel/test-stream-iter-disabled.js | 30 +++++++++++++++++++ test/parallel/test-stream-iter-duplex.js | 1 + test/parallel/test-stream-iter-from.js | 1 + test/parallel/test-stream-iter-namespace.js | 1 + test/parallel/test-stream-iter-pull.js | 1 + test/parallel/test-stream-iter-push.js | 1 + test/parallel/test-stream-iter-share.js | 1 + test/parallel/test-stream-iter-transform.js | 1 + 22 files changed, 88 insertions(+), 5 deletions(-) create mode 100644 test/parallel/test-stream-iter-disabled.js diff --git a/benchmark/fs/bench-filehandle-pull-vs-webstream.js b/benchmark/fs/bench-filehandle-pull-vs-webstream.js index 298a8952bcdaf9..5d1bd56e441cbf 100644 --- a/benchmark/fs/bench-filehandle-pull-vs-webstream.js +++ b/benchmark/fs/bench-filehandle-pull-vs-webstream.js @@ -1,3 +1,4 @@ +// Flags: --experimental-stream-iter // Compare FileHandle.createReadStream() vs readableWebStream() vs pull() // reading a large file through two transforms: uppercase then gzip compress. 'use strict'; diff --git a/doc/api/cli.md b/doc/api/cli.md index b1a0d674ca4ded..34fa321a228dd5 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -1277,6 +1277,16 @@ added: Enable experimental support for storage inspection +### `--experimental-stream-iter` + + + +> Stability: 1 - Experimental + +Enable the experimental [`node:stream/iter`][] module. + ### `--experimental-test-coverage` @@ -6,8 +6,12 @@ -The `node:stream/iter` module provides a new streaming API built on iterables -rather than the event-driven `Readable`/`Writable`/`Transform` class hierarchy. +The `node:stream/iter` module provides a streaming API built on iterables +rather than the event-driven `Readable`/`Writable`/`Transform` class hierarchy, +or the Web Streams `ReadableStream`/`WritableStream`/`TransformStream` interfaces. + +This module is available only when the `--experimental-stream-iter` CLI flag +is enabled. Streams are represented as `AsyncIterable` (async) or `Iterable` (sync). There are no base classes to extend -- any diff --git a/doc/node.1 b/doc/node.1 index e88c005731b40f..d3c6654ecaf391 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -720,6 +720,11 @@ top-level awaits, and print their location to help users find them. .It Fl -experimental-quic Enable experimental support for the QUIC protocol. . +.It Fl -experimental-stream-iter +Enable the experimental +.Sy node:stream/iter +module. +. .It Fl -experimental-sea-config Use this flag to generate a blob that can be injected into the Node.js binary to produce a single executable application. See the documentation diff --git a/lib/internal/bootstrap/realm.js b/lib/internal/bootstrap/realm.js index f49f0814bbc687..cbf2ff1778919c 100644 --- a/lib/internal/bootstrap/realm.js +++ b/lib/internal/bootstrap/realm.js @@ -131,7 +131,7 @@ const schemelessBlockList = new SafeSet([ 'test/reporters', ]); // Modules that will only be enabled at run time. -const experimentalModuleList = new SafeSet(['sqlite', 'quic']); +const experimentalModuleList = new SafeSet(['sqlite', 'quic', 'stream/iter']); // Set up process.binding() and process._linkedBinding(). { diff --git a/lib/internal/process/pre_execution.js b/lib/internal/process/pre_execution.js index b68b4e26d4a7c9..87e793dda45e77 100644 --- a/lib/internal/process/pre_execution.js +++ b/lib/internal/process/pre_execution.js @@ -115,6 +115,7 @@ function prepareExecution(options) { setupNavigator(); setupWarningHandler(); setupSQLite(); + setupStreamIter(); setupQuic(); setupWebStorage(); setupWebsocket(); @@ -392,6 +393,15 @@ function initializeConfigFileSupport() { } } +function setupStreamIter() { + if (!getOptionValue('--experimental-stream-iter')) { + return; + } + + const { BuiltinModule } = require('internal/bootstrap/realm'); + BuiltinModule.allowRequireByUsers('stream/iter'); +} + function setupQuic() { if (!getOptionValue('--experimental-quic')) { return; diff --git a/lib/stream/iter.js b/lib/stream/iter.js index c8e6c45b3eeed8..8d25a7eb3a50ab 100644 --- a/lib/stream/iter.js +++ b/lib/stream/iter.js @@ -1,12 +1,16 @@ 'use strict'; -// Public entry point for the new streams API. +// Public entry point for the iterable streams API. // Usage: require('stream/iter') or require('node:stream/iter') +// Requires: --experimental-stream-iter const { ObjectFreeze, } = primordials; +const { emitExperimentalWarning } = require('internal/util'); +emitExperimentalWarning('stream/iter'); + // Protocol symbols const { toStreamable, diff --git a/src/node_builtins.cc b/src/node_builtins.cc index 318ff5158e9c28..255405ce317e93 100644 --- a/src/node_builtins.cc +++ b/src/node_builtins.cc @@ -141,6 +141,7 @@ BuiltinLoader::BuiltinCategories BuiltinLoader::GetBuiltinCategories() const { #endif // !OPENSSL_NO_QUIC "quic", // Experimental. "sqlite", // Experimental. + "stream/iter", // Experimental. "sys", // Deprecated. "wasi", // Experimental. #if !HAVE_SQLITE diff --git a/src/node_options.cc b/src/node_options.cc index d48641ae3ffe07..55dfeab420844c 100644 --- a/src/node_options.cc +++ b/src/node_options.cc @@ -599,6 +599,10 @@ EnvironmentOptionsParser::EnvironmentOptionsParser() { &EnvironmentOptions::experimental_sqlite, kAllowedInEnvvar, true); + AddOption("--experimental-stream-iter", + "experimental iterable streams API (node:stream/iter)", + &EnvironmentOptions::experimental_stream_iter, + kAllowedInEnvvar); AddOption("--experimental-quic", #ifndef OPENSSL_NO_QUIC "experimental QUIC support", diff --git a/src/node_options.h b/src/node_options.h index 2f0adb5ae491ec..3cbb636c026cb2 100644 --- a/src/node_options.h +++ b/src/node_options.h @@ -127,6 +127,7 @@ class EnvironmentOptions : public Options { bool experimental_fetch = true; bool experimental_websocket = true; bool experimental_sqlite = true; + bool experimental_stream_iter = false; bool webstorage = HAVE_SQLITE; #ifndef OPENSSL_NO_QUIC bool experimental_quic = false; diff --git a/test/parallel/test-fs-promises-file-handle-pull.js b/test/parallel/test-fs-promises-file-handle-pull.js index 6c971664aafbaa..5dd78e53dbfc6e 100644 --- a/test/parallel/test-fs-promises-file-handle-pull.js +++ b/test/parallel/test-fs-promises-file-handle-pull.js @@ -1,3 +1,4 @@ +// Flags: --experimental-stream-iter 'use strict'; const common = require('../common'); diff --git a/test/parallel/test-fs-promises-file-handle-writer.js b/test/parallel/test-fs-promises-file-handle-writer.js index 920cb3976a00d0..7bb920ca823a41 100644 --- a/test/parallel/test-fs-promises-file-handle-writer.js +++ b/test/parallel/test-fs-promises-file-handle-writer.js @@ -1,3 +1,4 @@ +// Flags: --experimental-stream-iter 'use strict'; const common = require('../common'); diff --git a/test/parallel/test-stream-iter-broadcast.js b/test/parallel/test-stream-iter-broadcast.js index b0aea6e3b5ec48..9ce7c22b317e85 100644 --- a/test/parallel/test-stream-iter-broadcast.js +++ b/test/parallel/test-stream-iter-broadcast.js @@ -1,3 +1,4 @@ +// Flags: --experimental-stream-iter 'use strict'; const common = require('../common'); diff --git a/test/parallel/test-stream-iter-consumers.js b/test/parallel/test-stream-iter-consumers.js index 3866f9bd00af2f..385539fd31985a 100644 --- a/test/parallel/test-stream-iter-consumers.js +++ b/test/parallel/test-stream-iter-consumers.js @@ -1,3 +1,4 @@ +// Flags: --experimental-stream-iter 'use strict'; const common = require('../common'); diff --git a/test/parallel/test-stream-iter-disabled.js b/test/parallel/test-stream-iter-disabled.js new file mode 100644 index 00000000000000..acc6bf86f6ed94 --- /dev/null +++ b/test/parallel/test-stream-iter-disabled.js @@ -0,0 +1,30 @@ +'use strict'; +const { spawnPromisified } = require('../common'); +const assert = require('assert'); +const { describe, it } = require('node:test'); + +describe('stream/iter gating', () => { + it('fails to require node:stream/iter without flag', async () => { + const { stderr, code } = await spawnPromisified(process.execPath, [ + '-e', 'require("node:stream/iter")', + ]); + assert.match(stderr, /No such built-in module: node:stream\/iter/); + assert.notStrictEqual(code, 0); + }); + + it('fails to require stream/iter without flag', async () => { + const { stderr, code } = await spawnPromisified(process.execPath, [ + '-e', 'require("stream/iter")', + ]); + assert.match(stderr, /Cannot find module/); + assert.notStrictEqual(code, 0); + }); + + it('succeeds with --experimental-stream-iter', async () => { + const { code } = await spawnPromisified(process.execPath, [ + '--experimental-stream-iter', + '-e', 'require("node:stream/iter")', + ]); + assert.strictEqual(code, 0); + }); +}); diff --git a/test/parallel/test-stream-iter-duplex.js b/test/parallel/test-stream-iter-duplex.js index 74d60acec74dee..ffd3033a30ce20 100644 --- a/test/parallel/test-stream-iter-duplex.js +++ b/test/parallel/test-stream-iter-duplex.js @@ -1,3 +1,4 @@ +// Flags: --experimental-stream-iter 'use strict'; const common = require('../common'); diff --git a/test/parallel/test-stream-iter-from.js b/test/parallel/test-stream-iter-from.js index 4c23f412a7bd8d..5349076bafebf1 100644 --- a/test/parallel/test-stream-iter-from.js +++ b/test/parallel/test-stream-iter-from.js @@ -1,3 +1,4 @@ +// Flags: --experimental-stream-iter 'use strict'; const common = require('../common'); diff --git a/test/parallel/test-stream-iter-namespace.js b/test/parallel/test-stream-iter-namespace.js index 73330a8cbef44a..ce197e85846dfe 100644 --- a/test/parallel/test-stream-iter-namespace.js +++ b/test/parallel/test-stream-iter-namespace.js @@ -1,3 +1,4 @@ +// Flags: --experimental-stream-iter 'use strict'; const common = require('../common'); diff --git a/test/parallel/test-stream-iter-pull.js b/test/parallel/test-stream-iter-pull.js index a5206fe6a955e7..b327493f1d9383 100644 --- a/test/parallel/test-stream-iter-pull.js +++ b/test/parallel/test-stream-iter-pull.js @@ -1,3 +1,4 @@ +// Flags: --experimental-stream-iter 'use strict'; const common = require('../common'); diff --git a/test/parallel/test-stream-iter-push.js b/test/parallel/test-stream-iter-push.js index 8b9b0ec97d5378..15a996ebf09879 100644 --- a/test/parallel/test-stream-iter-push.js +++ b/test/parallel/test-stream-iter-push.js @@ -1,3 +1,4 @@ +// Flags: --experimental-stream-iter 'use strict'; const common = require('../common'); diff --git a/test/parallel/test-stream-iter-share.js b/test/parallel/test-stream-iter-share.js index fcfb5cec1a6401..7aefdd2111a86a 100644 --- a/test/parallel/test-stream-iter-share.js +++ b/test/parallel/test-stream-iter-share.js @@ -1,3 +1,4 @@ +// Flags: --experimental-stream-iter 'use strict'; const common = require('../common'); diff --git a/test/parallel/test-stream-iter-transform.js b/test/parallel/test-stream-iter-transform.js index 788240b71c519c..d8f001194dfd3b 100644 --- a/test/parallel/test-stream-iter-transform.js +++ b/test/parallel/test-stream-iter-transform.js @@ -1,3 +1,4 @@ +// Flags: --experimental-stream-iter 'use strict'; const common = require('../common'); From 5bfafbce594dd112fe011b16335fa7c1aa10c5d1 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 17 Mar 2026 20:56:05 -0700 Subject: [PATCH 15/42] stream: replace instanceof with cross-realm-safe type checks Replace all instanceof checks in the `stream/iter` implementation with cross-realm-safe alternatives from `internal/util/types` and `internal/util`. Objects created in different VM contexts have different prototypes, causing `instanceof` to return `false`. - `instanceof Uint8Array` -> `isUint8Array` (`internal/util/types`) - `instanceof ArrayBuffer` -> `isArrayBuffer` (`internal/util/types`) - `instanceof Promise` -> `isPromise` (`internal/util/types`) - `error instanceof Error` -> `isError` (`internal/util`) Note: `Error.isError` cannot be used via primordials because V8 flag-gated features (`InitializeGlobal_js_error_iserror` in `bootstrapper.cc`) are installed after `primordials.js` captures `Error`'s static methods during snapshot build. Adds cross-realm tests verifying correct handling of `Uint8Array`, `ArrayBuffer`, `Uint8Array[]`, `Promise`, and typed array views created via `vm.runInNewContext` --- lib/internal/streams/iter/broadcast.js | 5 +- lib/internal/streams/iter/from.js | 35 ++--- lib/internal/streams/iter/pull.js | 35 ++--- lib/internal/streams/iter/push.js | 7 +- lib/internal/streams/iter/share.js | 7 +- test/parallel/test-stream-iter-cross-realm.js | 142 ++++++++++++++++++ 6 files changed, 187 insertions(+), 44 deletions(-) create mode 100644 test/parallel/test-stream-iter-cross-realm.js diff --git a/lib/internal/streams/iter/broadcast.js b/lib/internal/streams/iter/broadcast.js index 60bc543f42ead0..81cb547799d390 100644 --- a/lib/internal/streams/iter/broadcast.js +++ b/lib/internal/streams/iter/broadcast.js @@ -11,7 +11,6 @@ const { ArrayPrototypeMap, ArrayPrototypePush, ArrayPrototypeSlice, - Error, MathMax, Promise, PromiseResolve, @@ -24,7 +23,7 @@ const { const { TextEncoder } = require('internal/encoding'); -const { lazyDOMException } = require('internal/util'); +const { isError, lazyDOMException } = require('internal/util'); const { codes: { @@ -710,7 +709,7 @@ const Broadcast = { await result.writer.end(signal ? { signal } : undefined); } catch (error) { await result.writer.fail( - error instanceof Error ? error : new ERR_INVALID_ARG_TYPE('error', 'Error', String(error))); + isError(error) ? error : new ERR_INVALID_ARG_TYPE('error', 'Error', String(error))); } })(); diff --git a/lib/internal/streams/iter/from.js b/lib/internal/streams/iter/from.js index a0c40c9f356275..9dc782598039ea 100644 --- a/lib/internal/streams/iter/from.js +++ b/lib/internal/streams/iter/from.js @@ -6,14 +6,12 @@ // Handles recursive flattening of nested iterables and protocol conversions. const { - ArrayBuffer, ArrayBufferIsView, ArrayIsArray, ArrayPrototypeEvery, ArrayPrototypePush, ArrayPrototypeSlice, ObjectPrototypeToString, - Promise, SymbolAsyncIterator, SymbolIterator, SymbolToPrimitive, @@ -26,6 +24,11 @@ const { }, } = require('internal/errors'); const { TextEncoder } = require('internal/encoding'); +const { + isArrayBuffer, + isPromise, + isUint8Array, +} = require('internal/util/types'); const { toStreamable, @@ -50,7 +53,7 @@ const FROM_BATCH_SIZE = 128; */ function isPrimitiveChunk(value) { if (typeof value === 'string') return true; - if (value instanceof ArrayBuffer) return true; + if (isArrayBuffer(value)) return true; if (ArrayBufferIsView(value)) return true; return false; } @@ -144,10 +147,10 @@ function primitiveToUint8Array(chunk) { if (typeof chunk === 'string') { return encoder.encode(chunk); } - if (chunk instanceof ArrayBuffer) { + if (isArrayBuffer(chunk)) { return new Uint8Array(chunk); } - if (chunk instanceof Uint8Array) { + if (isUint8Array(chunk)) { return chunk; } // Other ArrayBufferView types (Int8Array, DataView, etc.) @@ -243,7 +246,7 @@ function isUint8ArrayBatch(value) { if (!ArrayIsArray(value)) return false; if (value.length === 0) return true; // Check first element - if it's a Uint8Array, assume the rest are too - return value[0] instanceof Uint8Array; + return isUint8Array(value[0]); } /** @@ -261,7 +264,7 @@ function* normalizeSyncSource(source) { continue; } // Fast path 2: value is a single Uint8Array (very common) - if (value instanceof Uint8Array) { + if (isUint8Array(value)) { yield [value]; continue; } @@ -288,7 +291,7 @@ function* normalizeSyncSource(source) { */ async function* normalizeAsyncValue(value) { // Handle promises first - if (value instanceof Promise) { + if (isPromise(value)) { const resolved = await value; yield* normalizeAsyncValue(resolved); return; @@ -303,7 +306,7 @@ async function* normalizeAsyncValue(value) { // Handle ToAsyncStreamable protocol (check before ToStreamable) if (isToAsyncStreamable(value)) { const result = value[toAsyncStreamable](); - if (result instanceof Promise) { + if (isPromise(result)) { yield* normalizeAsyncValue(await result); } else { yield* normalizeAsyncValue(result); @@ -377,7 +380,7 @@ async function* normalizeAsyncSource(source) { continue; } // Fast path 2: value is a single Uint8Array (very common) - if (value instanceof Uint8Array) { + if (isUint8Array(value)) { yield [value]; continue; } @@ -411,7 +414,7 @@ async function* normalizeAsyncSource(source) { continue; } // Fast path 2: value is a single Uint8Array (very common) - if (value instanceof Uint8Array) { + if (isUint8Array(value)) { ArrayPrototypePush(batch, value); continue; } @@ -477,9 +480,8 @@ function fromSync(input) { }; } // Check if it's an array of Uint8Array (common case) - if (input[0] instanceof Uint8Array) { - const allUint8 = ArrayPrototypeEvery(input, - (item) => item instanceof Uint8Array); + if (isUint8Array(input[0])) { + const allUint8 = ArrayPrototypeEvery(input, isUint8Array); if (allUint8) { const batch = input; return { @@ -542,9 +544,8 @@ function from(input) { }, }; } - if (input[0] instanceof Uint8Array) { - const allUint8 = ArrayPrototypeEvery(input, - (item) => item instanceof Uint8Array); + if (isUint8Array(input[0])) { + const allUint8 = ArrayPrototypeEvery(input, isUint8Array); if (allUint8) { const batch = input; return { diff --git a/lib/internal/streams/iter/pull.js b/lib/internal/streams/iter/pull.js index 3b0b2f6ad6faaf..0f53519608cfbb 100644 --- a/lib/internal/streams/iter/pull.js +++ b/lib/internal/streams/iter/pull.js @@ -10,13 +10,10 @@ const { ArrayIsArray, ArrayPrototypePush, ArrayPrototypeSlice, - Error, - Promise, SafePromiseAllReturnVoid, String, SymbolAsyncIterator, SymbolIterator, - Uint8Array, } = primordials; const { @@ -27,7 +24,11 @@ const { }, } = require('internal/errors'); const { TextEncoder } = require('internal/encoding'); -const { lazyDOMException } = require('internal/util'); +const { isError, lazyDOMException } = require('internal/util'); +const { + isPromise, + isUint8Array, +} = require('internal/util/types'); const { AbortController } = require('internal/abort_controller'); const { @@ -121,7 +122,7 @@ function parsePipeToArgs(args) { * @yields {Uint8Array} */ function* flattenTransformYieldSync(value) { - if (value instanceof Uint8Array) { + if (isUint8Array(value)) { yield value; return; } @@ -144,7 +145,7 @@ function* flattenTransformYieldSync(value) { * @yields {Uint8Array} */ async function* flattenTransformYieldAsync(value) { - if (value instanceof Uint8Array) { + if (isUint8Array(value)) { yield value; return; } @@ -178,7 +179,7 @@ function* processTransformResultSync(result) { return; } if (ArrayIsArray(result) && result.length > 0 && - result[0] instanceof Uint8Array) { + isUint8Array(result[0])) { // Fast path: Uint8Array[] if (result.length > 0) { yield result; @@ -207,7 +208,7 @@ function* processTransformResultSync(result) { */ async function* processTransformResultAsync(result) { // Handle Promise - if (result instanceof Promise) { + if (isPromise(result)) { const resolved = await result; yield* processTransformResultAsync(resolved); return; @@ -216,7 +217,7 @@ async function* processTransformResultAsync(result) { return; } if (ArrayIsArray(result) && - (result.length === 0 || result[0] instanceof Uint8Array)) { + (result.length === 0 || isUint8Array(result[0]))) { // Fast path: Uint8Array[] if (result.length > 0) { yield result; @@ -228,7 +229,7 @@ async function* processTransformResultAsync(result) { const batch = []; for await (const item of result) { // Fast path: item is already Uint8Array - if (item instanceof Uint8Array) { + if (isUint8Array(item)) { ArrayPrototypePush(batch, item); continue; } @@ -247,7 +248,7 @@ async function* processTransformResultAsync(result) { const batch = []; for (const item of result) { // Fast path: item is already Uint8Array - if (item instanceof Uint8Array) { + if (isUint8Array(item)) { ArrayPrototypePush(batch, item); continue; } @@ -353,7 +354,7 @@ async function* applyStatelessAsyncTransform(source, transform, options) { continue; } // Handle Promise of Uint8Array[] - if (result instanceof Promise) { + if (isPromise(result)) { const resolved = await result; if (resolved === null) continue; if (isUint8ArrayBatch(resolved)) { @@ -374,7 +375,7 @@ async function* applyStatelessAsyncTransform(source, transform, options) { for (let i = 0; i < item.length; i++) { ArrayPrototypePush(batch, item[i]); } - } else if (item instanceof Uint8Array) { + } else if (isUint8Array(item)) { ArrayPrototypePush(batch, item); } else if (item !== null && item !== undefined) { for await (const chunk of flattenTransformYieldAsync(item)) { @@ -407,7 +408,7 @@ async function* applyStatefulAsyncTransform(source, transform, options) { continue; } // Fast path: single Uint8Array - if (item instanceof Uint8Array) { + if (isUint8Array(item)) { yield [item]; continue; } @@ -524,7 +525,7 @@ async function* createAsyncPipeline(source, transforms, signal) { if (!controller.signal.aborted) { try { controller.abort( - error instanceof Error ? error : + isError(error) ? error : new ERR_OPERATION_FAILED(String(error))); } catch { // Transform signal listeners may throw - suppress @@ -623,7 +624,7 @@ function pipeToSync(source, ...args) { } } catch (error) { if (!options?.preventFail) { - writer.fail(error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error))); + writer.fail(isError(error) ? error : new ERR_OPERATION_FAILED(String(error))); } throw error; } @@ -722,7 +723,7 @@ async function pipeTo(source, ...args) { } catch (error) { if (!options?.preventFail) { await writer.fail( - error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error))); + isError(error) ? error : new ERR_OPERATION_FAILED(String(error))); } throw error; } diff --git a/lib/internal/streams/iter/push.js b/lib/internal/streams/iter/push.js index 8a40d7004691b1..7665f6e10c1a8d 100644 --- a/lib/internal/streams/iter/push.js +++ b/lib/internal/streams/iter/push.js @@ -8,7 +8,6 @@ const { ArrayPrototypePush, ArrayPrototypeSlice, - Error, MathMax, Promise, PromiseResolve, @@ -20,7 +19,7 @@ const { ERR_INVALID_STATE, }, } = require('internal/errors'); -const { lazyDOMException } = require('internal/util'); +const { isError, lazyDOMException } = require('internal/util'); const { drainableProtocol, @@ -78,12 +77,12 @@ class PushQueue { if (this.#signal) { if (this.#signal.aborted) { - this.fail(this.#signal.reason instanceof Error ? + this.fail(isError(this.#signal.reason) ? this.#signal.reason : lazyDOMException('Aborted', 'AbortError')); } else { this.#abortHandler = () => { - this.fail(this.#signal.reason instanceof Error ? + this.fail(isError(this.#signal.reason) ? this.#signal.reason : lazyDOMException('Aborted', 'AbortError')); }; diff --git a/lib/internal/streams/iter/share.js b/lib/internal/streams/iter/share.js index 1ef73bf2f45a6f..e0d7b914b114b8 100644 --- a/lib/internal/streams/iter/share.js +++ b/lib/internal/streams/iter/share.js @@ -7,7 +7,6 @@ const { ArrayPrototypePush, - Error, MathMax, Promise, PromiseResolve, @@ -18,6 +17,8 @@ const { SymbolIterator, } = primordials; +const { isError } = require('internal/util'); + const { shareProtocol, shareSyncProtocol, @@ -310,7 +311,7 @@ class ShareImpl { } } catch (error) { this.#sourceError = - error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error)); + isError(error) ? error : new ERR_OPERATION_FAILED(String(error)); this.#sourceExhausted = true; } finally { this.#pulling = false; @@ -533,7 +534,7 @@ class SyncShareImpl { } } catch (error) { this.#sourceError = - error instanceof Error ? error : new ERR_OPERATION_FAILED(String(error)); + isError(error) ? error : new ERR_OPERATION_FAILED(String(error)); this.#sourceExhausted = true; } } diff --git a/test/parallel/test-stream-iter-cross-realm.js b/test/parallel/test-stream-iter-cross-realm.js new file mode 100644 index 00000000000000..0409b14b1c6f32 --- /dev/null +++ b/test/parallel/test-stream-iter-cross-realm.js @@ -0,0 +1,142 @@ +// Flags: --experimental-stream-iter +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const vm = require('vm'); +const { from, fromSync, pull, text, bytesSync } = require('stream/iter'); + +// Cross-realm objects are created in a different VM context. +// They have different prototypes, so `instanceof` checks fail. +// These tests verify that stream/iter correctly handles cross-realm types. + +// Helper: compare Uint8Array content regardless of realm. +function assertBytes(actual, expected) { + assert.strictEqual(actual.length, expected.length, + `length mismatch: ${actual.length} !== ${expected.length}`); + for (let i = 0; i < expected.length; i++) { + assert.strictEqual(actual[i], expected[i], `byte mismatch at index ${i}`); + } +} + +// ============================================================================= +// from() / fromSync() with cross-realm Uint8Array +// ============================================================================= + +async function testFromSyncCrossRealmUint8Array() { + const crossRealm = vm.runInNewContext('new Uint8Array([1, 2, 3])'); + const readable = fromSync(crossRealm); + const data = bytesSync(readable); + assertBytes(data, new Uint8Array([1, 2, 3])); +} + +async function testFromCrossRealmUint8Array() { + const crossRealm = vm.runInNewContext('new Uint8Array([4, 5, 6])'); + const readable = from(crossRealm); + const result = await text(readable); + assert.strictEqual(result, '\x04\x05\x06'); +} + +// ============================================================================= +// from() / fromSync() with cross-realm ArrayBuffer +// ============================================================================= + +async function testFromSyncCrossRealmArrayBuffer() { + const crossRealm = vm.runInNewContext( + 'new Uint8Array([7, 8, 9]).buffer', + ); + const readable = fromSync(crossRealm); + const data = bytesSync(readable); + assertBytes(data, new Uint8Array([7, 8, 9])); +} + +async function testFromCrossRealmArrayBuffer() { + const crossRealm = vm.runInNewContext( + 'new Uint8Array([10, 11, 12]).buffer', + ); + const readable = from(crossRealm); + const result = await text(readable); + assert.strictEqual(result, '\x0a\x0b\x0c'); +} + +// ============================================================================= +// from() / fromSync() with cross-realm Uint8Array[] +// ============================================================================= + +async function testFromSyncCrossRealmUint8ArrayArray() { + const crossRealm = vm.runInNewContext( + '[new Uint8Array([1, 2]), new Uint8Array([3, 4])]', + ); + const readable = fromSync(crossRealm); + const data = bytesSync(readable); + assertBytes(data, new Uint8Array([1, 2, 3, 4])); +} + +async function testFromCrossRealmUint8ArrayArray() { + const crossRealm = vm.runInNewContext( + '[new Uint8Array([5, 6]), new Uint8Array([7, 8])]', + ); + const readable = from(crossRealm); + const result = await text(readable); + assert.strictEqual(result, '\x05\x06\x07\x08'); +} + +// ============================================================================= +// pull() with cross-realm Uint8Array from transforms +// ============================================================================= + +async function testPullCrossRealmTransformOutput() { + // Transform that returns cross-realm Uint8Array[] batches + const source = from('hello'); + const crossRealmTransform = (chunks) => { + if (chunks === null) return null; + // Re-encode each chunk as cross-realm Uint8Array + return vm.runInNewContext( + `[new Uint8Array([${[...chunks[0]]}])]`, + ); + }; + const result = pull(source, crossRealmTransform); + const output = await text(result); + assert.strictEqual(output, 'hello'); +} + +// ============================================================================= +// from() with cross-realm Promise +// ============================================================================= + +async function testFromCrossRealmPromise() { + const crossRealmPromise = vm.runInNewContext( + 'Promise.resolve("promised-data")', + ); + async function* gen() { + yield crossRealmPromise; + } + const readable = from(gen()); + const result = await text(readable); + assert.strictEqual(result, 'promised-data'); +} + +// ============================================================================= +// from() with cross-realm typed arrays (non-Uint8Array views) +// ============================================================================= + +async function testFromSyncCrossRealmInt32Array() { + const crossRealm = vm.runInNewContext('new Int32Array([1])'); + const readable = fromSync(crossRealm); + const data = bytesSync(readable); + // Int32Array([1]) = 4 bytes: 01 00 00 00 (little-endian) + assert.strictEqual(data.length, 4); + assert.strictEqual(data[0], 1); +} + +Promise.all([ + testFromSyncCrossRealmUint8Array(), + testFromCrossRealmUint8Array(), + testFromSyncCrossRealmArrayBuffer(), + testFromCrossRealmArrayBuffer(), + testFromSyncCrossRealmUint8ArrayArray(), + testFromCrossRealmUint8ArrayArray(), + testPullCrossRealmTransformOutput(), + testFromCrossRealmPromise(), + testFromSyncCrossRealmInt32Array(), +]).then(common.mustCall()); From 18844d761c572535676aff2f73922d69056a6ccb Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 17 Mar 2026 21:43:42 -0700 Subject: [PATCH 16/42] stream: use primordials for prototype method access in stream/iter MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace direct prototype property access and method calls with primordial-safe equivalents throughout the stream/iter implementation. - `.byteLength`/`.byteOffset`/`.buffer` on typed arrays replaced with `TypedArrayPrototypeGetByteLength`/`GetByteOffset`/`GetBuffer` - `.buffer.byteLength` replaced with `ArrayBufferPrototypeGetByteLength` - `.buffer.slice()` replaced with `ArrayBufferPrototypeSlice` - `.call()` replaced with `FunctionPrototypeCall` - `.toString()` replaced with `FunctionPrototypeCall` - Protocol method calls replaced with `FunctionPrototypeCall` - `.map()` replaced with `ArrayPrototypeMap` - `.startsWith()` replaced with `StringPrototypeStartsWith` - `.slice()` on `Buffer` replaced with `TypedArrayPrototypeSlice` - `.shift()` on plain array replaced with `ArrayPrototypeShift` - `.fill()` on typed arrays replaced with `TypedArrayPrototypeFill` - `.then()` replaced with `PromisePrototypeThen` - `.catch()` replaced with `PromisePrototypeThen(…, undefined, handler)` - Added `__proto__`: null to all iterator result objects resolved through promises (`push.js`, `broadcast.js`, `share.js`, `consumers.js`) --- lib/internal/streams/iter/broadcast.js | 21 ++++++------ lib/internal/streams/iter/consumers.js | 47 +++++++++++++++++--------- lib/internal/streams/iter/from.js | 20 +++++++---- lib/internal/streams/iter/pull.js | 7 ++-- lib/internal/streams/iter/push.js | 11 +++--- lib/internal/streams/iter/share.js | 25 +++++++------- lib/internal/streams/iter/transform.js | 44 ++++++++++++++++-------- lib/internal/streams/iter/utils.js | 5 +-- 8 files changed, 111 insertions(+), 69 deletions(-) diff --git a/lib/internal/streams/iter/broadcast.js b/lib/internal/streams/iter/broadcast.js index 81cb547799d390..b75c32b65daf98 100644 --- a/lib/internal/streams/iter/broadcast.js +++ b/lib/internal/streams/iter/broadcast.js @@ -19,6 +19,7 @@ const { Symbol, SymbolAsyncIterator, SymbolDispose, + TypedArrayPrototypeGetByteLength, } = primordials; const { TextEncoder } = require('internal/encoding'); @@ -236,7 +237,7 @@ class BroadcastImpl { if (reason) { consumer.reject?.(reason); } else { - consumer.resolve({ done: true, value: undefined }); + consumer.resolve({ __proto__: null, done: true, value: undefined }); } consumer.resolve = null; consumer.reject = null; @@ -289,9 +290,9 @@ class BroadcastImpl { if (bufferIndex < this.#buffer.length) { const chunk = this.#buffer.get(bufferIndex); consumer.cursor++; - consumer.resolve({ done: false, value: chunk }); + consumer.resolve({ __proto__: null, done: false, value: chunk }); } else { - consumer.resolve({ done: true, value: undefined }); + consumer.resolve({ __proto__: null, done: true, value: undefined }); } consumer.resolve = null; consumer.reject = null; @@ -368,7 +369,7 @@ class BroadcastImpl { const resolve = consumer.resolve; consumer.resolve = null; consumer.reject = null; - resolve({ done: false, value: chunk }); + resolve({ __proto__: null, done: false, value: chunk }); } } } @@ -417,7 +418,7 @@ class BroadcastWriter { const converted = typeof chunk === 'string' ? encoder.encode(chunk) : chunk; this.#broadcast[kWrite]([converted]); - this.#totalBytes += converted.byteLength; + this.#totalBytes += TypedArrayPrototypeGetByteLength(converted); return kResolvedPromise; } return this.writev([chunk], options); @@ -433,7 +434,7 @@ class BroadcastWriter { (typeof c === 'string' ? encoder.encode(c) : c)); this.#broadcast[kWrite](converted); for (let i = 0; i < converted.length; i++) { - this.#totalBytes += converted[i].byteLength; + this.#totalBytes += TypedArrayPrototypeGetByteLength(converted[i]); } return kResolvedPromise; } @@ -459,7 +460,7 @@ class BroadcastWriter { if (this.#broadcast[kWrite](converted)) { for (let i = 0; i < converted.length; i++) { - this.#totalBytes += converted[i].byteLength; + this.#totalBytes += TypedArrayPrototypeGetByteLength(converted[i]); } return; } @@ -486,7 +487,7 @@ class BroadcastWriter { const converted = typeof chunk === 'string' ? encoder.encode(chunk) : chunk; if (this.#broadcast[kWrite]([converted])) { - this.#totalBytes += converted.byteLength; + this.#totalBytes += TypedArrayPrototypeGetByteLength(converted); return true; } return false; @@ -501,7 +502,7 @@ class BroadcastWriter { (typeof c === 'string' ? encoder.encode(c) : c)); if (this.#broadcast[kWrite](converted)) { for (let i = 0; i < converted.length; i++) { - this.#totalBytes += converted[i].byteLength; + this.#totalBytes += TypedArrayPrototypeGetByteLength(converted[i]); } return true; } @@ -596,7 +597,7 @@ class BroadcastWriter { const pending = this.#pendingWrites.shift(); if (this.#broadcast[kWrite](pending.chunk)) { for (let i = 0; i < pending.chunk.length; i++) { - this.#totalBytes += pending.chunk[i].byteLength; + this.#totalBytes += TypedArrayPrototypeGetByteLength(pending.chunk[i]); } pending.resolve(); } else { diff --git a/lib/internal/streams/iter/consumers.js b/lib/internal/streams/iter/consumers.js index 46ae87a2b39d5c..25c2db0584c475 100644 --- a/lib/internal/streams/iter/consumers.js +++ b/lib/internal/streams/iter/consumers.js @@ -8,13 +8,19 @@ // ondrain() - backpressure drain utility const { + ArrayBufferPrototypeGetByteLength, + ArrayBufferPrototypeSlice, ArrayPrototypeFilter, ArrayPrototypeMap, ArrayPrototypePush, ArrayPrototypeSlice, + PromisePrototypeThen, SafePromiseAllReturnVoid, SafePromiseRace, SymbolAsyncIterator, + TypedArrayPrototypeGetBuffer, + TypedArrayPrototypeGetByteLength, + TypedArrayPrototypeGetByteOffset, } = primordials; const { @@ -71,7 +77,7 @@ function bytesSync(source, options) { for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; if (limit !== undefined) { - totalBytes += chunk.byteLength; + totalBytes += TypedArrayPrototypeGetByteLength(chunk); if (totalBytes > limit) { throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); } @@ -106,11 +112,15 @@ function textSync(source, options) { */ function arrayBufferSync(source, options) { const data = bytesSync(source, options); - if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) { - return data.buffer; + const byteOffset = TypedArrayPrototypeGetByteOffset(data); + const byteLength = TypedArrayPrototypeGetByteLength(data); + const buffer = TypedArrayPrototypeGetBuffer(data); + if (byteOffset === 0 && + byteLength === ArrayBufferPrototypeGetByteLength(buffer)) { + return buffer; } - return data.buffer.slice(data.byteOffset, - data.byteOffset + data.byteLength); + return ArrayBufferPrototypeSlice(buffer, byteOffset, + byteOffset + byteLength); } /** @@ -128,7 +138,7 @@ function arraySync(source, options) { for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; if (limit !== undefined) { - totalBytes += chunk.byteLength; + totalBytes += TypedArrayPrototypeGetByteLength(chunk); if (totalBytes > limit) { throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); } @@ -191,7 +201,7 @@ async function bytes(source, options) { for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; if (limit !== undefined) { - totalBytes += chunk.byteLength; + totalBytes += TypedArrayPrototypeGetByteLength(chunk); if (totalBytes > limit) { throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); } @@ -207,7 +217,7 @@ async function bytes(source, options) { for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; if (limit !== undefined) { - totalBytes += chunk.byteLength; + totalBytes += TypedArrayPrototypeGetByteLength(chunk); if (totalBytes > limit) { throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); } @@ -245,11 +255,15 @@ async function text(source, options) { */ async function arrayBuffer(source, options) { const data = await bytes(source, options); - if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) { - return data.buffer; + const byteOffset = TypedArrayPrototypeGetByteOffset(data); + const byteLength = TypedArrayPrototypeGetByteLength(data); + const buffer = TypedArrayPrototypeGetBuffer(data); + if (byteOffset === 0 && + byteLength === ArrayBufferPrototypeGetByteLength(buffer)) { + return buffer; } - return data.buffer.slice(data.byteOffset, - data.byteOffset + data.byteLength); + return ArrayBufferPrototypeSlice(buffer, byteOffset, + byteOffset + byteLength); } /** @@ -299,7 +313,7 @@ async function array(source, options) { for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; if (limit !== undefined) { - totalBytes += chunk.byteLength; + totalBytes += TypedArrayPrototypeGetByteLength(chunk); if (totalBytes > limit) { throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); } @@ -315,7 +329,7 @@ async function array(source, options) { for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; if (limit !== undefined) { - totalBytes += chunk.byteLength; + totalBytes += TypedArrayPrototypeGetByteLength(chunk); if (totalBytes > limit) { throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); } @@ -439,8 +453,9 @@ function merge(...args) { const startIterator = (state, index) => { if (!state.done && !state.pending) { - state.pending = state.iterator.next().then( - (result) => ({ index, result })); + state.pending = PromisePrototypeThen( + state.iterator.next(), + (result) => ({ __proto__: null, index, result })); } }; diff --git a/lib/internal/streams/iter/from.js b/lib/internal/streams/iter/from.js index 9dc782598039ea..fc4fc1b40958f6 100644 --- a/lib/internal/streams/iter/from.js +++ b/lib/internal/streams/iter/from.js @@ -11,10 +11,14 @@ const { ArrayPrototypeEvery, ArrayPrototypePush, ArrayPrototypeSlice, + FunctionPrototypeCall, ObjectPrototypeToString, SymbolAsyncIterator, SymbolIterator, SymbolToPrimitive, + TypedArrayPrototypeGetBuffer, + TypedArrayPrototypeGetByteLength, + TypedArrayPrototypeGetByteOffset, Uint8Array, } = primordials; @@ -154,7 +158,11 @@ function primitiveToUint8Array(chunk) { return chunk; } // Other ArrayBufferView types (Int8Array, DataView, etc.) - return new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength); + return new Uint8Array( + TypedArrayPrototypeGetBuffer(chunk), + TypedArrayPrototypeGetByteOffset(chunk), + TypedArrayPrototypeGetByteLength(chunk), + ); } /** @@ -166,7 +174,7 @@ function tryStringCoercion(obj) { // Check for Symbol.toPrimitive first if (hasToPrimitive(obj)) { const toPrimitive = obj[SymbolToPrimitive]; - const result = toPrimitive.call(obj, 'string'); + const result = FunctionPrototypeCall(toPrimitive, obj, 'string'); if (typeof result === 'string') { return result; } @@ -175,7 +183,7 @@ function tryStringCoercion(obj) { // Check for custom toString if (hasCustomToString(obj)) { - const result = obj.toString(); + const result = FunctionPrototypeCall(obj.toString, obj); return result; } @@ -200,7 +208,7 @@ function* normalizeSyncValue(value) { // Handle ToStreamable protocol if (isToStreamable(value)) { - const result = value[toStreamable](); + const result = FunctionPrototypeCall(value[toStreamable], value); yield* normalizeSyncValue(result); return; } @@ -305,7 +313,7 @@ async function* normalizeAsyncValue(value) { // Handle ToAsyncStreamable protocol (check before ToStreamable) if (isToAsyncStreamable(value)) { - const result = value[toAsyncStreamable](); + const result = FunctionPrototypeCall(value[toAsyncStreamable], value); if (isPromise(result)) { yield* normalizeAsyncValue(await result); } else { @@ -316,7 +324,7 @@ async function* normalizeAsyncValue(value) { // Handle ToStreamable protocol if (isToStreamable(value)) { - const result = value[toStreamable](); + const result = FunctionPrototypeCall(value[toStreamable], value); yield* normalizeAsyncValue(result); return; } diff --git a/lib/internal/streams/iter/pull.js b/lib/internal/streams/iter/pull.js index 0f53519608cfbb..f03c828c8f4e5a 100644 --- a/lib/internal/streams/iter/pull.js +++ b/lib/internal/streams/iter/pull.js @@ -14,6 +14,7 @@ const { String, SymbolAsyncIterator, SymbolIterator, + TypedArrayPrototypeGetByteLength, } = primordials; const { @@ -615,7 +616,7 @@ function pipeToSync(source, ...args) { for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; writer.write(chunk); - totalBytes += chunk.byteLength; + totalBytes += TypedArrayPrototypeGetByteLength(chunk); } } @@ -662,7 +663,7 @@ async function pipeTo(source, ...args) { if (hasWritev && batch.length > 1) { await writer.writev(batch, signal ? { signal } : undefined); for (let i = 0; i < batch.length; i++) { - totalBytes += batch[i].byteLength; + totalBytes += TypedArrayPrototypeGetByteLength(batch[i]); } } else { const promises = []; @@ -672,7 +673,7 @@ async function pipeTo(source, ...args) { if (result !== undefined) { ArrayPrototypePush(promises, result); } - totalBytes += chunk.byteLength; + totalBytes += TypedArrayPrototypeGetByteLength(chunk); } if (promises.length > 0) { await SafePromiseAllReturnVoid(promises); diff --git a/lib/internal/streams/iter/push.js b/lib/internal/streams/iter/push.js index 7665f6e10c1a8d..c932becea23305 100644 --- a/lib/internal/streams/iter/push.js +++ b/lib/internal/streams/iter/push.js @@ -12,6 +12,7 @@ const { Promise, PromiseResolve, SymbolAsyncIterator, + TypedArrayPrototypeGetByteLength, } = primordials; const { @@ -145,7 +146,7 @@ class PushQueue { case 'drop-newest': // Discard this write, but return true for (let i = 0; i < chunks.length; i++) { - this.#bytesWritten += chunks[i].byteLength; + this.#bytesWritten += TypedArrayPrototypeGetByteLength(chunks[i]); } return true; } @@ -153,7 +154,7 @@ class PushQueue { this.#slots.push(chunks); for (let i = 0; i < chunks.length; i++) { - this.#bytesWritten += chunks[i].byteLength; + this.#bytesWritten += TypedArrayPrototypeGetByteLength(chunks[i]); } this.#resolvePendingReads(); @@ -352,10 +353,10 @@ class PushQueue { const pending = this.#pendingReads.shift(); const result = this.#drain(); this.#resolvePendingWrites(); - pending.resolve({ value: result, done: false }); + pending.resolve({ __proto__: null, value: result, done: false }); } else if (this.#writerState === 'closed') { const pending = this.#pendingReads.shift(); - pending.resolve({ value: undefined, done: true }); + pending.resolve({ __proto__: null, value: undefined, done: true }); } else if (this.#writerState === 'errored' && this.#error) { const pending = this.#pendingReads.shift(); pending.reject(this.#error); @@ -371,7 +372,7 @@ class PushQueue { const pending = this.#pendingWrites.shift(); this.#slots.push(pending.chunks); for (let i = 0; i < pending.chunks.length; i++) { - this.#bytesWritten += pending.chunks[i].byteLength; + this.#bytesWritten += TypedArrayPrototypeGetByteLength(pending.chunks[i]); } pending.resolve(); } diff --git a/lib/internal/streams/iter/share.js b/lib/internal/streams/iter/share.js index e0d7b914b114b8..34ed87f58512c6 100644 --- a/lib/internal/streams/iter/share.js +++ b/lib/internal/streams/iter/share.js @@ -9,6 +9,7 @@ const { ArrayPrototypePush, MathMax, Promise, + PromisePrototypeThen, PromiseResolve, SafeSet, String, @@ -204,7 +205,7 @@ class ShareImpl { } if (this.#sourceIterator?.return) { - this.#sourceIterator.return().catch(() => {}); + PromisePrototypeThen(this.#sourceIterator.return(), undefined, () => {}); } for (const consumer of this.#consumers) { @@ -293,7 +294,7 @@ class ShareImpl { }, async return() { return syncIterator.return?.() ?? - { done: true, value: undefined }; + { __proto__: null, done: true, value: undefined }; }, }; } else { @@ -399,7 +400,7 @@ class SyncShareImpl { return { next() { if (state.detached) { - return { done: true, value: undefined }; + return { __proto__: null, done: true, value: undefined }; } if (self.#sourceError) { state.detached = true; @@ -409,7 +410,7 @@ class SyncShareImpl { if (self.#cancelled) { state.detached = true; self.#consumers.delete(state); - return { done: true, value: undefined }; + return { __proto__: null, done: true, value: undefined }; } const bufferIndex = state.cursor - self.#bufferStart; @@ -417,13 +418,13 @@ class SyncShareImpl { const chunk = self.#buffer.get(bufferIndex); state.cursor++; self.#tryTrimBuffer(); - return { done: false, value: chunk }; + return { __proto__: null, done: false, value: chunk }; } if (self.#sourceExhausted) { state.detached = true; self.#consumers.delete(state); - return { done: true, value: undefined }; + return { __proto__: null, done: true, value: undefined }; } // Check buffer limit @@ -450,7 +451,7 @@ class SyncShareImpl { case 'drop-newest': state.detached = true; self.#consumers.delete(state); - return { done: true, value: undefined }; + return { __proto__: null, done: true, value: undefined }; } } @@ -467,30 +468,30 @@ class SyncShareImpl { const chunk = self.#buffer.get(newBufferIndex); state.cursor++; self.#tryTrimBuffer(); - return { done: false, value: chunk }; + return { __proto__: null, done: false, value: chunk }; } if (self.#sourceExhausted) { state.detached = true; self.#consumers.delete(state); - return { done: true, value: undefined }; + return { __proto__: null, done: true, value: undefined }; } - return { done: true, value: undefined }; + return { __proto__: null, done: true, value: undefined }; }, return() { state.detached = true; self.#consumers.delete(state); self.#tryTrimBuffer(); - return { done: true, value: undefined }; + return { __proto__: null, done: true, value: undefined }; }, throw() { state.detached = true; self.#consumers.delete(state); self.#tryTrimBuffer(); - return { done: true, value: undefined }; + return { __proto__: null, done: true, value: undefined }; }, }; }, diff --git a/lib/internal/streams/iter/transform.js b/lib/internal/streams/iter/transform.js index e073e343998ef2..8c2a4f018efc95 100644 --- a/lib/internal/streams/iter/transform.js +++ b/lib/internal/streams/iter/transform.js @@ -9,14 +9,20 @@ // Each factory returns a transform descriptor that can be passed to pull(). const { + ArrayPrototypeMap, ArrayPrototypePush, + ArrayPrototypeShift, ArrayPrototypeSplice, MathMax, NumberIsNaN, ObjectEntries, ObjectKeys, Promise, + StringPrototypeStartsWith, SymbolAsyncIterator, + TypedArrayPrototypeFill, + TypedArrayPrototypeGetByteLength, + TypedArrayPrototypeSlice, Uint32Array, } = primordials; @@ -57,19 +63,27 @@ const kEmpty = Buffer.alloc(0); // Mirrors the pattern in lib/zlib.js. // --------------------------------------------------------------------------- const kMaxBrotliParam = MathMax( - ...ObjectEntries(constants) - .map(({ 0: key, 1: value }) => - (key.startsWith('BROTLI_PARAM_') ? value : 0)), + ...ArrayPrototypeMap( + ObjectEntries(constants), + ({ 0: key, 1: value }) => + (StringPrototypeStartsWith(key, 'BROTLI_PARAM_') ? value : 0), + ), ); const brotliInitParamsArray = new Uint32Array(kMaxBrotliParam + 1); -const kMaxZstdCParam = MathMax(...ObjectKeys(constants).map( - (key) => (key.startsWith('ZSTD_c_') ? constants[key] : 0)), +const kMaxZstdCParam = MathMax( + ...ArrayPrototypeMap( + ObjectKeys(constants), + (key) => (StringPrototypeStartsWith(key, 'ZSTD_c_') ? constants[key] : 0), + ), ); const zstdInitCParamsArray = new Uint32Array(kMaxZstdCParam + 1); -const kMaxZstdDParam = MathMax(...ObjectKeys(constants).map( - (key) => (key.startsWith('ZSTD_d_') ? constants[key] : 0)), +const kMaxZstdDParam = MathMax( + ...ArrayPrototypeMap( + ObjectKeys(constants), + (key) => (StringPrototypeStartsWith(key, 'ZSTD_d_') ? constants[key] : 0), + ), ); const zstdInitDParamsArray = new Uint32Array(kMaxZstdDParam + 1); @@ -113,7 +127,7 @@ function createBrotliHandle(mode, options, processCallback, onError) { const writeState = new Uint32Array(2); const chunkSize = options?.chunkSize ?? Z_DEFAULT_CHUNK; - brotliInitParamsArray.fill(-1); + TypedArrayPrototypeFill(brotliInitParamsArray, -1); if (options?.params) { const params = options.params; const keys = ObjectKeys(params); @@ -150,7 +164,7 @@ function createZstdHandle(mode, options, processCallback, onError) { const initArray = isCompress ? zstdInitCParamsArray : zstdInitDParamsArray; const maxParam = isCompress ? kMaxZstdCParam : kMaxZstdDParam; - initArray.fill(-1); + TypedArrayPrototypeFill(initArray, -1); if (options?.params) { const params = options.params; const keys = ObjectKeys(params); @@ -216,7 +230,7 @@ function makeZlibTransform(createHandleFn, processFlag, finishFlag) { if (have > 0) { ArrayPrototypePush(pending, - outBuf.slice(outOffset, outOffset + have)); + TypedArrayPrototypeSlice(outBuf, outOffset, outOffset + have)); pendingBytes += have; outOffset += have; } @@ -290,14 +304,14 @@ function makeZlibTransform(createHandleFn, processFlag, finishFlag) { writeInput = input; writeFlush = flushFlag; writeInOff = 0; - writeAvailIn = input.byteLength; + writeAvailIn = TypedArrayPrototypeGetByteLength(input); writeAvailOutBefore = chunkSize - outOffset; // Keep input alive while the threadpool references it. handle.buffer = input; handle.write(flushFlag, - input, 0, input.byteLength, + input, 0, TypedArrayPrototypeGetByteLength(input), outBuf, outOffset, writeAvailOutBefore); }); } @@ -311,10 +325,10 @@ function makeZlibTransform(createHandleFn, processFlag, finishFlag) { const batch = []; let batchBytes = 0; while (pending.length > 0 && batchBytes < BATCH_HWM) { - const buf = pending.shift(); + const buf = ArrayPrototypeShift(pending); ArrayPrototypePush(batch, buf); - batchBytes += buf.byteLength; - pendingBytes -= buf.byteLength; + batchBytes += TypedArrayPrototypeGetByteLength(buf); + pendingBytes -= TypedArrayPrototypeGetByteLength(buf); } return batch; } diff --git a/lib/internal/streams/iter/utils.js b/lib/internal/streams/iter/utils.js index ceacc7c904b463..81f9670419b69c 100644 --- a/lib/internal/streams/iter/utils.js +++ b/lib/internal/streams/iter/utils.js @@ -4,6 +4,7 @@ const { ArrayPrototypeSlice, + TypedArrayPrototypeGetByteLength, TypedArrayPrototypeSet, Uint8Array, } = primordials; @@ -47,7 +48,7 @@ function allUint8Array(chunks) { function totalByteLength(chunks) { let total = 0; for (let i = 0; i < chunks.length; i++) { - total += chunks[i].byteLength; + total += TypedArrayPrototypeGetByteLength(chunks[i]); } return total; } @@ -70,7 +71,7 @@ function concatBytes(chunks) { let offset = 0; for (let i = 0; i < chunks.length; i++) { TypedArrayPrototypeSet(result, chunks[i], offset); - offset += chunks[i].byteLength; + offset += TypedArrayPrototypeGetByteLength(chunks[i]); } return result; } From 8811c6bf119c5ac1ccfbff57400b086bc085e017 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 17 Mar 2026 21:53:15 -0700 Subject: [PATCH 17/42] stream: fix Broadcast.from() silently dropping non-array chunks `Broadcast.from()` only handled sources that yield `Uint8Array[]` batches (arrays), silently discarding any non-array values like single `Uint8Array` chunks or strings. Route non-array chunks through `writer.write()` instead of skipping them. --- lib/internal/streams/iter/broadcast.js | 6 +++++ test/parallel/test-stream-iter-broadcast.js | 26 +++++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/lib/internal/streams/iter/broadcast.js b/lib/internal/streams/iter/broadcast.js index b75c32b65daf98..75e42895bd2b3b 100644 --- a/lib/internal/streams/iter/broadcast.js +++ b/lib/internal/streams/iter/broadcast.js @@ -693,6 +693,9 @@ const Broadcast = { if (ArrayIsArray(chunks)) { await result.writer.writev( chunks, signal ? { signal } : undefined); + } else { + await result.writer.write( + chunks, signal ? { signal } : undefined); } } } else if (isSyncIterable(input)) { @@ -704,6 +707,9 @@ const Broadcast = { if (ArrayIsArray(chunks)) { await result.writer.writev( chunks, signal ? { signal } : undefined); + } else { + await result.writer.write( + chunks, signal ? { signal } : undefined); } } } diff --git a/test/parallel/test-stream-iter-broadcast.js b/test/parallel/test-stream-iter-broadcast.js index 9ce7c22b317e85..acea956b9e6b88 100644 --- a/test/parallel/test-stream-iter-broadcast.js +++ b/test/parallel/test-stream-iter-broadcast.js @@ -210,6 +210,30 @@ async function testBroadcastFromAsyncIterable() { assert.strictEqual(data, 'broadcast-from'); } +async function testBroadcastFromNonArrayChunks() { + // Source that yields single Uint8Array chunks (not arrays) + async function* singleChunkSource() { + yield new TextEncoder().encode('hello'); + yield new TextEncoder().encode(' world'); + } + const { broadcast: bc } = Broadcast.from(singleChunkSource()); + const consumer = bc.push(); + const data = await text(consumer); + assert.strictEqual(data, 'hello world'); +} + +async function testBroadcastFromStringChunks() { + // Source that yields bare strings (not arrays) + async function* stringSource() { + yield 'foo'; + yield 'bar'; + } + const { broadcast: bc } = Broadcast.from(stringSource()); + const consumer = bc.push(); + const data = await text(consumer); + assert.strictEqual(data, 'foobar'); +} + async function testBroadcastFromMultipleConsumers() { const source = from('shared-data'); const { broadcast: bc } = Broadcast.from(source); @@ -342,6 +366,8 @@ Promise.all([ testCancelWithoutReason(), testCancelWithReason(), testBroadcastFromAsyncIterable(), + testBroadcastFromNonArrayChunks(), + testBroadcastFromStringChunks(), testBroadcastFromMultipleConsumers(), testAbortSignal(), testAlreadyAbortedSignal(), From a6a37e56130aafa2f440c1480ee45ed00cb061d3 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 17 Mar 2026 22:05:08 -0700 Subject: [PATCH 18/42] stream: fix share consumer premature termination on concurrent pull When multiple share consumers call `next()` concurrently, they all await the same `#pullFromSource()` call. When it completes, only one item is added to the buffer, so consumers whose cursor doesn't find data would incorrectly return done:true even though the source still has data. Replace the linear fall-through in the consumer `next()` method with a retry loop. Consumers that wake without data at their cursor now re-pull from source. The existing `#pulling` guard ensures only one actual source pull is in flight at a time; other consumers enqueue on `#pullWaiters` at the cost of a single promise allocation. --- lib/internal/streams/iter/share.js | 87 ++++++++++--------------- test/parallel/test-stream-iter-share.js | 27 ++++++++ 2 files changed, 63 insertions(+), 51 deletions(-) diff --git a/lib/internal/streams/iter/share.js b/lib/internal/streams/iter/share.js index 34ed87f58512c6..2a7c538623e75a 100644 --- a/lib/internal/streams/iter/share.js +++ b/lib/internal/streams/iter/share.js @@ -115,63 +115,48 @@ class ShareImpl { throw self.#sourceError; } - if (state.detached) { - return { __proto__: null, done: true, value: undefined }; - } - - if (self.#cancelled) { - state.detached = true; - self.#consumers.delete(state); - return { __proto__: null, done: true, value: undefined }; - } - - // Check if data is available in buffer - const bufferIndex = state.cursor - self.#bufferStart; - if (bufferIndex < self.#buffer.length) { - const chunk = self.#buffer.get(bufferIndex); - state.cursor++; - self.#tryTrimBuffer(); - return { __proto__: null, done: false, value: chunk }; - } - - if (self.#sourceExhausted) { - state.detached = true; - self.#consumers.delete(state); - return { __proto__: null, done: true, value: undefined }; - } + // Loop until we get data, source is exhausted, or + // consumer is detached. Multiple consumers may be woken + // after a single pull - those that find no data at their + // cursor must re-pull rather than terminating prematurely. + for (;;) { + if (state.detached) { + return { __proto__: null, done: true, value: undefined }; + } - // Need to pull from source - check buffer limit - const canPull = await self.#waitForBufferSpace(state); - if (!canPull) { - state.detached = true; - self.#consumers.delete(state); - if (self.#sourceError) throw self.#sourceError; - return { __proto__: null, done: true, value: undefined }; - } + if (self.#cancelled) { + state.detached = true; + self.#consumers.delete(state); + return { __proto__: null, done: true, value: undefined }; + } - await self.#pullFromSource(); + // Check if data is available in buffer + const bufferIndex = state.cursor - self.#bufferStart; + if (bufferIndex < self.#buffer.length) { + const chunk = self.#buffer.get(bufferIndex); + state.cursor++; + self.#tryTrimBuffer(); + return { __proto__: null, done: false, value: chunk }; + } - if (self.#sourceError) { - state.detached = true; - self.#consumers.delete(state); - throw self.#sourceError; - } + if (self.#sourceExhausted) { + state.detached = true; + self.#consumers.delete(state); + if (self.#sourceError) throw self.#sourceError; + return { __proto__: null, done: true, value: undefined }; + } - const newBufferIndex = state.cursor - self.#bufferStart; - if (newBufferIndex < self.#buffer.length) { - const chunk = self.#buffer.get(newBufferIndex); - state.cursor++; - self.#tryTrimBuffer(); - return { __proto__: null, done: false, value: chunk }; - } + // Need to pull from source - check buffer limit + const canPull = await self.#waitForBufferSpace(state); + if (!canPull) { + state.detached = true; + self.#consumers.delete(state); + if (self.#sourceError) throw self.#sourceError; + return { __proto__: null, done: true, value: undefined }; + } - if (self.#sourceExhausted) { - state.detached = true; - self.#consumers.delete(state); - return { __proto__: null, done: true, value: undefined }; + await self.#pullFromSource(); } - - return { __proto__: null, done: true, value: undefined }; }, async return() { diff --git a/test/parallel/test-stream-iter-share.js b/test/parallel/test-stream-iter-share.js index 7aefdd2111a86a..76b61f0a8ec74d 100644 --- a/test/parallel/test-stream-iter-share.js +++ b/test/parallel/test-stream-iter-share.js @@ -238,4 +238,31 @@ Promise.all([ testShareSyncCancel(), testSyncShareFromSync(), testSyncShareFromRejectsNonStreamable(), + testShareMultipleConsumersConcurrentPull(), ]).then(common.mustCall()); + +async function testShareMultipleConsumersConcurrentPull() { + // Regression test: multiple consumers pulling concurrently should each + // receive all items even when only one item is pulled from source at a time. + // Previously, consumers woken after a pull that found no data at their + // cursor would return done:true prematurely (thundering herd bug). + async function* slowSource() { + for (let i = 0; i < 5; i++) { + await new Promise((r) => setTimeout(r, 1)); + yield [new TextEncoder().encode(`item-${i}`)]; + } + } + const shared = share(slowSource()); + const c1 = shared.pull(); + const c2 = shared.pull(); + const c3 = shared.pull(); + + const [t1, t2, t3] = await Promise.all([ + text(c1), text(c2), text(c3), + ]); + + const expected = 'item-0item-1item-2item-3item-4'; + assert.strictEqual(t1, expected); + assert.strictEqual(t2, expected); + assert.strictEqual(t3, expected); +} From 0c9827aebd309215585c26e606b88b4f5082fcbd Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 17 Mar 2026 22:36:07 -0700 Subject: [PATCH 19/42] stream: fix Writer sync method return values for try-fallback pattern All `*Sync` methods on the Writer interface are designed as try-fallback pairs with their async counterparts: attempt the fast synchronous path first, and fall back to the async version only when the synchronous call indicates it could not complete. `endSync()` was returning the byte count unconditionally, making the fallback check in duplex `close()` incorrect (`!0 === true`). `failSync()` always returned true even when already errored. - `endSync()` now returns -1 when writer is not open, byte count (`>= 0`) on success - `failSync()` now returns false when already errored - `PushQueue.fail()` returns boolean to support `failSync()` - duplex `close()` uses `endSync() < 0` for the fallback check - Document the try-fallback pattern and sync return values --- doc/api/stream_iter.md | 32 ++++++++++++++++++++++++-- lib/internal/streams/iter/duplex.js | 4 ++-- lib/internal/streams/iter/push.js | 9 ++++---- test/parallel/test-stream-iter-push.js | 3 +++ 4 files changed, 40 insertions(+), 8 deletions(-) diff --git a/doc/api/stream_iter.md b/doc/api/stream_iter.md index 4760b1e583c209..739afb10a57192 100644 --- a/doc/api/stream_iter.md +++ b/doc/api/stream_iter.md @@ -695,7 +695,19 @@ run().catch(console.error); #### Writer -The writer returned by `push()` has the following methods: +The writer returned by `push()` has the following methods. + +Each async method has a synchronous `*Sync` counterpart designed for a +try-fallback pattern: attempt the fast synchronous path first, and fall back +to the async version only when the synchronous call indicates it could not +complete: + +```mjs +if (!writer.writeSync(chunk)) await writer.write(chunk); +if (!writer.writevSync(chunks)) await writer.writev(chunks); +if (writer.endSync() < 0) await writer.end(); +if (!writer.failSync(err)) await writer.fail(err); +``` ##### `writer.fail(reason)` @@ -707,8 +719,10 @@ Fail the stream with an error. ##### `writer.failSync(reason)` * `reason` {Error} +* Returns: {boolean} `true` if the writer was failed, `false` if already + errored. -Synchronously fail the stream with an error. Does not return a promise. +Synchronous variant of `writer.fail()`. ##### `writer.desiredSize` @@ -726,6 +740,20 @@ Returns `null` if the writer is closed or the consumer has disconnected. Signal that no more data will be written. +##### `writer.endSync()` + +* Returns: {number} Total bytes written, or `-1` if the writer is not open. + +Synchronous variant of `writer.end()`. Returns `-1` if the writer is already +closed or errored. Can be used as a try-fallback pattern: + +```cjs +const result = writer.endSync(); +if (result < 0) { + writer.end(); +} +``` + ##### `writer.write(chunk[, options])` * `chunk` {Uint8Array|string} diff --git a/lib/internal/streams/iter/duplex.js b/lib/internal/streams/iter/duplex.js index bce898443d6552..2f50855b329032 100644 --- a/lib/internal/streams/iter/duplex.js +++ b/lib/internal/streams/iter/duplex.js @@ -46,7 +46,7 @@ function duplex(options) { if (aWriterRef === null) return; const writer = aWriterRef; aWriterRef = null; - if (!writer.endSync()) { + if (writer.endSync() < 0) { await writer.end(); } }, @@ -62,7 +62,7 @@ function duplex(options) { if (bWriterRef === null) return; const writer = bWriterRef; bWriterRef = null; - if (!writer.endSync()) { + if (writer.endSync() < 0) { await writer.end(); } }, diff --git a/lib/internal/streams/iter/push.js b/lib/internal/streams/iter/push.js index c932becea23305..9a6d9cdd414c72 100644 --- a/lib/internal/streams/iter/push.js +++ b/lib/internal/streams/iter/push.js @@ -248,7 +248,7 @@ class PushQueue { */ end() { if (this.#writerState !== 'open') { - return this.#bytesWritten; + return -1; } this.#writerState = 'closed'; @@ -261,9 +261,10 @@ class PushQueue { /** * Put queue into terminal error state. + * @returns {boolean} true if the writer was failed, false if already errored. */ fail(reason) { - if (this.#writerState === 'errored') return; + if (this.#writerState === 'errored') return false; this.#writerState = 'errored'; this.#error = reason ?? new ERR_INVALID_STATE('Failed'); @@ -271,6 +272,7 @@ class PushQueue { this.#rejectPendingReads(this.#error); this.#rejectPendingWrites(this.#error); this.#rejectPendingDrains(this.#error); + return true; } get totalBytesWritten() { @@ -512,8 +514,7 @@ class PushWriter { } failSync(reason) { - this.#queue.fail(reason); - return true; + return this.#queue.fail(reason); } } diff --git a/test/parallel/test-stream-iter-push.js b/test/parallel/test-stream-iter-push.js index 15a996ebf09879..fefa745df7a79a 100644 --- a/test/parallel/test-stream-iter-push.js +++ b/test/parallel/test-stream-iter-push.js @@ -108,6 +108,9 @@ async function testWriterEnd() { const totalBytes = writer.endSync(); assert.strictEqual(totalBytes, 0); + // Calling endSync again returns -1 (already closed) + assert.strictEqual(writer.endSync(), -1); + const batches = []; for await (const batch of readable) { batches.push(batch); From 32eaae7f5b0f72ffd19bcb8e3c2cc4896bc2bb68 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 17 Mar 2026 22:52:14 -0700 Subject: [PATCH 20/42] stream: use Writer try-fallback pattern in pipeTo and Broadcast.from The `Writer` interface sync methods are designed as try-fallback pairs: attempt the fast sync path first, fall back to async only when rejected or unavailable. This was not being followed in `pipeTo`, `pipeToSync`, or `Broadcast.from()`. `pipeTo` now attempts `writeSync`/`writevSync` before `write`/`writev`, `endSync` before `end`, and `failSync` before `fail`, with graceful handling when *Sync methods are not present on the writer. `pipeToSync` similarly prefers *Sync methods with fallback to the non-sync variants for writers that lack them. `Broadcast.from()` applies the same pattern when writing source data to the internal broadcast writer. Additionally, writes in p`ipeTo are now sequential rather than parallel (previously used `Promise.all`), which respects writer backpressure signals. --- lib/internal/streams/iter/broadcast.js | 32 +++++++++------- lib/internal/streams/iter/pull.js | 53 ++++++++++++++++++-------- 2 files changed, 56 insertions(+), 29 deletions(-) diff --git a/lib/internal/streams/iter/broadcast.js b/lib/internal/streams/iter/broadcast.js index 75e42895bd2b3b..c950403ad140c7 100644 --- a/lib/internal/streams/iter/broadcast.js +++ b/lib/internal/streams/iter/broadcast.js @@ -683,6 +683,7 @@ const Broadcast = { const signal = options?.signal; (async () => { + const w = result.writer; try { if (isAsyncIterable(input)) { for await (const chunks of input) { @@ -691,11 +692,11 @@ const Broadcast = { lazyDOMException('Aborted', 'AbortError'); } if (ArrayIsArray(chunks)) { - await result.writer.writev( - chunks, signal ? { signal } : undefined); - } else { - await result.writer.write( - chunks, signal ? { signal } : undefined); + if (!w.writevSync(chunks)) { + await w.writev(chunks, signal ? { signal } : undefined); + } + } else if (!w.writeSync(chunks)) { + await w.write(chunks, signal ? { signal } : undefined); } } } else if (isSyncIterable(input)) { @@ -705,18 +706,23 @@ const Broadcast = { lazyDOMException('Aborted', 'AbortError'); } if (ArrayIsArray(chunks)) { - await result.writer.writev( - chunks, signal ? { signal } : undefined); - } else { - await result.writer.write( - chunks, signal ? { signal } : undefined); + if (!w.writevSync(chunks)) { + await w.writev(chunks, signal ? { signal } : undefined); + } + } else if (!w.writeSync(chunks)) { + await w.write(chunks, signal ? { signal } : undefined); } } } - await result.writer.end(signal ? { signal } : undefined); + if (w.endSync() < 0) { + await w.end(signal ? { signal } : undefined); + } } catch (error) { - await result.writer.fail( - isError(error) ? error : new ERR_INVALID_ARG_TYPE('error', 'Error', String(error))); + const err = isError(error) ? error : + new ERR_INVALID_ARG_TYPE('error', 'Error', String(error)); + if (!w.failSync(err)) { + await w.fail(err); + } } })(); diff --git a/lib/internal/streams/iter/pull.js b/lib/internal/streams/iter/pull.js index f03c828c8f4e5a..d128d43b40173d 100644 --- a/lib/internal/streams/iter/pull.js +++ b/lib/internal/streams/iter/pull.js @@ -10,7 +10,6 @@ const { ArrayIsArray, ArrayPrototypePush, ArrayPrototypeSlice, - SafePromiseAllReturnVoid, String, SymbolAsyncIterator, SymbolIterator, @@ -610,22 +609,33 @@ function pipeToSync(source, ...args) { source; let totalBytes = 0; + const hasWriteSync = typeof writer.writeSync === 'function'; + const hasEndSync = typeof writer.endSync === 'function'; + const hasFailSync = typeof writer.failSync === 'function'; try { for (const batch of pipeline) { for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; - writer.write(chunk); + if (!hasWriteSync || !writer.writeSync(chunk)) { + writer.write(chunk); + } totalBytes += TypedArrayPrototypeGetByteLength(chunk); } } if (!options?.preventClose) { - writer.end(); + if (!hasEndSync || writer.endSync() < 0) { + writer.end(); + } } } catch (error) { if (!options?.preventFail) { - writer.fail(isError(error) ? error : new ERR_OPERATION_FAILED(String(error))); + const err = isError(error) ? error : + new ERR_OPERATION_FAILED(String(error)); + if (!hasFailSync || !writer.failSync(err)) { + writer.fail(err); + } } throw error; } @@ -657,27 +667,31 @@ async function pipeTo(source, ...args) { let totalBytes = 0; const hasWritev = typeof writer.writev === 'function'; + const hasWritevSync = typeof writer.writevSync === 'function'; + const hasWriteSync = typeof writer.writeSync === 'function'; - // Helper to write a batch efficiently + // Helper to write a batch efficiently using try-fallback pattern: + // attempt the sync path first, fall back to async if rejected. const writeBatch = async (batch) => { if (hasWritev && batch.length > 1) { - await writer.writev(batch, signal ? { signal } : undefined); + if (!hasWritevSync || + !writer.writevSync(batch)) { + await writer.writev(batch, signal ? { signal } : undefined); + } for (let i = 0; i < batch.length; i++) { totalBytes += TypedArrayPrototypeGetByteLength(batch[i]); } } else { - const promises = []; for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; - const result = writer.write(chunk, signal ? { signal } : undefined); - if (result !== undefined) { - ArrayPrototypePush(promises, result); + if (!hasWriteSync || !writer.writeSync(chunk)) { + const result = writer.write(chunk, signal ? { signal } : undefined); + if (result !== undefined) { + await result; + } } totalBytes += TypedArrayPrototypeGetByteLength(chunk); } - if (promises.length > 0) { - await SafePromiseAllReturnVoid(promises); - } } }; @@ -719,12 +733,19 @@ async function pipeTo(source, ...args) { } if (!options?.preventClose) { - await writer.end(signal ? { signal } : undefined); + if (typeof writer.endSync !== 'function' || + writer.endSync() < 0) { + await writer.end(signal ? { signal } : undefined); + } } } catch (error) { if (!options?.preventFail) { - await writer.fail( - isError(error) ? error : new ERR_OPERATION_FAILED(String(error))); + const err = isError(error) ? error : + new ERR_OPERATION_FAILED(String(error)); + if (typeof writer.failSync !== 'function' || + !writer.failSync(err)) { + await writer.fail(err); + } } throw error; } From 99eb71262c203dda0fca4ca7527f36e4f003956b Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 17 Mar 2026 23:02:34 -0700 Subject: [PATCH 21/42] stream: tolerate non-array batches in stream/iter consumers The stream protocol specifies that sources yield `Uint8Array[]` batches, but a source that yields a raw Uint8Array would silently produce garbage: batch.length returns the byte count and `batch[i]` yields individual byte values as numbers. Add an ``ensureBatch()`` helper that wraps non-array values in a single-element array, applied at all batch iteration entry points in `bytes`, `bytesSync`, `array`, `arraySync` and their fast/slow paths. --- lib/internal/streams/iter/consumers.js | 38 +++++++++++----- test/parallel/test-stream-iter-consumers.js | 50 +++++++++++++++++++++ 2 files changed, 78 insertions(+), 10 deletions(-) diff --git a/lib/internal/streams/iter/consumers.js b/lib/internal/streams/iter/consumers.js index 25c2db0584c475..774f30ce7baf3d 100644 --- a/lib/internal/streams/iter/consumers.js +++ b/lib/internal/streams/iter/consumers.js @@ -10,6 +10,7 @@ const { ArrayBufferPrototypeGetByteLength, ArrayBufferPrototypeSlice, + ArrayIsArray, ArrayPrototypeFilter, ArrayPrototypeMap, ArrayPrototypePush, @@ -58,6 +59,13 @@ function isMergeOptions(value) { ); } +// Normalize a yielded value to a Uint8Array[] batch. Sources should yield +// Uint8Array[] but a raw Uint8Array or string is tolerated by wrapping it. +function ensureBatch(batch) { + if (ArrayIsArray(batch)) return batch; + return [batch]; +} + // ============================================================================= // Sync Consumers // ============================================================================= @@ -73,7 +81,8 @@ function bytesSync(source, options) { const chunks = []; let totalBytes = 0; - for (const batch of source) { + for (const raw of source) { + const batch = ensureBatch(raw); for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; if (limit !== undefined) { @@ -134,7 +143,8 @@ function arraySync(source, options) { const chunks = []; let totalBytes = 0; - for (const batch of source) { + for (const raw of source) { + const batch = ensureBatch(raw); for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; if (limit !== undefined) { @@ -173,13 +183,15 @@ async function bytes(source, options) { // Fast path: no signal and no limit if (!signal && limit === undefined) { if (isAsyncIterable(source)) { - for await (const batch of source) { + for await (const raw of source) { + const batch = ensureBatch(raw); for (let i = 0; i < batch.length; i++) { ArrayPrototypePush(chunks, batch[i]); } } } else if (isSyncIterable(source)) { - for (const batch of source) { + for (const raw of source) { + const batch = ensureBatch(raw); for (let i = 0; i < batch.length; i++) { ArrayPrototypePush(chunks, batch[i]); } @@ -194,7 +206,8 @@ async function bytes(source, options) { let totalBytes = 0; if (isAsyncIterable(source)) { - for await (const batch of source) { + for await (const raw of source) { + const batch = ensureBatch(raw); if (signal?.aborted) { throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); } @@ -210,7 +223,8 @@ async function bytes(source, options) { } } } else if (isSyncIterable(source)) { - for (const batch of source) { + for (const raw of source) { + const batch = ensureBatch(raw); if (signal?.aborted) { throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); } @@ -285,13 +299,15 @@ async function array(source, options) { // Fast path: no signal and no limit if (!signal && limit === undefined) { if (isAsyncIterable(source)) { - for await (const batch of source) { + for await (const raw of source) { + const batch = ensureBatch(raw); for (let i = 0; i < batch.length; i++) { ArrayPrototypePush(chunks, batch[i]); } } } else if (isSyncIterable(source)) { - for (const batch of source) { + for (const raw of source) { + const batch = ensureBatch(raw); for (let i = 0; i < batch.length; i++) { ArrayPrototypePush(chunks, batch[i]); } @@ -306,7 +322,8 @@ async function array(source, options) { let totalBytes = 0; if (isAsyncIterable(source)) { - for await (const batch of source) { + for await (const raw of source) { + const batch = ensureBatch(raw); if (signal?.aborted) { throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); } @@ -322,7 +339,8 @@ async function array(source, options) { } } } else if (isSyncIterable(source)) { - for (const batch of source) { + for (const raw of source) { + const batch = ensureBatch(raw); if (signal?.aborted) { throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); } diff --git a/test/parallel/test-stream-iter-consumers.js b/test/parallel/test-stream-iter-consumers.js index 385539fd31985a..441c96d79aeffa 100644 --- a/test/parallel/test-stream-iter-consumers.js +++ b/test/parallel/test-stream-iter-consumers.js @@ -317,4 +317,54 @@ Promise.all([ testMergeSingleSource(), testMergeEmpty(), testMergeWithAbortSignal(), + testConsumersNonArrayBatch(), + testConsumersNonArrayBatchSync(), ]).then(common.mustCall()); + +// Regression test: consumers should tolerate sources that yield raw +// Uint8Array or string values instead of Uint8Array[] batches. +async function testConsumersNonArrayBatch() { + const encoder = new TextEncoder(); + + // Source yields raw Uint8Array, not wrapped in an array + async function* rawSource() { + yield encoder.encode('hello'); + yield encoder.encode(' world'); + } + const result = await text(rawSource()); + assert.strictEqual(result, 'hello world'); + + // bytes() with raw chunks + async function* rawSource2() { + yield encoder.encode('ab'); + } + const data = await bytes(rawSource2()); + assert.strictEqual(data.length, 2); + assert.strictEqual(data[0], 97); // 'a' + assert.strictEqual(data[1], 98); // 'b' + + // array() with raw chunks + async function* rawSource3() { + yield encoder.encode('x'); + yield encoder.encode('y'); + } + const arr = await array(rawSource3()); + assert.strictEqual(arr.length, 2); +} + +async function testConsumersNonArrayBatchSync() { + const encoder = new TextEncoder(); + + function* rawSyncSource() { + yield encoder.encode('sync'); + yield encoder.encode('data'); + } + const result = textSync(rawSyncSource()); + assert.strictEqual(result, 'syncdata'); + + const data = bytesSync(rawSyncSource()); + assert.strictEqual(data.length, 8); + + const arr = arraySync(rawSyncSource()); + assert.strictEqual(arr.length, 2); +} From c6c39d4cfeb48f38dfa35397c2fe6da4d2f7b247 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 17 Mar 2026 23:11:09 -0700 Subject: [PATCH 22/42] stream: treat Writer end/fail methods as optional in pipeTo The `Writer` interface only requires `write()`. The `end()`, `fail()`, and all `*Sync` variant methods are optional. `pipeTo` and `pipeToSync` were calling these without existence checks, which would throw on minimal writer implementations. Use optional chaining for all optional writer methods throughout `pipeTo` and `pipeToSync`. Added tests verifying `pipeTo` works with a minimal writer that only implements `write()`. --- lib/internal/streams/iter/pull.js | 30 ++++++++++---------------- test/parallel/test-stream-iter-pull.js | 30 ++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 19 deletions(-) diff --git a/lib/internal/streams/iter/pull.js b/lib/internal/streams/iter/pull.js index d128d43b40173d..1824ea4fe9e7f4 100644 --- a/lib/internal/streams/iter/pull.js +++ b/lib/internal/streams/iter/pull.js @@ -609,15 +609,12 @@ function pipeToSync(source, ...args) { source; let totalBytes = 0; - const hasWriteSync = typeof writer.writeSync === 'function'; - const hasEndSync = typeof writer.endSync === 'function'; - const hasFailSync = typeof writer.failSync === 'function'; try { for (const batch of pipeline) { for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; - if (!hasWriteSync || !writer.writeSync(chunk)) { + if (!writer.writeSync?.(chunk)) { writer.write(chunk); } totalBytes += TypedArrayPrototypeGetByteLength(chunk); @@ -625,16 +622,16 @@ function pipeToSync(source, ...args) { } if (!options?.preventClose) { - if (!hasEndSync || writer.endSync() < 0) { - writer.end(); + if (!(writer.endSync?.() >= 0)) { + writer.end?.(); } } } catch (error) { if (!options?.preventFail) { const err = isError(error) ? error : new ERR_OPERATION_FAILED(String(error)); - if (!hasFailSync || !writer.failSync(err)) { - writer.fail(err); + if (!writer.failSync?.(err)) { + writer.fail?.(err); } } throw error; @@ -667,15 +664,12 @@ async function pipeTo(source, ...args) { let totalBytes = 0; const hasWritev = typeof writer.writev === 'function'; - const hasWritevSync = typeof writer.writevSync === 'function'; - const hasWriteSync = typeof writer.writeSync === 'function'; // Helper to write a batch efficiently using try-fallback pattern: // attempt the sync path first, fall back to async if rejected. const writeBatch = async (batch) => { if (hasWritev && batch.length > 1) { - if (!hasWritevSync || - !writer.writevSync(batch)) { + if (!writer.writevSync?.(batch)) { await writer.writev(batch, signal ? { signal } : undefined); } for (let i = 0; i < batch.length; i++) { @@ -684,7 +678,7 @@ async function pipeTo(source, ...args) { } else { for (let i = 0; i < batch.length; i++) { const chunk = batch[i]; - if (!hasWriteSync || !writer.writeSync(chunk)) { + if (!writer.writeSync?.(chunk)) { const result = writer.write(chunk, signal ? { signal } : undefined); if (result !== undefined) { await result; @@ -733,18 +727,16 @@ async function pipeTo(source, ...args) { } if (!options?.preventClose) { - if (typeof writer.endSync !== 'function' || - writer.endSync() < 0) { - await writer.end(signal ? { signal } : undefined); + if (!(writer.endSync?.() >= 0)) { + await writer.end?.(signal ? { signal } : undefined); } } } catch (error) { if (!options?.preventFail) { const err = isError(error) ? error : new ERR_OPERATION_FAILED(String(error)); - if (typeof writer.failSync !== 'function' || - !writer.failSync(err)) { - await writer.fail(err); + if (!writer.failSync?.(err)) { + await writer.fail?.(err); } } throw error; diff --git a/test/parallel/test-stream-iter-pull.js b/test/parallel/test-stream-iter-pull.js index b327493f1d9383..a2a9aeffa91dd7 100644 --- a/test/parallel/test-stream-iter-pull.js +++ b/test/parallel/test-stream-iter-pull.js @@ -211,4 +211,34 @@ Promise.all([ testPipeToSync(), testPipeTo(), testPipeToPreventClose(), + testPipeToMinimalWriter(), + testPipeToSyncMinimalWriter(), ]).then(common.mustCall()); + +// Regression test: pipeTo should work with a minimal writer that only +// implements write(). end(), fail(), and all *Sync methods are optional. +async function testPipeToMinimalWriter() { + const chunks = []; + const minimalWriter = { + write(chunk) { + chunks.push(chunk); + }, + }; + + const source = from('minimal'); + await pipeTo(source, minimalWriter); + assert.strictEqual(chunks.length > 0, true); +} + +async function testPipeToSyncMinimalWriter() { + const chunks = []; + const minimalWriter = { + write(chunk) { + chunks.push(chunk); + }, + }; + + const source = fromSync('minimal-sync'); + pipeToSync(source, minimalWriter); + assert.strictEqual(chunks.length > 0, true); +} From 6b93fa8e7d10a251e18d5f7fbc47d023d0f7443c Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 17 Mar 2026 23:19:55 -0700 Subject: [PATCH 23/42] stream: add bounds checking to RingBuffer.get() Return `undefined` for out-of-bounds indices instead of wrapping into stale backing array slots via modular arithmetic. --- lib/internal/streams/iter/ringbuffer.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/internal/streams/iter/ringbuffer.js b/lib/internal/streams/iter/ringbuffer.js index c3ad401a3bdb08..0d70f53650ef3d 100644 --- a/lib/internal/streams/iter/ringbuffer.js +++ b/lib/internal/streams/iter/ringbuffer.js @@ -65,9 +65,11 @@ class RingBuffer { /** * Read item at a logical index (0 = head). O(1). + * Returns undefined if index is out of bounds. * @returns {any} */ get(index) { + if (index < 0 || index >= this.#size) return undefined; return this.#backing[(this.#head + index) % this.#capacity]; } From 4e303867489c2f1eb33e30211b4416d4ce10ff51 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 17 Mar 2026 23:27:07 -0700 Subject: [PATCH 24/42] stream: deduplicate consumer sync/async iteration logic Extract shared helpers from the consumer functions: - `collectSync()` for sync chunk collection with optional limit - `collectAsync()` for async/sync chunk collection with signal + limit - `toArrayBuffer()` for `Uint8Array`-to-`ArrayBuffer` conversion `bytesSync`, `bytes`, `arraySync`, `array`, `arrayBufferSync`, and arrayBu`ffer are now thin wrappers over these helpers, eliminating ~170 lines of nearly identical iteration code --- lib/internal/streams/iter/consumers.js | 255 +++++++++---------------- 1 file changed, 93 insertions(+), 162 deletions(-) diff --git a/lib/internal/streams/iter/consumers.js b/lib/internal/streams/iter/consumers.js index 774f30ce7baf3d..69c3485944709b 100644 --- a/lib/internal/streams/iter/consumers.js +++ b/lib/internal/streams/iter/consumers.js @@ -67,79 +67,16 @@ function ensureBatch(batch) { } // ============================================================================= -// Sync Consumers +// Shared chunk collection helpers // ============================================================================= /** - * Collect all bytes from a sync source. - * @param {Iterable} source - * @param {{ limit?: number }} [options] - * @returns {Uint8Array} - */ -function bytesSync(source, options) { - const limit = options?.limit; - const chunks = []; - let totalBytes = 0; - - for (const raw of source) { - const batch = ensureBatch(raw); - for (let i = 0; i < batch.length; i++) { - const chunk = batch[i]; - if (limit !== undefined) { - totalBytes += TypedArrayPrototypeGetByteLength(chunk); - if (totalBytes > limit) { - throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); - } - } - ArrayPrototypePush(chunks, chunk); - } - } - - return concatBytes(chunks); -} - -/** - * Collect and decode text from a sync source. - * @param {Iterable} source - * @param {{ encoding?: string, limit?: number }} [options] - * @returns {string} - */ -function textSync(source, options) { - const data = bytesSync(source, options); - const decoder = new TextDecoder(options?.encoding ?? 'utf-8', { - fatal: true, - ignoreBOM: true, - }); - return decoder.decode(data); -} - -/** - * Collect bytes as ArrayBuffer from a sync source. + * Collect chunks from a sync source into an array. * @param {Iterable} source - * @param {{ limit?: number }} [options] - * @returns {ArrayBuffer} - */ -function arrayBufferSync(source, options) { - const data = bytesSync(source, options); - const byteOffset = TypedArrayPrototypeGetByteOffset(data); - const byteLength = TypedArrayPrototypeGetByteLength(data); - const buffer = TypedArrayPrototypeGetBuffer(data); - if (byteOffset === 0 && - byteLength === ArrayBufferPrototypeGetByteLength(buffer)) { - return buffer; - } - return ArrayBufferPrototypeSlice(buffer, byteOffset, - byteOffset + byteLength); -} - -/** - * Collect all chunks as an array from a sync source. - * @param {Iterable} source - * @param {{ limit?: number }} [options] + * @param {number} [limit] * @returns {Uint8Array[]} */ -function arraySync(source, options) { - const limit = options?.limit; +function collectSync(source, limit) { const chunks = []; let totalBytes = 0; @@ -160,20 +97,14 @@ function arraySync(source, options) { return chunks; } -// ============================================================================= -// Async Consumers -// ============================================================================= - /** - * Collect all bytes from an async or sync source. + * Collect chunks from an async or sync source into an array. * @param {AsyncIterable|Iterable} source - * @param {{ signal?: AbortSignal, limit?: number }} [options] - * @returns {Promise} + * @param {AbortSignal} [signal] + * @param {number} [limit] + * @returns {Promise} */ -async function bytes(source, options) { - const signal = options?.signal; - const limit = options?.limit; - +async function collectAsync(source, signal, limit) { if (signal?.aborted) { throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); } @@ -199,7 +130,7 @@ async function bytes(source, options) { } else { throw new ERR_INVALID_ARG_TYPE('source', ['AsyncIterable', 'Iterable'], source); } - return concatBytes(chunks); + return chunks; } // Slow path: with signal or limit checks @@ -243,6 +174,87 @@ async function bytes(source, options) { throw new ERR_INVALID_ARG_TYPE('source', ['AsyncIterable', 'Iterable'], source); } + return chunks; +} + +/** + * Convert a Uint8Array to its backing ArrayBuffer, slicing if necessary. + * @param {Uint8Array} data + * @returns {ArrayBuffer} + */ +function toArrayBuffer(data) { + const byteOffset = TypedArrayPrototypeGetByteOffset(data); + const byteLength = TypedArrayPrototypeGetByteLength(data); + const buffer = TypedArrayPrototypeGetBuffer(data); + if (byteOffset === 0 && + byteLength === ArrayBufferPrototypeGetByteLength(buffer)) { + return buffer; + } + return ArrayBufferPrototypeSlice(buffer, byteOffset, + byteOffset + byteLength); +} + +// ============================================================================= +// Sync Consumers +// ============================================================================= + +/** + * Collect all bytes from a sync source. + * @param {Iterable} source + * @param {{ limit?: number }} [options] + * @returns {Uint8Array} + */ +function bytesSync(source, options) { + return concatBytes(collectSync(source, options?.limit)); +} + +/** + * Collect and decode text from a sync source. + * @param {Iterable} source + * @param {{ encoding?: string, limit?: number }} [options] + * @returns {string} + */ +function textSync(source, options) { + const data = bytesSync(source, options); + const decoder = new TextDecoder(options?.encoding ?? 'utf-8', { + fatal: true, + ignoreBOM: true, + }); + return decoder.decode(data); +} + +/** + * Collect bytes as ArrayBuffer from a sync source. + * @param {Iterable} source + * @param {{ limit?: number }} [options] + * @returns {ArrayBuffer} + */ +function arrayBufferSync(source, options) { + return toArrayBuffer(bytesSync(source, options)); +} + +/** + * Collect all chunks as an array from a sync source. + * @param {Iterable} source + * @param {{ limit?: number }} [options] + * @returns {Uint8Array[]} + */ +function arraySync(source, options) { + return collectSync(source, options?.limit); +} + +// ============================================================================= +// Async Consumers +// ============================================================================= + +/** + * Collect all bytes from an async or sync source. + * @param {AsyncIterable|Iterable} source + * @param {{ signal?: AbortSignal, limit?: number }} [options] + * @returns {Promise} + */ +async function bytes(source, options) { + const chunks = await collectAsync(source, options?.signal, options?.limit); return concatBytes(chunks); } @@ -268,16 +280,7 @@ async function text(source, options) { * @returns {Promise} */ async function arrayBuffer(source, options) { - const data = await bytes(source, options); - const byteOffset = TypedArrayPrototypeGetByteOffset(data); - const byteLength = TypedArrayPrototypeGetByteLength(data); - const buffer = TypedArrayPrototypeGetBuffer(data); - if (byteOffset === 0 && - byteLength === ArrayBufferPrototypeGetByteLength(buffer)) { - return buffer; - } - return ArrayBufferPrototypeSlice(buffer, byteOffset, - byteOffset + byteLength); + return toArrayBuffer(await bytes(source, options)); } /** @@ -287,79 +290,7 @@ async function arrayBuffer(source, options) { * @returns {Promise} */ async function array(source, options) { - const signal = options?.signal; - const limit = options?.limit; - - if (signal?.aborted) { - throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); - } - - const chunks = []; - - // Fast path: no signal and no limit - if (!signal && limit === undefined) { - if (isAsyncIterable(source)) { - for await (const raw of source) { - const batch = ensureBatch(raw); - for (let i = 0; i < batch.length; i++) { - ArrayPrototypePush(chunks, batch[i]); - } - } - } else if (isSyncIterable(source)) { - for (const raw of source) { - const batch = ensureBatch(raw); - for (let i = 0; i < batch.length; i++) { - ArrayPrototypePush(chunks, batch[i]); - } - } - } else { - throw new ERR_INVALID_ARG_TYPE('source', ['AsyncIterable', 'Iterable'], source); - } - return chunks; - } - - // Slow path - let totalBytes = 0; - - if (isAsyncIterable(source)) { - for await (const raw of source) { - const batch = ensureBatch(raw); - if (signal?.aborted) { - throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); - } - for (let i = 0; i < batch.length; i++) { - const chunk = batch[i]; - if (limit !== undefined) { - totalBytes += TypedArrayPrototypeGetByteLength(chunk); - if (totalBytes > limit) { - throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); - } - } - ArrayPrototypePush(chunks, chunk); - } - } - } else if (isSyncIterable(source)) { - for (const raw of source) { - const batch = ensureBatch(raw); - if (signal?.aborted) { - throw signal.reason ?? lazyDOMException('Aborted', 'AbortError'); - } - for (let i = 0; i < batch.length; i++) { - const chunk = batch[i]; - if (limit !== undefined) { - totalBytes += TypedArrayPrototypeGetByteLength(chunk); - if (totalBytes > limit) { - throw new ERR_OUT_OF_RANGE('totalBytes', `<= ${limit}`, totalBytes); - } - } - ArrayPrototypePush(chunks, chunk); - } - } - } else { - throw new ERR_INVALID_ARG_TYPE('source', ['AsyncIterable', 'Iterable'], source); - } - - return chunks; + return collectAsync(source, options?.signal, options?.limit); } // ============================================================================= From eec6f0a8074b7ec11b6c19922e673921696f5e02 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Tue, 17 Mar 2026 23:49:09 -0700 Subject: [PATCH 25/42] stream: validate all elements in isUint8ArrayBatch `isUint8ArrayBatch` only checked the first array element, so a mixed array like `[Uint8Array, "hello"]` would pass the fast-path check and flow through un-normalized. Use `ArrayPrototypeEvery` to validate all elements. Also replaced inline first-element-only checks in `processTransformResultSync` and `processTransformResultAsync` with the corrected `isUint8ArrayBatch` function. --- lib/internal/streams/iter/from.js | 3 +-- lib/internal/streams/iter/pull.js | 9 ++------- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/lib/internal/streams/iter/from.js b/lib/internal/streams/iter/from.js index fc4fc1b40958f6..d8d9f33aebc5bc 100644 --- a/lib/internal/streams/iter/from.js +++ b/lib/internal/streams/iter/from.js @@ -253,8 +253,7 @@ function* normalizeSyncValue(value) { function isUint8ArrayBatch(value) { if (!ArrayIsArray(value)) return false; if (value.length === 0) return true; - // Check first element - if it's a Uint8Array, assume the rest are too - return isUint8Array(value[0]); + return ArrayPrototypeEvery(value, isUint8Array); } /** diff --git a/lib/internal/streams/iter/pull.js b/lib/internal/streams/iter/pull.js index 1824ea4fe9e7f4..e951683485d758 100644 --- a/lib/internal/streams/iter/pull.js +++ b/lib/internal/streams/iter/pull.js @@ -7,7 +7,6 @@ // through transforms to consumer. const { - ArrayIsArray, ArrayPrototypePush, ArrayPrototypeSlice, String, @@ -178,9 +177,7 @@ function* processTransformResultSync(result) { if (result === null) { return; } - if (ArrayIsArray(result) && result.length > 0 && - isUint8Array(result[0])) { - // Fast path: Uint8Array[] + if (isUint8ArrayBatch(result)) { if (result.length > 0) { yield result; } @@ -216,9 +213,7 @@ async function* processTransformResultAsync(result) { if (result === null) { return; } - if (ArrayIsArray(result) && - (result.length === 0 || isUint8Array(result[0]))) { - // Fast path: Uint8Array[] + if (isUint8ArrayBatch(result)) { if (result.length > 0) { yield result; } From 88964abf41f09f6f09acfd4241670283c053c0ef Mon Sep 17 00:00:00 2001 From: James M Snell Date: Wed, 18 Mar 2026 00:08:43 -0700 Subject: [PATCH 26/42] stream: handle sync iterables in merge() multi-source path The multi-source merge path called `source[Symbol.asyncIterator]()` directly, which throws for sync-only iterables. The single-source fast path handled this via for-await-of, but passing multiple sync sources would fail. Wrap sync iterators with `PromiseResolve` so they participate in the `SafePromiseRace` pattern. The wrapping overhead is negligible relative to the per-iteration promise race. --- lib/internal/streams/iter/consumers.js | 27 +++++++++++++++++---- test/parallel/test-stream-iter-consumers.js | 13 ++++++++++ 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/lib/internal/streams/iter/consumers.js b/lib/internal/streams/iter/consumers.js index 69c3485944709b..e5c73edc10f86b 100644 --- a/lib/internal/streams/iter/consumers.js +++ b/lib/internal/streams/iter/consumers.js @@ -16,9 +16,11 @@ const { ArrayPrototypePush, ArrayPrototypeSlice, PromisePrototypeThen, + PromiseResolve, SafePromiseAllReturnVoid, SafePromiseRace, SymbolAsyncIterator, + SymbolIterator, TypedArrayPrototypeGetBuffer, TypedArrayPrototypeGetByteLength, TypedArrayPrototypeGetByteOffset, @@ -394,11 +396,26 @@ function merge(...args) { } // Multiple sources - race them - const states = ArrayPrototypeMap(sources, (source) => ({ - iterator: source[SymbolAsyncIterator](), - done: false, - pending: null, - })); + const states = ArrayPrototypeMap(sources, (source) => { + let iterator; + if (source[SymbolAsyncIterator]) { + iterator = source[SymbolAsyncIterator](); + } else if (source[SymbolIterator]) { + // Wrap sync iterator to async + const syncIter = source[SymbolIterator](); + iterator = { + next() { return PromiseResolve(syncIter.next()); }, + return() { + return PromiseResolve(syncIter.return?.() ?? + { __proto__: null, done: true, value: undefined }); + }, + }; + } else { + throw new ERR_INVALID_ARG_TYPE( + 'source', ['AsyncIterable', 'Iterable'], source); + } + return { iterator, done: false, pending: null }; + }); const startIterator = (state, index) => { if (!state.done && !state.pending) { diff --git a/test/parallel/test-stream-iter-consumers.js b/test/parallel/test-stream-iter-consumers.js index 441c96d79aeffa..204f23d38eac9b 100644 --- a/test/parallel/test-stream-iter-consumers.js +++ b/test/parallel/test-stream-iter-consumers.js @@ -317,10 +317,23 @@ Promise.all([ testMergeSingleSource(), testMergeEmpty(), testMergeWithAbortSignal(), + testMergeSyncSources(), testConsumersNonArrayBatch(), testConsumersNonArrayBatchSync(), ]).then(common.mustCall()); +// Regression test: merge() with sync iterable sources +async function testMergeSyncSources() { + const s1 = fromSync('abc'); + const s2 = fromSync('def'); + const result = await text(merge(s1, s2)); + // Both sources should be fully consumed; order may vary + assert.strictEqual(result.length, 6); + for (const ch of 'abcdef') { + assert.ok(result.includes(ch), `missing '${ch}' in '${result}'`); + } +} + // Regression test: consumers should tolerate sources that yield raw // Uint8Array or string values instead of Uint8Array[] batches. async function testConsumersNonArrayBatch() { From 9b21cbd1d12fdacefde950415e8b7ef5c322dcbe Mon Sep 17 00:00:00 2001 From: James M Snell Date: Wed, 18 Mar 2026 00:24:10 -0700 Subject: [PATCH 27/42] stream: consolidate TextEncoder to single instance in utils Four separate `TextEncoder` instances were created across `from.js`, `pull.js`, `broadcast.js`, and `utils.js`. Consolidate to the single instance in `utils.js` by having all files use the existing `toUint8Array()` helper for `string`-to-`Uint8Array` conversion. --- lib/internal/streams/iter/broadcast.js | 15 ++++++--------- lib/internal/streams/iter/from.js | 13 +++++++------ lib/internal/streams/iter/pull.js | 9 +++------ 3 files changed, 16 insertions(+), 21 deletions(-) diff --git a/lib/internal/streams/iter/broadcast.js b/lib/internal/streams/iter/broadcast.js index c950403ad140c7..43576c44f8d992 100644 --- a/lib/internal/streams/iter/broadcast.js +++ b/lib/internal/streams/iter/broadcast.js @@ -22,8 +22,6 @@ const { TypedArrayPrototypeGetByteLength, } = primordials; -const { TextEncoder } = require('internal/encoding'); - const { isError, lazyDOMException } = require('internal/util'); const { @@ -49,14 +47,13 @@ const { const { allUint8Array, + toUint8Array, } = require('internal/streams/iter/utils'); const { RingBuffer, } = require('internal/streams/iter/ringbuffer'); -const encoder = new TextEncoder(); - // Cached resolved promise to avoid allocating a new one on every sync fast-path. const kResolvedPromise = PromiseResolve(); @@ -416,7 +413,7 @@ class BroadcastWriter { if (!options?.signal && !this.#closed && !this.#aborted && this.#broadcast[kCanWrite]()) { const converted = - typeof chunk === 'string' ? encoder.encode(chunk) : chunk; + toUint8Array(chunk); this.#broadcast[kWrite]([converted]); this.#totalBytes += TypedArrayPrototypeGetByteLength(converted); return kResolvedPromise; @@ -431,7 +428,7 @@ class BroadcastWriter { const converted = allUint8Array(chunks) ? ArrayPrototypeSlice(chunks) : ArrayPrototypeMap(chunks, (c) => - (typeof c === 'string' ? encoder.encode(c) : c)); + toUint8Array(c)); this.#broadcast[kWrite](converted); for (let i = 0; i < converted.length; i++) { this.#totalBytes += TypedArrayPrototypeGetByteLength(converted[i]); @@ -456,7 +453,7 @@ class BroadcastWriter { const converted = allUint8Array(chunks) ? ArrayPrototypeSlice(chunks) : ArrayPrototypeMap(chunks, (c) => - (typeof c === 'string' ? encoder.encode(c) : c)); + toUint8Array(c)); if (this.#broadcast[kWrite](converted)) { for (let i = 0; i < converted.length; i++) { @@ -485,7 +482,7 @@ class BroadcastWriter { if (this.#closed || this.#aborted) return false; if (!this.#broadcast[kCanWrite]()) return false; const converted = - typeof chunk === 'string' ? encoder.encode(chunk) : chunk; + toUint8Array(chunk); if (this.#broadcast[kWrite]([converted])) { this.#totalBytes += TypedArrayPrototypeGetByteLength(converted); return true; @@ -499,7 +496,7 @@ class BroadcastWriter { const converted = allUint8Array(chunks) ? ArrayPrototypeSlice(chunks) : ArrayPrototypeMap(chunks, (c) => - (typeof c === 'string' ? encoder.encode(c) : c)); + toUint8Array(c)); if (this.#broadcast[kWrite](converted)) { for (let i = 0; i < converted.length; i++) { this.#totalBytes += TypedArrayPrototypeGetByteLength(converted[i]); diff --git a/lib/internal/streams/iter/from.js b/lib/internal/streams/iter/from.js index d8d9f33aebc5bc..302952fc4c57a1 100644 --- a/lib/internal/streams/iter/from.js +++ b/lib/internal/streams/iter/from.js @@ -27,7 +27,7 @@ const { ERR_INVALID_ARG_TYPE, }, } = require('internal/errors'); -const { TextEncoder } = require('internal/encoding'); + const { isArrayBuffer, isPromise, @@ -39,8 +39,9 @@ const { toAsyncStreamable, } = require('internal/streams/iter/types'); -// Shared TextEncoder instance for string conversion. -const encoder = new TextEncoder(); +const { + toUint8Array, +} = require('internal/streams/iter/utils'); // Maximum number of chunks to yield per batch from from(Uint8Array[]). // Bounds peak memory when arrays flow through transforms, which must @@ -149,7 +150,7 @@ function hasToPrimitive(obj) { */ function primitiveToUint8Array(chunk) { if (typeof chunk === 'string') { - return encoder.encode(chunk); + return toUint8Array(chunk); } if (isArrayBuffer(chunk)) { return new Uint8Array(chunk); @@ -233,7 +234,7 @@ function* normalizeSyncValue(value) { if (typeof value === 'object' && value !== null) { const str = tryStringCoercion(value); if (str !== null) { - yield encoder.encode(str); + yield toUint8Array(str); return; } } @@ -357,7 +358,7 @@ async function* normalizeAsyncValue(value) { if (typeof value === 'object' && value !== null) { const str = tryStringCoercion(value); if (str !== null) { - yield encoder.encode(str); + yield toUint8Array(str); return; } } diff --git a/lib/internal/streams/iter/pull.js b/lib/internal/streams/iter/pull.js index e951683485d758..c757991e124677 100644 --- a/lib/internal/streams/iter/pull.js +++ b/lib/internal/streams/iter/pull.js @@ -22,7 +22,6 @@ const { ERR_OPERATION_FAILED, }, } = require('internal/errors'); -const { TextEncoder } = require('internal/encoding'); const { isError, lazyDOMException } = require('internal/util'); const { isPromise, @@ -41,11 +40,9 @@ const { const { isPullOptions, parsePullArgs, + toUint8Array, } = require('internal/streams/iter/utils'); -// Shared TextEncoder instance for string conversion. -const encoder = new TextEncoder(); - // ============================================================================= // Type Guards and Helpers // ============================================================================= @@ -126,7 +123,7 @@ function* flattenTransformYieldSync(value) { return; } if (typeof value === 'string') { - yield encoder.encode(value); + yield toUint8Array(value); return; } // Must be Iterable @@ -149,7 +146,7 @@ async function* flattenTransformYieldAsync(value) { return; } if (typeof value === 'string') { - yield encoder.encode(value); + yield toUint8Array(value); return; } // Check for async iterable first From f2ada96caca3be880d74240d11c7e516421da766 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Wed, 18 Mar 2026 00:32:58 -0700 Subject: [PATCH 28/42] stream: prevent unhandled rejection in Broadcast.from() pump `Broadcast.from()` used a fire-and-forget async IIFE to pump data from source to writer. While the catch block routes all errors to `writer.fail()` which never rejects, the IIFE's promise was unguarded against future invariant changes. Extract the IIFE to a named pump function and attach a no-op rejection handler via `PromisePrototypeThen`. --- lib/internal/streams/iter/broadcast.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/internal/streams/iter/broadcast.js b/lib/internal/streams/iter/broadcast.js index 43576c44f8d992..cdf77b6c5a7868 100644 --- a/lib/internal/streams/iter/broadcast.js +++ b/lib/internal/streams/iter/broadcast.js @@ -13,6 +13,7 @@ const { ArrayPrototypeSlice, MathMax, Promise, + PromisePrototypeThen, PromiseResolve, SafeSet, String, @@ -679,7 +680,7 @@ const Broadcast = { const result = broadcast(options); const signal = options?.signal; - (async () => { + const pump = async () => { const w = result.writer; try { if (isAsyncIterable(input)) { @@ -721,7 +722,8 @@ const Broadcast = { await w.fail(err); } } - })(); + }; + PromisePrototypeThen(pump(), undefined, () => {}); return result; }, From 2b05573960c47ee0ec5c94ce5444fc0b136a39b0 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Wed, 18 Mar 2026 00:42:56 -0700 Subject: [PATCH 29/42] stream: validate backpressure option at construction time Invalid backpressure values like `'banana'` would fall through switch statements at write time with a confusing `ERR_INVALID_STATE` error about "`writeSync` should have handled non-strict policy". Add `validateBackpressure()` in utils.js and call it from the `PushQueue`, `BroadcastImpl`, `ShareImpl`, and `SyncShareImpl` constructors. Invalid values now throw `ERR_INVALID_ARG_VALUE` immediately at construction. --- lib/internal/streams/iter/broadcast.js | 2 ++ lib/internal/streams/iter/push.js | 2 ++ lib/internal/streams/iter/share.js | 2 ++ lib/internal/streams/iter/utils.js | 21 +++++++++++++++++++++ test/parallel/test-stream-iter-push.js | 15 +++++++++++++++ 5 files changed, 42 insertions(+) diff --git a/lib/internal/streams/iter/broadcast.js b/lib/internal/streams/iter/broadcast.js index cdf77b6c5a7868..645b9669464d5b 100644 --- a/lib/internal/streams/iter/broadcast.js +++ b/lib/internal/streams/iter/broadcast.js @@ -49,6 +49,7 @@ const { const { allUint8Array, toUint8Array, + validateBackpressure, } = require('internal/streams/iter/utils'); const { @@ -111,6 +112,7 @@ class BroadcastImpl { #writer = null; constructor(options) { + validateBackpressure(options.backpressure); this.#options = options; this[kOnBufferDrained] = null; } diff --git a/lib/internal/streams/iter/push.js b/lib/internal/streams/iter/push.js index 9a6d9cdd414c72..b14fb2f1a53593 100644 --- a/lib/internal/streams/iter/push.js +++ b/lib/internal/streams/iter/push.js @@ -29,6 +29,7 @@ const { const { toUint8Array, allUint8Array, + validateBackpressure, } = require('internal/streams/iter/utils'); const { @@ -73,6 +74,7 @@ class PushQueue { constructor(options = {}) { this.#highWaterMark = MathMax(1, options.highWaterMark ?? 4); this.#backpressure = options.backpressure ?? 'strict'; + validateBackpressure(this.#backpressure); this.#signal = options.signal; this.#abortHandler = undefined; diff --git a/lib/internal/streams/iter/share.js b/lib/internal/streams/iter/share.js index 2a7c538623e75a..48d35d59aa61c9 100644 --- a/lib/internal/streams/iter/share.js +++ b/lib/internal/streams/iter/share.js @@ -37,6 +37,7 @@ const { const { parsePullArgs, + validateBackpressure, } = require('internal/streams/iter/utils'); const { @@ -350,6 +351,7 @@ class SyncShareImpl { #cancelled = false; constructor(source, options) { + validateBackpressure(options.backpressure); this.#source = source; this.#options = options; } diff --git a/lib/internal/streams/iter/utils.js b/lib/internal/streams/iter/utils.js index 81f9670419b69c..7494541da91768 100644 --- a/lib/internal/streams/iter/utils.js +++ b/lib/internal/streams/iter/utils.js @@ -10,6 +10,11 @@ const { } = primordials; const { TextEncoder } = require('internal/encoding'); +const { + codes: { + ERR_INVALID_ARG_VALUE, + }, +} = require('internal/errors'); // Shared TextEncoder instance for string conversion. const encoder = new TextEncoder(); @@ -112,10 +117,26 @@ function parsePullArgs(args) { return { transforms: args, options: undefined }; } +/** + * Validate backpressure option value. + * @param {string} value + */ +function validateBackpressure(value) { + if (value !== 'strict' && + value !== 'block' && + value !== 'drop-oldest' && + value !== 'drop-newest') { + throw new ERR_INVALID_ARG_VALUE( + 'options.backpressure', value, + 'must be "strict", "block", "drop-oldest", or "drop-newest"'); + } +} + module.exports = { toUint8Array, allUint8Array, concatBytes, isPullOptions, parsePullArgs, + validateBackpressure, }; diff --git a/test/parallel/test-stream-iter-push.js b/test/parallel/test-stream-iter-push.js index fefa745df7a79a..ccbf064fcbd8a3 100644 --- a/test/parallel/test-stream-iter-push.js +++ b/test/parallel/test-stream-iter-push.js @@ -352,4 +352,19 @@ Promise.all([ testCancelledWriteRemovedFromQueue(), testOndrainResolvesFalseOnConsumerBreak(), testOndrainRejectsOnConsumerThrow(), + testInvalidBackpressure(), ]).then(common.mustCall()); + +async function testInvalidBackpressure() { + assert.throws(() => push({ backpressure: 'banana' }), { + code: 'ERR_INVALID_ARG_VALUE', + }); + assert.throws(() => push({ backpressure: '' }), { + code: 'ERR_INVALID_ARG_VALUE', + }); + + // Valid values should not throw + for (const bp of ['strict', 'block', 'drop-oldest', 'drop-newest']) { + push({ backpressure: bp }); + } +} From 604168c113e4e9388a01a8452b986087d450e325 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Wed, 18 Mar 2026 06:46:46 -0700 Subject: [PATCH 30/42] stream: make a number of cleanups in broadcast.js --- lib/internal/streams/iter/broadcast.js | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/lib/internal/streams/iter/broadcast.js b/lib/internal/streams/iter/broadcast.js index 645b9669464d5b..36b448ebf9f007 100644 --- a/lib/internal/streams/iter/broadcast.js +++ b/lib/internal/streams/iter/broadcast.js @@ -15,6 +15,7 @@ const { Promise, PromisePrototypeThen, PromiseResolve, + PromiseWithResolvers, SafeSet, String, Symbol, @@ -56,12 +57,7 @@ const { RingBuffer, } = require('internal/streams/iter/ringbuffer'); -// Cached resolved promise to avoid allocating a new one on every sync fast-path. const kResolvedPromise = PromiseResolve(); - -// Non-exported symbols for internal cross-class communication between -// BroadcastImpl and BroadcastWriter. Because these symbols are not exported, -// external code cannot access the internal methods/fields. const kCancelWriter = Symbol('cancelWriter'); const kWrite = Symbol('write'); const kEnd = Symbol('end'); @@ -163,8 +159,10 @@ class BroadcastImpl { const self = this; return { + __proto__: null, [SymbolAsyncIterator]() { return { + __proto__: null, async next() { if (state.detached) { // If detached due to an error, throw the error @@ -192,10 +190,10 @@ class BroadcastImpl { return { __proto__: null, done: true, value: undefined }; } - return new Promise((resolve, reject) => { - state.resolve = resolve; - state.reject = reject; - }); + const { promise, resolve, reject } = PromiseWithResolvers(); + state.resolve = resolve; + state.reject = reject; + return promise; }, async return() { From 23a822aae9d499c745722bc9fbe52763105ed044 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Wed, 18 Mar 2026 08:02:08 -0700 Subject: [PATCH 31/42] stream: update and improve the stream_iter doc --- doc/api/stream_iter.md | 806 ++++++++++++++++++++++++++++++++--------- 1 file changed, 632 insertions(+), 174 deletions(-) diff --git a/doc/api/stream_iter.md b/doc/api/stream_iter.md index 739afb10a57192..d4c16d996fd4fc 100644 --- a/doc/api/stream_iter.md +++ b/doc/api/stream_iter.md @@ -81,7 +81,7 @@ run().catch(console.error); ### Byte streams -All data in the new streams API is represented as `Uint8Array` bytes. Strings +All data in this API is represented as `Uint8Array` bytes. Strings are automatically UTF-8 encoded when passed to `from()`, `push()`, or `pipeTo()`. This removes ambiguity around encodings and enables zero-copy transfers between streams and native code. @@ -289,9 +289,6 @@ for (const item of dataset) { // --> throws "Backpressure violation: too many pending writes" ``` -This is the default policy because it catches the exact class of bug -that push streams exist to prevent. - #### Block Block mode caps slots at `highWaterMark` but places no limit on the @@ -398,14 +395,107 @@ const { writer, readable } = push({ }); ``` -### Writers +### Writer interface + +A writer is any object conforming to the Writer interface. Only `write()` is +required; all other methods are optional. + +Each async method has a synchronous `*Sync` counterpart designed for a +try-fallback pattern: attempt the fast synchronous path first, and fall back +to the async version only when the synchronous call indicates it could not +complete: + +```mjs +if (!writer.writeSync(chunk)) await writer.write(chunk); +if (!writer.writevSync(chunks)) await writer.writev(chunks); +if (writer.endSync() < 0) await writer.end(); +if (!writer.failSync(err)) await writer.fail(err); +``` + +### `writer.desiredSize` + +* {number|null} + +The number of buffer slots available before the high water mark is reached. +Returns `null` if the writer is closed or the consumer has disconnected. + +The value is always non-negative. + +### `writer.end([options])` + +* `options` {Object} + * `signal` {AbortSignal} Cancel just this operation. The signal cancels only + the pending `end()` call; it does not fail the writer itself. +* Returns: {Promise\} Total bytes written. + +Signal that no more data will be written. + +### `writer.endSync()` + +* Returns: {number} Total bytes written, or `-1` if the writer is not open. + +Synchronous variant of `writer.end()`. Returns `-1` if the writer is already +closed or errored. Can be used as a try-fallback pattern: + +```cjs +const result = writer.endSync(); +if (result < 0) { + writer.end(); +} +``` + +### `writer.fail(reason)` + +* `reason` {Error} +* Returns: {Promise\} + +Fail the stream with an error. + +### `writer.failSync(reason)` + +* `reason` {Error} +* Returns: {boolean} `true` if the writer was failed, `false` if already + errored. + +Synchronous variant of `writer.fail()`. + +### `writer.write(chunk[, options])` + +* `chunk` {Uint8Array|string} +* `options` {Object} + * `signal` {AbortSignal} Cancel just this write operation. The signal cancels + only the pending `write()` call; it does not fail the writer itself. +* Returns: {Promise\} + +Write a chunk. The promise resolves when buffer space is available. + +### `writer.writeSync(chunk)` -A writer is any object with a `write(chunk)` method. Writers optionally -support `writev(chunks)` for batch writes (mapped to scatter/gather I/O where -available), `end()` to signal completion, and `fail(reason)` to signal -failure. +* `chunk` {Uint8Array|string} +* Returns: {boolean} `true` if the write was accepted, `false` if the + buffer is full. -## `require('node:stream/iter')` +Synchronous write. Does not block; returns `false` if backpressure is active. + +### `writer.writev(chunks[, options])` + +* `chunks` {Uint8Array\[]|string\[]} +* `options` {Object} + * `signal` {AbortSignal} Cancel just this write operation. The signal cancels + only the pending `writev()` call; it does not fail the writer itself. +* Returns: {Promise\} + +Write multiple chunks as a single batch. + +### `writer.writevSync(chunks)` + +* `chunks` {Uint8Array\[]|string\[]} +* Returns: {boolean} `true` if the write was accepted, `false` if the + buffer is full. + +Synchronous batch write. + +## The `stream/iter` module All functions are available both as named exports and as properties of the `Stream` namespace object: @@ -426,6 +516,8 @@ const { from, pull, bytes, Stream } = require('node:stream/iter'); Stream.from('hello'); ``` +Including the `node:` prefix on the module specifier is optional. + ## Sources ### `from(input)` @@ -453,6 +545,7 @@ console.log(await text(from(Buffer.from('hello')))); // 'hello' ``` ```cjs +const { Buffer } = require('node:buffer'); const { from, text } = require('node:stream/iter'); async function run() { @@ -510,6 +603,12 @@ Pipe a source through transforms into a writer. If the writer has a `writev(chunks)` method, entire batches are passed in a single call (enabling scatter/gather I/O). +If the writer implements the optional `*Sync` methods (`writeSync`, `writevSync`, +`endSync`, `failSync`), `pipeTo()` will attempt to use the synchronous methods +first as a fast path, and fall back to the async versions only when the sync +methods indicate they cannot complete (e.g., backpressure or waiting for the +next tick). + ```mjs import { from, pipeTo, compressGzip } from 'node:stream/iter'; import { open } from 'node:fs/promises'; @@ -552,7 +651,11 @@ added: REPLACEME * `preventFail` {boolean} **Default:** `false`. * Returns: {number} Total bytes written. -Synchronous version of [`pipeTo()`][]. +Synchronous version of [`pipeTo()`][]. The `source`, all transforms, and the +`writer` must be synchronous. Cannot accept async iterables or promises. + +The `writer` must have the `*Sync` methods (`writeSync`, `writevSync`, +`endSync`, `failSync`) for this to work. ### `pull(source[, ...transforms][, options])` @@ -572,29 +675,35 @@ returned iterable is consumed. Transforms are applied in order. ```mjs import { from, pull, text } from 'node:stream/iter'; -const upper = (chunks) => { +const asciiUpper = (chunks) => { if (chunks === null) return null; - return chunks.map((c) => - new TextEncoder().encode(new TextDecoder().decode(c).toUpperCase()), - ); + return chunks.map((c) => { + for (let i = 0; i < c.length; i++) { + c[i] -= (c[i] >= 97 && c[i] <= 122) * 32; + } + return c; + }); }; -const result = pull(from('hello'), upper); +const result = pull(from('hello'), asciiUpper); console.log(await text(result)); // 'HELLO' ``` ```cjs const { from, pull, text } = require('node:stream/iter'); -const upper = (chunks) => { +const asciiUpper = (chunks) => { if (chunks === null) return null; - return chunks.map((c) => - new TextEncoder().encode(new TextDecoder().decode(c).toUpperCase()), - ); + return chunks.map((c) => { + for (let i = 0; i < c.length; i++) { + c[i] -= (c[i] >= 97 && c[i] <= 122) * 32; + } + return c; + }); }; async function run() { - const result = pull(from('hello'), upper); + const result = pull(from('hello'), asciiUpper); console.log(await text(result)); // 'HELLO' } @@ -693,101 +802,85 @@ async function run() { run().catch(console.error); ``` -#### Writer - -The writer returned by `push()` has the following methods. - -Each async method has a synchronous `*Sync` counterpart designed for a -try-fallback pattern: attempt the fast synchronous path first, and fall back -to the async version only when the synchronous call indicates it could not -complete: - -```mjs -if (!writer.writeSync(chunk)) await writer.write(chunk); -if (!writer.writevSync(chunks)) await writer.writev(chunks); -if (writer.endSync() < 0) await writer.end(); -if (!writer.failSync(err)) await writer.fail(err); -``` - -##### `writer.fail(reason)` - -* `reason` {Error} -* Returns: {Promise\} - -Fail the stream with an error. - -##### `writer.failSync(reason)` +The writer returned by `push()` conforms to the [Writer interface][]. -* `reason` {Error} -* Returns: {boolean} `true` if the writer was failed, `false` if already - errored. - -Synchronous variant of `writer.fail()`. +## Duplex channels -##### `writer.desiredSize` +### `duplex([options])` -* {number|null} - -The number of buffer slots available before the high water mark is reached. -Returns `null` if the writer is closed or the consumer has disconnected. - -##### `writer.end([options])` + * `options` {Object} - * `signal` {AbortSignal} Cancel just this operation. The signal cancels only - the pending `end()` call; it does not fail the writer itself. -* Returns: {Promise\} Total bytes written. + * `highWaterMark` {number} Buffer size for both directions. + **Default:** `4`. + * `backpressure` {string} Policy for both directions. + **Default:** `'strict'`. + * `signal` {AbortSignal} Cancellation signal for both channels. + * `a` {Object} Options specific to the A-to-B direction. Overrides + shared options. + * `highWaterMark` {number} + * `backpressure` {string} + * `b` {Object} Options specific to the B-to-A direction. Overrides + shared options. + * `highWaterMark` {number} + * `backpressure` {string} +* Returns: {Array} A pair `[channelA, channelB]` of duplex channels. + +Create a pair of connected duplex channels for bidirectional communication, +similar to `socketpair()`. Data written to one channel's writer appears in +the other channel's readable. + +Each channel has: + +* `writer` — a [Writer interface][] object for sending data to the peer. +* `readable` — an `AsyncIterable` for reading data from + the peer. +* `close()` — close this end of the channel (idempotent). +* `[Symbol.asyncDispose]()` — async dispose support for `await using`. -Signal that no more data will be written. +```mjs +import { duplex, text } from 'node:stream/iter'; -##### `writer.endSync()` +const [client, server] = duplex(); -* Returns: {number} Total bytes written, or `-1` if the writer is not open. +// Server echoes back +const serving = (async () => { + for await (const chunks of server.readable) { + await server.writer.writev(chunks); + } +})(); -Synchronous variant of `writer.end()`. Returns `-1` if the writer is already -closed or errored. Can be used as a try-fallback pattern: +await client.writer.write('hello'); +await client.writer.end(); -```cjs -const result = writer.endSync(); -if (result < 0) { - writer.end(); -} +console.log(await text(server.readable)); // handled by echo +await serving; ``` -##### `writer.write(chunk[, options])` - -* `chunk` {Uint8Array|string} -* `options` {Object} - * `signal` {AbortSignal} Cancel just this write operation. The signal cancels - only the pending `write()` call; it does not fail the writer itself. -* Returns: {Promise\} - -Write a chunk. The promise resolves when buffer space is available. - -##### `writer.writeSync(chunk)` - -* `chunk` {Uint8Array|string} -* Returns: {boolean} `true` if the write was accepted, `false` if the - buffer is full. - -Synchronous write. Does not block; returns `false` if backpressure is active. - -##### `writer.writev(chunks[, options])` +```cjs +const { duplex, text } = require('node:stream/iter'); -* `chunks` {Uint8Array\[]|string\[]} -* `options` {Object} - * `signal` {AbortSignal} Cancel just this write operation. The signal cancels - only the pending `writev()` call; it does not fail the writer itself. -* Returns: {Promise\} +async function run() { + const [client, server] = duplex(); -Write multiple chunks as a single batch. + // Server echoes back + const serving = (async () => { + for await (const chunks of server.readable) { + await server.writer.writev(chunks); + } + })(); -##### `writer.writevSync(chunks)` + await client.writer.write('hello'); + await client.writer.end(); -* `chunks` {Uint8Array\[]|string\[]} -* Returns: {boolean} + console.log(await text(server.readable)); // handled by echo + await serving; +} -Synchronous batch write. +run().catch(console.error); +``` ## Consumers @@ -800,7 +893,8 @@ added: REPLACEME * `source` {AsyncIterable\|Iterable\} * `options` {Object} * `signal` {AbortSignal} - * `limit` {number} + * `limit` {number} Maximum number of bytes to consume. If the total bytes + collected exceeds limit, an `ERR_OUT_OF_RANGE` error is thrown * Returns: {Promise\} Collect all chunks as an array of `Uint8Array` values (without concatenating). @@ -814,7 +908,8 @@ added: REPLACEME * `source` {AsyncIterable\|Iterable\} * `options` {Object} * `signal` {AbortSignal} - * `limit` {number} + * `limit` {number} Maximum number of bytes to consume. If the total bytes + collected exceeds limit, an `ERR_OUT_OF_RANGE` error is thrown * Returns: {Promise\} Collect all bytes into an `ArrayBuffer`. @@ -827,7 +922,8 @@ added: REPLACEME * `source` {Iterable\} * `options` {Object} - * `limit` {number} + * `limit` {number} Maximum number of bytes to consume. If the total bytes + collected exceeds limit, an `ERR_OUT_OF_RANGE` error is thrown * Returns: {ArrayBuffer} Synchronous version of [`arrayBuffer()`][]. @@ -840,7 +936,8 @@ added: REPLACEME * `source` {Iterable\} * `options` {Object} - * `limit` {number} + * `limit` {number} Maximum number of bytes to consume. If the total bytes + collected exceeds limit, an `ERR_OUT_OF_RANGE` error is thrown * Returns: {Uint8Array\[]} Synchronous version of [`array()`][]. @@ -854,7 +951,8 @@ added: REPLACEME * `source` {AsyncIterable\|Iterable\} * `options` {Object} * `signal` {AbortSignal} - * `limit` {number} Maximum bytes to collect. Throws if exceeded. + * `limit` {number} Maximum number of bytes to consume. If the total bytes + collected exceeds limit, an `ERR_OUT_OF_RANGE` error is thrown * Returns: {Promise\} Collect all bytes from a stream into a single `Uint8Array`. @@ -885,7 +983,8 @@ added: REPLACEME * `source` {Iterable\} * `options` {Object} - * `limit` {number} + * `limit` {number} Maximum number of bytes to consume. If the total bytes + collected exceeds limit, an `ERR_OUT_OF_RANGE` error is thrown * Returns: {Uint8Array} Synchronous version of [`bytes()`][]. @@ -900,7 +999,8 @@ added: REPLACEME * `options` {Object} * `encoding` {string} Text encoding. **Default:** `'utf-8'`. * `signal` {AbortSignal} - * `limit` {number} + * `limit` {number} Maximum number of bytes to consume. If the total bytes + collected exceeds limit, an `ERR_OUT_OF_RANGE` error is thrown * Returns: {Promise\} Collect all bytes and decode as text. @@ -930,46 +1030,14 @@ added: REPLACEME * `source` {Iterable\} * `options` {Object} * `encoding` {string} **Default:** `'utf-8'`. - * `limit` {number} + * `limit` {number} Maximum number of bytes to consume. If the total bytes + collected exceeds limit, an `ERR_OUT_OF_RANGE` error is thrown * Returns: {string} Synchronous version of [`text()`][]. ## Utilities -### `merge(...sources[, options])` - - - -* `...sources` {AsyncIterable\} Two or more async iterables. -* `options` {Object} - * `signal` {AbortSignal} -* Returns: {AsyncIterable\} - -Merge multiple async iterables by yielding batches in temporal order -(whichever source produces data first). All sources are consumed -concurrently. - -```mjs -import { from, merge, text } from 'node:stream/iter'; - -const merged = merge(from('hello '), from('world')); -console.log(await text(merged)); // Order depends on timing -``` - -```cjs -const { from, merge, text } = require('node:stream/iter'); - -async function run() { - const merged = merge(from('hello '), from('world')); - console.log(await text(merged)); // Order depends on timing -} - -run().catch(console.error); -``` - ### `ondrain(drainable)` + +* `...sources` {AsyncIterable\|Iterable\} Two or more iterables. +* `options` {Object} + * `signal` {AbortSignal} +* Returns: {AsyncIterable\} + +Merge multiple async iterables by yielding batches in temporal order +(whichever source produces data first). All sources are consumed +concurrently. + +```mjs +import { from, merge, text } from 'node:stream/iter'; + +const merged = merge(from('hello '), from('world')); +console.log(await text(merged)); // Order depends on timing +``` + +```cjs +const { from, merge, text } = require('node:stream/iter'); + +async function run() { + const merged = merge(from('hello '), from('world')); + console.log(await text(merged)); // Order depends on timing +} + +run().catch(console.error); +``` + ### `tap(callback)`