From d8b75360e88245574d545561e81aeac108c09af4 Mon Sep 17 00:00:00 2001 From: Dan MacTough Date: Mon, 23 Mar 2026 19:41:12 -0400 Subject: [PATCH 1/8] Add async iterator support via Symbol.asyncIterator FeedParser instances can now be consumed directly with for await...of. The implementation bridges the push-based stream event model to the pull-based async iteration protocol using an async generator, without changing the underlying readable-stream v2 implementation. Co-Authored-By: Claude Sonnet 4.6 --- .eslintrc.json | 6 +++++- index.d.ts | 1 + lib/feedparser/index.js | 41 +++++++++++++++++++++++++++++++++++++++++ test/examples.js | 17 +++++++++++++++++ 4 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 test/examples.js diff --git a/.eslintrc.json b/.eslintrc.json index 43cc6da..8761f2d 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -1,6 +1,10 @@ { "env": { - "node": true + "node": true, + "es6": true + }, + "parserOptions": { + "ecmaVersion": 2018 }, "extends": "eslint:recommended", "rules": { diff --git a/index.d.ts b/index.d.ts index 6e581af..abad0ae 100644 --- a/index.d.ts +++ b/index.d.ts @@ -10,6 +10,7 @@ declare class FeedParser extends stream.Transform { options: FeedParser.Options; read(): FeedParser.Item | null; + [Symbol.asyncIterator](): AsyncGenerator; resumeSaxError(): void; on(event: 'meta', listener: (meta: FeedParser.Meta) => void): this; diff --git a/lib/feedparser/index.js b/lib/feedparser/index.js index 04c8195..02e025d 100644 --- a/lib/feedparser/index.js +++ b/lib/feedparser/index.js @@ -1239,4 +1239,45 @@ FeedParser.prototype._flush = function (done) { * @typedef {import('readable-stream').Transform & FeedParserState} FeedParserInstance */ +/** @this {FeedParserInstance} */ +FeedParser.prototype[Symbol.asyncIterator] = async function* () { + var resolve = null; + var error = null; + var ended = false; + + function onReadable() { + if (resolve) { resolve(); resolve = null; } + } + function onEnd() { + ended = true; + if (resolve) { resolve(); resolve = null; } + } + function onError(err) { + error = err; + if (resolve) { resolve(); resolve = null; } + } + + this.on('readable', onReadable); + this.on('end', onEnd); + this.on('error', onError); + + try { + while (true) { + var item; + while ((item = this.read()) !== null) { + yield item; + } + if (ended) break; + if (error) throw error; + await new Promise(function (r) { resolve = r; }); + resolve = null; + if (error) throw error; + } + } finally { + this.removeListener('readable', onReadable); + this.removeListener('end', onEnd); + this.removeListener('error', onError); + } +}; + exports = module.exports = FeedParser; diff --git a/test/examples.js b/test/examples.js new file mode 100644 index 0000000..b1f2496 --- /dev/null +++ b/test/examples.js @@ -0,0 +1,17 @@ +var FeedParser = require('../'); + +describe('examples', function () { + it('should work as an async iterator', async function () { + var feedparser = new FeedParser(); + var feed = __dirname + '/feeds/rss2sample.xml'; + var items = []; + + fs.createReadStream(feed).pipe(feedparser); + + for await (const item of feedparser) { + items.push(item); + } + + assert.equal(items.length, 4); + }); +}); From bf50c142f4933252df5e3ed67120d01b4e6937e5 Mon Sep 17 00:00:00 2001 From: Claude Date: Thu, 26 Mar 2026 20:18:33 +0000 Subject: [PATCH 2/8] Add async iterator usage example to README https://claude.ai/code/session_01Rx48d2xCMjtzmoynPwet77 --- README.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/README.md b/README.md index 40dbeb3..354f886 100644 --- a/README.md +++ b/README.md @@ -62,6 +62,30 @@ feedparser.on('readable', function () { ``` +You can also consume feeds using async iteration: + +```js + +var FeedParser = require('feedparser'); +var fetch = require('node-fetch'); + +async function main() { + var res = await fetch('http://somefeedurl.xml'); + if (res.status !== 200) throw new Error('Bad status code'); + + var feedparser = new FeedParser([options]); + feedparser.on('error', function (error) { throw error; }); + res.body.pipe(feedparser); + + for await (var item of feedparser) { + console.log(item.title); + } +} + +main(); + +``` + You can also check out this nice [working implementation](https://github.com/scripting/feedRead) that demonstrates one way to handle all the hard and annoying stuff. :smiley: ### options From d9db5b5e487a4d9492bc5a4cd614b18800bdfba0 Mon Sep 17 00:00:00 2001 From: Claude Date: Thu, 26 Mar 2026 20:22:42 +0000 Subject: [PATCH 3/8] Use try/catch in async iterator README example https://claude.ai/code/session_01Rx48d2xCMjtzmoynPwet77 --- README.md | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 354f886..08fef86 100644 --- a/README.md +++ b/README.md @@ -74,11 +74,14 @@ async function main() { if (res.status !== 200) throw new Error('Bad status code'); var feedparser = new FeedParser([options]); - feedparser.on('error', function (error) { throw error; }); res.body.pipe(feedparser); - for await (var item of feedparser) { - console.log(item.title); + try { + for await (var item of feedparser) { + console.log(item.title); + } + } catch (err) { + console.error(err); } } From 7c61d9df49e36c5cc7899c93690bf33512bb4551 Mon Sep 17 00:00:00 2001 From: Claude Date: Thu, 26 Mar 2026 20:23:58 +0000 Subject: [PATCH 4/8] Add test for error handling via try/catch with async iterator https://claude.ai/code/session_01Rx48d2xCMjtzmoynPwet77 --- test/examples.js | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/test/examples.js b/test/examples.js index b1f2496..d1d7771 100644 --- a/test/examples.js +++ b/test/examples.js @@ -14,4 +14,20 @@ describe('examples', function () { assert.equal(items.length, 4); }); + + it('should surface errors via try/catch when using async iterator', async function () { + var feedparser = new FeedParser(); + var feed = __dirname + '/feeds/notafeed.html'; + fs.createReadStream(feed).pipe(feedparser); + + var caught = null; + try { + for await (const item of feedparser) {} // eslint-disable-line no-unused-vars + } catch (err) { + caught = err; + } + + assert.ok(caught instanceof Error); + assert.equal(caught.message, 'Not a feed'); + }); }); From 9675d7412ec56b864fb62a3da97eb9159316e4f4 Mon Sep 17 00:00:00 2001 From: Dan MacTough Date: Thu, 26 Mar 2026 20:16:36 -0400 Subject: [PATCH 5/8] Add more tests of async iterator usage --- test/async-iterator.js | 68 ++++++++++++++++++++++++++++++++++++++++++ test/examples.js | 33 -------------------- 2 files changed, 68 insertions(+), 33 deletions(-) create mode 100644 test/async-iterator.js delete mode 100644 test/examples.js diff --git a/test/async-iterator.js b/test/async-iterator.js new file mode 100644 index 0000000..61aed3f --- /dev/null +++ b/test/async-iterator.js @@ -0,0 +1,68 @@ +var FeedParser = require('..'); + +describe('async iterator usage', function () { + it('should work as an async iterator', async function () { + var feedparser = new FeedParser(); + var feed = __dirname + '/feeds/rss2sample.xml'; + var items = []; + + fs.createReadStream(feed).pipe(feedparser); + + for await (const item of feedparser) { + items.push(item); + } + + assert.equal(items.length, 4); + }); + + it('should surface errors via try/catch', async function () { + var feedparser = new FeedParser(); + var feed = __dirname + '/feeds/notafeed.html'; + fs.createReadStream(feed).pipe(feedparser); + + var caught = null; + try { + for await (const item of feedparser) {} // eslint-disable-line no-empty, no-unused-vars + } catch (err) { + caught = err; + } + + assert.ok(caught instanceof Error); + assert.equal(caught.message, 'Not a feed'); + }); + + describe('resume_saxerror behavior', function () { + var feed = __dirname + '/feeds/saxerror.xml'; + + it('should continue iterating past SAX errors by default (resume_saxerror: true)', async function () { + var feedparser = new FeedParser({ strict: true }); + fs.createReadStream(feed).pipe(feedparser); + var items = []; + + for await (const item of feedparser) { + items.push(item.title); + } + + assert.equal(items.length, 3); + assert.deepEqual(items, ['Good Item', 'Bad Item', 'Item After Error']); + }); + + it('should throw on SAX errors when (resume_saxerror: false)', async function () { + var feedparser = new FeedParser({ strict: true, resume_saxerror: false }); + fs.createReadStream(feed).pipe(feedparser); + var items = []; + + var caught = null; + try { + for await (const item of feedparser) { + items.push(item.title); + } + } catch (err) { + caught = err; + } + + assert.ok(caught instanceof Error); + assert.equal(items.length, 0); + }); + }); +}); diff --git a/test/examples.js b/test/examples.js deleted file mode 100644 index d1d7771..0000000 --- a/test/examples.js +++ /dev/null @@ -1,33 +0,0 @@ -var FeedParser = require('../'); - -describe('examples', function () { - it('should work as an async iterator', async function () { - var feedparser = new FeedParser(); - var feed = __dirname + '/feeds/rss2sample.xml'; - var items = []; - - fs.createReadStream(feed).pipe(feedparser); - - for await (const item of feedparser) { - items.push(item); - } - - assert.equal(items.length, 4); - }); - - it('should surface errors via try/catch when using async iterator', async function () { - var feedparser = new FeedParser(); - var feed = __dirname + '/feeds/notafeed.html'; - fs.createReadStream(feed).pipe(feedparser); - - var caught = null; - try { - for await (const item of feedparser) {} // eslint-disable-line no-unused-vars - } catch (err) { - caught = err; - } - - assert.ok(caught instanceof Error); - assert.equal(caught.message, 'Not a feed'); - }); -}); From 6dfb13001ffe81fa7d8b31b7f32fed86a762c9f0 Mon Sep 17 00:00:00 2001 From: Dan MacTough Date: Thu, 26 Mar 2026 20:38:24 -0400 Subject: [PATCH 6/8] Fix async iterator: destroy stream on early loop exit When breaking out of a for-await loop, the finally block removed the error listener but left the source still piping in. Any subsequent parse error would have no listener and become an unhandled exception. Destroy the stream on early exit to prevent this. Co-Authored-By: Claude Sonnet 4.6 --- lib/feedparser/index.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/feedparser/index.js b/lib/feedparser/index.js index 02e025d..f69f33b 100644 --- a/lib/feedparser/index.js +++ b/lib/feedparser/index.js @@ -1277,6 +1277,9 @@ FeedParser.prototype[Symbol.asyncIterator] = async function* () { this.removeListener('readable', onReadable); this.removeListener('end', onEnd); this.removeListener('error', onError); + if (!ended && !this.destroyed) { + this.destroy(); + } } }; From f20e409988d13ac567b2c7451bb8a9769103ac9e Mon Sep 17 00:00:00 2001 From: Dan MacTough Date: Fri, 27 Mar 2026 09:44:58 -0400 Subject: [PATCH 7/8] Change examples to pipeline-based async iteration This avoids recommending a pattern that exposes a footgun - pipe() needs to have independent error-handling to avoid possible triggering an unhandled error. --- README.md | 28 ++++++++++++++++------- test/async-iterator.js | 51 +++++++++++++++++++++++++++++++++++++----- 2 files changed, 66 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 08fef86..02ea6dc 100644 --- a/README.md +++ b/README.md @@ -62,24 +62,36 @@ feedparser.on('readable', function () { ``` -You can also consume feeds using async iteration: +You can also consume feeds using async iteration. -```js +When using async iteration, prefer `stream.pipeline(...)` (or a promisified +`stream.pipeline`) so stream errors are handled before data starts flowing. Async iterator usage with `pipeline` requires Node v12+. +If you use `pipe()` or otherwise start writing to `FeedParser` before iteration +begins, attach an `error` handler on `feedparser` yourself. +```js var FeedParser = require('feedparser'); var fetch = require('node-fetch'); +// stream/promises requires Node v15+ but the same behavior can be +// attained by promisifying require('stream').pipeline +var pipeline = require('stream/promises').pipeline; async function main() { - var res = await fetch('http://somefeedurl.xml'); + var res = await fetch('http://someurl.site/rss.xml'); if (res.status !== 200) throw new Error('Bad status code'); - var feedparser = new FeedParser([options]); - res.body.pipe(feedparser); + var feedparser = new FeedParser(options); try { - for await (var item of feedparser) { - console.log(item.title); - } + await pipeline( + res.body, + feedparser, + async function (feedparserIterable) { + for await (var item of feedparserIterable) { + console.log(item.title); + } + } + ) } catch (err) { console.error(err); } diff --git a/test/async-iterator.js b/test/async-iterator.js index 61aed3f..ada8aea 100644 --- a/test/async-iterator.js +++ b/test/async-iterator.js @@ -1,6 +1,12 @@ -var FeedParser = require('..'); +var PassThrough = require('stream').PassThrough; +// We're using this form so we can run tests on older Node versions that don't have stream.promises.pipeline +var pipeline = require('util').promisify(require('stream').pipeline); describe('async iterator usage', function () { + // These tests use .pipe() only to allow testing in older Node versions. + // In modern Node versions, you can use pipeline() with async iterators + // instead of .pipe(). If you use .pipe, you must add your own error handling + // to avoid uncaught exceptions on errors. it('should work as an async iterator', async function () { var feedparser = new FeedParser(); var feed = __dirname + '/feeds/rss2sample.xml'; @@ -8,7 +14,7 @@ describe('async iterator usage', function () { fs.createReadStream(feed).pipe(feedparser); - for await (const item of feedparser) { + for await (var item of feedparser) { items.push(item); } @@ -22,7 +28,7 @@ describe('async iterator usage', function () { var caught = null; try { - for await (const item of feedparser) {} // eslint-disable-line no-empty, no-unused-vars + for await (var item of feedparser) {} // eslint-disable-line no-empty, no-unused-vars } catch (err) { caught = err; } @@ -31,6 +37,41 @@ describe('async iterator usage', function () { assert.equal(caught.message, 'Not a feed'); }); + it('should catch errors after a delayed iteration start', async function () { + if (process.release.lts < 'Gallium') { + this.skip(); // Older Node versions don't allow async iterators with pipeline, so we can't test this behavior. + } + var feedparser = new FeedParser(); + var source = new PassThrough(); + var items = []; + var caught = null; + var uncaught = null; + function onUncaught(err) { + uncaught = err; + } + process.prependOnceListener('uncaughtException', onUncaught); + + source.end('not a feed'); + + await new Promise(setImmediate); + + try { + await pipeline(source, feedparser, async function (fpIterable) { + for await (var item of fpIterable) { + items.push(item.title); + } + }); + } catch (err) { + caught = err; + } finally { + process.removeListener('uncaughtException', onUncaught); + assert.equal(uncaught, null); + assert.ok(caught instanceof Error); + assert.equal(caught.message, 'Not a feed'); + assert.equal(items.length, 0); + } + }); + describe('resume_saxerror behavior', function () { var feed = __dirname + '/feeds/saxerror.xml'; @@ -39,7 +80,7 @@ describe('async iterator usage', function () { fs.createReadStream(feed).pipe(feedparser); var items = []; - for await (const item of feedparser) { + for await (var item of feedparser) { items.push(item.title); } @@ -54,7 +95,7 @@ describe('async iterator usage', function () { var caught = null; try { - for await (const item of feedparser) { + for await (var item of feedparser) { items.push(item.title); } } catch (err) { From 854d490ebcd102d8e7e1961cd9345912caff56dd Mon Sep 17 00:00:00 2001 From: Dan MacTough Date: Fri, 27 Mar 2026 10:57:38 -0400 Subject: [PATCH 8/8] Remove redundant nulling --- lib/feedparser/index.js | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/feedparser/index.js b/lib/feedparser/index.js index f69f33b..40ef244 100644 --- a/lib/feedparser/index.js +++ b/lib/feedparser/index.js @@ -1270,7 +1270,6 @@ FeedParser.prototype[Symbol.asyncIterator] = async function* () { if (ended) break; if (error) throw error; await new Promise(function (r) { resolve = r; }); - resolve = null; if (error) throw error; } } finally {