diff --git a/changelog.d/901.bugfix b/changelog.d/901.bugfix new file mode 100644 index 00000000..6bc32020 --- /dev/null +++ b/changelog.d/901.bugfix @@ -0,0 +1 @@ +Fix Atom feeds being repeated in rooms once after an upgrade. diff --git a/src/Stores/RedisStorageProvider.ts b/src/Stores/RedisStorageProvider.ts index e1dc7743..12c42a79 100644 --- a/src/Stores/RedisStorageProvider.ts +++ b/src/Stores/RedisStorageProvider.ts @@ -227,8 +227,10 @@ export class RedisStorageProvider extends RedisStorageContextualProvider impleme public async hasSeenFeedGuids(url: string, ...guids: string[]): Promise { let multi = this.redis.multi(); + const feedKey = `${FEED_GUIDS}${url}`; + for (const guid of guids) { - multi = multi.lpos(`${FEED_GUIDS}${url}`, guid); + multi = multi.lpos(feedKey, guid); } const res = await multi.exec(); if (res === null) { diff --git a/src/feeds/parser.rs b/src/feeds/parser.rs index 7d2add88..631caee8 100644 --- a/src/feeds/parser.rs +++ b/src/feeds/parser.rs @@ -118,7 +118,7 @@ fn parse_feed_to_js_result(feed: &Feed) -> JsRssChannel { .map(|date| date.to_rfc2822()), summary: item.summary().map(|v| v.value.clone()), author: authors_to_string(item.authors()), - hash_id: hash_id(item.id.clone()).ok(), + hash_id: hash_id(item.id.clone()).ok().map(|f| format!("md5:{}", f)), }) .collect(), } diff --git a/tests/FeedReader.spec.ts b/tests/FeedReader.spec.ts index 9b2de2fb..00f39745 100644 --- a/tests/FeedReader.spec.ts +++ b/tests/FeedReader.spec.ts @@ -62,18 +62,22 @@ async function constructFeedReader(feedResponse: () => {headers: Record[] = []; + mq.on('pushed', (data) => { if (data.eventName === 'feed.entry') {events.push(data);} }); + const storage = new MemoryStorageProvider(); // Ensure we don't initial sync by storing a guid. await storage.storeFeedGuids(feedUrl, '-test-guid-'); const feedReader = new FeedReader( config, cm, mq, storage, ); - return {config, cm, mq, feedReader, feedUrl, httpServer}; + after(() => httpServer.close()); + return {config, cm, events, feedReader, feedUrl, httpServer, storage}; } describe("FeedReader", () => { it("should correctly handle empty titles", async () => { - const { mq, feedReader, httpServer } = await constructFeedReader(() => ({ + const { events, feedReader, feedUrl } = await constructFeedReader(() => ({ headers: {}, data: ` @@ -89,18 +93,15 @@ describe("FeedReader", () => { ` })); - after(() => httpServer.close()); + await feedReader.pollFeed(feedUrl); + feedReader.stop(); + expect(events).to.have.lengthOf(1); - const event: any = await new Promise((resolve) => { - mq.on('pushed', (data) => { resolve(data); feedReader.stop() }); - }); - - expect(event.eventName).to.equal('feed.entry'); - expect(event.data.feed.title).to.equal(null); - expect(event.data.title).to.equal(null); + expect(events[0].data.feed.title).to.equal(null); + expect(events[0].data.title).to.equal(null); }); it("should handle RSS 2.0 feeds", async () => { - const { mq, feedReader, httpServer } = await constructFeedReader(() => ({ + const { events, feedReader, feedUrl } = await constructFeedReader(() => ({ headers: {}, data: ` @@ -125,22 +126,19 @@ describe("FeedReader", () => { ` })); - after(() => httpServer.close()); + await feedReader.pollFeed(feedUrl); + feedReader.stop(); + expect(events).to.have.lengthOf(1); - const event: MessageQueueMessage = await new Promise((resolve) => { - mq.on('pushed', (data) => { resolve(data); feedReader.stop() }); - }); - - expect(event.eventName).to.equal('feed.entry'); - expect(event.data.feed.title).to.equal('RSS Title'); - expect(event.data.author).to.equal('John Doe'); - expect(event.data.title).to.equal('Example entry'); - expect(event.data.summary).to.equal('Here is some text containing an interesting description.'); - expect(event.data.link).to.equal('http://www.example.com/blog/post/1'); - expect(event.data.pubdate).to.equal('Sun, 6 Sep 2009 16:20:00 +0000'); + expect(events[0].data.feed.title).to.equal('RSS Title'); + expect(events[0].data.author).to.equal('John Doe'); + expect(events[0].data.title).to.equal('Example entry'); + expect(events[0].data.summary).to.equal('Here is some text containing an interesting description.'); + expect(events[0].data.link).to.equal('http://www.example.com/blog/post/1'); + expect(events[0].data.pubdate).to.equal('Sun, 6 Sep 2009 16:20:00 +0000'); }); it("should handle RSS feeds with a permalink url", async () => { - const { mq, feedReader, httpServer } = await constructFeedReader(() => ({ + const { events, feedReader, feedUrl } = await constructFeedReader(() => ({ headers: {}, data: ` @@ -164,22 +162,19 @@ describe("FeedReader", () => { ` })); - after(() => httpServer.close()); + await feedReader.pollFeed(feedUrl); + feedReader.stop(); + expect(events).to.have.lengthOf(1); - const event: MessageQueueMessage = await new Promise((resolve) => { - mq.on('pushed', (data) => { resolve(data); feedReader.stop() }); - }); - - expect(event.eventName).to.equal('feed.entry'); - expect(event.data.feed.title).to.equal('RSS Title'); - expect(event.data.author).to.equal('John Doe'); - expect(event.data.title).to.equal('Example entry'); - expect(event.data.summary).to.equal('Here is some text containing an interesting description.'); - expect(event.data.link).to.equal('http://www.example.com/blog/post/1'); - expect(event.data.pubdate).to.equal('Sun, 6 Sep 2009 16:20:00 +0000'); + expect(events[0].data.feed.title).to.equal('RSS Title'); + expect(events[0].data.author).to.equal('John Doe'); + expect(events[0].data.title).to.equal('Example entry'); + expect(events[0].data.summary).to.equal('Here is some text containing an interesting description.'); + expect(events[0].data.link).to.equal('http://www.example.com/blog/post/1'); + expect(events[0].data.pubdate).to.equal('Sun, 6 Sep 2009 16:20:00 +0000'); }); it("should handle Atom feeds", async () => { - const { mq, feedReader, httpServer } = await constructFeedReader(() => ({ + const { events, feedReader, feedUrl } = await constructFeedReader(() => ({ headers: {}, data: ` @@ -207,22 +202,19 @@ describe("FeedReader", () => { ` })); - after(() => httpServer.close()); + await feedReader.pollFeed(feedUrl); + feedReader.stop(); + expect(events).to.have.lengthOf(1); - const event: MessageQueueMessage = await new Promise((resolve) => { - mq.on('pushed', (data) => { resolve(data); feedReader.stop() }); - }); - - expect(event.eventName).to.equal('feed.entry'); - expect(event.data.feed.title).to.equal('Example Feed'); - expect(event.data.title).to.equal('Atom-Powered Robots Run Amok'); - expect(event.data.author).to.equal('John Doe'); - expect(event.data.summary).to.equal('Some text.'); - expect(event.data.link).to.equal('http://example.org/2003/12/13/atom03'); - expect(event.data.pubdate).to.equal('Sat, 13 Dec 2003 18:30:02 +0000'); + expect(events[0].data.feed.title).to.equal('Example Feed'); + expect(events[0].data.title).to.equal('Atom-Powered Robots Run Amok'); + expect(events[0].data.author).to.equal('John Doe'); + expect(events[0].data.summary).to.equal('Some text.'); + expect(events[0].data.link).to.equal('http://example.org/2003/12/13/atom03'); + expect(events[0].data.pubdate).to.equal('Sat, 13 Dec 2003 18:30:02 +0000'); }); it("should not duplicate feed entries", async () => { - const { mq, feedReader, httpServer, feedUrl } = await constructFeedReader(() => ({ + const { events, feedReader, feedUrl } = await constructFeedReader(() => ({ headers: {}, data: ` @@ -240,14 +232,64 @@ describe("FeedReader", () => { ` })); - after(() => httpServer.close()); - - const events: MessageQueueMessage[] = []; - mq.on('pushed', (data) => { if (data.eventName === 'feed.entry') {events.push(data);} }); await feedReader.pollFeed(feedUrl); await feedReader.pollFeed(feedUrl); await feedReader.pollFeed(feedUrl); feedReader.stop(); expect(events).to.have.lengthOf(1); }); + it("should always hash to the same value for Atom feeds", async () => { + const expectedHash = ['md5:d41d8cd98f00b204e9800998ecf8427e']; + const { feedReader, feedUrl, storage } = await constructFeedReader(() => ({ + headers: {}, data: ` + + + + Atom-Powered Robots Run Amok + + http://example.com/test/123 + + + ` + })); + + await feedReader.pollFeed(feedUrl); + feedReader.stop(); + const items = await storage.hasSeenFeedGuids(feedUrl, ...expectedHash); + expect(items).to.deep.equal(expectedHash); + }); + it("should always hash to the same value for RSS feeds", async () => { + const expectedHash = [ + 'md5:98bafde155b931e656ad7c137cd7711e', // guid + 'md5:72eec3c0d59ff91a80f0073ee4f8511a', // link + 'md5:7c5dd7e5988ff388ab2a402ce7feb2f0', // title + ]; + const { feedReader, feedUrl, storage } = await constructFeedReader(() => ({ + headers: {}, data: ` + + + + RSS Title + This is an example of an RSS feed + + Example entry + http://www.example.com/blog/post/1 + + + Example entry + http://www.example.com/blog/post/2 + + + Example entry 3 + + + + ` + })); + + await feedReader.pollFeed(feedUrl); + feedReader.stop(); + const items = await storage.hasSeenFeedGuids(feedUrl, ...expectedHash); + expect(items).to.deep.equal(expectedHash); + }); });