Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove wiki-feed-parser, add logic to get feeds from supabase #3363

Merged
merged 1 commit into from
Apr 13, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docker/development.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,11 @@ services:
context: ../src/api/parser
cache_from:
- docker.cdot.systems/parser:buildcache
environment:
# In development and testing, the SSO service needs to contact the Supabase
# service directly via Docker vs through the http://localhost/v1/supabase domain.
# Using staging database
- SUPABASE_URL=https://dev.api.telescope.cdot.systems/v1/supabase
depends_on:
- elasticsearch
- traefik
Expand Down
2 changes: 2 additions & 0 deletions docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,8 @@ services:
- ELASTIC_MAX_RESULTS_PER_PAGE
- ELASTIC_URL=http://elasticsearch
- ELASTIC_PORT=9200
- SERVICE_ROLE_KEY
- SUPABASE_URL
# Satellite authentication/authorization support
- JWT_ISSUER
- JWT_AUDIENCE
Expand Down
2 changes: 2 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 9 additions & 0 deletions src/api/parser/env.local
Original file line number Diff line number Diff line change
Expand Up @@ -32,3 +32,12 @@ FEED_QUEUE_DELAY_MS=60000
FEED_QUEUE_PARALLEL_WORKERS=1

PARSER_PORT = 10000

################################################################################
# Supabase Services
################################################################################

# Supabase Secrets
# Using staging database
#SUPABASE_URL=http://localhost/v1/supabase
SUPABASE_URL=https://dev.supabase.telescope.cdot.systems/
1 change: 1 addition & 0 deletions src/api/parser/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
"@bull-board/api": "3.10.3",
"@bull-board/express": "3.10.3",
"@senecacdot/satellite": "^1.27.0",
"@supabase/supabase-js": "1.29.4",
"bull": "3.29.3",
"clean-whitespace": "0.1.2",
"highlight.js": "11.4.0",
Expand Down
6 changes: 3 additions & 3 deletions src/api/parser/src/parser.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
const { logger } = require('@senecacdot/satellite');
const { feedQueue } = require('./feed/queue');
const getWikiFeeds = require('./utils/wiki-feed-parser');
const Feed = require('./data/feed');
const { getAllFeeds } = require('./utils/supabase');

/**
* Adds the feed to the database if necessary, or gets a more complete
Expand Down Expand Up @@ -58,8 +58,8 @@ const processFeeds = (feeds) => {
*/
const processAllFeeds = async () => {
try {
// Get an Array of Feed objects from the wiki feed list
const feeds = await getWikiFeeds();
// Get an Array of Feed objects from the wiki feed list and Redis
const feeds = await getAllFeeds();
// Process these feeds into the database and feed queue
await processFeeds(feeds);
} catch (err) {
Expand Down
30 changes: 30 additions & 0 deletions src/api/parser/src/utils/supabase.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
const { logger } = require('@senecacdot/satellite');
const { createClient } = require('@supabase/supabase-js');

const { SUPABASE_URL, SERVICE_ROLE_KEY } = process.env;

const supabase = createClient(SUPABASE_URL, SERVICE_ROLE_KEY);

if (!SUPABASE_URL || !SERVICE_ROLE_KEY) {
logger.error('SUPBASE_URL or SERVICE_ROLE_KEY is missing');
process.exit(1);
}

module.exports = {
async getAllFeeds() {
const { data, error } = await supabase
.from('feeds')
.select('wiki_author_name, url, telescope_profiles (display_name)');

if (error) {
logger.error({ error });
throw Error(error.message, "can't fetch feeds from supabase");
}

return data.map((feed) => ({
// Prefer the a user's display name if present, fallback to wiki name otherwise
author: feed.display_name || feed.wiki_author_name,
url: feed.url,
}));
},
};
102 changes: 0 additions & 102 deletions src/api/parser/src/utils/wiki-feed-parser.js

This file was deleted.

16 changes: 10 additions & 6 deletions src/api/parser/test/e2e/parser-flow.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,14 @@ const normalizeUrl = require('normalize-url');
const { loadFeedsIntoQueue, invalidateFeed } = require('../../src/parser');
const feedWorker = require('../../src/feed/worker');
const { feedQueue } = require('../../src/feed/queue');
const getWikiFeeds = require('../../src/utils/wiki-feed-parser');
const { getAllFeeds } = require('../../src/utils/supabase');

const urlToId = (url) => hash(normalizeUrl(url));

jest.mock('../../src/utils/wiki-feed-parser', () => jest.fn());
jest.mock('../../src/utils/supabase', () => ({
...jest.requireActual('../../src/utils/supabase'),
getAllFeeds: jest.fn(),
}));

const fetchData = async (url) => {
const res = await fetch(url);
Expand Down Expand Up @@ -36,9 +39,10 @@ const processFeeds = () => {

let satellite;

beforeAll(() => {
beforeAll(async () => {
satellite = new Satellite();
return processFeeds(); // start the feed queue for e2e test
await feedQueue.empty(); // remove jobs from the queue
await processFeeds(); // start the feed queue for e2e test
});

afterAll(() => Promise.all([feedQueue.close(), satellite.stop()]));
Expand All @@ -55,7 +59,7 @@ describe("Testing parser service's flow", () => {
url: 'http://localhost:8888/feed.xml',
},
];
getWikiFeeds.mockImplementation(() => Promise.resolve(valid)); // mock getWikiFeeds to return what feeds we want to test
getAllFeeds.mockImplementation(() => Promise.resolve(valid));
loadFeedsIntoQueue();
await waitForDrained();

Expand Down Expand Up @@ -83,7 +87,7 @@ describe("Testing parser service's flow", () => {
url: 'https://janehasinvalidfeed.com/feed',
},
];
getWikiFeeds.mockImplementation(() => Promise.resolve(invalid));
getAllFeeds.mockImplementation(() => Promise.resolve(invalid));
loadFeedsIntoQueue();
await waitForDrained();

Expand Down
62 changes: 0 additions & 62 deletions src/api/parser/test/wiki-feed-parser.test.js

This file was deleted.