diff --git a/.eslintrc.js b/.eslintrc.js index 53d240fd75..50359b113b 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -1,6 +1,6 @@ module.exports = { - extends: ['airbnb-base', 'prettier'], - plugins: ['prettier'], + extends: ['airbnb-base', 'prettier', 'plugin:promise/recommended'], + plugins: ['prettier', 'promise'], env: { jest: true, browser: true, @@ -23,6 +23,36 @@ module.exports = { */ 'no-param-reassign': ['error', { props: false }], + /**Disallows unnecessary return await + * https://eslint.org/docs/rules/no-return-await + */ + 'no-return-await': ['error'], + + /** + * Disallow using an async function as a Promise executor + * https://eslint.org/docs/rules/no-async-promise-executor + */ + 'no-async-promise-executor': ['error'], + + /** + * Disallow await inside of loops + * https://eslint.org/docs/rules/no-await-in-loop + */ + 'no-await-in-loop': ['error'], + + /** + * Disallow assignments that can lead to race conditions due to + * usage of await or yield + * https://eslint.org/docs/rules/require-atomic-updates + */ + 'require-atomic-updates': ['error'], + + /** + * Disallow async functions which have no await expression + * https://eslint.org/docs/rules/require-await + */ + 'require-await': ['error'], + /** * Require or disallow named function expressions * https://eslint.org/docs/rules/func-names @@ -34,5 +64,20 @@ module.exports = { * https://eslint.org/docs/rules/func-names */ 'linebreak-style': 'off', + + /** + * The following are eslint rules from the promise-plugin + * https://github.com/xjamundx/eslint-plugin-promise + */ + + /** + * Prefer wait to then() for reading Promise values + */ + 'promise/prefer-await-to-then': 'warn', + + /** + * Prefer async/await to the callback pattern + */ + 'promise/prefer-await-to-callbacks': 'warn', }, }; diff --git a/package.json b/package.json index 3f895c6930..41c483df3c 100644 --- a/package.json +++ b/package.json @@ -91,6 +91,7 @@ "eslint-config-prettier": "6.7.0", "eslint-plugin-import": "2.18.2", "eslint-plugin-prettier": "3.1.1", + "eslint-plugin-promise": "4.2.1", "hint": "6.0.0", "husky": "3.1.0", "jest": "24.9.0", diff --git a/src/backend/feed/worker.js b/src/backend/feed/worker.js index 43b667810a..b0ab935f33 100644 --- a/src/backend/feed/worker.js +++ b/src/backend/feed/worker.js @@ -18,7 +18,7 @@ exports.workerCallback = async function(job) { return articles.map(article => Post.fromArticle(article)); }; -exports.start = async function() { +exports.start = function() { // Start processing jobs from the feed queue... feedQueue.process(exports.workerCallback); diff --git a/src/backend/index.js b/src/backend/index.js index 2da87119e1..7c4fe70795 100644 --- a/src/backend/index.js +++ b/src/backend/index.js @@ -40,9 +40,7 @@ async function enqueueWikiFeed() { } enqueueWikiFeed() - .then(() => { - feedWorker.start(); - }) + .then(() => feedWorker.start()) .catch(error => { log.error({ error }, 'Unable to enqueue wiki feeds'); }); diff --git a/src/backend/utils/basic_analysis.js b/src/backend/utils/basic_analysis.js index f2defa1fca..d3d71d39c7 100644 --- a/src/backend/utils/basic_analysis.js +++ b/src/backend/utils/basic_analysis.js @@ -71,18 +71,8 @@ module.exports = function analyzeText(text_) { // asynchonize function // to get a basic analysis information, getAsyAnalysis should be called - this.getAsyAnalysis = async function() { - return new Promise((res, rej) => { - if (isValidString) { - res(analysis); - } else { - res(textInfo); - } - if (isValidString === undefined) { - rej(textInfo); - } - }); + this.getAsyAnalysis = function() { + return isValidString ? analysis : textInfo; }; - return this; }; diff --git a/src/backend/utils/email-sender.js b/src/backend/utils/email-sender.js index 444700d489..6b36eae498 100644 --- a/src/backend/utils/email-sender.js +++ b/src/backend/utils/email-sender.js @@ -2,8 +2,6 @@ require('../lib/config'); const nodemailer = require('nodemailer'); const { logger } = require('./logger'); -const log = logger.child({ module: 'email-sender' }); - /* HOW TO USE Import this file - const sendEmail = require('./email-sender); @@ -69,39 +67,36 @@ exports.verifyTransporter = function(transporter) { transporter.verify(err => { // If error then print to console if (err) { - log.error({ err }, 'Transporter connection failed.'); + logger.error({ err }, 'Transporter connection failed.'); return false; } // else print a ready message - log.info('Server is running properly'); + logger.info('Server is running properly'); return true; }); }; // Sends a message using the passed in parameters exports.sendMessage = async function(receipiants, subjectMessage, message) { - return new Promise((resolve, reject) => { - const transporter = this.createTransporter( + try { + const transporter = await this.createTransporter( process.env.NODEMAILER_SERVER, 2222, false, process.env.NODEMAILER_USERNAME, process.env.NODEMAILER_PASSWORD ); - const allGood = this.verifyTransporter(transporter); - if (!allGood) { - reject(new Error()); // Send promise.reject if an error occurs + if (!this.verifyTransporter(transporter)) { + throw new Error('Email transport could not be verified'); } // Creates email for to be sent const mail = this.createMail(receipiants, subjectMessage, message); // Send the email with the email content - transporter.sendMail(mail, (err, info) => { - if (err) { - reject(err); // Send promise.reject if an error occurs - } else { - resolve(info.accepted); // Send promise.resolve if an error occurs - } - }); - }); + const result = await transporter.sendMail(mail); + return result.accepted; + } catch (error) { + logger.error({ error }, 'Unable to send email'); + throw error; + } }; diff --git a/src/backend/utils/github-url.js b/src/backend/utils/github-url.js index b74cb48008..4ab80be472 100644 --- a/src/backend/utils/github-url.js +++ b/src/backend/utils/github-url.js @@ -1,6 +1,7 @@ require('../lib/config'); const parseGithubUrl = require('parse-github-url'); const fetch = require('node-fetch'); +const { logger } = require('./logger'); const githubAPI = 'http://api.github.com'; @@ -56,69 +57,71 @@ exports.getGithubUrlData = async incomingUrl => { typeof process.env.GITHUB_TOKEN !== 'undefined' ? `?access_token=${process.env.GITHUB_TOKEN}` : ''; + let data; + try { + const fetchResult = await fetch(`${githubAPI}${subUrl}`); + data = await fetchResult.json(); + } catch (error) { + logger.error({ error }, 'Unable to fetch GitHub API results'); + throw error; + } + let fetchedData; + /** + * Format the fetched data in a specifi way depending if the url + * is for a repo, user or for a {pull request, issue} + */ + // User + if (ghUrl.repo === null) { + const { login: user, avatar_url: avatarURL, name, company, blog, email, bio } = data; - return fetch(`${githubAPI}${subUrl}`) - .then(res => res.json()) - .then(data => { - let fetchedData; - - /** - * Format the fetched data in a specifi way depending if the url - * is for a repo, user or for a {pull request, issue} - */ - // User - if (ghUrl.repo === null) { - const { login: user, avatar_url: avatarURL, name, company, blog, email, bio } = data; - - fetchedData = { - user, - avatarURL, - name, - company, - blog, - email, - bio, - }; + fetchedData = { + user, + avatarURL, + name, + company, + blog, + email, + bio, + }; - // Repo - } else if (ghUrl.branch === 'master') { - const { - owner: { avatar_url: avatarURL }, - description, - license, - open_issues: openIssues, - forks, - created_at: createdAt, - language, - } = data; + // Repo + } else if (ghUrl.branch === 'master') { + const { + owner: { avatar_url: avatarURL }, + description, + license, + open_issues: openIssues, + forks, + created_at: createdAt, + language, + } = data; - fetchedData = { - avatarURL, - description, - license, - openIssues, - forks, - createdAt, - language, - }; + fetchedData = { + avatarURL, + description, + license, + openIssues, + forks, + createdAt, + language, + }; - // Issue or Pull Request - } else { - const { - user: { login, avatar_url: avatarURL }, - body, - created_at: createdAt, - } = data; + // Issue or Pull Request + } else { + const { + user: { login, avatar_url: avatarURL }, + body, + created_at: createdAt, + } = data; - fetchedData = { - login, - avatarURL, - body, - createdAt, - branch: ghUrl.branch, - repo: ghUrl.name, - }; - } - return fetchedData; - }); + fetchedData = { + login, + avatarURL, + body, + createdAt, + branch: ghUrl.branch, + repo: ghUrl.name, + }; + } + return fetchedData; }; diff --git a/src/backend/utils/inactive-blog-filter.js b/src/backend/utils/inactive-blog-filter.js index 911937772e..53bea602df 100644 --- a/src/backend/utils/inactive-blog-filter.js +++ b/src/backend/utils/inactive-blog-filter.js @@ -9,7 +9,6 @@ require('../lib/config.js'); const { logger } = require('./logger'); const fsPromises = fs.promises; -const log = logger.child({ module: 'inactive-blog-filter' }); /** * Condition for passing redlist some() check @@ -39,17 +38,14 @@ function dateDiff(postDate) { */ async function check(feedUrl, redlistPath = 'feeds-redlist.json') { // Read redlist file - return fsPromises - .readFile(redlistPath, 'utf-8') - .then(redListRaw => { - // Concat to array no matter what - const redList = [].concat(JSON.parse(redListRaw)); - return Promise.resolve(redList.some(isRedlisted.bind(null, feedUrl))); - }) - .catch(error => { - log.error({ error }, `failed to read ${redlistPath}`); - return Promise.reject(error); - }); + try { + const redListRaw = await fsPromises.readFile(redlistPath, 'utf-8'); + const redList = [].concat(JSON.parse(redListRaw)); + return redList.some(redItem => isRedlisted(feedUrl, redItem)); + } catch (error) { + logger.error({ error }, `failed to read ${redlistPath}`); + throw error; + } } /** @@ -66,12 +62,13 @@ async function check(feedUrl, redlistPath = 'feeds-redlist.json') { */ async function update(feedsPath = 'feeds.txt', redlistPath = 'feeds-redlist.json', parser = parse) { if (!feedsPath || !redlistPath) { - return Promise.reject(new Error('failed to update: bad filepath')); + throw new Error('failed to update: bad filepath'); } - // Read the feeds list file - return fsPromises.readFile(feedsPath, 'utf-8').then(async lines => { - // Divide the file into separate lines + try { + const redlistUpdate = []; + // Read the feeds list file + const lines = await fsPromises.readFile(feedsPath, 'utf-8'); const feedUrlList = lines .split(/\r?\n/) // Basic filtering to remove any ines that don't look like a feed URL @@ -79,63 +76,53 @@ async function update(feedsPath = 'feeds.txt', redlistPath = 'feeds-redlist.json // Convert this into an Object of the form expected by our queue .map(url => ({ url })); - const redlistUpdate = []; let linesRead = 0; - - feedUrlList.forEach(async feedItem => { - const feed = await parser(feedItem); - const feedUrl = feedItem.url; - - let recentPostDate = new Date(); - - if (feed && typeof feed[0] !== 'undefined') { - recentPostDate = new Date(feed[0].date); - - // Check if the blog is inactive - // We convert the dateDiff(result) from ms in days - const timeDiff = Math.ceil(dateDiff(recentPostDate) / (1000 * 3600 * 24)); - - // BLOG_INACTIVE_TIME is supplied with a necessary default value, e.g. of 365 days - // See: https://github.com/Seneca-CDOT/telescope/issues/396 - if (timeDiff > (process.env.BLOG_INACTIVE_TIME || 365)) { - log.info(`Blog at: ${feedUrl} is INACTIVE!`); - + await Promise.all( + feedUrlList.map(async feedItem => { + const feed = await parser(feedItem); + const feedUrl = feedItem.url; + let recentPostDate = new Date(); + + if (feed && typeof feed[0] !== 'undefined') { + recentPostDate = new Date(feed[0].date); + + // Check if the blog is inactive + // We convert the dateDiff(result) from ms in days + const timeDiff = Math.ceil(dateDiff(recentPostDate) / (1000 * 3600 * 24)); + + // BLOG_INACTIVE_TIME is supplied with a necessary default value, e.g. of 365 days + // See: https://github.com/Seneca-CDOT/telescope/issues/396 + if (timeDiff > (process.env.BLOG_INACTIVE_TIME || 365)) { + logger.info(`Blog at: ${feedUrl} is INACTIVE!`); + + redlistUpdate.push({ + url: feedUrl, + lastUpdate: feed[0].date, + }); + } + } else { + // In case of invalid/ dead feeds + logger.info(`Blog at: ${feedUrl} HAS NOTHING TO SHARE!`); redlistUpdate.push({ url: feedUrl, - lastUpdate: feed[0].date, + lastUpdate: null, }); } - } else { - // In case of invalid/ dead feeds - log.info(`Blog at: ${feedUrl} HAS NOTHING TO SHARE!`); - - redlistUpdate.push({ - url: feedUrl, - lastUpdate: null, - }); - } - - // Use a counter to ensure all feeds are processed before writing redlist - linesRead += 1; - - if (linesRead === feedUrlList.length) { - // Write the new feeds-redlist.json - const rlData = JSON.stringify(redlistUpdate, null, 2); + linesRead += 1; - await fsPromises - .writeFile(redlistPath, rlData) - .then(() => { - log.info(`wrote to ${redlistPath}: ${rlData}`); - }) - .catch(error => { - log.error({ error }, `Cannot write to file ${redlistPath} `); - throw error; - }); - } - }); - - return Promise.resolve(redlistUpdate); - }); + if (linesRead === feedUrlList.length) { + // Write the new feeds-redlist.json + const rlData = JSON.stringify(redlistUpdate, null, 2); + await fsPromises.writeFile(redlistPath, rlData); + logger.info(`wrote to ${redlistPath}: ${rlData}`); + } + }) + ); + return redlistUpdate; + } catch (error) { + logger.error({ error }); + throw error; + } } exports.check = check; diff --git a/src/backend/utils/sentiment-analysis.js b/src/backend/utils/sentiment-analysis.js index 81a84ccd56..1c3fb9ed58 100644 --- a/src/backend/utils/sentiment-analysis.js +++ b/src/backend/utils/sentiment-analysis.js @@ -2,12 +2,12 @@ * negative or positve words being used in a post and return a summary of it * along with a score. The file uses a node module called sentiment to implement * the functionality of analyzing text of blogs. The function accepts plain - * text as a parameter i.e text containg no HTML tags, and returns a promise - * object whcih contains the result. + * text as a parameter i.e text contains no HTML tags, and returns a promise + * object which contains the result. */ const Sentiment = require('sentiment'); -module.exports.run = async function(text) { +module.exports.run = function(text) { const sentiment = new Sentiment(); - return Promise.resolve(sentiment.analyze(text)); + return sentiment.analyze(text); }; diff --git a/src/backend/utils/storage.js b/src/backend/utils/storage.js index c5d76da493..b48ac58154 100644 --- a/src/backend/utils/storage.js +++ b/src/backend/utils/storage.js @@ -68,5 +68,5 @@ module.exports = { getPostsCount: () => redis.zcard(POSTS), - getPost: async guid => redis.hgetall(guid), + getPost: guid => redis.hgetall(guid), }; diff --git a/src/backend/utils/summarize.js b/src/backend/utils/summarize.js index 1677c4dc7e..1d025488f5 100644 --- a/src/backend/utils/summarize.js +++ b/src/backend/utils/summarize.js @@ -1,6 +1,10 @@ +/** + * https://www.npmjs.com/package/node-summarizer#desc + */ const { SummarizerManager } = require('node-summarizer'); module.exports = async function(text, numSentences = 3) { const summarizer = new SummarizerManager(text, numSentences); - return summarizer.getSummaryByRank().then(summaryObject => summaryObject.summary); + const summaryByRank = await summarizer.getSummaryByRank(); + return summaryByRank.summary; }; diff --git a/src/backend/utils/word-counter.js b/src/backend/utils/word-counter.js deleted file mode 100644 index 7df559f5b1..0000000000 --- a/src/backend/utils/word-counter.js +++ /dev/null @@ -1,5 +0,0 @@ -const wordcount = require('wordcount'); - -exports.wordCounter = async function wordCounter(blogText) { - return Promise.resolve(wordcount(blogText)); -}; diff --git a/test/basic_analysis.test.js b/test/basic_analysis.test.js index d5ed9d1909..c09639302f 100644 --- a/test/basic_analysis.test.js +++ b/test/basic_analysis.test.js @@ -55,9 +55,9 @@ describe('Basic analysis test 1', () => { data = await analyzeText(text2).getAsyAnalysis(); }); - test('the number of of word count is ', async () => expect(data.wordCount).toEqual(7)); - test('the level of of readability is ', async () => expect(data.readability).toEqual('hard')); - test('the number of of reading time is ', async () => expect(data.readingTime).toEqual(0)); + test('the number of of word count is ', () => expect(data.wordCount).toEqual(7)); + test('the level of of readability is ', () => expect(data.readability).toEqual('hard')); + test('the number of of reading time is ', () => expect(data.readingTime).toEqual(0)); }); describe('Basic analysis test 2', () => { @@ -66,9 +66,9 @@ describe('Basic analysis test 2', () => { data = await analyzeText(text3).getAsyAnalysis(); }); - test('the number of of word count is ', async () => expect(data.wordCount).toEqual(36)); - test('the level of of readability is ', async () => expect(data.readability).toEqual('hard')); - test('the number of of reading time is ', async () => expect(data.readingTime).toEqual(0)); + test('the number of of word count is ', () => expect(data.wordCount).toEqual(36)); + test('the level of of readability is ', () => expect(data.readability).toEqual('hard')); + test('the number of of reading time is ', () => expect(data.readingTime).toEqual(0)); }); describe('Basic analysis test 3', () => { @@ -77,9 +77,9 @@ describe('Basic analysis test 3', () => { data = await analyzeText(text4).getAsyAnalysis(); }); - test('the number of of word count is ', async () => expect(data.wordCount).toEqual(560)); - test('the level of of readability is ', async () => expect(data.readability).toEqual('hard')); - test('the number of of reading time is ', async () => expect(data.readingTime).toEqual(3)); + test('the number of of word count is ', () => expect(data.wordCount).toEqual(560)); + test('the level of of readability is ', () => expect(data.readability).toEqual('hard')); + test('the number of of reading time is ', () => expect(data.readingTime).toEqual(3)); }); describe('Basic analysis test 4', () => { @@ -88,9 +88,9 @@ describe('Basic analysis test 4', () => { data = await analyzeText(text5).getAsyAnalysis(); }); - test('the number of of word count is ', async () => expect(data.wordCount).toEqual(6254)); - test('the level of of readability is ', async () => expect(data.readability).toEqual('hard')); - test('the number of of reading time is ', async () => expect(data.readingTime).toEqual(31)); + test('the number of of word count is ', () => expect(data.wordCount).toEqual(6254)); + test('the level of of readability is ', () => expect(data.readability).toEqual('hard')); + test('the number of of reading time is ', () => expect(data.readingTime).toEqual(31)); }); describe('Basic analysis test 5', () => { @@ -99,7 +99,7 @@ describe('Basic analysis test 5', () => { data = await analyzeText(text6).getAsyAnalysis(); }); - test('the number of of word count is ', async () => expect(data.wordCount).toEqual(25648)); - test('the level of of readability is ', async () => expect(data.readability).toEqual('hard')); - test('the number of of reading time is ', async () => expect(data.readingTime).toEqual(128)); + test('the number of of word count is ', () => expect(data.wordCount).toEqual(25648)); + test('the level of of readability is ', () => expect(data.readability).toEqual('hard')); + test('the number of of reading time is ', () => expect(data.readingTime).toEqual(128)); }); diff --git a/test/email-sender.test.js b/test/email-sender.test.js index a94f40588a..b8aeb73b16 100644 --- a/test/email-sender.test.js +++ b/test/email-sender.test.js @@ -10,7 +10,11 @@ test.skip('Tests if sendMessage resolves with expected info', async () => { const testSubjectMessage = 'Test'; const testHTML = '

Hello World

'; const testRecipient = process.env.NODEMAILER_TESTRECIPIENT; - const testReturnValue = emailsender.sendMessage(testRecipient, testSubjectMessage, testHTML); + const testReturnValue = await emailsender.sendMessage( + testRecipient, + testSubjectMessage, + testHTML + ); // Tests if the expected resolve is correct expect(testReturnValue).resolves.toBe([testRecipient]); }); diff --git a/test/feed-queue.test.js b/test/feed-queue.test.js index b21a8eecad..0d8cad1d36 100644 --- a/test/feed-queue.test.js +++ b/test/feed-queue.test.js @@ -7,5 +7,5 @@ afterAll(() => feedQueue.close()); /** * feedQueue.add() returns a promise which is usually true unless the Redis server is offline. */ -test('able to add a job to the feed queue', async () => +test('able to add a job to the feed queue', () => feedQueue.add('http://ajhooper.blogspot.com/feeds/posts/default')); diff --git a/test/inactive-blog-filter.test.js b/test/inactive-blog-filter.test.js index db9d26dd71..6681e1803c 100644 --- a/test/inactive-blog-filter.test.js +++ b/test/inactive-blog-filter.test.js @@ -36,31 +36,19 @@ describe('Redlisted feed checking', () => { it('should return false for bad feed URLs', async () => { const badURLs = [redlist[0].url.slice(0, -1), '', undefined]; - badURLs.forEach(async url => { - await inactiveFilter - .check(url) - .then(isRedlisted => { - expect(isRedlisted).toBe(false); - }) - .catch(err => { - log.error(err.message); - throw err; - }); - }); + await Promise.all( + badURLs.map(async url => { + expect(await inactiveFilter.check(url)).toBe(false); + }) + ); }); it('should return true for all feed URLs in feeds-redlist.json', async () => { - redlist.forEach(async feed => { - await inactiveFilter - .check(feed.url) - .then(isRedlisted => { - expect(isRedlisted).toBe(true); - }) - .catch(err => { - log.error(err.message); - throw err; - }); - }); + await Promise.all( + redlist.map(async feed => { + expect(await inactiveFilter.check(feed.url)).toBe(true); + }) + ); }); it('should handle being passed a bad filename by rejecting with an error', async () => { @@ -129,54 +117,56 @@ describe('Testing of update()', () => { } it('should not redlist active blogs', async () => { - return inactiveFilter - .update(pathToActiveFeeds, pathToMockRedList, mockFeedParser) - .then(data => { - expect(data.length).toEqual(0); - }) - .catch(err => { - deleteMockRedlist(); - log.error(err.message); - throw err; - }); + try { + const data = await inactiveFilter.update( + pathToActiveFeeds, + pathToMockRedList, + mockFeedParser + ); + expect(data.length).toEqual(0); + } catch (err) { + deleteMockRedlist(); + log.error(err.message); + throw err; + } }); it('should redlist inactive blogs', async () => { const inactiveRedlist = mockFeeds.filter(item => { return item.status === 'inactive'; }); - return inactiveFilter - .update(pathToInactiveFeeds, pathToMockRedList, mockFeedParser) - .then(data => { - expect(data.length).toEqual(inactiveRedlist.length); - for (let i = 0; i < inactiveRedlist.length; i += 1) { - expect(data[i].url).toBe(inactiveRedlist[i].url); - } - }) - .catch(err => { - deleteMockRedlist(); - log.error(err.message); - throw err; - }); + try { + const data = await inactiveFilter.update( + pathToInactiveFeeds, + pathToMockRedList, + mockFeedParser + ); + expect(data.length).toEqual(inactiveRedlist.length); + for (let i = 0; i < inactiveRedlist.length; i += 1) { + expect(data[i].url).toBe(inactiveRedlist[i].url); + } + } catch (err) { + deleteMockRedlist(); + log.error(err.message); + throw err; + } }); it('should redlist invalid or dead blogs', async () => { const deadRedlist = mockFeeds.filter(item => { return item.status === 'dead'; }); - return inactiveFilter - .update(pathToDeadFeeds, pathToMockRedList, mockFeedParser) - .then(data => { - expect(data.length).toEqual(deadRedlist.length); - for (let i = 0; i < deadRedlist.length; i += 1) { - expect(data[i].url).toBe(deadRedlist[i].url); - } - }) - .catch(err => { - deleteMockRedlist(); - log.error(err.message); - throw err; - }); + try { + const data = await inactiveFilter.update(pathToDeadFeeds, pathToMockRedList, mockFeedParser); + expect(data.length).toEqual(deadRedlist.length); + for (let i = 0; i < deadRedlist.length; i += 1) { + expect(data[i].url).toBe(deadRedlist[i].url); + } + } catch (err) { + deleteMockRedlist(); + log.error(err.message); + throw err; + } }); it('should handle an invalid first argument by throwing an error', async () => { diff --git a/test/posts.test.js b/test/posts.test.js index 45729bc5a0..45d7f1aab6 100644 --- a/test/posts.test.js +++ b/test/posts.test.js @@ -24,9 +24,7 @@ describe('test /posts endpoint', () => { }; }); - beforeAll(() => { - Promise.all(posts.map(post => addPost(post))); - }); + beforeAll(() => Promise.all(posts.map(post => addPost(post)))); it('requests default number of items', async () => { const res = await request(app).get('/posts'); @@ -120,8 +118,8 @@ describe('test /posts/:guid responses', () => { // add the post to the storage beforeAll(async () => { - addedPost1.save(); - addedPost2.save(); + await addedPost1.save(); + await addedPost2.save(); }); // tests diff --git a/test/spam-checker.test.js b/test/spam-checker.test.js index 7cfd52fdbc..b4d3fc13d8 100644 --- a/test/spam-checker.test.js +++ b/test/spam-checker.test.js @@ -14,32 +14,27 @@ describe('Spam detection checks', () => { }); test('Should return true on first post (spam, title is detected as spam by spam-filter)', async () => { - checkForSpam(parsedContent[0]).then(result => { - expect(result).toBe(true); - }); + const result = await checkForSpam(parsedContent[0]); + expect(result).toBe(true); }); test('Should return false on second post (not spam)', async () => { - checkForSpam(parsedContent[1]).then(result => { - expect(result).toBe(false); - }); + const result = await checkForSpam(parsedContent[1]); + expect(result).toBe(false); }); test('Should return true on third post (spam, no title)', async () => { - checkForSpam(parsedContent[2]).then(result => { - expect(result).toBe(true); - }); + const result = await checkForSpam(parsedContent[2]); + expect(result).toBe(true); }); test('Should return true on fourth post (spam, all caps)', async () => { - checkForSpam(parsedContent[3]).then(result => { - expect(result).toBe(true); - }); + const result = await checkForSpam(parsedContent[3]); + expect(result).toBe(true); }); test('Should return true on fifth post (spam, less than 5 words)', async () => { - checkForSpam(parsedContent[4]).then(result => { - expect(result).toBe(true); - }); + const result = await checkForSpam(parsedContent[4]); + expect(result).toBe(true); }); }); diff --git a/test/storage.test.js b/test/storage.test.js index 9a0706c15e..4534a11cc1 100644 --- a/test/storage.test.js +++ b/test/storage.test.js @@ -80,9 +80,7 @@ describe('Storage tests for posts', () => { site: 'foo', }; - beforeAll(() => { - Promise.all([testPost, testPost2, testPost3].map(post => addPost(post))); - }); + beforeAll(() => Promise.all([testPost, testPost2, testPost3].map(post => addPost(post)))); it('should allow retrieving a post by guid after inserting', async () => { const result = await getPost(testPost.guid); diff --git a/test/word-counter.test.js b/test/word-counter.test.js deleted file mode 100644 index 3843c10254..0000000000 --- a/test/word-counter.test.js +++ /dev/null @@ -1,6 +0,0 @@ -const { wordCounter } = require('../src/backend/utils/word-counter'); - -test('counts the number of words in a 5-word long sentence', () => - wordCounter('one two three four five').then(data => { - expect(data).toBe(5); - }));