From 66eaf246e214163fcbfb2016fc57e0eecc4a2882 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Spie=C3=9F?= Date: Thu, 18 Jan 2024 15:41:22 +0000 Subject: [PATCH 01/11] Remove unused .prettierrc --- .prettierrc | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 .prettierrc diff --git a/.prettierrc b/.prettierrc deleted file mode 100644 index 0cf184f6..00000000 --- a/.prettierrc +++ /dev/null @@ -1,3 +0,0 @@ -printWidth: 120 -singleQuote: true -semi: false From 55e31399c7d779009b5c6041121a482fb5305d03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Spie=C3=9F?= Date: Thu, 18 Jan 2024 15:41:45 +0000 Subject: [PATCH 02/11] Update ESLint --- package-lock.json | 843 ++++++++++++++++++++++++---------------------- package.json | 2 +- 2 files changed, 438 insertions(+), 407 deletions(-) diff --git a/package-lock.json b/package-lock.json index 483177c9..66ff351c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -62,7 +62,7 @@ "chai": "^4.2.0", "chai-as-promised": "^7.1.1", "chai-spies": "^1.0.0", - "eslint": "^7.5", + "eslint": "^8.56.0", "istanbul": "^0.4.5", "mocha": "^8.2.0", "nyc": "^15.0.0", @@ -70,6 +70,15 @@ "sinon": "^5.0.0" } }, + "node_modules/@aashutoshrathi/word-wrap": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", + "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@babel/code-frame": { "version": "7.12.11", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", @@ -465,30 +474,63 @@ "spdx-license-ids": "^3.0.0" } }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz", + "integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, "node_modules/@eslint/eslintrc": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz", - "integrity": "sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", "dev": true, "dependencies": { "ajv": "^6.12.4", - "debug": "^4.1.1", - "espree": "^7.3.0", - "globals": "^13.9.0", - "ignore": "^4.0.6", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", "import-fresh": "^3.2.1", - "js-yaml": "^3.13.1", - "minimatch": "^3.0.4", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" }, "engines": { - "node": "^10.12.0 || >=12.0.0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, + "node_modules/@eslint/eslintrc/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, "node_modules/@eslint/eslintrc/node_modules/debug": { - "version": "4.3.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", - "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -503,9 +545,9 @@ } }, "node_modules/@eslint/eslintrc/node_modules/globals": { - "version": "13.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.12.0.tgz", - "integrity": "sha512-uS8X6lSKN2JumVoXrbUz+uG4BYG+eiawqm3qFcT7ammfbUHeCBoJMlHcec/S3krSk73/AE/f0szYFmgAA3kYZg==", + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", "dev": true, "dependencies": { "type-fest": "^0.20.2" @@ -517,6 +559,30 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/@eslint/eslintrc/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/@eslint/eslintrc/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -547,24 +613,33 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/@eslint/js": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.56.0.tgz", + "integrity": "sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, "node_modules/@humanwhocodes/config-array": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", - "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", + "version": "0.11.14", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", + "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", "dev": true, "dependencies": { - "@humanwhocodes/object-schema": "^1.2.0", - "debug": "^4.1.1", - "minimatch": "^3.0.4" + "@humanwhocodes/object-schema": "^2.0.2", + "debug": "^4.3.1", + "minimatch": "^3.0.5" }, "engines": { "node": ">=10.10.0" } }, "node_modules/@humanwhocodes/config-array/node_modules/debug": { - "version": "4.3.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", - "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -578,16 +653,41 @@ } } }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/@humanwhocodes/config-array/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, "node_modules/@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz", + "integrity": "sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw==", "dev": true }, "node_modules/@istanbuljs/load-nyc-config": { @@ -698,6 +798,41 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/@sinonjs/commons": { "version": "1.8.3", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz", @@ -739,6 +874,12 @@ "integrity": "sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q==", "dev": true }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "dev": true + }, "node_modules/@yarnpkg/lockfile": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz", @@ -770,9 +911,9 @@ } }, "node_modules/acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "version": "8.11.3", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", + "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==", "dev": true, "bin": { "acorn": "bin/acorn" @@ -1009,15 +1150,6 @@ "node": ">=0.10.0" } }, - "node_modules/astral-regex": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", - "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/async": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", @@ -2460,18 +2592,6 @@ "once": "^1.4.0" } }, - "node_modules/enquirer": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", - "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", - "dev": true, - "dependencies": { - "ansi-colors": "^4.1.1" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/entities": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", @@ -2569,106 +2689,86 @@ } }, "node_modules/eslint": { - "version": "7.32.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.32.0.tgz", - "integrity": "sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==", - "dev": true, - "dependencies": { - "@babel/code-frame": "7.12.11", - "@eslint/eslintrc": "^0.4.3", - "@humanwhocodes/config-array": "^0.5.0", - "ajv": "^6.10.0", + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.56.0.tgz", + "integrity": "sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.56.0", + "@humanwhocodes/config-array": "^0.11.13", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", - "debug": "^4.0.1", + "debug": "^4.3.2", "doctrine": "^3.0.0", - "enquirer": "^2.3.5", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^5.1.1", - "eslint-utils": "^2.1.0", - "eslint-visitor-keys": "^2.0.0", - "espree": "^7.3.1", - "esquery": "^1.4.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", - "functional-red-black-tree": "^1.0.1", - "glob-parent": "^5.1.2", - "globals": "^13.6.0", - "ignore": "^4.0.6", - "import-fresh": "^3.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", - "js-yaml": "^3.13.1", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", "lodash.merge": "^4.6.2", - "minimatch": "^3.0.4", + "minimatch": "^3.1.2", "natural-compare": "^1.4.0", - "optionator": "^0.9.1", - "progress": "^2.0.0", - "regexpp": "^3.1.0", - "semver": "^7.2.1", - "strip-ansi": "^6.0.0", - "strip-json-comments": "^3.1.0", - "table": "^6.0.9", - "text-table": "^0.2.0", - "v8-compile-cache": "^2.0.3" + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" }, "bin": { "eslint": "bin/eslint.js" }, "engines": { - "node": "^10.12.0 || >=12.0.0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, "funding": { "url": "https://opencollective.com/eslint" } }, "node_modules/eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", "dev": true, "dependencies": { "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/eslint-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", - "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", - "dev": true, - "dependencies": { - "eslint-visitor-keys": "^1.1.0" + "estraverse": "^5.2.0" }, "engines": { - "node": ">=6" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, "funding": { - "url": "https://github.com/sponsors/mysticatea" - } - }, - "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true, - "engines": { - "node": ">=4" + "url": "https://opencollective.com/eslint" } }, "node_modules/eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", "dev": true, "engines": { - "node": ">=10" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, "node_modules/eslint/node_modules/ansi-regex": { @@ -2695,6 +2795,12 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/eslint/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, "node_modules/eslint/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -2772,10 +2878,38 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/eslint/node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, "node_modules/eslint/node_modules/globals": { - "version": "13.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.12.0.tgz", - "integrity": "sha512-uS8X6lSKN2JumVoXrbUz+uG4BYG+eiawqm3qFcT7ammfbUHeCBoJMlHcec/S3krSk73/AE/f0szYFmgAA3kYZg==", + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", "dev": true, "dependencies": { "type-fest": "^0.20.2" @@ -2796,6 +2930,18 @@ "node": ">=8" } }, + "node_modules/eslint/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, "node_modules/eslint/node_modules/levn": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", @@ -2809,6 +2955,33 @@ "node": ">= 0.8.0" } }, + "node_modules/eslint/node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/eslint/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -2816,22 +2989,52 @@ "dev": true }, "node_modules/eslint/node_modules/optionator": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", - "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", + "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", "dev": true, "dependencies": { + "@aashutoshrathi/word-wrap": "^1.2.3", "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.3" + "type-check": "^0.4.0" }, "engines": { "node": ">= 0.8.0" } }, + "node_modules/eslint/node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/eslint/node_modules/path-key": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", @@ -2850,21 +3053,6 @@ "node": ">= 0.8.0" } }, - "node_modules/eslint/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/eslint/node_modules/shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", @@ -2898,18 +3086,6 @@ "node": ">=8" } }, - "node_modules/eslint/node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/eslint/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -2962,26 +3138,20 @@ } }, "node_modules/espree": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz", - "integrity": "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==", + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", "dev": true, "dependencies": { - "acorn": "^7.4.0", - "acorn-jsx": "^5.3.1", - "eslint-visitor-keys": "^1.3.0" + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" }, "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/espree/node_modules/eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true, - "engines": { - "node": ">=4" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, "node_modules/esprima": { @@ -2997,9 +3167,9 @@ } }, "node_modules/esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", "dev": true, "dependencies": { "estraverse": "^5.1.0" @@ -3008,15 +3178,6 @@ "node": ">=0.10" } }, - "node_modules/esquery/node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, "node_modules/esrecurse": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", @@ -3029,7 +3190,7 @@ "node": ">=4.0" } }, - "node_modules/esrecurse/node_modules/estraverse": { + "node_modules/estraverse": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", @@ -3038,15 +3199,6 @@ "node": ">=4.0" } }, - "node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, "node_modules/esutils": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz", @@ -3394,6 +3546,15 @@ "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", "dev": true }, + "node_modules/fastq": { + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.16.0.tgz", + "integrity": "sha512-ifCoaXsDrsdkWTtiNJX5uzHDsrck5TzfKKDcuFFTIrrc/BS076qgEIfoIy1VeZqViznfKiysPYTh/QeHtnIsYA==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, "node_modules/fd-slicer": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", @@ -4025,6 +4186,12 @@ "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz", "integrity": "sha1-TK+tdrxi8C+gObL5Tpo906ORpyU=" }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true + }, "node_modules/growl": { "version": "1.10.5", "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", @@ -4299,9 +4466,9 @@ "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==" }, "node_modules/ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.0.tgz", + "integrity": "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==", "dev": true, "engines": { "node": ">= 4" @@ -4512,9 +4679,9 @@ } }, "node_modules/is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, "dependencies": { "is-extglob": "^2.1.1" @@ -4543,6 +4710,15 @@ "integrity": "sha1-iVJojF7C/9awPsyF52ngKQMINHA=", "dev": true }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/is-plain-obj": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", @@ -5217,12 +5393,6 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, - "node_modules/lodash.truncate": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", - "integrity": "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=", - "dev": true - }, "node_modules/log-symbols": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz", @@ -5311,24 +5481,6 @@ "integrity": "sha512-l9x0+1offnKKIzYVjyXU2SiwhXDLekRzKyhnbyldPHvC7BvLPVpdNUNR2KeMAiCN2D/kLNttZgQD5WjSxuBx3Q==", "dev": true }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/lru-cache/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/ltgt": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/ltgt/-/ltgt-2.2.1.tgz", @@ -6932,15 +7084,6 @@ "node": ">=8" } }, - "node_modules/progress": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", - "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", - "dev": true, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/promise-retry": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-1.1.1.tgz", @@ -7033,6 +7176,26 @@ "node": ">=0.6" } }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, "node_modules/randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", @@ -7120,18 +7283,6 @@ "node": ">=0.10.0" } }, - "node_modules/regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", - "dev": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - } - }, "node_modules/release-zalgo": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz", @@ -7253,15 +7404,6 @@ "node": ">=0.10.0" } }, - "node_modules/require-from-string": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", - "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/require-main-filename": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", @@ -7305,6 +7447,16 @@ "node": "*" } }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, "node_modules/rimraf": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.0.tgz", @@ -7316,6 +7468,29 @@ "rimraf": "bin.js" } }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, "node_modules/safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", @@ -7548,65 +7723,6 @@ "node": ">=6" } }, - "node_modules/slice-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", - "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "astral-regex": "^2.0.0", - "is-fullwidth-code-point": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/slice-ansi?sponsor=1" - } - }, - "node_modules/slice-ansi/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/slice-ansi/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/slice-ansi/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/slice-ansi/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/snapdragon": { "version": "0.8.2", "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", @@ -8044,88 +8160,6 @@ "node": ">=4" } }, - "node_modules/table": { - "version": "6.7.5", - "resolved": "https://registry.npmjs.org/table/-/table-6.7.5.tgz", - "integrity": "sha512-LFNeryOqiQHqCVKzhkymKwt6ozeRhlm8IL1mE8rNUurkir4heF6PzMyRgaTa4tlyPTGGgXuvVOF/OLWiH09Lqw==", - "dev": true, - "dependencies": { - "ajv": "^8.0.1", - "lodash.truncate": "^4.4.2", - "slice-ansi": "^4.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/table/node_modules/ajv": { - "version": "8.8.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.8.2.tgz", - "integrity": "sha512-x9VuX+R/jcFj1DHo/fCp99esgGDWiHENrKxaCENuCxpoMCmAt/COCGVDwA7kleEpEzJjDnvh3yGoOuLu0Dtllw==", - "dev": true, - "dependencies": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/table/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/table/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/table/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "dev": true - }, - "node_modules/table/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/table/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/tar": { "version": "4.4.19", "resolved": "https://registry.npmjs.org/tar/-/tar-4.4.19.tgz", @@ -8569,12 +8603,6 @@ "node": ">= 0.4.0" } }, - "node_modules/v8-compile-cache": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", - "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", - "dev": true - }, "node_modules/validator": { "version": "13.11.0", "resolved": "https://registry.npmjs.org/validator/-/validator-13.11.0.tgz", @@ -8674,15 +8702,6 @@ "resolved": "https://registry.npmjs.org/async/-/async-1.0.0.tgz", "integrity": "sha1-+PwEyjoTeErenhZBr5hXjPvWR6k=" }, - "node_modules/word-wrap": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", - "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/wordwrap": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", @@ -9042,6 +9061,18 @@ "buffer-crc32": "~0.2.3", "fd-slicer": "~1.1.0" } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } } } } diff --git a/package.json b/package.json index 63c929bb..09a84d2e 100644 --- a/package.json +++ b/package.json @@ -80,7 +80,7 @@ "chai": "^4.2.0", "chai-as-promised": "^7.1.1", "chai-spies": "^1.0.0", - "eslint": "^7.5", + "eslint": "^8.56.0", "istanbul": "^0.4.5", "mocha": "^8.2.0", "nyc": "^15.0.0", From 66381c5d7a641c4d0c74b2580ae1cb0cc70def51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Spie=C3=9F?= Date: Thu, 18 Jan 2024 15:41:56 +0000 Subject: [PATCH 03/11] Update ESLint config --- .eslintrc.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.eslintrc.json b/.eslintrc.json index 144b7fd0..26ce2f0f 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -1,13 +1,12 @@ { "env": { "browser": false, - "es2021": true, + "es2023": true, "node": true, "mocha": true }, "extends": "eslint:recommended", "parserOptions": { - "ecmaVersion": 2020, "sourceType": "module" }, "rules": { From 169c27c773fc8b4a34b523dc91ab29622cc8f290 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Spie=C3=9F?= Date: Fri, 26 Jan 2024 17:00:33 +0100 Subject: [PATCH 04/11] Use ESLint's new flat config file --- .eslintrc.json | 17 ----------------- eslint.config.js | 24 ++++++++++++++++++++++++ 2 files changed, 24 insertions(+), 17 deletions(-) delete mode 100644 .eslintrc.json create mode 100644 eslint.config.js diff --git a/.eslintrc.json b/.eslintrc.json deleted file mode 100644 index 26ce2f0f..00000000 --- a/.eslintrc.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "env": { - "browser": false, - "es2023": true, - "node": true, - "mocha": true - }, - "extends": "eslint:recommended", - "parserOptions": { - "sourceType": "module" - }, - "rules": { - "quotes": ["error", "single"], - "semi": ["error", "never"], - "no-console": 0 - } -} diff --git a/eslint.config.js b/eslint.config.js new file mode 100644 index 00000000..ff3291eb --- /dev/null +++ b/eslint.config.js @@ -0,0 +1,24 @@ +const js = require('@eslint/js') +const globals = require('globals') +const eslintConfigPrettier = require('eslint-config-prettier') + +module.exports = [ + js.configs.recommended, + { + languageOptions: { + globals: { + ...globals.node, + ...globals.mocha + }, + parserOptions: { + sourceType: 'module' + } + }, + rules: { + quotes: ['error', 'single'], + semi: ['error', 'never'], + 'no-console': 'off' + } + }, + eslintConfigPrettier +] From 2c17da9bb24486d0985cb4acf04343cf78d4c970 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Spie=C3=9F?= Date: Fri, 26 Jan 2024 17:03:45 +0100 Subject: [PATCH 05/11] Revert "Remove unused .prettierrc" This reverts commit a0fc32925bf631ec8611b709c7c2513d02f71fd7. --- .prettierrc | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .prettierrc diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..0cf184f6 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,3 @@ +printWidth: 120 +singleQuote: true +semi: false From 13fa2fd587d52643d5e260b973d9628253cc5785 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Spie=C3=9F?= Date: Fri, 26 Jan 2024 17:07:25 +0100 Subject: [PATCH 06/11] Use JSON format for prettierrc --- .prettierrc | 3 --- .prettierrc.json | 5 +++++ 2 files changed, 5 insertions(+), 3 deletions(-) delete mode 100644 .prettierrc create mode 100644 .prettierrc.json diff --git a/.prettierrc b/.prettierrc deleted file mode 100644 index 0cf184f6..00000000 --- a/.prettierrc +++ /dev/null @@ -1,3 +0,0 @@ -printWidth: 120 -singleQuote: true -semi: false diff --git a/.prettierrc.json b/.prettierrc.json new file mode 100644 index 00000000..b3977820 --- /dev/null +++ b/.prettierrc.json @@ -0,0 +1,5 @@ +{ + "printWidth": 120, + "singleQuote": true, + "semi": false +} From c11038d298d06016aab4b5c5aee44ec49ff2446b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Spie=C3=9F?= Date: Mon, 29 Jan 2024 13:30:04 +0100 Subject: [PATCH 07/11] Install prettier and update npm scripts --- package-lock.json | 29 +++++++++++++++++++++++++++++ package.json | 10 +++++++++- 2 files changed, 38 insertions(+), 1 deletion(-) diff --git a/package-lock.json b/package-lock.json index 66ff351c..a761351d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -63,9 +63,11 @@ "chai-as-promised": "^7.1.1", "chai-spies": "^1.0.0", "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", "istanbul": "^0.4.5", "mocha": "^8.2.0", "nyc": "^15.0.0", + "prettier": "3.2.4", "proxyquire": "^2.1.3", "sinon": "^5.0.0" } @@ -2743,6 +2745,18 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/eslint-config-prettier": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", + "dev": true, + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, "node_modules/eslint-scope": { "version": "7.2.2", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", @@ -7067,6 +7081,21 @@ "node": ">= 0.8.0" } }, + "node_modules/prettier": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.4.tgz", + "integrity": "sha512-FWu1oLHKCrtpO1ypU6J0SbK2d9Ckwysq6bHj/uaCP26DxrPpppCLQRGVuqAxSTvhF00AcvDRyYrLNW7ocBhFFQ==", + "dev": true, + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, "node_modules/process-nextick-args": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", diff --git a/package.json b/package.json index 09a84d2e..fa90decf 100644 --- a/package.json +++ b/package.json @@ -5,10 +5,16 @@ "main": "./index.js", "scripts": { "start": "node ./index.js", - "test": "nyc mocha \"test/unit/**/*.js\" --timeout 20000 && eslint .", + "test": "npm run mocha && npm run lint", + "mocha": "nyc mocha \"test/unit/**/*.js\"", "local": "node --inspect-brk=0.0.0.0:9229 ./index.js", "integration": "mocha \"test/integration/**/*.js\" --timeout 20000", + "lint": "npm run prettier:check && npm run eslint", + "lint:fix": "npm run prettier:write && npm run eslint:fix", "eslint": "eslint .", + "eslint:fix": "eslint . --fix", + "prettier:check": "prettier . --check", + "prettier:write": "prettier . --write", "postinstall": "patch-package" }, "keywords": [ @@ -81,9 +87,11 @@ "chai-as-promised": "^7.1.1", "chai-spies": "^1.0.0", "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", "istanbul": "^0.4.5", "mocha": "^8.2.0", "nyc": "^15.0.0", + "prettier": "3.2.4", "proxyquire": "^2.1.3", "sinon": "^5.0.0" } From ad0355669b5df6783e933fdebd47bcb08a67457f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Spie=C3=9F?= Date: Mon, 29 Jan 2024 13:31:18 +0100 Subject: [PATCH 08/11] Add Prettier and ESLint extensions for VSCode to recommendations --- .vscode/extensions.json | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .vscode/extensions.json diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 00000000..d7df89c9 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,3 @@ +{ + "recommendations": ["esbenp.prettier-vscode", "dbaeumer.vscode-eslint"] +} From 1faa1dd2d960351194d4c5157149c2de5c396b8f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Spie=C3=9F?= Date: Mon, 29 Jan 2024 15:10:04 +0100 Subject: [PATCH 09/11] Fix all formatting issues --- .vscode/settings.json | 4 +- CODE_OF_CONDUCT.md | 20 +- CONTRIBUTING.md | 2 +- README.md | 13 +- config/cdConfig.js | 40 +- config/cdMemoryConfig.js | 16 +- config/map.js | 32 +- dev-scripts/README.md | 2 +- docs/rampup.md | 66 +-- eslint.config.js | 12 +- ghcrawler/app.js | 6 +- ghcrawler/bin/www.js | 12 +- ghcrawler/crawlerFactory.js | 28 +- ghcrawler/index.js | 2 +- ghcrawler/lib/crawler.js | 43 +- ghcrawler/lib/crawlerService.js | 21 +- ghcrawler/lib/request.js | 4 +- ghcrawler/lib/traversalPolicy.js | 9 +- ghcrawler/memoryConfig.js | 14 +- ghcrawler/middleware/asyncMiddleware.js | 2 +- ghcrawler/middleware/sendHelper.js | 6 +- ghcrawler/providers/index.js | 8 +- .../providers/queuing/attenuatedQueue.js | 19 +- .../providers/queuing/inmemorycrawlqueue.js | 4 +- ghcrawler/providers/queuing/memoryFactory.js | 4 +- ghcrawler/providers/queuing/queueSet.js | 12 +- .../providers/queuing/scopedQueueSets.js | 46 +-- .../providers/queuing/storageBackedQueue.js | 22 +- ghcrawler/providers/queuing/storageQueue.js | 39 +- .../providers/queuing/storageQueueFactory.js | 4 +- .../providers/queuing/storageQueueManager.js | 2 +- .../providers/storage/azureBlobFactory.js | 2 +- ghcrawler/providers/storage/file.js | 10 +- .../providers/storage/inmemoryDocStore.js | 8 +- .../providers/storage/storageDocStore.js | 24 +- ghcrawler/routes/requests.js | 4 +- index.js | 2 +- lib/baseHandler.js | 8 +- lib/entitySpec.js | 14 +- lib/fetchResult.js | 11 +- lib/memoryCache.js | 2 +- lib/sourceDiscovery.js | 8 +- lib/utils.js | 45 +- providers/fetch/abstractFetch.js | 12 +- providers/fetch/condaFetch.js | 109 +++-- providers/fetch/cratesioFetch.js | 14 +- providers/fetch/debianFetch.js | 73 ++-- providers/fetch/dispatcher.js | 16 +- providers/fetch/gitCloner.js | 12 +- providers/fetch/goFetch.js | 25 +- providers/fetch/gradlePluginFetch.js | 13 +- providers/fetch/mavenBasedFetch.js | 387 +++++++++--------- providers/fetch/mavenGoogleFetch.js | 11 +- providers/fetch/mavencentralFetch.js | 12 +- providers/fetch/npmjsFetch.js | 6 +- providers/fetch/nugetFetch.js | 24 +- providers/fetch/packagistFetch.js | 19 +- providers/fetch/podFetch.js | 25 +- providers/fetch/pypiFetch.js | 10 +- providers/fetch/requestRetryWithDefaults.js | 2 +- providers/fetch/rubyGemsFetch.js | 10 +- providers/filter/filter.js | 2 +- providers/index.js | 10 +- providers/logging/insights.js | 9 +- providers/logging/logger.js | 2 +- .../abstractClearlyDefinedProcessor.js | 12 +- providers/process/abstractProcessor.js | 32 +- providers/process/component.js | 2 +- providers/process/composerExtract.js | 4 +- providers/process/condaExtract.js | 10 +- providers/process/condaSrcExtract.js | 2 +- providers/process/crateExtract.js | 2 +- providers/process/debExtract.js | 9 +- providers/process/debsrcExtract.js | 9 +- providers/process/fossology.js | 30 +- providers/process/fsfeReuse.js | 49 ++- providers/process/gemExtract.js | 2 +- providers/process/goExtract.js | 2 +- providers/process/licensee.js | 26 +- providers/process/mavenExtract.js | 4 +- providers/process/npmExtract.js | 4 +- providers/process/nugetExtract.js | 2 +- providers/process/package.js | 2 +- providers/process/podExtract.js | 2 +- providers/process/pypiExtract.js | 4 +- providers/process/scancode.js | 22 +- providers/process/source.js | 2 +- providers/process/sourceExtract.js | 2 +- providers/process/top.js | 71 ++-- providers/store/attachmentStore.js | 12 +- providers/store/attachmentStoreFactory.js | 4 +- providers/store/azureQueueStore.js | 2 +- providers/store/storeDispatcher.js | 16 +- providers/store/webhookDeltaStore.js | 6 +- template.env.json | 2 +- test/fixtures/conda/channeldata.json | 2 +- test/fixtures/conda/repodata.json | 2 +- test/fixtures/crates/bitflags.json | 26 +- test/fixtures/go/license.html | 10 +- test/fixtures/packagist/registryData.json | 19 +- test/fixtures/pod/registryData.json | 2 +- test/fixtures/pod/versions.json | 2 +- test/fixtures/pypi/registryData.json | 2 +- .../fixtures/pypi/registryData_dnspython.json | 2 +- test/fixtures/pypi/registryData_lgpl2.json | 2 +- test/unit/ghcrawler/crawlerFactoryTest.js | 6 +- test/unit/ghcrawler/lib/traversalPolicy.js | 2 +- test/unit/ghcrawler/queueSetTests.js | 27 +- test/unit/ghcrawler/requestTests.js | 2 +- test/unit/lib/entitySpecTests.js | 6 +- test/unit/lib/fetchResultTests.js | 5 +- test/unit/lib/memoryCacheTest.js | 4 +- test/unit/lib/sourceSpecTests.js | 4 +- test/unit/lib/utilsTests.js | 19 +- test/unit/providers/fetch/condaFetchTests.js | 48 +-- .../providers/fetch/cratesioFetchTests.js | 20 +- test/unit/providers/fetch/debianFetchTests.js | 28 +- test/unit/providers/fetch/dispatcherTests.js | 88 ++-- test/unit/providers/fetch/gitClonerTests.js | 16 +- test/unit/providers/fetch/goFetchTests.js | 83 ++-- .../providers/fetch/gradlePluginFetchTests.js | 33 +- .../providers/fetch/mavenBasedFetchTests.js | 2 +- .../providers/fetch/mavencentralFetchTests.js | 62 +-- .../providers/fetch/mavengoogleFetchTests.js | 60 +-- test/unit/providers/fetch/npmjsFetchTests.js | 10 +- test/unit/providers/fetch/nugetFetchTests.js | 10 +- .../providers/fetch/packagistFetchTests.js | 10 +- test/unit/providers/fetch/podFetchTests.js | 11 +- test/unit/providers/fetch/pypiFetchTests.js | 18 +- .../providers/fetch/rubyGemsFetchTests.js | 9 +- .../abstractClearylDefinedProcessorTests.js | 10 +- .../process/abstractProcessorTests.js | 22 +- .../providers/process/composerExtractTests.js | 28 +- .../providers/process/condaExtractTests.js | 16 +- .../providers/process/crateExtractTests.js | 36 +- .../unit/providers/process/debExtractTests.js | 10 +- test/unit/providers/process/fsfeReuseTests.js | 21 +- .../unit/providers/process/gemExtractTests.js | 4 +- test/unit/providers/process/goExtractTests.js | 20 +- test/unit/providers/process/licenseeTests.js | 14 +- .../providers/process/mavenExtractTests.js | 6 +- .../unit/providers/process/npmExtractTests.js | 26 +- .../providers/process/nugetExtractTests.js | 36 +- .../providers/process/pypiExtractTests.js | 2 +- test/unit/providers/process/scancodeTests.js | 24 +- test/unit/providers/process/sourceTests.js | 8 +- .../providers/queuing/scopedQueueSetsTests.js | 21 +- .../queuing/storageBackedQueueTest.js | 19 +- .../providers/store/attachmentStoreTests.js | 7 +- 149 files changed, 1451 insertions(+), 1334 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 89dc9e86..8e282b70 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,10 +1,8 @@ // Place your settings in this file to overwrite default and user settings. { - "jshint.options": { - "esnext": true - }, "editor.folding": false, "editor.tabSize": 2, + "editor.defaultFormatter": "esbenp.prettier-vscode", "editor.detectIndentation": false, "editor.formatOnSave": false, "editor.formatOnType": true, diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index da960baa..c4b57fba 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -8,19 +8,19 @@ In the interest of fostering an open and welcoming environment, we as contributo Examples of behavior that contributes to creating a positive environment include: -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members +- Using welcoming and inclusive language +- Being respectful of differing viewpoints and experiences +- Gracefully accepting constructive criticism +- Focusing on what is best for the community +- Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery and unwelcome sexual attention or advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a professional setting +- The use of sexualized language or imagery and unwelcome sexual attention or advances +- Trolling, insulting/derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or electronic address, without explicit permission +- Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b9887a88..c9bea753 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -5,7 +5,7 @@ The ClearlyDefined project welcomes your suggestions and contributions! Before o ## Contribution Licensing -Most of our code is distributed under the terms of the [MIT license](LICENSE), and when you contribute code that you wrote to our repositories, +Most of our code is distributed under the terms of the [MIT license](LICENSE), and when you contribute code that you wrote to our repositories, you agree that you are contributing under those same terms. In addition, by submitting your contributions you are indicating that you have the right to submit those contributions under those terms. diff --git a/README.md b/README.md index 8d7a0f5c..793bedb9 100755 --- a/README.md +++ b/README.md @@ -81,16 +81,20 @@ Process the source, if any: The crawler's output is stored for use by the rest of the ClearlyDefined infrastructure -- it is not intended to be used directly by humans. Note that each tool's output is stored separately and the results of processing the component and the component source are also separated. ### More on `type` + The `type` in the request object typically corresponds to an internal processor in CD. -1. `component` is the most generic type. Internally, it is converted to a `package` or `source` request by the component processor. -2. `package` request is processed by the package processor and is further converted to a request with a specific type (`crate`, `deb`, `gem`, `go`, `maven`, `npm`, `nuget`, `composer`, `pod`, `pypi`). For a `package` typed request, if the mentioned specific binary package type is known, the specific type (e.g. `npm`) can be used (instead of `package`) in the harvest request and skip the conversion step. For example, + +1. `component` is the most generic type. Internally, it is converted to a `package` or `source` request by the component processor. +2. `package` request is processed by the package processor and is further converted to a request with a specific type (`crate`, `deb`, `gem`, `go`, `maven`, `npm`, `nuget`, `composer`, `pod`, `pypi`). For a `package` typed request, if the mentioned specific binary package type is known, the specific type (e.g. `npm`) can be used (instead of `package`) in the harvest request and skip the conversion step. For example, + ```json { "type": "npm", "url": "cd:/npm/npmjs/-/redie/0.3.0" } ``` -3. `source` requests are processed by the source processor, which subsequently dispatches a `clearlydefined` typed request for the supported source types and other requests (one for each scanning tool). These are the more advanced scenarios where the request type and the coordinate type differ. + +3. `source` requests are processed by the source processor, which subsequently dispatches a `clearlydefined` typed request for the supported source types and other requests (one for each scanning tool). These are the more advanced scenarios where the request type and the coordinate type differ. # Configuration @@ -238,7 +242,8 @@ Make sure you started the container with the 5000 port forwarded for this to wor -X POST \ http://crawler:5000/requests - On windows: +On Windows: + curl -d "{\"type\":\"npm\", \"url\":\"cd:/npm/npmjs/-/redie/0.3.0\"}" -H "Content-Type: application/json" -H "X-token: secret" -X POST http://localhost:5000/requests Expose dashboard port: diff --git a/config/cdConfig.js b/config/cdConfig.js index 31869b54..7af14e0c 100644 --- a/config/cdConfig.js +++ b/config/cdConfig.js @@ -5,13 +5,13 @@ const config = require('painless-config') const cd_azblob = { connection: config.get('CRAWLER_AZBLOB_CONNECTION_STRING'), - container: config.get('CRAWLER_AZBLOB_CONTAINER_NAME') + container: config.get('CRAWLER_AZBLOB_CONTAINER_NAME'), } const githubToken = config.get('CRAWLER_GITHUB_TOKEN') const cd_file = { - location: config.get('FILE_STORE_LOCATION') || (process.platform === 'win32' ? 'c:/temp/cd' : '/tmp/cd') + location: config.get('FILE_STORE_LOCATION') || (process.platform === 'win32' ? 'c:/temp/cd' : '/tmp/cd'), } const crawlerStoreProvider = config.get('CRAWLER_STORE_PROVIDER') || 'cd(file)' const maxRequeueAttemptCount = config.get('CRAWLER_MAX_REQUEUE_ATTEMPTS') || 5 @@ -22,20 +22,20 @@ module.exports = { searchPath: [module], crawler: { count: 2, - maxRequeueAttemptCount + maxRequeueAttemptCount, }, filter: { provider: 'filter', - filter: {} + filter: {}, }, fetch: { dispatcher: 'cdDispatch', cdDispatch: { - fetched: { defaultTtlSeconds: fetchedCacheTtlSeconds } + fetched: { defaultTtlSeconds: fetchedCacheTtlSeconds }, }, cocoapods: { githubToken }, conda: { - cdFileLocation: cd_file.location + cdFileLocation: cd_file.location, }, cratesio: {}, debian: { cdFileLocation: cd_file.location }, @@ -48,7 +48,7 @@ module.exports = { nuget: {}, packagist: {}, pypi: {}, - rubygems: {} + rubygems: {}, }, process: { cdsource: {}, @@ -60,7 +60,7 @@ module.exports = { debsrc: {}, fossology: { disabled: true, - installDir: config.get('FOSSOLOGY_HOME') || '/mnt/c/git/fo/fossology/src/' + installDir: config.get('FOSSOLOGY_HOME') || '/mnt/c/git/fo/fossology/src/', }, gem: { githubToken }, go: { githubToken }, @@ -90,39 +90,39 @@ module.exports = { '--classify', '--generated', '--summary', - '--summary-key-files' + '--summary-key-files', // '--quiet' ], timeout: 1000, processes: 2, - format: '--json-pp' + format: '--json-pp', }, source: {}, - top: { githubToken } + top: { githubToken }, }, store: { dispatcher: crawlerStoreProvider, cdDispatch: {}, webhook: { url: config.get('CRAWLER_WEBHOOK_URL') || 'http://localhost:4000/webhook', - token: config.get('CRAWLER_WEBHOOK_TOKEN') + token: config.get('CRAWLER_WEBHOOK_TOKEN'), }, azqueue: { connectionString: cd_azblob.connection, - queueName: config.get('CRAWLER_HARVESTS_QUEUE_NAME') || 'harvests' + queueName: config.get('CRAWLER_HARVESTS_QUEUE_NAME') || 'harvests', }, 'cd(azblob)': cd_azblob, - 'cd(file)': cd_file + 'cd(file)': cd_file, }, deadletter: { provider: config.get('CRAWLER_DEADLETTER_PROVIDER') || crawlerStoreProvider, 'cd(azblob)': cd_azblob, - 'cd(file)': cd_file + 'cd(file)': cd_file, }, queue: { provider: config.get('CRAWLER_QUEUE_PROVIDER') || 'memory', memory: { - weights: { immediate: 3, soon: 2, normal: 3, later: 2 } + weights: { immediate: 3, soon: 2, normal: 3, later: 2 }, }, storageQueue: { weights: { immediate: 3, soon: 2, normal: 3, later: 2 }, @@ -132,8 +132,8 @@ module.exports = { visibilityTimeout_remainLocal: fetchedCacheTtlSeconds, maxDequeueCount: 5, attenuation: { - ttl: 3000 - } - } - } + ttl: 3000, + }, + }, + }, } diff --git a/config/cdMemoryConfig.js b/config/cdMemoryConfig.js index 7c11c4ff..03a58b1d 100644 --- a/config/cdMemoryConfig.js +++ b/config/cdMemoryConfig.js @@ -4,26 +4,26 @@ module.exports = { crawler: { count: 1, - maxRequeueAttemptCount: 5 + maxRequeueAttemptCount: 5, }, fetch: { - github: {} + github: {}, }, process: { scancode: {}, licensee: {}, - reuse: {} + reuse: {}, }, store: { - provider: 'memory' + provider: 'memory', }, deadletter: { - provider: 'memory' + provider: 'memory', }, queue: { provider: 'memory', memory: { - weights: { events: 10, immediate: 3, soon: 2, normal: 3, later: 2 } - } - } + weights: { events: 10, immediate: 3, soon: 2, normal: 3, later: 2 }, + }, + }, } diff --git a/config/map.js b/config/map.js index 63e7a312..5f11552a 100644 --- a/config/map.js +++ b/config/map.js @@ -22,7 +22,7 @@ const source = { licensee, reuse, scancode, - fossology + fossology, } const npm = { @@ -32,7 +32,7 @@ const npm = { licensee, reuse, scancode, - fossology + fossology, } const conda = { @@ -42,7 +42,7 @@ const conda = { licensee, reuse, scancode, - fossology + fossology, } const crate = { @@ -52,7 +52,7 @@ const crate = { licensee, reuse, scancode, - fossology + fossology, } const deb = { @@ -62,7 +62,7 @@ const deb = { licensee, reuse, scancode, - fossology + fossology, } const go = { @@ -72,7 +72,7 @@ const go = { licensee, reuse, scancode, - fossology + fossology, } const maven = { @@ -82,7 +82,7 @@ const maven = { licensee, reuse, scancode, - fossology + fossology, } const nuget = { @@ -91,7 +91,7 @@ const nuget = { clearlydefined, licensee, scancode, - reuse + reuse, } const pod = { @@ -101,7 +101,7 @@ const pod = { licensee, reuse, scancode, - fossology + fossology, } const pypi = { @@ -111,7 +111,7 @@ const pypi = { licensee, reuse, scancode, - fossology + fossology, } const composer = { @@ -121,7 +121,7 @@ const composer = { licensee, reuse, scancode, - fossology + fossology, } const gem = { @@ -131,7 +131,7 @@ const gem = { licensee, reuse, scancode, - fossology + fossology, } const _package = { @@ -146,13 +146,13 @@ const _package = { pod, pypi, composer, - gem + gem, } const component = { _type: 'component', source, - package: _package + package: _package, } const entities = { @@ -176,9 +176,9 @@ const entities = { composer, pod, pypi, - gem + gem, } module.exports = { - default: entities + default: entities, } diff --git a/dev-scripts/README.md b/dev-scripts/README.md index 57053dd9..f730835a 100644 --- a/dev-scripts/README.md +++ b/dev-scripts/README.md @@ -10,4 +10,4 @@ run.foo -- Runs the docker container, killing a previous run if it exists. Hosts ### Extra: -debug.foo -- Does everything run does, but also pauses execution until a debugger is attached. Attach using vscode's profile. \ No newline at end of file +debug.foo -- Does everything run does, but also pauses execution until a debugger is attached. Attach using vscode's profile. diff --git a/docs/rampup.md b/docs/rampup.md index 1d5b4acf..bdd9bad2 100644 --- a/docs/rampup.md +++ b/docs/rampup.md @@ -5,58 +5,61 @@ These are suggested steps / tips to get familiar with the codebase: - Two branches: master/prod correspond to dev/prod 0. Clone the repo -0. Run `npm install` -0. `npm test` to run tests -0. Try `npm audit fix` for a simple contribution -0. Open a PR to master - - AzDo will run clearlydefined.crawler pipeline: npm install / npm test - - After merge, crawler-pipeline will run, builds and pushes to ACR - - Release step: deploys to cdcrawler-dev app service, restarts (dev crawler is still app service) -0. After successful dev deploy, can merge and push to prod branch - - Prod build pipeline will build and push to Docker Hub, no actual deploymen. +1. Run `npm install` +2. `npm test` to run tests +3. Try `npm audit fix` for a simple contribution +4. Open a PR to master + - AzDo will run clearlydefined.crawler pipeline: npm install / npm test + - After merge, crawler-pipeline will run, builds and pushes to ACR + - Release step: deploys to cdcrawler-dev app service, restarts (dev crawler is still app service) +5. After successful dev deploy, can merge and push to prod branch + - Prod build pipeline will build and push to Docker Hub, no actual deploymen. ## Dockerfile -- based on node -- installs scancode/lincesee, installs Ruby (for licensee), -- Sets all env vars -- Npm install with production -- Then starts - +- based on node +- installs scancode/lincesee, installs Ruby (for licensee), +- Sets all env vars +- Npm install with production +- Then starts ## Deployment + - Image is pushed to: https://hub.docker.com/r/clearlydefined/crawler - Webhooks in docker hub for donated crawler resources, signals them to re-pull cthe crawler Docker image - There are also donated crawler resources that don't have a webhook. These poll, monitor, or pull the image regularly. - In effect: once crawler is pushed, will be deployed “eventually consistent” not all at once. Some versions of the old crawler and new crawler will be running at the same time. [Tools repo: run.sh](https://github.com/clearlydefined/tools/blob/master/run.sh) + - Can be used for VM based crawlers -- Cron job that checks for new docker crawler image, if new image: restart crawlers -- Hardcoded # of docker containers, based on vcpu, based on experimentation -- Where doe secrets come from? Not sure, need to investigate +- Cron job that checks for new docker crawler image, if new image: restart crawlers +- Hardcoded # of docker containers, based on vcpu, based on experimentation +- Where doe secrets come from? Not sure, need to investigate ## Local dev + - If you want to run locally, you’ll need to install scancode/licensee on your local machine with paths/etc. Easier to run docker image. - There is a linux Dockerfile to build a container, that is the target environment - Look at quick start in [README](/README.md#quick-start) - Template.env.json has minimal settings: file storage provider, memory incoming queue - “Queueing work with crawler”: instructions once crawler is running - - Could bring up service and crawler, and send harvest to service - - Easier to work with just crawler, example post message in readme + - Could bring up service and crawler, and send harvest to service + - Easier to work with just crawler, example post message in readme - See “Build and run docker image locally” in readme, need config file - Run docker build command - To get dev config: go to portal: cdcrawler-dev, Settings->Configuration - Uses docker’s “env-file”, key/value environment vars, different than env.json -- From dev, change *crawler/harvest azblob_container_name, queue prefix, harvests, queue name, to be your own personal names +- From dev, change \*crawler/harvest azblob_container_name, queue prefix, harvests, queue name, to be your own personal names - Crawler_service_auth_token: the token needed for the harvest queue curl command - When you use the curl command directly on the crawler, it puts a message on its own queue. You could just copy an existing harvest message from the storage queue, and put on your own named harvest queue ## Code + - Background: - - ghcrawler was used to crawl github and store data - - CD crawler pulled ghcrawler in as a dependency, was then forked/modified in an upstream branch - - Now just a directory: ghcrawler/ with unused upstream code removed, refactored + - ghcrawler was used to crawl github and store data + - CD crawler pulled ghcrawler in as a dependency, was then forked/modified in an upstream branch + - Now just a directory: ghcrawler/ with unused upstream code removed, refactored - Important directories: providers/, providers/fetch, providers/process - Map.js: maps package types to code - First queues is a “component” type, then either queues “package” and/or “source” type @@ -69,15 +72,16 @@ This project uses two tools to monitor (and fix) vulnerabilities in this project ### Dependabot -* [Dependabot](https://docs.github.com/en/free-pro-team@latest/github/managing-security-vulnerabilities/about-dependabot-security-updates) is a GitHub Security Feature. It tracks vulnerabilities in several languages including JavaScript. -* When Dependabot detects any vulnerabilities in the [GitHub Advisory Database](https://docs.github.com/en/free-pro-team@latest/github/managing-security-vulnerabilities/browsing-security-vulnerabilities-in-the-github-advisory-database), it sends a notification and may also open a pull request to fix the vulnerability. -* Only project maintainers can see Dependabot alerts +- [Dependabot](https://docs.github.com/en/free-pro-team@latest/github/managing-security-vulnerabilities/about-dependabot-security-updates) is a GitHub Security Feature. It tracks vulnerabilities in several languages including JavaScript. +- When Dependabot detects any vulnerabilities in the [GitHub Advisory Database](https://docs.github.com/en/free-pro-team@latest/github/managing-security-vulnerabilities/browsing-security-vulnerabilities-in-the-github-advisory-database), it sends a notification and may also open a pull request to fix the vulnerability. +- Only project maintainers can see Dependabot alerts ### Snyk -* [Synk Open Source](https://solutions.snyk.io/snyk-academy/open-source) is similar to Dependabot, though not GitHub specific. It also tracks vulnerabilities in dependencies. -* When Synk detects a vulnerability in the [Synk Intel Vulnerability Database](https://snyk.io/product/vulnerability-database/), it also opens a pull request with a fix for the vulnerability. -* Everyone can see pull requests opened by Snyk, but only members of the Clearly Defined organization on Snyk can see details of the vulnerability. -* If you do not have access to the Clearly Defined Snyk organization, reach out to @nellshamrell + +- [Synk Open Source](https://solutions.snyk.io/snyk-academy/open-source) is similar to Dependabot, though not GitHub specific. It also tracks vulnerabilities in dependencies. +- When Synk detects a vulnerability in the [Synk Intel Vulnerability Database](https://snyk.io/product/vulnerability-database/), it also opens a pull request with a fix for the vulnerability. +- Everyone can see pull requests opened by Snyk, but only members of the Clearly Defined organization on Snyk can see details of the vulnerability. +- If you do not have access to the Clearly Defined Snyk organization, reach out to @nellshamrell ### Why both? diff --git a/eslint.config.js b/eslint.config.js index ff3291eb..d4e56395 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -8,17 +8,17 @@ module.exports = [ languageOptions: { globals: { ...globals.node, - ...globals.mocha + ...globals.mocha, }, parserOptions: { - sourceType: 'module' - } + sourceType: 'module', + }, }, rules: { quotes: ['error', 'single'], semi: ['error', 'never'], - 'no-console': 'off' - } + 'no-console': 'off', + }, }, - eslintConfigPrettier + eslintConfigPrettier, ] diff --git a/ghcrawler/app.js b/ghcrawler/app.js index 20af0261..e50982c6 100644 --- a/ghcrawler/app.js +++ b/ghcrawler/app.js @@ -9,7 +9,7 @@ const morgan = require('morgan') const sendHelper = require('./middleware/sendHelper') function configureApp(service, logger) { - process.on('unhandledRejection', exception => logger.error('unhandledRejection', exception)) + process.on('unhandledRejection', (exception) => logger.error('unhandledRejection', exception)) auth.initialize(config.get('CRAWLER_SERVICE_AUTH_TOKEN') || 'secret', config.get('CRAWLER_SERVICE_FORCE_AUTH')) const app = express() @@ -44,11 +44,11 @@ function configureApp(service, logger) { // call the callback but with no args. An arg indicates an error. callback() }, - error => { + (error) => { console.log(`Service initialization error: ${error.message}`) console.dir(error) callback(error) - } + }, ) } diff --git a/ghcrawler/bin/www.js b/ghcrawler/bin/www.js index f3018445..fe076836 100644 --- a/ghcrawler/bin/www.js +++ b/ghcrawler/bin/www.js @@ -25,7 +25,7 @@ function run(service, logger) { const server = http.createServer(app) // initialize the apps (if they have async init functions) and start listening - init(app, error => { + init(app, (error) => { if (error) { console.log('Error initializing the Express app: ' + error) throw new Error(error) @@ -100,14 +100,16 @@ function run(service, logger) { * Event listener for HTTP server 'close' event. */ function onClose() { - service.stop() - .then(() => { + service.stop().then( + () => { console.log('Server closed.') process.exit(0) - }, error => { + }, + (error) => { console.error(`Closing server: ${error}`) process.exit(1) - }) + }, + ) } /** diff --git a/ghcrawler/crawlerFactory.js b/ghcrawler/crawlerFactory.js index f3608c9b..184ac01e 100644 --- a/ghcrawler/crawlerFactory.js +++ b/ghcrawler/crawlerFactory.js @@ -21,14 +21,14 @@ class CrawlerFactory { const optionsProvider = defaults.provider || 'memory' const crawlerName = (defaults.crawler && defaults.crawler.name) || 'crawler' - searchPath.forEach(entry => providerSearchPath.push(entry)) + searchPath.forEach((entry) => providerSearchPath.push(entry)) const subsystemNames = ['crawler', 'filter', 'fetch', 'process', 'queue', 'store', 'deadletter', 'lock'] const crawlerPromise = CrawlerFactory.createRefreshingOptions( crawlerName, subsystemNames, defaults, - optionsProvider - ).then(options => { + optionsProvider, + ).then((options) => { logger.info('created all refreshingOptions') finalOptions = options const crawler = CrawlerFactory.createCrawler(options) @@ -55,8 +55,8 @@ class CrawlerFactory { locker = null, filter = null, fetchers = null, - processors = null - } = {} + processors = null, + } = {}, ) { logger.info('creating crawler') queues = queues || CrawlerFactory.createQueues(options.queue) @@ -84,7 +84,7 @@ class CrawlerFactory { const result = {} refreshingProvider = refreshingProvider.toLowerCase() await Promise.all( - subsystemNames.map(subsystemName => { + subsystemNames.map((subsystemName) => { // Any given subsytem may have a provider or may be a list of providers. If a particular provider is // identified then hook up just that set of options for refreshing. logger.info(`creating refreshing options ${subsystemName} with provider ${refreshingProvider}`) @@ -97,7 +97,7 @@ class CrawlerFactory { } else { throw new Error(`Invalid refreshing provider setting ${refreshingProvider}`) } - return config.getAll().then(values => { + return config.getAll().then((values) => { logger.info(`got refreshingOption values for ${subsystemName}`) // grab the right defaults. May need to drill down a level if the subsystem has a provider const trueDefaults = subProvider ? subDefaults[subProvider] || {} : subDefaults @@ -109,7 +109,7 @@ class CrawlerFactory { else result[subsystemName] = values }) }) - }) + }), ) return result } @@ -119,9 +119,9 @@ class CrawlerFactory { return config } await Promise.all( - Object.getOwnPropertyNames(defaults).map(optionName => { + Object.getOwnPropertyNames(defaults).map((optionName) => { return config._config.set(optionName, defaults[optionName]) - }) + }), ) return config._config.getAll() } @@ -130,7 +130,7 @@ class CrawlerFactory { logger.info('creating in memory refreshing config') const configStore = new RefreshingConfig.InMemoryConfigStore(values) const config = new RefreshingConfig.RefreshingConfig(configStore).withExtension( - new RefreshingConfig.InMemoryPubSubRefreshPolicyAndChangePublisher() + new RefreshingConfig.InMemoryPubSubRefreshPolicyAndChangePublisher(), ) return config } @@ -164,8 +164,8 @@ class CrawlerFactory { static _getNamedProviders(options, namespace, names, ...params) { return names - .filter(key => !['_config', 'logger', 'dispatcher', options.dispatcher].includes(key)) - .map(name => CrawlerFactory._getProvider(options, name, namespace, ...params)) + .filter((key) => !['_config', 'logger', 'dispatcher', options.dispatcher].includes(key)) + .map((name) => CrawlerFactory._getProvider(options, name, namespace, ...params)) } static createFilter(options, processors) { @@ -203,7 +203,7 @@ class CrawlerFactory { } static createNolock() { - return { lock: () => null, unlock: () => { } } + return { lock: () => null, unlock: () => {} } } static createQueues(options, provider = options.provider) { diff --git a/ghcrawler/index.js b/ghcrawler/index.js index 765b3cb1..9248b727 100644 --- a/ghcrawler/index.js +++ b/ghcrawler/index.js @@ -17,6 +17,6 @@ const VisitorMap = require('./lib/visitorMap') module.exports.run = (defaults, logger, searchPath, maps) => { const service = CrawlerFactory.createService(defaults, logger, searchPath) - Object.getOwnPropertyNames(maps).forEach(name => VisitorMap.register(name, maps[name])) + Object.getOwnPropertyNames(maps).forEach((name) => VisitorMap.register(name, maps[name])) www(service, logger) } diff --git a/ghcrawler/lib/crawler.js b/ghcrawler/lib/crawler.js index 7086c1c8..6d922bb2 100644 --- a/ghcrawler/lib/crawler.js +++ b/ghcrawler/lib/crawler.js @@ -19,7 +19,7 @@ const defaultOptions = { processingTtl: 60 * 1000, promiseTrace: false, requeueDelay: 5000, - deadletterPolicy: 'always' // Another option: excludeNotFound + deadletterPolicy: 'always', // Another option: excludeNotFound } class Crawler { @@ -262,7 +262,7 @@ class Crawler { return request } - request.getTrackedCleanups().forEach(cleanup => { + request.getTrackedCleanups().forEach((cleanup) => { try { cleanup() } catch (error) { @@ -277,23 +277,25 @@ class Crawler { const originalPromise = trackedPromises[i] originalPromise.then( - result => { + (result) => { completedPromises++ debug( - `_completeRequest(${loopName}:${request.toUniqueString()}): completed ${completedPromises} of ${trackedPromises.length - } promises (${failedPromises} failed)` + `_completeRequest(${loopName}:${request.toUniqueString()}): completed ${completedPromises} of ${ + trackedPromises.length + } promises (${failedPromises} failed)`, ) return result }, - error => { + (error) => { completedPromises++ failedPromises++ debug( - `_completeRequest(${loopName}:${request.toUniqueString()}): completed ${completedPromises} of ${trackedPromises.length - } promises (${failedPromises} failed)` + `_completeRequest(${loopName}:${request.toUniqueString()}): completed ${completedPromises} of ${ + trackedPromises.length + } promises (${failedPromises} failed)`, ) throw error - } + }, ) } debug(`_completeRequest(${loopName}:${request.toUniqueString()}): ${trackedPromises.length} tracked promises`) @@ -304,25 +306,25 @@ class Crawler { () => { return self._deleteFromQueue(request) }, - error => { + (error) => { debug(`_completeRequest(${loopName}:${request.toUniqueString()}): catch release lock`) self.logger.error(error) return self._abandonInQueue(request) - } + }, ) }, - error => { + (error) => { debug(`_completeRequest(${loopName}:${request.toUniqueString()}): catch tracked promises`) self.logger.error(error) return self._completeRequest(request, true) - } + }, ) return completeWork .then(() => { debug(`_completeRequest(${loopName}:${request.toUniqueString()}): exit (success)`) return request }) - .catch(error => { + .catch((error) => { debug(`_completeRequest(${loopName}:${request.toUniqueString()}): catch completeWork`) throw error }) @@ -387,7 +389,7 @@ class Crawler { return request } return handler.handle(request) - }).then(request => { + }).then((request) => { debug(`_fetch(${loopName}:${request.toUniqueString()}): exit (success - fetched)`) return request }) @@ -420,7 +422,7 @@ class Crawler { type: request.type, url: request.url, fetchedAt: DateTime.utc().toISO(), - links: {} + links: {}, } if (request.response) { if (request.response.headers) { @@ -468,7 +470,7 @@ class Crawler { return request } return this._logStartEnd('processing', request, () => { - return this._process(request).then(result => { + return this._process(request).then((result) => { debug(`_processDocument(${loopName}:${request.toUniqueString()}): exit (success)`) return result }) @@ -512,7 +514,6 @@ class Crawler { request.outcome = request.outcome || 'Traversed' } return request - } async _logStartEnd(name, request, work) { @@ -548,7 +549,7 @@ class Crawler { const start = Date.now() const documentToStore = this._buildDocumentToStore(request.document) - return this.store.upsert(documentToStore).then(upsert => { + return this.store.upsert(documentToStore).then((upsert) => { request.upsert = upsert request.addMeta({ write: Date.now() - start }) debug(`_storeDocument(${loopName}:${request.toUniqueString()}): exit (success)`) @@ -619,7 +620,7 @@ class Crawler { debug(`storeDeadletter(${loopName}:${request.toUniqueString()}): enter`) if (this.options.deadletterPolicy === 'excludeNotFound' && reason && reason.toLowerCase().includes('status 404')) { this.logger.info( - `storeDeadletter(${loopName}:${request.toUniqueString()}): not storing due to configured deadletter policy` + `storeDeadletter(${loopName}:${request.toUniqueString()}): not storing due to configured deadletter policy`, ) return request } @@ -653,7 +654,7 @@ class Crawler { _preFilter(requests) { const list = Array.isArray(requests) ? requests : [requests] - return list.filter(request => { + return list.filter((request) => { if (!request.url || !request.type) { this._storeDeadletter(request, `Attempt to queue malformed request ${request.toString()}`) return false diff --git a/ghcrawler/lib/crawlerService.js b/ghcrawler/lib/crawlerService.js index fec2719d..c7e329d8 100644 --- a/ghcrawler/lib/crawlerService.js +++ b/ghcrawler/lib/crawlerService.js @@ -37,7 +37,7 @@ class CrawlerService { } async ensureLoops(targetCount = this.options.crawler.count) { - this.loops = this.loops.filter(loop => loop.running()) + this.loops = this.loops.filter((loop) => loop.running()) const running = this.status() const delta = targetCount - running if (delta < 0) { @@ -61,8 +61,7 @@ class CrawlerService { } stop() { - return this.ensureLoops(0) - .then(() => this.crawler.done()) + return this.ensureLoops(0).then(() => this.crawler.done()) } queues() { @@ -99,10 +98,10 @@ class CrawlerService { for (let i = 0; i < count; i++) { result.push(queue.pop()) } - return Promise.all(result).then(requests => { - const filtered = requests.filter(request => request) - return Promise.all(filtered.map(request => (remove ? queue.done(request) : queue.abandon(request)))).then( - filtered + return Promise.all(result).then((requests) => { + const filtered = requests.filter((request) => request) + return Promise.all(filtered.map((request) => (remove ? queue.done(request) : queue.abandon(request)))).then( + filtered, ) }) } @@ -122,7 +121,7 @@ class CrawlerService { requeueDeadletter(url, queue) { const self = this return this.getDeadletter(url) - .then(document => { + .then((document) => { const request = Request.adopt(document).createRequeuable() request.attemptCount = 0 return self.crawler.queues.push([request], queue) @@ -138,7 +137,7 @@ class CrawlerService { _reconfigure(current, changes) { // if the loop count changed, make it so - if (changes.some(patch => patch.path === '/count')) { + if (changes.some((patch) => patch.path === '/count')) { return this.options.crawler.count.value > 0 ? this.run() : this.stop() } return null @@ -163,8 +162,8 @@ class CrawlerLoop { } this.state = 'running' // Create callback that when run, resolves a promise and completes this loop - const donePromise = new Promise(resolve => { - this.done = value => resolve(value) + const donePromise = new Promise((resolve) => { + this.done = (value) => resolve(value) this.options.done = this.done }) donePromise.finally(() => { diff --git a/ghcrawler/lib/request.js b/ghcrawler/lib/request.js index 241143b0..307722ac 100644 --- a/ghcrawler/lib/request.js +++ b/ghcrawler/lib/request.js @@ -131,7 +131,7 @@ class Request { return this } const toRemove = Array.isArray(cleanups) ? cleanups : [cleanups] - this.cleanups = this.cleanups.filter(item => !toRemove.includes(item)) + this.cleanups = this.cleanups.filter((item) => !toRemove.includes(item)) return this } @@ -192,7 +192,7 @@ class Request { queueRequests(requests, name = null, scope = null) { requests = Array.isArray(requests) ? requests : [requests] - const toQueue = requests.filter(request => !this.hasSeen(request)) + const toQueue = requests.filter((request) => !this.hasSeen(request)) this.track(this.crawler.queue(toQueue, name, scope)) } diff --git a/ghcrawler/lib/traversalPolicy.js b/ghcrawler/lib/traversalPolicy.js index 40764e5e..9acd2bc8 100644 --- a/ghcrawler/lib/traversalPolicy.js +++ b/ghcrawler/lib/traversalPolicy.js @@ -87,10 +87,7 @@ class TraversalPolicy { } static _hasExpired(processedAt, expiration = 0, unit = 'hours') { - return ( - !processedAt || - DateTime.now().diff(DateTime.fromISO(processedAt), unit)[unit] > expiration - ) + return !processedAt || DateTime.now().diff(DateTime.fromISO(processedAt), unit)[unit] > expiration } /** * A policy spec has the following form: [:<[scenario/]mapName[@path]]. That means a spec can be just @@ -220,7 +217,7 @@ class TraversalPolicy { originMutable: 'storage', storageOriginIfMissing: 'storage', mutables: mutablesValue, - originOnly: 'origin' + originOnly: 'origin', }[this.fetch] if (!result) { throw new Error(`Fetch policy misconfigured ${this.fetch}`) @@ -237,7 +234,7 @@ class TraversalPolicy { originStorage: 'origin', storageOriginIfMissing: 'origin', mutables: 'origin', - originOnly: null + originOnly: null, }[this.fetch] if (result === undefined) { throw new Error(`Fetch policy misconfigured ${this.fetch}`) diff --git a/ghcrawler/memoryConfig.js b/ghcrawler/memoryConfig.js index 15777ddb..b77e9b11 100644 --- a/ghcrawler/memoryConfig.js +++ b/ghcrawler/memoryConfig.js @@ -3,23 +3,23 @@ module.exports = { crawler: { - count: 1 + count: 1, }, fetch: {}, process: {}, store: { - provider: 'memory' + provider: 'memory', }, deadletter: { - provider: 'memory' + provider: 'memory', }, lock: { - provider: 'memory' + provider: 'memory', }, queue: { provider: 'memory', memory: { - weights: { immediate: 3, soon: 2, normal: 3, later: 2 } - } - } + weights: { immediate: 3, soon: 2, normal: 3, later: 2 }, + }, + }, } diff --git a/ghcrawler/middleware/asyncMiddleware.js b/ghcrawler/middleware/asyncMiddleware.js index 95f7de65..9d2dfd59 100644 --- a/ghcrawler/middleware/asyncMiddleware.js +++ b/ghcrawler/middleware/asyncMiddleware.js @@ -1,7 +1,7 @@ // Copyright (c) Microsoft Corporation and others. Licensed under the MIT license. // SPDX-License-Identifier: MIT -module.exports = func => async (request, response, next) => { +module.exports = (func) => async (request, response, next) => { try { await func(request, response, next) } catch (error) { diff --git a/ghcrawler/middleware/sendHelper.js b/ghcrawler/middleware/sendHelper.js index 5ed678f9..23d60ba6 100644 --- a/ghcrawler/middleware/sendHelper.js +++ b/ghcrawler/middleware/sendHelper.js @@ -5,15 +5,15 @@ const htmlencode = require('htmlencode').htmlEncode function create() { - return function(request, response, next) { + return function (request, response, next) { response.helpers = response.helpers || {} response.helpers.send = { context: { request: request, - response: response + response: response, }, noContent: noContent, - partialHtml: partialHtml + partialHtml: partialHtml, } next() } diff --git a/ghcrawler/providers/index.js b/ghcrawler/providers/index.js index cad24f75..55f6c56c 100644 --- a/ghcrawler/providers/index.js +++ b/ghcrawler/providers/index.js @@ -4,14 +4,14 @@ module.exports = { queue: { storageQueue: require('./queuing/storageQueueFactory'), - memory: require('./queuing/memoryFactory') + memory: require('./queuing/memoryFactory'), }, store: { memory: require('./storage/inmemoryDocStore'), file: require('./storage/file'), - azblob: require('./storage/azureBlobFactory') + azblob: require('./storage/azureBlobFactory'), }, lock: { - memory: require('./locker/memory') - } + memory: require('./locker/memory'), + }, } diff --git a/ghcrawler/providers/queuing/attenuatedQueue.js b/ghcrawler/providers/queuing/attenuatedQueue.js index 4d6b85fd..79dc13e0 100644 --- a/ghcrawler/providers/queuing/attenuatedQueue.js +++ b/ghcrawler/providers/queuing/attenuatedQueue.js @@ -13,12 +13,11 @@ class AttenuatedQueue extends NestedQueue { } done(request) { - return super.done(request) - .then(() => { - const key = this._getCacheKey(request) - const deleted = memoryCache.del(key) - if (deleted) this.logger.verbose(`Deleted ${key}`) - }) + return super.done(request).then(() => { + const key = this._getCacheKey(request) + const deleted = memoryCache.del(key) + if (deleted) this.logger.verbose(`Deleted ${key}`) + }) } push(requests) { @@ -26,10 +25,10 @@ class AttenuatedQueue extends NestedQueue { requests = Array.isArray(requests) ? requests : [requests] return Promise.all( requests.map( - qlimit(this.options.parallelPush || 1)(request => { + qlimit(this.options.parallelPush || 1)((request) => { return self._pushOne(request) - }) - ) + }), + ), ) } @@ -56,7 +55,7 @@ class AttenuatedQueue extends NestedQueue { } entry = { timestamp: Date.now(), - promise: this.queue.push(request) + promise: this.queue.push(request), } const ttl = (this.options.attenuation && this.options.attenuation.ttl) || 1000 memoryCache.put(key, entry, ttl) diff --git a/ghcrawler/providers/queuing/inmemorycrawlqueue.js b/ghcrawler/providers/queuing/inmemorycrawlqueue.js index 10b9bebf..b198e319 100644 --- a/ghcrawler/providers/queuing/inmemorycrawlqueue.js +++ b/ghcrawler/providers/queuing/inmemorycrawlqueue.js @@ -18,7 +18,7 @@ class InMemoryCrawlQueue { async push(requests) { requests = Array.isArray(requests) ? requests : [requests] - requests = requests.map(request => extend(true, {}, request)) + requests = requests.map((request) => extend(true, {}, request)) this.queue = this.queue.concat(requests) } @@ -60,7 +60,7 @@ class InMemoryCrawlQueue { async getInfo() { return { count: this.queue.length, - metricsName: this.name + metricsName: this.name, } } } diff --git a/ghcrawler/providers/queuing/memoryFactory.js b/ghcrawler/providers/queuing/memoryFactory.js index fbed68d6..861873f8 100644 --- a/ghcrawler/providers/queuing/memoryFactory.js +++ b/ghcrawler/providers/queuing/memoryFactory.js @@ -5,11 +5,11 @@ const CrawlerFactory = require('../../crawlerFactory') const AttenuatedQueue = require('./attenuatedQueue') const InMemoryCrawlQueue = require('./inmemorycrawlqueue') -module.exports = options => { +module.exports = (options) => { const manager = { createQueueChain: (name, options) => { return new AttenuatedQueue(new InMemoryCrawlQueue(name, options), options) - } + }, } return CrawlerFactory.createScopedQueueSets({ globalManager: manager, localManager: manager }, options) } diff --git a/ghcrawler/providers/queuing/queueSet.js b/ghcrawler/providers/queuing/queueSet.js index e6815d7f..07198518 100644 --- a/ghcrawler/providers/queuing/queueSet.js +++ b/ghcrawler/providers/queuing/queueSet.js @@ -28,7 +28,7 @@ class QueueSet { } _reconfigure(current, changes) { - if (changes.some(patch => patch.path.includes('/weights'))) { + if (changes.some((patch) => patch.path.includes('/weights'))) { this._startMap = this._createStartMap(this.options.weights) } return Promise.resolve() @@ -40,17 +40,17 @@ class QueueSet { subscribe() { return Promise.all( - this.queues.map(queue => { + this.queues.map((queue) => { return queue.subscribe() - }) + }), ) } unsubscribe() { return Promise.all( - this.queues.map(queue => { + this.queues.map((queue) => { return queue.unsubscribe() - }) + }), ) } @@ -65,7 +65,7 @@ class QueueSet { } async _pop(queue, request = null) { - const result = request || await queue.pop() + const result = request || (await queue.pop()) if (result && !result._originQueue) { result._originQueue = queue } diff --git a/ghcrawler/providers/queuing/scopedQueueSets.js b/ghcrawler/providers/queuing/scopedQueueSets.js index 392f6fcc..394defbb 100644 --- a/ghcrawler/providers/queuing/scopedQueueSets.js +++ b/ghcrawler/providers/queuing/scopedQueueSets.js @@ -7,7 +7,7 @@ class ScopedQueueSets { constructor(globalQueues, localQueues) { this._scopedQueues = { local: localQueues, - global: globalQueues + global: globalQueues, } } @@ -32,30 +32,29 @@ class ScopedQueueSets { subscribe() { return Promise.all( - Object.values(this._scopedQueues).map(queues => { + Object.values(this._scopedQueues).map((queues) => { return queues.subscribe() - }) + }), ) } unsubscribe() { return Promise.all( - Object.values(this._scopedQueues).map(queues => { + Object.values(this._scopedQueues).map((queues) => { return queues.unsubscribe() - }) + }), ) } pop() { - return this._scopedQueues.local.pop() - .then(request => { - if (request) { - //mark to retry on the global queues - request._retryQueue = request._originQueue.getName() - return request - } - return this._scopedQueues.global.pop() - }) + return this._scopedQueues.local.pop().then((request) => { + if (request) { + //mark to retry on the global queues + request._retryQueue = request._originQueue.getName() + return request + } + return this._scopedQueues.global.pop() + }) } done(request) { @@ -79,24 +78,25 @@ class ScopedQueueSets { } publish() { - const publishToGlobal = async localQueue => { + const publishToGlobal = async (localQueue) => { const localRequests = [] const info = await localQueue.getInfo() for (let count = info.count; count > 0; count--) { localRequests.push( - localQueue.pop() - .then(request => request && localQueue.done(request).then(() => request.createRequeuable())) - .then(request => request && this.push(request, localQueue.getName(), 'global'))) + localQueue + .pop() + .then((request) => request && localQueue.done(request).then(() => request.createRequeuable())) + .then((request) => request && this.push(request, localQueue.getName(), 'global')), + ) } debug(`publishing ${localRequests.length} to ${localQueue.getName()}`) return Promise.all(localRequests) } - return Promise.allSettled(this._scopedQueues.local.queues.map(publishToGlobal)) - .then(results => { - const found = results.find(result => result.status === 'rejected') - if (found) throw new Error(found.reason) - }) + return Promise.allSettled(this._scopedQueues.local.queues.map(publishToGlobal)).then((results) => { + const found = results.find((result) => result.status === 'rejected') + if (found) throw new Error(found.reason) + }) } } diff --git a/ghcrawler/providers/queuing/storageBackedQueue.js b/ghcrawler/providers/queuing/storageBackedQueue.js index 28c30146..f8324e26 100644 --- a/ghcrawler/providers/queuing/storageBackedQueue.js +++ b/ghcrawler/providers/queuing/storageBackedQueue.js @@ -7,7 +7,6 @@ const VISIBILITY_TIMEOUT_TO_REMAIN_ON_LOCAL_QUEUE = 8 * 60 * 60 // 8 hours const VISIBILITY_TIMEOUT_FOR_PROCESSING = 1 * 60 * 60 // 1 hours, similar to storage queue pop visibility timeout class StorageBackedQueue extends NestedQueue { - constructor(queue, storageQueue, options) { super(queue) this.options = options @@ -53,9 +52,7 @@ class StorageBackedQueue extends NestedQueue { } async done(request) { - await Promise.all([ - super.done(request), - this._doneInStorage(request)]) + await Promise.all([super.done(request), this._doneInStorage(request)]) } async _doneInStorage(request) { @@ -73,15 +70,11 @@ class StorageBackedQueue extends NestedQueue { } async subscribe() { - await Promise.all([ - super.subscribe(), - this._sharedStorageQueue.subscribe()]) + await Promise.all([super.subscribe(), this._sharedStorageQueue.subscribe()]) } async unsubscribe() { - const results = await Promise.allSettled([ - super.unsubscribe(), - this._sharedStorageQueue.unsubscribe()]) + const results = await Promise.allSettled([super.unsubscribe(), this._sharedStorageQueue.unsubscribe()]) this._throwIfError(results, 'Failed to unsubscribe') } @@ -89,7 +82,7 @@ class StorageBackedQueue extends NestedQueue { const deleteRequests = [] const info = await this.getInfo() for (let count = info.count; count > 0; count--) { - const deleteOne = super.pop().then(request => this.done(request)) + const deleteOne = super.pop().then((request) => this.done(request)) deleteRequests.push(deleteOne) } const results = await Promise.allSettled(deleteRequests) @@ -97,8 +90,9 @@ class StorageBackedQueue extends NestedQueue { } _throwIfError(results, message) { - const errors = results.filter(result => result.status === 'rejected') - .map(rejected => new Error(rejected.reason)) + const errors = results + .filter((result) => result.status === 'rejected') + .map((rejected) => new Error(rejected.reason)) if (errors.length) throw new AggregateError(errors, message) } @@ -109,7 +103,7 @@ class StorageBackedQueue extends NestedQueue { static create(queue, storageQueue, options = {}) { const defaultOptions = { visibilityTimeout_remainLocal: VISIBILITY_TIMEOUT_TO_REMAIN_ON_LOCAL_QUEUE, - visibilityTimeout: VISIBILITY_TIMEOUT_FOR_PROCESSING + visibilityTimeout: VISIBILITY_TIMEOUT_FOR_PROCESSING, } const optionsWithDefaults = { ...defaultOptions, ...options } return new StorageBackedQueue(queue, storageQueue, optionsWithDefaults) diff --git a/ghcrawler/providers/queuing/storageQueue.js b/ghcrawler/providers/queuing/storageQueue.js index e24d4505..4e3e8490 100644 --- a/ghcrawler/providers/queuing/storageQueue.js +++ b/ghcrawler/providers/queuing/storageQueue.js @@ -16,7 +16,7 @@ class StorageQueue { async subscribe() { return new Promise((resolve, reject) => { - this.client.createQueueIfNotExists(this.queueName, error => { + this.client.createQueueIfNotExists(this.queueName, (error) => { if (error) { return reject(error) } @@ -34,7 +34,7 @@ class StorageQueue { requests = Array.isArray(requests) ? requests : [requests] return Promise.all( requests.map( - qlimit(this.options.parallelPush || 1)(request => { + qlimit(this.options.parallelPush || 1)((request) => { const body = JSON.stringify(request) return new Promise((resolve, reject) => { this.client.createMessage(this.queueName, body, option, (error, queueMessageResult) => { @@ -45,8 +45,8 @@ class StorageQueue { resolve(this._buildMessageReceipt(queueMessageResult, request)) }) }) - }) - ) + }), + ), ) } @@ -55,7 +55,6 @@ class StorageQueue { return { _message } } - async pop() { const msgOptions = { numOfMessages: 1, visibilityTimeout: this.options.visibilityTimeout || 60 * 60 } return new Promise((resolve, reject) => { @@ -70,7 +69,7 @@ class StorageQueue { } if (this.options.maxDequeueCount && message.dequeueCount > this.options.maxDequeueCount) { this.logger.verbose('maxDequeueCount exceeded') - this.client.deleteMessage(this.queueName, message.messageId, message.popReceipt, error => { + this.client.deleteMessage(this.queueName, message.messageId, message.popReceipt, (error) => { if (error) return reject(error) resolve(null) }) @@ -90,7 +89,7 @@ class StorageQueue { return } return new Promise((resolve, reject) => { - this.client.deleteMessage(this.queueName, request._message.messageId, request._message.popReceipt, error => { + this.client.deleteMessage(this.queueName, request._message.messageId, request._message.popReceipt, (error) => { if (error) { return reject(error) } @@ -114,21 +113,27 @@ class StorageQueue { updateVisibilityTimeout(request, visibilityTimeout = 0) { return new Promise((resolve, reject) => { // visibilityTimeout is updated to 0 to unlock/unlease the message - this.client.updateMessage(this.queueName, request._message.messageId, request._message.popReceipt, visibilityTimeout, (error, result) => { - if (error) { - return reject(error) - } - this._log('NAKed', request._message.body) - resolve(this._buildMessageReceipt(result, request._message.body)) - }) + this.client.updateMessage( + this.queueName, + request._message.messageId, + request._message.popReceipt, + visibilityTimeout, + (error, result) => { + if (error) { + return reject(error) + } + this._log('NAKed', request._message.body) + resolve(this._buildMessageReceipt(result, request._message.body)) + }, + ) }) } async flush() { return new Promise((resolve, reject) => { - this.client.deleteQueue(this.queueName, error => { + this.client.deleteQueue(this.queueName, (error) => { if (error) return reject(error) - this.client.createQueueIfNotExists(this.queueName, error => { + this.client.createQueueIfNotExists(this.queueName, (error) => { if (error) return reject(error) resolve() }) @@ -137,7 +142,7 @@ class StorageQueue { } async getInfo() { - return new Promise(resolve => { + return new Promise((resolve) => { this.client.getQueueMetadata(this.queueName, (result, error) => { if (error) { this.logger.error(error) diff --git a/ghcrawler/providers/queuing/storageQueueFactory.js b/ghcrawler/providers/queuing/storageQueueFactory.js index e2a02bbc..44178570 100644 --- a/ghcrawler/providers/queuing/storageQueueFactory.js +++ b/ghcrawler/providers/queuing/storageQueueFactory.js @@ -5,9 +5,9 @@ const StorageQueueManager = require('./storageQueueManager') const CrawlerFactory = require('../../crawlerFactory') const StorageBackedInMemoryQueueManager = require('./storageBackedInMemoryQueueManager') -module.exports = options => { +module.exports = (options) => { const { connectionString } = options const storageQueueManager = new StorageQueueManager(connectionString, options) const localManager = new StorageBackedInMemoryQueueManager(storageQueueManager) - return CrawlerFactory.createScopedQueueSets({ globalManager: storageQueueManager, localManager}, options) + return CrawlerFactory.createScopedQueueSets({ globalManager: storageQueueManager, localManager }, options) } diff --git a/ghcrawler/providers/queuing/storageQueueManager.js b/ghcrawler/providers/queuing/storageQueueManager.js index 2f23a7c9..81b05917 100644 --- a/ghcrawler/providers/queuing/storageQueueManager.js +++ b/ghcrawler/providers/queuing/storageQueueManager.js @@ -22,7 +22,7 @@ class StorageQueueManager { } createQueue(name, options) { - const formatter = message => { + const formatter = (message) => { // make sure the message/request object is copied to enable deferral scenarios (i.e., the request is modified // and then put back on the queue) return Request.adopt(Object.assign({}, message.body)) diff --git a/ghcrawler/providers/storage/azureBlobFactory.js b/ghcrawler/providers/storage/azureBlobFactory.js index 2d2d5eb5..fbe0a2cd 100644 --- a/ghcrawler/providers/storage/azureBlobFactory.js +++ b/ghcrawler/providers/storage/azureBlobFactory.js @@ -4,7 +4,7 @@ const AzureStorage = require('azure-storage') const AzureStorageDocStore = require('./storageDocStore') -module.exports = options => { +module.exports = (options) => { options.logger.info('creating azure storage store') const { account, key, connection, container } = options const retryOperations = new AzureStorage.ExponentialRetryPolicyFilter() diff --git a/ghcrawler/providers/storage/file.js b/ghcrawler/providers/storage/file.js index b38c48c7..f8bb24ab 100644 --- a/ghcrawler/providers/storage/file.js +++ b/ghcrawler/providers/storage/file.js @@ -24,14 +24,14 @@ class FileStore { const filePath = this._getPath(urn) mkdirp.sync(path.dirname(filePath)) return new Promise((resolve, reject) => - fs.writeFile(filePath, JSON.stringify(document, null, 2), error => (error ? reject(error) : resolve(document))) + fs.writeFile(filePath, JSON.stringify(document, null, 2), (error) => (error ? reject(error) : resolve(document))), ) } async get(type, key) { const path = this._getPath(key) return new Promise((resolve, reject) => - fs.readFile(path, (error, data) => (error ? reject(error) : resolve(JSON.parse(data)))) + fs.readFile(path, (error, data) => (error ? reject(error) : resolve(JSON.parse(data)))), ) } @@ -42,7 +42,7 @@ class FileStore { } etag(type, key) { - return this.get(type, key).then(result => result._metadata.etag) + return this.get(type, key).then((result) => result._metadata.etag) } // list(type) { @@ -57,7 +57,7 @@ class FileStore { count(type) { // TODO likewise wrt list. Not sure this is needed - return this.list(type).then(results => { + return this.list(type).then((results) => { return results.length }) } @@ -65,4 +65,4 @@ class FileStore { close() {} } -module.exports = options => new FileStore(options) +module.exports = (options) => new FileStore(options) diff --git a/ghcrawler/providers/storage/inmemoryDocStore.js b/ghcrawler/providers/storage/inmemoryDocStore.js index dc4d8565..d6aaf068 100644 --- a/ghcrawler/providers/storage/inmemoryDocStore.js +++ b/ghcrawler/providers/storage/inmemoryDocStore.js @@ -48,10 +48,10 @@ class InmemoryDocStore { collection = {} } return Object.keys(collection) - .filter(key => { + .filter((key) => { return key.startsWith('urn:') ? true : false }) - .map(key => { + .map((key) => { const metadata = collection[key]._metadata return { version: metadata.version, @@ -61,7 +61,7 @@ class InmemoryDocStore { urn: metadata.links.self.href, fetchedAt: metadata.fetchedAt, processedAt: metadata.processedAt, - extra: metadata.extra + extra: metadata.extra, } }) } @@ -90,4 +90,4 @@ class InmemoryDocStore { } } -module.exports = options => new InmemoryDocStore(options) +module.exports = (options) => new InmemoryDocStore(options) diff --git a/ghcrawler/providers/storage/storageDocStore.js b/ghcrawler/providers/storage/storageDocStore.js index 52a8e266..819eb9c8 100644 --- a/ghcrawler/providers/storage/storageDocStore.js +++ b/ghcrawler/providers/storage/storageDocStore.js @@ -20,7 +20,7 @@ class AzureStorageDocStore { async _createContainer(name) { return new Promise((resolve, reject) => { - this.service.createContainerIfNotExists(name, error => { + this.service.createContainerIfNotExists(name, (error) => { if (error) { return reject(error) } @@ -38,7 +38,7 @@ class AzureStorageDocStore { url: document._metadata.url, urn: document._metadata.links.self.href, fetchedat: document._metadata.fetchedAt, - processedat: document._metadata.processedAt + processedat: document._metadata.processedAt, } if (document._metadata.extra) { blobMetadata.extra = JSON.stringify(document._metadata.extra) @@ -50,7 +50,7 @@ class AzureStorageDocStore { return new Promise((resolve, reject) => { dataStream .pipe(this.service.createWriteStreamToBlockBlob(this.name, blobName, options)) - .on('error', error => { + .on('error', (error) => { return reject(error) }) .on('finish', () => { @@ -76,7 +76,7 @@ class AzureStorageDocStore { // TODO: Consistency on whether key is a URL or URN async etag(type, key) { const blobName = this._getBlobNameFromKey(type, key) - return new Promise(resolve => { + return new Promise((resolve) => { this.service.getBlobMetadata(this.name, blobName, (error, blob) => { resolve(error ? null : blob.metadata.etag) }) @@ -95,7 +95,7 @@ class AzureStorageDocStore { continuationToken, { include: azure.BlobUtilities.BlobListingDetails.METADATA, - location: azure.StorageUtilities.LocationMode.PRIMARY_THEN_SECONDARY + location: azure.StorageUtilities.LocationMode.PRIMARY_THEN_SECONDARY, }, (error, response) => { if (error) { @@ -103,10 +103,11 @@ class AzureStorageDocStore { reject(error) } return resolve(response) - }) + }, + ) }) entries = entries.concat( - result.entries.map(entry => { + result.entries.map((entry) => { const blobMetadata = entry.metadata return { version: blobMetadata.version, @@ -116,9 +117,9 @@ class AzureStorageDocStore { urn: blobMetadata.urn, fetchedAt: blobMetadata.fetchedat, processedAt: blobMetadata.processedat, - extra: blobMetadata.extra ? JSON.parse(blobMetadata.extra) : undefined + extra: blobMetadata.extra ? JSON.parse(blobMetadata.extra) : undefined, } - }) + }), ) } while (continuationToken && entries.length < 10000) return entries @@ -129,7 +130,7 @@ class AzureStorageDocStore { this._ensureDeadletter(type) const blobName = this._getBlobNameFromKey(type, key) return new Promise((resolve, reject) => { - this.service.deleteBlob(this.name, blobName, error => { + this.service.deleteBlob(this.name, blobName, (error) => { if (error) { return reject(error) } @@ -162,7 +163,8 @@ class AzureStorageDocStore { reject(error) } return resolve(response) - }) + }, + ) }) entryCount += result.entries.length } while (continuationToken) diff --git a/ghcrawler/routes/requests.js b/ghcrawler/routes/requests.js index fb8db609..61e6a2d4 100644 --- a/ghcrawler/routes/requests.js +++ b/ghcrawler/routes/requests.js @@ -18,12 +18,12 @@ router.post( return response.sendStatus(404) } response.sendStatus(201) - }) + }), ) async function queueRequests(requestSpecs, queueName) { requestSpecs = Array.isArray(requestSpecs) ? requestSpecs : [requestSpecs] - const requests = requestSpecs.map(spec => rationalizeRequest(spec)) + const requests = requestSpecs.map((spec) => rationalizeRequest(spec)) try { return crawlerService.queue(requests, queueName) } catch (error) { diff --git a/index.js b/index.js index 4d264077..f572e85a 100644 --- a/index.js +++ b/index.js @@ -10,7 +10,7 @@ const uuid = require('node-uuid') const logger = require('./providers/logging/logger')({ crawlerId: config.get('CRAWLER_ID') || uuid.v4(), crawlerHost: config.get('CRAWLER_HOST'), - buildNumber: config.get('CRAWLER_BUILD_NUMBER') || 'local' + buildNumber: config.get('CRAWLER_BUILD_NUMBER') || 'local', }) run(defaults, logger, searchPath, maps) diff --git a/lib/baseHandler.js b/lib/baseHandler.js index 3f0921e8..c7de76ec 100644 --- a/lib/baseHandler.js +++ b/lib/baseHandler.js @@ -21,13 +21,13 @@ class BaseHandler { * @param {Request} request */ // eslint-disable-next-line no-unused-vars - handle(request) { } + handle(request) {} get tmpOptions() { const tmpBase = config.get('TEMPDIR') || (process.platform === 'win32' ? 'c:/temp/' : '/tmp/') return { unsafeCleanup: true, - template: tmpBase + 'cd-XXXXXX' + template: tmpBase + 'cd-XXXXXX', } } @@ -48,7 +48,7 @@ class BaseHandler { hash.end() resolve(hash.read()) }) - file.on('error', error => reject(error)) + file.on('error', (error) => reject(error)) file.pipe(hash) }) } @@ -74,7 +74,7 @@ class BaseHandler { if (versions.length === 0) return null if (versions.length === 1) return versions[0] return versions - .filter(v => !this.isPreReleaseVersion(v)) + .filter((v) => !this.isPreReleaseVersion(v)) .reduce((max, current) => (semver.gt(current, max) ? current : max), versions[0]) } diff --git a/lib/entitySpec.js b/lib/entitySpec.js index ddaaf6f8..ecb82de6 100644 --- a/lib/entitySpec.js +++ b/lib/entitySpec.js @@ -5,7 +5,7 @@ class EntitySpec { static fromUrl(url) { if (!url) return null const [, type, provider, namespace, name, revision, toolSpec] = url.match( - /.*:\/*([^/]+)\/([^/]+)\/([^/]+)\/([^/]+)\/?([^/]+)?(\/tool\/.+)?/ + /.*:\/*([^/]+)\/([^/]+)\/([^/]+)\/([^/]+)\/?([^/]+)?(\/tool\/.+)?/, ) const [, , toolName, toolVersion] = toolSpec ? toolSpec.split('/') : [] return new EntitySpec(type, provider, namespace, name, revision, toolName, toolVersion) @@ -21,7 +21,7 @@ class EntitySpec { spec.name, spec.revision, spec.tool, - spec.toolVersion + spec.toolVersion, ) } @@ -39,8 +39,9 @@ class EntitySpec { const revisionPart = this.revision ? `:revision:${this.revision}` : '' const toolVersionPart = this.toolVersion ? `:${this.toolVersion}` : '' const toolPart = this.tool ? `:tool:${this.tool}` : '' - return `urn:${this.type}:${this.provider}:${this.namespace || '-'}:${this.name - }${revisionPart}${toolPart}${toolVersionPart}` + return `urn:${this.type}:${this.provider}:${this.namespace || '-'}:${ + this.name + }${revisionPart}${toolPart}${toolVersionPart}` } toUrl() { @@ -51,8 +52,9 @@ class EntitySpec { const revisionPart = this.revision ? `/${this.revision}` : '' const toolVersionPart = this.toolVersion ? `/${this.toolVersion}` : '' const toolPart = this.tool ? `/tool/${this.tool}` : '' - return `${this.type}/${this.provider}/${this.namespace || '-'}/${this.name - }${revisionPart}${toolPart}${toolVersionPart}` + return `${this.type}/${this.provider}/${this.namespace || '-'}/${ + this.name + }${revisionPart}${toolPart}${toolVersionPart}` } } diff --git a/lib/fetchResult.js b/lib/fetchResult.js index 21ea4768..d8c8f5bb 100644 --- a/lib/fetchResult.js +++ b/lib/fetchResult.js @@ -4,7 +4,6 @@ const { cloneDeep } = require('lodash') class FetchResult { - constructor(url) { this.contentOrigin = 'origin' if (url) this.url = url @@ -24,8 +23,8 @@ class FetchResult { adoptCleanup(needCleanup, fromRequest) { if (!needCleanup) return this const cleanups = (Array.isArray(needCleanup) ? needCleanup : [needCleanup]) - .map(toCleanup => toCleanup.removeCallback) - .filter(item => item) + .map((toCleanup) => toCleanup.removeCallback) + .filter((item) => item) //transfer the clean up from request to fetchResult this.trackCleanup(cleanups) fromRequest?.removeCleanup(cleanups) @@ -33,7 +32,7 @@ class FetchResult { } cleanup(errorHandler) { - this._cleanups.forEach(cleanup => { + this._cleanups.forEach((cleanup) => { try { cleanup() } catch (error) { @@ -58,7 +57,7 @@ class FetchResult { } removeDependents(...toRemove) { - this._dependents = this._dependents.filter(item => !toRemove.includes(item)) + this._dependents = this._dependents.filter((item) => !toRemove.includes(item)) return this } @@ -73,4 +72,4 @@ class FetchResult { } } -module.exports = FetchResult \ No newline at end of file +module.exports = FetchResult diff --git a/lib/memoryCache.js b/lib/memoryCache.js index 807f4c31..12d64a67 100644 --- a/lib/memoryCache.js +++ b/lib/memoryCache.js @@ -39,4 +39,4 @@ class MemoryCache { } } -module.exports = MemoryCache \ No newline at end of file +module.exports = MemoryCache diff --git a/lib/sourceDiscovery.js b/lib/sourceDiscovery.js index a02d4997..09170169 100644 --- a/lib/sourceDiscovery.js +++ b/lib/sourceDiscovery.js @@ -49,11 +49,11 @@ async function discoverRevision(version, candidate, options) { function resolveGitHubLocations(locations) { const result = locations - .map(location => { + .map((location) => { var parsedUrl = location ? parseGitHubUrl(location) : null return parsedUrl && parsedUrl.owner && parsedUrl.name ? parsedUrl : null }) - .filter(e => e) + .filter((e) => e) return uniqWith(result, (a, b) => a.owner === b.owner && a.name === b.name) } @@ -62,7 +62,7 @@ function resolveGitHubLocations(locations) { // eslint-disable-next-line no-unused-vars async function discoverFromGitHubRefs(version, candidate, options) { const headers = { - 'User-Agent': 'clearlydefined/scanning' + 'User-Agent': 'clearlydefined/scanning', } const token = options.githubToken if (token) headers.Authorization = 'token ' + token @@ -77,7 +77,7 @@ async function discoverFromGitHubRefs(version, candidate, options) { retryDelay: 250, retryStrategy: request.RetryStrategies.HTTPOrNetworkError, tokenLowerBound: 10, - json: true + json: true, }) if (!refs) return null for (let i = 0; i < refs.length; i++) { diff --git a/lib/utils.js b/lib/utils.js index df2e395a..b01bd371 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -5,7 +5,7 @@ const { spawn } = require('child_process') const { intersection } = require('lodash') const dateTimeFormats = [ - 'EEE MMM d HH:mm:ss \'GMT\'ZZ yyyy' //in pom properties + "EEE MMM d HH:mm:ss 'GMT'ZZ yyyy", //in pom properties ] function normalizePath(path) { @@ -15,7 +15,7 @@ function normalizePath(path) { function normalizePaths(paths) { if (!Array.isArray(paths)) return paths - return paths.map(path => normalizePath(path)) + return paths.map((path) => normalizePath(path)) } function trimParents(path, parents) { @@ -29,7 +29,7 @@ function trimParents(path, parents) { function trimAllParents(paths, parents) { if (!Array.isArray(paths)) return paths - return paths.map(path => trimParents(path, parents)) + return paths.map((path) => trimParents(path, parents)) } function isGitFile(file) { @@ -52,27 +52,28 @@ function extractDate(dateAndTime, formats = dateTimeFormats) { if (!luxonResult.isValid) return null const instant = luxonResult.until(luxonResult) - const validStart = DateTime.fromISO('1950-01-01') + const validStart = DateTime.fromISO('1950-01-01') const validEnd = DateTime.now().plus({ days: 30 }) - return (instant.isBefore(validStart) || instant.isAfter(validEnd)) ? null : luxonResult + return instant.isBefore(validStart) || instant.isAfter(validEnd) ? null : luxonResult } function attachListeners(child, resolve, reject) { - let stdoutData = [], stderrData = [] + let stdoutData = [], + stderrData = [] - child.stdout.on('data', chunk => stdoutData.push(chunk)) - child.stderr.on('data', chunk => stderrData.push(chunk)) + child.stdout.on('data', (chunk) => stdoutData.push(chunk)) + child.stderr.on('data', (chunk) => stderrData.push(chunk)) child - .on('error', (err) => reject(err)) - .on('close', (code) => { - if (code === 0) resolve(stdoutData.join('')) - else { - const errorFromChild = new Error(stderrData.join('')) - errorFromChild.code = code - reject(errorFromChild) - } - }) + .on('error', (err) => reject(err)) + .on('close', (code) => { + if (code === 0) resolve(stdoutData.join('')) + else { + const errorFromChild = new Error(stderrData.join('')) + errorFromChild.code = code + reject(errorFromChild) + } + }) } function spawnPromisified(command, args, options) { @@ -82,4 +83,12 @@ function spawnPromisified(command, args, options) { }) } -module.exports = { normalizePath, normalizePaths, trimParents, trimAllParents, isGitFile, extractDate, spawnPromisified } +module.exports = { + normalizePath, + normalizePaths, + trimParents, + trimAllParents, + isGitFile, + extractDate, + spawnPromisified, +} diff --git a/providers/fetch/abstractFetch.js b/providers/fetch/abstractFetch.js index a6b583a1..808a3c66 100644 --- a/providers/fetch/abstractFetch.js +++ b/providers/fetch/abstractFetch.js @@ -27,14 +27,20 @@ class AbstractFetch extends BaseHandler { unzip(source, destination) { return new Promise((resolve, reject) => - extract(source, { dir: destination }, error => (error ? reject(error) : resolve())) + extract(source, { dir: destination }, (error) => (error ? reject(error) : resolve())), ) } decompress(source, destination) { return decompress(source, destination, { - filter: file => !file.path.endsWith('/'), - plugins: [decompressTar(), decompressTarbz2(), decompressTargz(), decompressTarxz(), decompressUnzip({ validateEntrySizes: false })] + filter: (file) => !file.path.endsWith('/'), + plugins: [ + decompressTar(), + decompressTarbz2(), + decompressTargz(), + decompressTarxz(), + decompressUnzip({ validateEntrySizes: false }), + ], }) } } diff --git a/providers/fetch/condaFetch.js b/providers/fetch/condaFetch.js index ad3b39f9..e67a7259 100644 --- a/providers/fetch/condaFetch.js +++ b/providers/fetch/condaFetch.js @@ -15,17 +15,17 @@ class CondaFetch extends AbstractFetch { this.channels = { 'anaconda-main': 'https://repo.anaconda.com/pkgs/main', 'anaconda-r': 'https://repo.anaconda.com/pkgs/r', - 'conda-forge': 'https://conda.anaconda.org/conda-forge' + 'conda-forge': 'https://conda.anaconda.org/conda-forge', } this.headers = { - 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' + 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)', } this.CACHE_DURATION = 8 * 60 * 60 * 1000 // 8 hours } canHandle(request) { const spec = this.toSpec(request) - return spec && !!(this.channels[spec.provider]) + return spec && !!this.channels[spec.provider] } // {type: conda|condasrc}/{provider: anaconda-main|anaconda-r|conda-forge}/{architecture|-}/{package name}/[{version | }]-[{build version | }]/ @@ -53,14 +53,7 @@ class CondaFetch extends AbstractFetch { if (spec.type === 'condasrc') { return this._downloadCondaSourcePackage(spec, request, version, packageChannelData) } else { - return this._downloadCondaPackage( - spec, - request, - version, - buildVersion, - architecture, - packageChannelData - ) + return this._downloadCondaPackage(spec, request, version, buildVersion, architecture, packageChannelData) } } @@ -83,10 +76,10 @@ class CondaFetch extends AbstractFetch { const fetchResult = new FetchResult(request.url) fetchResult.document = { location: dir.name, - registryData: { 'channelData': packageChannelData, downloadUrl }, + registryData: { channelData: packageChannelData, downloadUrl }, releaseDate: new Date(packageChannelData.timestamp || 0).toISOString(), declaredLicenses: packageChannelData.license, - hashes + hashes, } fetchResult.casedSpec = clone(spec) request.fetchResult = fetchResult.adoptCleanup(dir, request) @@ -96,40 +89,55 @@ class CondaFetch extends AbstractFetch { _matchPackage(name, version, buildVersion, repoData) { let packageRepoEntries = [] let packageMatches = ([, packageData]) => { - return packageData.name === name && ((!version) || version === packageData.version) - && ((!buildVersion) || packageData.build.startsWith(buildVersion)) + return ( + packageData.name === name && + (!version || version === packageData.version) && + (!buildVersion || packageData.build.startsWith(buildVersion)) + ) } if (repoData['packages']) { - packageRepoEntries = packageRepoEntries.concat(Object.entries(repoData['packages']) - .filter(packageMatches) - .map(([packageFile, packageData]) => { return { packageFile, packageData } })) + packageRepoEntries = packageRepoEntries.concat( + Object.entries(repoData['packages']) + .filter(packageMatches) + .map(([packageFile, packageData]) => { + return { packageFile, packageData } + }), + ) } if (repoData['packages.conda']) { - packageRepoEntries = packageRepoEntries.concat(Object.entries(repoData['packages.conda']) - .filter(packageMatches) - .map(([packageFile, packageData]) => { return { packageFile, packageData } })) + packageRepoEntries = packageRepoEntries.concat( + Object.entries(repoData['packages.conda']) + .filter(packageMatches) + .map(([packageFile, packageData]) => { + return { packageFile, packageData } + }), + ) } packageRepoEntries.sort((a, b) => (b.packageData.timestamp || 0) - (a.packageData.timestamp || 0)) return packageRepoEntries } async _downloadCondaPackage(spec, request, version, buildVersion, architecture, packageChannelData) { - if (!architecture || architecture === '-' && packageChannelData.subdirs.length > 0) { + if (!architecture || (architecture === '-' && packageChannelData.subdirs.length > 0)) { // prefer no-arch if available architecture = packageChannelData.subdirs.includes('noarch') ? 'noarch' : packageChannelData.subdirs[0] this.logger.info(`No binary architecture specified for ${spec.name}, using architecture: ${architecture}`) } let repoData = undefined - if (!(packageChannelData.subdirs.find(x => x === architecture))) { + if (!packageChannelData.subdirs.find((x) => x === architecture)) { return request.markSkip(`Missing architecture ${architecture} for package ${spec.name} in channel`) } repoData = await this.getRepoData(this.channels[spec.provider], spec.provider, architecture) if (!repoData) { - return request.markSkip(`failed to fetch and parse repodata json file for channel ${spec.provider} in architecture ${architecture}`) + return request.markSkip( + `failed to fetch and parse repodata json file for channel ${spec.provider} in architecture ${architecture}`, + ) } let packageRepoEntries = this._matchPackage(spec.name, version, buildVersion, repoData) if (packageRepoEntries.length === 0) { - return request.markSkip(`Missing package with matching spec (version: ${version}, buildVersion: ${buildVersion}) in ${architecture} repository`) + return request.markSkip( + `Missing package with matching spec (version: ${version}, buildVersion: ${buildVersion}) in ${architecture} repository`, + ) } let packageRepoEntry = packageRepoEntries[0] let downloadUrl = new URL(`${this.channels[spec.provider]}/${architecture}/${packageRepoEntry.packageFile}`).href @@ -145,10 +153,10 @@ class CondaFetch extends AbstractFetch { const fetchResult = new FetchResult(request.url) fetchResult.document = { location: dir.name, - registryData: { 'channelData': packageChannelData, 'repoData': packageRepoEntry, downloadUrl }, + registryData: { channelData: packageChannelData, repoData: packageRepoEntry, downloadUrl }, releaseDate: new Date(packageRepoEntry.packageData.timestamp || 0).toISOString(), declaredLicenses: packageRepoEntry.packageData.license, - hashes + hashes, } fetchResult.casedSpec = clone(spec) request.fetchResult = fetchResult.adoptCleanup(dir, request) @@ -158,10 +166,12 @@ class CondaFetch extends AbstractFetch { async _downloadPackage(downloadUrl, destination) { return new Promise((resolve, reject) => { const options = { url: downloadUrl, headers: this.headers } - nodeRequest.get(options, (error, response) => { - if (error) return reject(error) - if (response.statusCode !== 200) return reject(new Error(`${response.statusCode} ${response.statusMessage}`)) - }).pipe(fs.createWriteStream(destination).on('finish', () => resolve())) + nodeRequest + .get(options, (error, response) => { + if (error) return reject(error) + if (response.statusCode !== 200) return reject(new Error(`${response.statusCode} ${response.statusMessage}`)) + }) + .pipe(fs.createWriteStream(destination).on('finish', () => resolve())) }) } @@ -169,14 +179,19 @@ class CondaFetch extends AbstractFetch { if (!memCache.get(cacheKey)) { return new Promise((resolve, reject) => { const options = { url: sourceUrl, headers: this.headers } - nodeRequest.get(options, (error, response) => { - if (error) return reject(error) - if (response.statusCode !== 200) return reject(new Error(`${response.statusCode} ${response.statusMessage}`)) - }).pipe(fs.createWriteStream(fileDstLocation).on('finish', () => { - memCache.put(cacheKey, true, cacheDuration) - this.logger.info(`Conda: retrieved ${sourceUrl}. Stored data file at ${fileDstLocation}`) - return resolve() - })) + nodeRequest + .get(options, (error, response) => { + if (error) return reject(error) + if (response.statusCode !== 200) + return reject(new Error(`${response.statusCode} ${response.statusMessage}`)) + }) + .pipe( + fs.createWriteStream(fileDstLocation).on('finish', () => { + memCache.put(cacheKey, true, cacheDuration) + this.logger.info(`Conda: retrieved ${sourceUrl}. Stored data file at ${fileDstLocation}`) + return resolve() + }), + ) }) } } @@ -191,12 +206,22 @@ class CondaFetch extends AbstractFetch { } async getChannelData(condaChannelUrl, condaChannelID) { - return await this._fetchCachedJSONFile(`${condaChannelID}-channelDataFile`, `${condaChannelUrl}/channeldata.json`, this.CACHE_DURATION, `${this.packageMapFolder}/${condaChannelID}-channelDataFile.json`) + return await this._fetchCachedJSONFile( + `${condaChannelID}-channelDataFile`, + `${condaChannelUrl}/channeldata.json`, + this.CACHE_DURATION, + `${this.packageMapFolder}/${condaChannelID}-channelDataFile.json`, + ) } async getRepoData(condaChannelUrl, condaChannelID, architecture) { - return await this._fetchCachedJSONFile(`${condaChannelID}-repoDataFile-${architecture}`, `${condaChannelUrl}/${architecture}/repodata.json`, this.CACHE_DURATION, `${this.packageMapFolder}/${condaChannelID}-repoDataFile-${architecture}.json`) + return await this._fetchCachedJSONFile( + `${condaChannelID}-repoDataFile-${architecture}`, + `${condaChannelUrl}/${architecture}/repodata.json`, + this.CACHE_DURATION, + `${this.packageMapFolder}/${condaChannelID}-repoDataFile-${architecture}.json`, + ) } } -module.exports = options => new CondaFetch(options) +module.exports = (options) => new CondaFetch(options) diff --git a/providers/fetch/cratesioFetch.js b/providers/fetch/cratesioFetch.js index e0a5e9bf..d5ac02a3 100644 --- a/providers/fetch/cratesioFetch.js +++ b/providers/fetch/cratesioFetch.js @@ -35,7 +35,7 @@ class CratesioFetch extends AbstractFetch { releaseDate: version.created_at, location, hashes: await this.computeHashes(zip), - manifest: registryData.manifest + manifest: registryData.manifest, } if (version.crate) { fetchResult.casedSpec = clone(spec) @@ -52,17 +52,17 @@ class CratesioFetch extends AbstractFetch { registryData = await request({ url: `https://crates.io/api/v1/crates/${spec.name}`, json: true, - headers: { 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' } + headers: { 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' }, }) } catch (exception) { if (exception.statusCode !== 404) throw exception return null } if (!registryData.versions) return null - const version = spec.revision || this.getLatestVersion(registryData.versions.map(x => x.num)) + const version = spec.revision || this.getLatestVersion(registryData.versions.map((x) => x.num)) return { manifest: registryData.crate, - version: registryData.versions.find(x => x.num === version) + version: registryData.versions.find((x) => x.num === version), } } @@ -73,15 +73,15 @@ class CratesioFetch extends AbstractFetch { url: `https://crates.io${version.dl_path}`, json: false, encoding: null, - headers: { 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' } + headers: { 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' }, }).pipe( fs .createWriteStream(zip) .on('finish', () => resolve(null)) - .on('error', reject) + .on('error', reject), ) }) } } -module.exports = options => new CratesioFetch(options) +module.exports = (options) => new CratesioFetch(options) diff --git a/providers/fetch/debianFetch.js b/providers/fetch/debianFetch.js index 60496a25..29c3adf7 100644 --- a/providers/fetch/debianFetch.js +++ b/providers/fetch/debianFetch.js @@ -23,13 +23,13 @@ const readdir = promisify(fs.readdir) const readFile = promisify(fs.readFile) const providerMap = { - debian: 'http://ftp.debian.org/debian/' + debian: 'http://ftp.debian.org/debian/', } const packageFileMap = { url: 'http://ftp.debian.org/debian/indices/package-file.map.bz2', cacheKey: 'packageFileMap', - cacheDuration: 8 * 60 * 60 * 1000 // 8 hours + cacheDuration: 8 * 60 * 60 * 1000, // 8 hours } const metadataChangelogsUrl = 'https://metadata.ftp-master.debian.org/changelogs/' @@ -62,7 +62,14 @@ class DebianFetch extends AbstractFetch { const declaredLicenses = await this._getDeclaredLicenses(copyrightUrl) const fetchResult = new FetchResult(request.url) - fetchResult.document = this._createDocument({ dir, registryData, releaseDate, copyrightUrl, declaredLicenses, hashes }) + fetchResult.document = this._createDocument({ + dir, + registryData, + releaseDate, + copyrightUrl, + declaredLicenses, + hashes, + }) fetchResult.casedSpec = clone(spec) request.fetchResult = fetchResult.adoptCleanup(dir, request) return request @@ -92,7 +99,7 @@ class DebianFetch extends AbstractFetch { memCache.put(packageFileMap.cacheKey, true, packageFileMap.cacheDuration) return new Promise((resolve, reject) => { const dom = domain.create() - dom.on('error', error => { + dom.on('error', (error) => { memCache.del(packageFileMap.cacheKey) return reject(error) }) @@ -103,7 +110,7 @@ class DebianFetch extends AbstractFetch { .pipe(fs.createWriteStream(this.packageMapFileLocation)) .on('finish', () => { this.logger.info( - `Debian: retrieved ${packageFileMap.url}. Stored map file at ${this.packageMapFileLocation}` + `Debian: retrieved ${packageFileMap.url}. Stored map file at ${this.packageMapFileLocation}`, ) return resolve() }) @@ -121,7 +128,7 @@ class DebianFetch extends AbstractFetch { let entry = {} const lineReader = linebyline(this.packageMapFileLocation) lineReader - .on('line', line => { + .on('line', (line) => { if (line === '') { if ( [entry.Source, entry.Binary].includes(name) && @@ -139,7 +146,7 @@ class DebianFetch extends AbstractFetch { this.logger.info(`Debian: got ${relevantEntries.length} entries for ${spec.toUrl()}`) return resolve(relevantEntries) }) - .on('error', error => reject(error)) + .on('error', (error) => reject(error)) }) } @@ -152,7 +159,7 @@ class DebianFetch extends AbstractFetch { _ensureArchitecturePresenceForBinary(spec, registryData) { const { architecture } = this._fromSpec(spec) if (spec.type === 'deb' && !architecture) { - const randomBinaryArchitecture = (registryData.find(entry => entry.Architecture) || {}).Architecture + const randomBinaryArchitecture = (registryData.find((entry) => entry.Architecture) || {}).Architecture if (!randomBinaryArchitecture) return false spec.revision += '_' + randomBinaryArchitecture } @@ -163,14 +170,14 @@ class DebianFetch extends AbstractFetch { const isSrc = spec.type === 'debsrc' const { architecture } = this._fromSpec(spec) if (isSrc) { - const sourceAndPatches = registryData.filter(entry => !entry.Architecture && !entry.Path.endsWith('.dsc')) - const sourcePath = (sourceAndPatches.find(entry => entry.Path.includes('.orig.tar.')) || {}).Path + const sourceAndPatches = registryData.filter((entry) => !entry.Architecture && !entry.Path.endsWith('.dsc')) + const sourcePath = (sourceAndPatches.find((entry) => entry.Path.includes('.orig.tar.')) || {}).Path const source = sourcePath ? new URL(providerMap.debian + sourcePath).href : null - const patchPath = (sourceAndPatches.find(entry => !entry.Path.includes('.orig.tar.')) || {}).Path + const patchPath = (sourceAndPatches.find((entry) => !entry.Path.includes('.orig.tar.')) || {}).Path const patches = patchPath ? new URL(providerMap.debian + patchPath).href : null return { source, patches } } - const binary = new URL(providerMap.debian + registryData.find(entry => entry.Architecture === architecture).Path) + const binary = new URL(providerMap.debian + registryData.find((entry) => entry.Architecture === architecture).Path) .href return { binary } } @@ -205,7 +212,7 @@ class DebianFetch extends AbstractFetch { async _download(downloadUrl, destination) { return new Promise((resolve, reject) => { const dom = domain.create() - dom.on('error', error => reject(error)) + dom.on('error', (error) => reject(error)) dom.run(() => { nodeRequest .get(downloadUrl, (error, response) => { @@ -225,12 +232,9 @@ class DebianFetch extends AbstractFetch { reader.on('entry', (entry, next) => { const name = entry.fileName() const fullName = path.join(destination, name) - entry - .fileData() - .pipe(fs.createWriteStream(fullName)) - .on('finish', next) + entry.fileData().pipe(fs.createWriteStream(fullName)).on('finish', next) }) - reader.on('error', error => { + reader.on('error', (error) => { reject(error) }) reader.on('end', () => { @@ -263,12 +267,12 @@ class DebianFetch extends AbstractFetch { if (!locationStat.isDirectory()) return [location] const subdirs = await readdir(location) const files = await Promise.all( - subdirs.map(subdir => { + subdirs.map((subdir) => { const entry = path.resolve(location, subdir) return this._getFiles(entry) - }) + }), ) - return flatten(files).filter(x => x) + return flatten(files).filter((x) => x) } async _getSourceDirectoryName(location) { @@ -282,7 +286,7 @@ class DebianFetch extends AbstractFetch { const orderedPatches = (await readFile(patchesSeriesLocation)) .toString() .split('\n') - .filter(patch => patch && !patch.trim().startsWith('#') && !patch.trim().startsWith('|')) + .filter((patch) => patch && !patch.trim().startsWith('#') && !patch.trim().startsWith('|')) for (let patchFileName of orderedPatches) { const patchCommand = `patch -p01 -i ${path.join(patchesLocation, 'patches', patchFileName)}` try { @@ -296,7 +300,7 @@ class DebianFetch extends AbstractFetch { } _getCopyrightUrl(registryData) { - const entry = registryData.find(entry => entry.Source) + const entry = registryData.find((entry) => entry.Source) if (!entry) return null // Example: ./pool/main/0/0ad/0ad_0.0.17-1.debian.tar.xz -> main/0 const pathFragment = entry.Path.replace('./pool/', '').split('/').slice(0, 2).join('/') @@ -322,23 +326,26 @@ class DebianFetch extends AbstractFetch { // https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/#spdx _parseDeclaredLicenses(copyrightResponse) { const licensesSet = new Set() - const licenses = copyrightResponse.split('\n') - .filter(line => line.startsWith('License: ')) - .map(line => line.replace('License:', '').trim()) - .map(licenseId => { + const licenses = copyrightResponse + .split('\n') + .filter((line) => line.startsWith('License: ')) + .map((line) => line.replace('License:', '').trim()) + .map((licenseId) => { if (licenseId.includes('CPL') && !licenseId.includes('RSCPL')) licenseId = licenseId.replace('CPL', 'CPL-1.0') if (licenseId.toLowerCase().includes('expat')) licenseId = licenseId.replace(/expat/i, 'MIT') return licenseId }) // Over-simplified parsing of edge cases: - licenses.forEach(licenseId => { - if (licenseId.includes(' or ') && !licenseId.includes(',')) { // A or B and C => (A OR B AND C) + licenses.forEach((licenseId) => { + if (licenseId.includes(' or ') && !licenseId.includes(',')) { + // A or B and C => (A OR B AND C) licenseId = licenseId.replace(' or ', ' OR ') licenseId = licenseId.replace(' and ', ' AND ') licensesSet.add('(' + licenseId + ')') - } else if (licenseId.includes(' or ') && licenseId.includes(',')) { // A or B, and C => (A OR B) AND C + } else if (licenseId.includes(' or ') && licenseId.includes(',')) { + // A or B, and C => (A OR B) AND C licenseId = licenseId.replace(' or ', ' OR ') - licenseId.split(' and ').forEach(part => { + licenseId.split(' and ').forEach((part) => { if (part.includes('OR') && part.endsWith(',')) { licensesSet.add('(' + part.replace(',', ')')) } else { @@ -346,7 +353,7 @@ class DebianFetch extends AbstractFetch { } }) } else if (licenseId.includes(' and ')) { - licenseId.split(' and ').forEach(part => licensesSet.add(part)) + licenseId.split(' and ').forEach((part) => licensesSet.add(part)) } else { licensesSet.add(licenseId) } @@ -355,4 +362,4 @@ class DebianFetch extends AbstractFetch { } } -module.exports = options => new DebianFetch(options) +module.exports = (options) => new DebianFetch(options) diff --git a/providers/fetch/dispatcher.js b/providers/fetch/dispatcher.js index fe527335..eec52ef4 100644 --- a/providers/fetch/dispatcher.js +++ b/providers/fetch/dispatcher.js @@ -66,18 +66,19 @@ class FetchDispatcher extends AbstractFetch { async _fetchResult(request, handler) { const cacheKey = this.toSpec(request).toUrlPath() - const fetchResult = this.fetched.get(cacheKey) || await this._fetchPromise(handler, request, cacheKey) + const fetchResult = this.fetched.get(cacheKey) || (await this._fetchPromise(handler, request, cacheKey)) fetchResult?.decorate(request) } _fetchPromise(handler, request, cacheKey) { - return this.inProgressFetches[cacheKey] || + return ( + this.inProgressFetches[cacheKey] || (this.inProgressFetches[cacheKey] = this._createFetchPromise(handler, request, cacheKey)) + ) } _createFetchPromise(handler, request, cacheKey) { - return this._fetch(handler, request, cacheKey) - .finally(() => delete this.inProgressFetches[cacheKey]) + return this._fetch(handler, request, cacheKey).finally(() => delete this.inProgressFetches[cacheKey]) } async _fetch(handler, request, cacheKey) { @@ -96,16 +97,17 @@ class FetchDispatcher extends AbstractFetch { cacheKey, fetchResult, this._cleanupResult.bind(this), - (key, result) => !result.isInUse()) + (key, result) => !result.isInUse(), + ) } _cleanupResult(key, result) { - result.cleanup(error => this.logger.info(`Cleanup Problem cleaning up after ${key} ${error.message}`)) + result.cleanup((error) => this.logger.info(`Cleanup Problem cleaning up after ${key} ${error.message}`)) } // get all the handler that apply to this request from the given list of handlers _getHandler(request, list) { - return list.filter(element => element.canHandle(request))[0] + return list.filter((element) => element.canHandle(request))[0] } } diff --git a/providers/fetch/gitCloner.js b/providers/fetch/gitCloner.js index c28f9ff1..43080240 100644 --- a/providers/fetch/gitCloner.js +++ b/providers/fetch/gitCloner.js @@ -9,7 +9,7 @@ const FetchResult = require('../../lib/fetchResult') const providerMap = { gitlab: 'https://gitlab.com', - github: 'https://github.com' + github: 'https://github.com', } class GitCloner extends AbstractFetch { @@ -54,7 +54,7 @@ class GitCloner extends AbstractFetch { return new Promise((resolve, reject) => { exec( `cd ${dirName} && git clone ${sourceUrl} --quiet && cd ${specName} ${reset} && git count-objects -v`, - (error, stdout) => (error ? reject(error) : resolve(this._getRepoSize(stdout))) + (error, stdout) => (error ? reject(error) : resolve(this._getRepoSize(stdout))), ) }) } @@ -62,7 +62,7 @@ class GitCloner extends AbstractFetch { _getDate(dirName, specName) { return new Promise((resolve, reject) => { exec(`cd ${dirName}/${specName} && git show -s --format=%ci`, (error, stdout) => - error ? reject(error) : resolve(new Date(stdout.trim())) + error ? reject(error) : resolve(new Date(stdout.trim())), ) }) } @@ -75,7 +75,7 @@ class GitCloner extends AbstractFetch { _getRevision(dirName, specName) { return new Promise((resolve, reject) => { exec(`cd ${dirName}/${specName} && git rev-parse HEAD`, (error, stdout) => - error ? reject(error) : resolve(stdout.trim()) + error ? reject(error) : resolve(stdout.trim()), ) }) } @@ -87,7 +87,7 @@ class GitCloner extends AbstractFetch { _deleteGitDatabase(dirName, specName) { return new Promise((resolve, reject) => { - rimraf(`${dirName}/${specName}/.git`, error => { + rimraf(`${dirName}/${specName}/.git`, (error) => { error ? reject(error) : resolve() }) }) @@ -99,4 +99,4 @@ class GitCloner extends AbstractFetch { } } -module.exports = options => new GitCloner(options) +module.exports = (options) => new GitCloner(options) diff --git a/providers/fetch/goFetch.js b/providers/fetch/goFetch.js index d8790100..ebcf8d2f 100644 --- a/providers/fetch/goFetch.js +++ b/providers/fetch/goFetch.js @@ -10,7 +10,7 @@ const { parse: spdxParser } = require('@clearlydefined/spdx') const FetchResult = require('../../lib/fetchResult') const providerMap = { - golang: 'https://proxy.golang.org' + golang: 'https://proxy.golang.org', } class GoFetch extends AbstractFetch { @@ -22,7 +22,7 @@ class GoFetch extends AbstractFetch { retryDelay: exponentialDelay, retryCondition: (err) => { return isNetworkOrIdempotentRequestError(err) || err.response?.status == 429 - } + }, }) this.options.http = options.http || axios } @@ -59,7 +59,10 @@ class GoFetch extends AbstractFetch { try { registryData = await this._getRegistryData(spec) } catch (err) { - if (err instanceof RequeueError && (request.attemptCount === undefined || request.attemptCount < this.options.maxRequeueAttemptCount)) { + if ( + err instanceof RequeueError && + (request.attemptCount === undefined || request.attemptCount < this.options.maxRequeueAttemptCount) + ) { return request.markRequeue('Throttled', err.message) } } @@ -102,13 +105,13 @@ class GoFetch extends AbstractFetch { } _replace_encodings(url) { - return `${url.replace(/%2f/ig, '/')}` + return `${url.replace(/%2f/gi, '/')}` } async _getArtifact(spec, destination) { const url = this._buildUrl(spec) - const status = await new Promise(resolve => { + const status = await new Promise((resolve) => { nodeRequest .get(url, (error, response) => { if (error) this.logger.error(this._google_proxy_error_string(error)) @@ -136,7 +139,7 @@ class GoFetch extends AbstractFetch { async _getRegistryData(spec) { const registryLicenseUrl = this._replace_encodings( - this._remove_blank_fields(`https://pkg.go.dev/${spec.namespace}/${spec.name}@${spec.revision}?tab=licenses`) + this._remove_blank_fields(`https://pkg.go.dev/${spec.namespace}/${spec.name}@${spec.revision}?tab=licenses`), ) try { // Based on this discussion https://github.com/golang/go/issues/36785, there is no API for pkg.go.dev for now. @@ -144,10 +147,10 @@ class GoFetch extends AbstractFetch { const root = htmlParser(response.data) // Here is the license html template file. // https://github.com/golang/pkgsite/blob/master/static/frontend/unit/licenses/licenses.tmpl - const licenses = root.querySelectorAll('[id^=#lic-]').map(ele => ele.textContent) + const licenses = root.querySelectorAll('[id^=#lic-]').map((ele) => ele.textContent) if (this._validateLicenses(licenses)) { return { - licenses + licenses, } } else { this.logger.info(`Licenses from html could not be parsed. The licenses are ${JSON.stringify(licenses)}.`) @@ -164,7 +167,9 @@ class GoFetch extends AbstractFetch { this.logger.info(msg) throw new RequeueError(msg) } - this.logger.info(`Getting declared license from pkg.go.dev failed. ${JSON.stringify(err.response?.data || err.request || err.message)}`) + this.logger.info( + `Getting declared license from pkg.go.dev failed. ${JSON.stringify(err.response?.data || err.request || err.message)}`, + ) } } @@ -191,4 +196,4 @@ class RequeueError extends Error { } } -module.exports = options => new GoFetch(options) \ No newline at end of file +module.exports = (options) => new GoFetch(options) diff --git a/providers/fetch/gradlePluginFetch.js b/providers/fetch/gradlePluginFetch.js index abe7bd2f..f5d4da09 100644 --- a/providers/fetch/gradlePluginFetch.js +++ b/providers/fetch/gradlePluginFetch.js @@ -4,11 +4,13 @@ const MavenBasedFetch = require('./mavenBasedFetch') class GradlePluginFetch extends MavenBasedFetch { - constructor(options) { - super({ - 'gradleplugin': 'https://plugins.gradle.org/m2/' - }, options) + super( + { + gradleplugin: 'https://plugins.gradle.org/m2/', + }, + options, + ) } async _getPoms(spec, result = []) { @@ -16,7 +18,6 @@ class GradlePluginFetch extends MavenBasedFetch { //See https://docs.gradle.org/current/userguide/publishing_gradle_module_metadata.html return super._getPoms(spec, result) } - } -module.exports = options => new GradlePluginFetch(options) \ No newline at end of file +module.exports = (options) => new GradlePluginFetch(options) diff --git a/providers/fetch/mavenBasedFetch.js b/providers/fetch/mavenBasedFetch.js index 98d9b6c6..fcb21c29 100644 --- a/providers/fetch/mavenBasedFetch.js +++ b/providers/fetch/mavenBasedFetch.js @@ -1,194 +1,193 @@ -// (c) Copyright 2021, SAP SE and ClearlyDefined contributors. Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -const AbstractFetch = require('./abstractFetch') -const requestPromise = require('request-promise-native') -const nodeRequest = require('request') -const { clone, get } = require('lodash') -const { promisify } = require('util') -const fs = require('fs') -const exists = promisify(fs.exists) -const readdir = promisify(fs.readdir) -const lstat = promisify(fs.lstat) -const path = require('path') -const parseString = promisify(require('xml2js').parseString) -const EntitySpec = require('../../lib/entitySpec') -const { extractDate } = require('../../lib/utils') -const FetchResult = require('../../lib/fetchResult') - -const extensionMap = { - sourcesJar: '-sources.jar', - pom: '.pom', - aar: '.aar', - jar: '.jar' -} - -const defaultHeaders = { headers: { 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' } } - -class MavenBasedFetch extends AbstractFetch { - constructor(providerMap, options) { - super(options) - this._providerMap = { ...providerMap } - this._handleRequestPromise = options.requestPromise || requestPromise.defaults(defaultHeaders) - this._handleRequestStream = options.requestStream || nodeRequest.defaults(defaultHeaders).get - } - - canHandle(request) { - const spec = this.toSpec(request) - return !!this._providerMap[spec?.provider] - } - - async handle(request) { - const spec = this.toSpec(request) - if (!spec.revision) spec.revision = await this._getLatestVersion(spec) - if (!spec.namespace || !spec.revision) return this.markSkip(request) - // rewrite the request URL as it is used throughout the system to derive locations and urns etc. - request.url = spec.toUrl() - super.handle(request) - const poms = await this._getPoms(spec) - if (!poms.length) return this.markSkip(request) - const summary = this._mergePoms(poms) - const artifact = this.createTempFile(request) - const artifactResult = await this._getArtifact(spec, artifact.name) - if (!artifactResult) return this.markSkip(request) - const dir = this.createTempDir(request) - await this.decompress(artifact.name, dir.name) - const hashes = await this.computeHashes(artifact.name) - const releaseDate = await this._getReleaseDate(dir.name, spec) - - const fetchResult = new FetchResult(request.url) - fetchResult.document = this._createDocument(dir, releaseDate, hashes, poms, summary) - if (get(summary, 'groupId[0]') || get(summary, 'artifactId[0]')) { - fetchResult.casedSpec = clone(spec) - fetchResult.casedSpec.namespace = get(summary, 'groupId[0]') || spec.namespace - fetchResult.casedSpec.name = get(summary, 'artifactId[0]') || spec.name - } - request.fetchResult = fetchResult.adoptCleanup(dir, request) - return request - } - - async _getLatestVersion(spec) { - //Use Maven repository meta data model to get the latest version - //https://maven.apache.org/ref/3.2.5/maven-repository-metadata/repository-metadata.html#class_versioning - const url = `${this._buildBaseUrl(spec)}/maven-metadata.xml` - const response = await this._requestPromise({ url, json: false }) - if (!response) return null - const meta = await parseString(response) - return get(meta, 'metadata.versioning[0].release[0]') - } - - _createDocument(dir, releaseDate, hashes, poms, summary) { - return { location: dir.name, releaseDate, hashes, poms, summary } - } - - _buildBaseUrl(spec) { - const fullName = `${spec.namespace?.replace(/\./g, '/')}/${spec.name}` - return `${this._providerMap[spec.provider]}${fullName}` - } - - _buildUrl(spec, extension = extensionMap.jar) { - return `${this._buildBaseUrl(spec)}/${spec.revision}/${spec.name}-${spec.revision}${extension}` - } - - async _getArtifact(spec, destination) { - const extensions = spec.type === 'sourcearchive' ? [extensionMap.sourcesJar] : [extensionMap.jar, extensionMap.aar] - for (let extension of extensions) { - const url = this._buildUrl(spec, extension) - const status = await new Promise(resolve => { - this._handleRequestStream(url, (error, response) => { - if (error) this.logger.error(error) - if (response.statusCode !== 200) return resolve(false) - }) - .pipe(fs.createWriteStream(destination).on('finish', () => resolve(true))) - }) - if (status) return true - } - return false - } - - async _getPoms(spec, result = []) { - const pom = await this._getPom(spec) - const parentSpec = this._buildParentSpec(pom, spec) - if (parentSpec) await this._getPoms(parentSpec, result) - if (pom) result.push(pom) - return result - } - - async _getPom(spec) { - const url = this._buildUrl(spec, extensionMap.pom) - const content = await this._requestPromise({ url, json: false }) - if (!content) return null - const pom = await parseString(content) - // clean up some stuff we don't actually look at. - delete pom.project.build - delete pom.project.dependencies - delete pom.project.dependencyManagement - delete pom.project.modules - delete pom.project.profiles - return pom - } - - _buildParentSpec(pom, spec) { - if (!pom || !pom.project || !pom.project.parent) return null - const parent = pom.project.parent[0] - return new EntitySpec( - spec.type, - spec.provider, - parent.groupId[0].trim(), - parent.artifactId[0].trim(), - parent.version[0].trim() - ) - } - - _mergePoms(poms) { - if (!poms) return null - return [...poms].reduce((result, pom) => { - return { ...result, ...pom.project } - }, {}) - } - - async _getReleaseDate(dirName, spec) { - const location = path.join(dirName, `META-INF/${spec.type}/${spec.namespace}/${spec.name}/pom.properties`) - if (await exists(location)) { - const pomProperties = (await promisify(fs.readFile)(location)).toString().split('\n') - for (const line of pomProperties) { - const releaseDate = extractDate(line.slice(1)) - if (releaseDate) return releaseDate.toJSDate().toISOString() - } - } - //Get "File Data Last Modified" from the MANIFEST.MF file, and infer release date. - const manifest = path.join(dirName, 'META-INF/MANIFEST.MF') - if (await exists(manifest)) { - const stats = await fs.promises.stat(manifest) - return stats.mtime.toISOString() - } - - //For archives which do not contain the META-INF/MANIFEST.MF file, use mtime from any file - //in the decompressed directory to infer release date - const fileStat = await MavenBasedFetch._findAnyFileStat(dirName) - return fileStat?.mtime.toISOString() - } - - static async _findAnyFileStat(location) { - const locationStat = await lstat(location) - if (locationStat.isSymbolicLink()) return - if (locationStat.isFile()) return locationStat - - const subdirs = await readdir(location) - return subdirs.reduce((prev, subdir) => { - const entry = path.resolve(location, subdir) - return prev.then(result => result || MavenBasedFetch._findAnyFileStat(entry)) - }, Promise.resolve()) - } - - async _requestPromise(options) { - try { - return await this._handleRequestPromise(options) - } catch (error) { - if (error.statusCode === 404) return null - else throw error - } - } -} - -module.exports = MavenBasedFetch +// (c) Copyright 2021, SAP SE and ClearlyDefined contributors. Licensed under the MIT license. +// SPDX-License-Identifier: MIT + +const AbstractFetch = require('./abstractFetch') +const requestPromise = require('request-promise-native') +const nodeRequest = require('request') +const { clone, get } = require('lodash') +const { promisify } = require('util') +const fs = require('fs') +const exists = promisify(fs.exists) +const readdir = promisify(fs.readdir) +const lstat = promisify(fs.lstat) +const path = require('path') +const parseString = promisify(require('xml2js').parseString) +const EntitySpec = require('../../lib/entitySpec') +const { extractDate } = require('../../lib/utils') +const FetchResult = require('../../lib/fetchResult') + +const extensionMap = { + sourcesJar: '-sources.jar', + pom: '.pom', + aar: '.aar', + jar: '.jar', +} + +const defaultHeaders = { headers: { 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' } } + +class MavenBasedFetch extends AbstractFetch { + constructor(providerMap, options) { + super(options) + this._providerMap = { ...providerMap } + this._handleRequestPromise = options.requestPromise || requestPromise.defaults(defaultHeaders) + this._handleRequestStream = options.requestStream || nodeRequest.defaults(defaultHeaders).get + } + + canHandle(request) { + const spec = this.toSpec(request) + return !!this._providerMap[spec?.provider] + } + + async handle(request) { + const spec = this.toSpec(request) + if (!spec.revision) spec.revision = await this._getLatestVersion(spec) + if (!spec.namespace || !spec.revision) return this.markSkip(request) + // rewrite the request URL as it is used throughout the system to derive locations and urns etc. + request.url = spec.toUrl() + super.handle(request) + const poms = await this._getPoms(spec) + if (!poms.length) return this.markSkip(request) + const summary = this._mergePoms(poms) + const artifact = this.createTempFile(request) + const artifactResult = await this._getArtifact(spec, artifact.name) + if (!artifactResult) return this.markSkip(request) + const dir = this.createTempDir(request) + await this.decompress(artifact.name, dir.name) + const hashes = await this.computeHashes(artifact.name) + const releaseDate = await this._getReleaseDate(dir.name, spec) + + const fetchResult = new FetchResult(request.url) + fetchResult.document = this._createDocument(dir, releaseDate, hashes, poms, summary) + if (get(summary, 'groupId[0]') || get(summary, 'artifactId[0]')) { + fetchResult.casedSpec = clone(spec) + fetchResult.casedSpec.namespace = get(summary, 'groupId[0]') || spec.namespace + fetchResult.casedSpec.name = get(summary, 'artifactId[0]') || spec.name + } + request.fetchResult = fetchResult.adoptCleanup(dir, request) + return request + } + + async _getLatestVersion(spec) { + //Use Maven repository meta data model to get the latest version + //https://maven.apache.org/ref/3.2.5/maven-repository-metadata/repository-metadata.html#class_versioning + const url = `${this._buildBaseUrl(spec)}/maven-metadata.xml` + const response = await this._requestPromise({ url, json: false }) + if (!response) return null + const meta = await parseString(response) + return get(meta, 'metadata.versioning[0].release[0]') + } + + _createDocument(dir, releaseDate, hashes, poms, summary) { + return { location: dir.name, releaseDate, hashes, poms, summary } + } + + _buildBaseUrl(spec) { + const fullName = `${spec.namespace?.replace(/\./g, '/')}/${spec.name}` + return `${this._providerMap[spec.provider]}${fullName}` + } + + _buildUrl(spec, extension = extensionMap.jar) { + return `${this._buildBaseUrl(spec)}/${spec.revision}/${spec.name}-${spec.revision}${extension}` + } + + async _getArtifact(spec, destination) { + const extensions = spec.type === 'sourcearchive' ? [extensionMap.sourcesJar] : [extensionMap.jar, extensionMap.aar] + for (let extension of extensions) { + const url = this._buildUrl(spec, extension) + const status = await new Promise((resolve) => { + this._handleRequestStream(url, (error, response) => { + if (error) this.logger.error(error) + if (response.statusCode !== 200) return resolve(false) + }).pipe(fs.createWriteStream(destination).on('finish', () => resolve(true))) + }) + if (status) return true + } + return false + } + + async _getPoms(spec, result = []) { + const pom = await this._getPom(spec) + const parentSpec = this._buildParentSpec(pom, spec) + if (parentSpec) await this._getPoms(parentSpec, result) + if (pom) result.push(pom) + return result + } + + async _getPom(spec) { + const url = this._buildUrl(spec, extensionMap.pom) + const content = await this._requestPromise({ url, json: false }) + if (!content) return null + const pom = await parseString(content) + // clean up some stuff we don't actually look at. + delete pom.project.build + delete pom.project.dependencies + delete pom.project.dependencyManagement + delete pom.project.modules + delete pom.project.profiles + return pom + } + + _buildParentSpec(pom, spec) { + if (!pom || !pom.project || !pom.project.parent) return null + const parent = pom.project.parent[0] + return new EntitySpec( + spec.type, + spec.provider, + parent.groupId[0].trim(), + parent.artifactId[0].trim(), + parent.version[0].trim(), + ) + } + + _mergePoms(poms) { + if (!poms) return null + return [...poms].reduce((result, pom) => { + return { ...result, ...pom.project } + }, {}) + } + + async _getReleaseDate(dirName, spec) { + const location = path.join(dirName, `META-INF/${spec.type}/${spec.namespace}/${spec.name}/pom.properties`) + if (await exists(location)) { + const pomProperties = (await promisify(fs.readFile)(location)).toString().split('\n') + for (const line of pomProperties) { + const releaseDate = extractDate(line.slice(1)) + if (releaseDate) return releaseDate.toJSDate().toISOString() + } + } + //Get "File Data Last Modified" from the MANIFEST.MF file, and infer release date. + const manifest = path.join(dirName, 'META-INF/MANIFEST.MF') + if (await exists(manifest)) { + const stats = await fs.promises.stat(manifest) + return stats.mtime.toISOString() + } + + //For archives which do not contain the META-INF/MANIFEST.MF file, use mtime from any file + //in the decompressed directory to infer release date + const fileStat = await MavenBasedFetch._findAnyFileStat(dirName) + return fileStat?.mtime.toISOString() + } + + static async _findAnyFileStat(location) { + const locationStat = await lstat(location) + if (locationStat.isSymbolicLink()) return + if (locationStat.isFile()) return locationStat + + const subdirs = await readdir(location) + return subdirs.reduce((prev, subdir) => { + const entry = path.resolve(location, subdir) + return prev.then((result) => result || MavenBasedFetch._findAnyFileStat(entry)) + }, Promise.resolve()) + } + + async _requestPromise(options) { + try { + return await this._handleRequestPromise(options) + } catch (error) { + if (error.statusCode === 404) return null + else throw error + } + } +} + +module.exports = MavenBasedFetch diff --git a/providers/fetch/mavenGoogleFetch.js b/providers/fetch/mavenGoogleFetch.js index 22f87078..af6c9c52 100644 --- a/providers/fetch/mavenGoogleFetch.js +++ b/providers/fetch/mavenGoogleFetch.js @@ -5,9 +5,12 @@ const MavenBasedFetch = require('./mavenBasedFetch') class MavenGoogleFetch extends MavenBasedFetch { constructor(options) { - super({ - mavengoogle: 'https://dl.google.com/android/maven2/' - }, options) + super( + { + mavengoogle: 'https://dl.google.com/android/maven2/', + }, + options, + ) } //The format for source url is: https://dl.google.com/android/maven2/groudId1/groupdId2/artifactId/revision/artifactId-revision-sources.jar // E.g.: https://maven.google.com/web/index.html#androidx.browser:browser:1.3.0 @@ -20,4 +23,4 @@ class MavenGoogleFetch extends MavenBasedFetch { } } -module.exports = options => new MavenGoogleFetch(options) +module.exports = (options) => new MavenGoogleFetch(options) diff --git a/providers/fetch/mavencentralFetch.js b/providers/fetch/mavencentralFetch.js index a17cc72d..661e6a47 100644 --- a/providers/fetch/mavencentralFetch.js +++ b/providers/fetch/mavencentralFetch.js @@ -5,11 +5,13 @@ const MavenBasedFetch = require('./mavenBasedFetch') const { get } = require('lodash') class MavenCentralFetch extends MavenBasedFetch { - constructor(options) { - super({ - mavencentral: 'https://search.maven.org/remotecontent?filepath=' - }, options) + super( + { + mavencentral: 'https://search.maven.org/remotecontent?filepath=', + }, + options, + ) } // query maven to get the latest version if we don't already have that. // Example: https://search.maven.org/solrsearch/select?q=g:%22org.eclipse%22+AND+a:%22swt%22+AND+v:%223.3.0-v3346%22&rows=1&wt=json @@ -30,4 +32,4 @@ class MavenCentralFetch extends MavenBasedFetch { } } -module.exports = options => new MavenCentralFetch(options) +module.exports = (options) => new MavenCentralFetch(options) diff --git a/providers/fetch/npmjsFetch.js b/providers/fetch/npmjsFetch.js index 8eb7861b..bd6958f6 100644 --- a/providers/fetch/npmjsFetch.js +++ b/providers/fetch/npmjsFetch.js @@ -9,7 +9,7 @@ const { clone, get } = require('lodash') const FetchResult = require('../../lib/fetchResult') const providerMap = { - npmjs: 'https://registry.npmjs.com' + npmjs: 'https://registry.npmjs.com', } class NpmFetch extends AbstractFetch { @@ -63,7 +63,7 @@ class NpmFetch extends AbstractFetch { try { registryData = await requestPromise({ url: `${baseUrl}/${encodeURIComponent(fullName).replace('%40', '@')}`, // npmjs doesn't handle the escaped version - json: true + json: true, }) } catch (exception) { if (exception.statusCode !== 404) throw exception @@ -109,4 +109,4 @@ class NpmFetch extends AbstractFetch { } } -module.exports = options => new NpmFetch(options) +module.exports = (options) => new NpmFetch(options) diff --git a/providers/fetch/nugetFetch.js b/providers/fetch/nugetFetch.js index f9b5780f..8d5214b5 100644 --- a/providers/fetch/nugetFetch.js +++ b/providers/fetch/nugetFetch.js @@ -11,7 +11,7 @@ const requestRetry = require('requestretry').defaults({ maxAttempts: 3, fullResp const FetchResult = require('../../lib/fetchResult') const providerMap = { - nuget: 'https://api.nuget.org' + nuget: 'https://api.nuget.org', } class NuGetFetch extends AbstractFetch { @@ -50,7 +50,7 @@ class NuGetFetch extends AbstractFetch { location, metadataLocation, releaseDate: registryData ? new Date(registryData.published).toISOString() : null, - hashes: await this.computeHashes(zip) + hashes: await this.computeHashes(zip), } if (manifest.licenseUrl) { await this._downloadLicense({ dirName: location, licenseUrl: manifest.licenseUrl }) @@ -71,7 +71,7 @@ class NuGetFetch extends AbstractFetch { // https://api.nuget.org/v3/registration5-gz-semver2/microsoft.powershell.native/7.0.0-preview.1.json const { body, statusCode } = await requestRetry.get( `${baseUrl}/v3/registration5-gz-semver2/${spec.name.toLowerCase()}/${spec.revision}.json`, - { gzip: true } + { gzip: true }, ) return statusCode !== 200 || !body ? null : JSON.parse(body) } @@ -79,8 +79,10 @@ class NuGetFetch extends AbstractFetch { // https://docs.microsoft.com/en-us/nuget/reference/package-versioning#normalized-version-numbers _normalizeVersion(version) { const parts = version.split('-') - const trimmed = parts[0].split('.').map(part => trimStart(part, '0') || '0') - return [(trimmed[3] === '0' ? trimmed.slice(0, 3) : trimmed).join('.'), ...parts.slice(1)].filter(x => x).join('-') + const trimmed = parts[0].split('.').map((part) => trimStart(part, '0') || '0') + return [(trimmed[3] === '0' ? trimmed.slice(0, 3) : trimmed).join('.'), ...parts.slice(1)] + .filter((x) => x) + .join('-') } async _getLatestVersion(name) { @@ -88,11 +90,11 @@ class NuGetFetch extends AbstractFetch { // Example: https://api.nuget.org/v3-flatcontainer/moq/index.json const baseUrl = providerMap.nuget const { body, statusCode } = await requestRetry.get(`${baseUrl}/v3-flatcontainer/${name}/index.json`, { - json: true + json: true, }) // If statusCode is not 200, XML may be returned if (statusCode === 200 && body.versions) { - const versions = body.versions.filter(version => !version.includes('build')) + const versions = body.versions.filter((version) => !version.includes('build')) return versions[versions.length - 1] // the versions are already sorted } return null @@ -120,7 +122,7 @@ class NuGetFetch extends AbstractFetch { // https://docs.microsoft.com/en-us/nuget/api/package-base-address-resource#download-package-manifest-nuspec // Example: https://api.nuget.org/v3-flatcontainer/newtonsoft.json/11.0.1/newtonsoft.json.nuspec const { body, statusCode } = await requestRetry.get( - `https://api.nuget.org/v3-flatcontainer/${spec.name.toLowerCase()}/${spec.revision}/${spec.name.toLowerCase()}.nuspec` + `https://api.nuget.org/v3-flatcontainer/${spec.name.toLowerCase()}/${spec.revision}/${spec.name.toLowerCase()}.nuspec`, ) if (statusCode !== 200) return null return body @@ -130,11 +132,11 @@ class NuGetFetch extends AbstractFetch { const location = { manifest: path.join(dir.name, 'manifest.json'), nuspec: path.join(dir.name, 'nuspec.xml'), - latestNuspec: latestNuspec ? path.join(dir.name, 'latestNuspec.xml') : null + latestNuspec: latestNuspec ? path.join(dir.name, 'latestNuspec.xml') : null, } await Promise.all([ promisify(fs.writeFile)(location.manifest, JSON.stringify(manifest)), - promisify(fs.writeFile)(location.nuspec, nuspec) + promisify(fs.writeFile)(location.nuspec, nuspec), ]) if (latestNuspec) { await promisify(fs.writeFile)(location.latestNuspec, latestNuspec) @@ -152,4 +154,4 @@ class NuGetFetch extends AbstractFetch { } } -module.exports = options => new NuGetFetch(options) +module.exports = (options) => new NuGetFetch(options) diff --git a/providers/fetch/packagistFetch.js b/providers/fetch/packagistFetch.js index 153f2af4..dfd8825a 100644 --- a/providers/fetch/packagistFetch.js +++ b/providers/fetch/packagistFetch.js @@ -11,7 +11,7 @@ const readdir = promisify(fs.readdir) const FetchResult = require('../../lib/fetchResult') const providerMap = { - packagist: 'https://repo.packagist.org/' + packagist: 'https://repo.packagist.org/', } class PackagistFetch extends AbstractFetch { @@ -42,7 +42,7 @@ class PackagistFetch extends AbstractFetch { let registryData const baseUrl = providerMap.packagist const { body, statusCode } = await requestRetry.get(`${baseUrl}/p/${spec.namespace}/${spec.name}.json`, { - json: true + json: true, }) if (statusCode !== 200 || !body) return null registryData = body @@ -63,13 +63,14 @@ class PackagistFetch extends AbstractFetch { const options = { url: distUrl, headers: { - 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' - } + 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)', + }, } - nodeRequest.get(options, (error, response) => { - if (error) return reject(error) - if (response.statusCode !== 200) reject(new Error(`${response.statusCode} ${response.statusMessage}`)) - }) + nodeRequest + .get(options, (error, response) => { + if (error) return reject(error) + if (response.statusCode !== 200) reject(new Error(`${response.statusCode} ${response.statusMessage}`)) + }) .pipe(fs.createWriteStream(destination).on('finish', () => resolve(null))) }) } @@ -84,4 +85,4 @@ class PackagistFetch extends AbstractFetch { } } -module.exports = options => new PackagistFetch(options) +module.exports = (options) => new PackagistFetch(options) diff --git a/providers/fetch/podFetch.js b/providers/fetch/podFetch.js index f30890ba..5e3665c5 100644 --- a/providers/fetch/podFetch.js +++ b/providers/fetch/podFetch.js @@ -13,7 +13,7 @@ const FetchResult = require('../../lib/fetchResult') const services = { trunk: 'https://trunk.cocoapods.org/api/v1', - specs: 'https://raw.githubusercontent.com/CocoaPods/Specs/master' + specs: 'https://raw.githubusercontent.com/CocoaPods/Specs/master', } class PodFetch extends AbstractFetch { @@ -43,7 +43,7 @@ class PodFetch extends AbstractFetch { fetchResult.document = { location: location, registryData: registryData, - releaseDate: version.created_at + releaseDate: version.created_at, } if (registryData.name) { @@ -60,9 +60,9 @@ class PodFetch extends AbstractFetch { registryData = await request({ url: `${services.specs}/Specs/${this._masterRepoPathFragment(spec, [1, 1, 1])}/${spec.name}.podspec.json`, headers: { - Authorization: this.options.githubToken ? `token ${this.options.githubToken}` : '' + Authorization: this.options.githubToken ? `token ${this.options.githubToken}` : '', }, - json: true + json: true, }) } catch (exception) { if (exception.statusCode !== 404) throw exception @@ -97,7 +97,7 @@ class PodFetch extends AbstractFetch { await this.decompress(archive, output) resolve(output) }) - .on('error', reject) + .on('error', reject), ) }) } @@ -120,25 +120,25 @@ class PodFetch extends AbstractFetch { const cloneCommands = [ `git -C "${dir.name}" clone ${cloneOptions.join(' ')} ${repo} "${outputDirName}"`, - `git -C "${output}" reset --quiet --hard ${rev}` + `git -C "${output}" reset --quiet --hard ${rev}`, ] return new Promise((resolve, reject) => { - exec(cloneCommands.join(' && '), error => (error ? reject(error) : resolve(output))) + exec(cloneCommands.join(' && '), (error) => (error ? reject(error) : resolve(output))) }) } async _getVersion(spec) { // Example: https://trunk.cocoapods.org/api/v1/pods/SwiftLCS const { body, statusCode } = await requestRetry.get(`${services.trunk}/pods/${spec.name}`, { - json: true + json: true, }) if (statusCode === 200 && body.versions) { const versions = body.versions if (spec.revision) { - return versions.find(version => version.name === spec.revision) + return versions.find((version) => version.name === spec.revision) } else { return versions[versions.length - 1] // the versions are already sorted } @@ -151,10 +151,7 @@ class PodFetch extends AbstractFetch { // Ported from: https://www.rubydoc.info/gems/cocoapods-core/Pod%2FSource%2FMetadata:path_fragment let prefixes if (prefixLengths.length > 0) { - let hashedName = crypto - .createHash('md5') - .update(spec.name) - .digest('hex') + let hashedName = crypto.createHash('md5').update(spec.name).digest('hex') prefixes = prefixLengths.map(function (length) { const prefix = hashedName.slice(0, length) hashedName = hashedName.substring(length) @@ -174,4 +171,4 @@ class PodFetch extends AbstractFetch { } } -module.exports = options => new PodFetch(options) +module.exports = (options) => new PodFetch(options) diff --git a/providers/fetch/pypiFetch.js b/providers/fetch/pypiFetch.js index fa7d8731..09b687ac 100644 --- a/providers/fetch/pypiFetch.js +++ b/providers/fetch/pypiFetch.js @@ -10,7 +10,7 @@ const { findLastKey, get, find, clone } = require('lodash') const FetchResult = require('../../lib/fetchResult') const providerMap = { - pypi: 'https://pypi.python.org' + pypi: 'https://pypi.python.org', } class PyPiFetch extends AbstractFetch { @@ -47,7 +47,7 @@ class PyPiFetch extends AbstractFetch { async _getRegistryData(spec) { const baseUrl = providerMap.pypi const { body, statusCode } = await requestRetry.get(`${baseUrl}/pypi/${spec.name}/json`, { - json: true + json: true, }) if (statusCode !== 200 || !body) return null return body @@ -66,7 +66,7 @@ class PyPiFetch extends AbstractFetch { _extractReleaseDate(spec, registryData) { const releaseTypes = get(registryData, ['releases', spec.revision]) - const release = find(releaseTypes, entry => { + const release = find(releaseTypes, (entry) => { return entry.url && entry.url.length > 6 && entry.url.slice(-6) === 'tar.gz' }) if (!release) return @@ -95,7 +95,7 @@ class PyPiFetch extends AbstractFetch { async _getPackage(spec, registryData, destination) { const releaseTypes = get(registryData, ['releases', spec.revision]) - const release = find(releaseTypes, entry => entry.url?.endsWith('tar.gz') || entry.url?.endsWith('zip')) + const release = find(releaseTypes, (entry) => entry.url?.endsWith('tar.gz') || entry.url?.endsWith('zip')) if (!release) return false return new Promise((resolve, reject) => { @@ -109,4 +109,4 @@ class PyPiFetch extends AbstractFetch { } } -module.exports = options => new PyPiFetch(options) +module.exports = (options) => new PyPiFetch(options) diff --git a/providers/fetch/requestRetryWithDefaults.js b/providers/fetch/requestRetryWithDefaults.js index e04b4012..741cad1d 100644 --- a/providers/fetch/requestRetryWithDefaults.js +++ b/providers/fetch/requestRetryWithDefaults.js @@ -1,3 +1,3 @@ const requestRetryWithDefaults = require('requestretry').defaults({ maxAttempts: 3, fullResponse: true }) -module.exports = requestRetryWithDefaults \ No newline at end of file +module.exports = requestRetryWithDefaults diff --git a/providers/fetch/rubyGemsFetch.js b/providers/fetch/rubyGemsFetch.js index c3838c70..4e1bb0a6 100644 --- a/providers/fetch/rubyGemsFetch.js +++ b/providers/fetch/rubyGemsFetch.js @@ -12,7 +12,7 @@ const FetchResult = require('../../lib/fetchResult') const { extractDate } = require('../../lib/utils') const providerMap = { - rubyGems: 'https://rubygems.org' + rubyGems: 'https://rubygems.org', } class RubyGemsFetch extends AbstractFetch { @@ -49,7 +49,7 @@ class RubyGemsFetch extends AbstractFetch { async _getRegistryData(spec) { const baseUrl = providerMap.rubyGems const { body, statusCode } = await requestRetry.get(`${baseUrl}/api/v1/gems/${spec.name}.json`, { - json: true + json: true, }) return statusCode === 200 && body ? body : null } @@ -77,8 +77,8 @@ class RubyGemsFetch extends AbstractFetch { await new Promise((resolve, reject) => { fs.createReadStream(`${dirName}/metadata.gz`) .pipe(zlib.createGunzip()) - .on('data', data => { - fs.writeFile(`${dirName}/metadata.txt`, data, error => { + .on('data', (data) => { + fs.writeFile(`${dirName}/metadata.txt`, data, (error) => { if (error) return reject(error) return resolve() }) @@ -105,4 +105,4 @@ class RubyGemsFetch extends AbstractFetch { } } -module.exports = options => new RubyGemsFetch(options) +module.exports = (options) => new RubyGemsFetch(options) diff --git a/providers/filter/filter.js b/providers/filter/filter.js index e38fc855..5f23dc04 100644 --- a/providers/filter/filter.js +++ b/providers/filter/filter.js @@ -23,7 +23,7 @@ class StandardFilter extends AbstractProcessor { } _getProcessor(request) { - return this.processors.filter(processor => processor.canHandle(request))[0] + return this.processors.filter((processor) => processor.canHandle(request))[0] } } diff --git a/providers/index.js b/providers/index.js index e73ac303..3c772e42 100644 --- a/providers/index.js +++ b/providers/index.js @@ -7,7 +7,7 @@ const providers = require('../ghcrawler').providers module.exports = { filter: { provider: 'filter', - filter: require('./filter/filter') + filter: require('./filter/filter'), }, fetch: { cdDispatch: require('./fetch/dispatcher'), @@ -24,7 +24,7 @@ module.exports = { npmjs: require('./fetch/npmjsFetch'), nuget: require('./fetch/nugetFetch'), pypi: require('./fetch/pypiFetch'), - rubygems: require('./fetch/rubyGemsFetch') + rubygems: require('./fetch/rubyGemsFetch'), }, process: { cdsource: require('./process/sourceExtract'), @@ -48,13 +48,13 @@ module.exports = { scancode: require('./process/scancode'), fossology: require('./process/fossology'), source: require('./process/source').processor, - top: require('./process/top') + top: require('./process/top'), }, store: { cdDispatch: require('./store/storeDispatcher'), webhook: require('./store/webhookDeltaStore'), azqueue: require('./store/azureQueueStore'), 'cd(azblob)': AttachmentStoreFactory(providers.store.azblob), - 'cd(file)': AttachmentStoreFactory(providers.store.file) - } + 'cd(file)': AttachmentStoreFactory(providers.store.file), + }, } diff --git a/providers/logging/insights.js b/providers/logging/insights.js index 26d9e408..7d194f99 100644 --- a/providers/logging/insights.js +++ b/providers/logging/insights.js @@ -15,11 +15,7 @@ class Insights { if (appInsights.defaultClient instanceof Insights) return if (!key || key === 'mock') appInsights.defaultClient = new Insights(tattoos, null, echo) else { - appInsights - .setup(key) - .setAutoCollectPerformance(false) - .setAutoCollectDependencies(false) - .start() + appInsights.setup(key).setAutoCollectPerformance(false).setAutoCollectDependencies(false).start() appInsights.defaultClient = new Insights(tattoos, appInsights.defaultClient, echo) } } @@ -43,8 +39,7 @@ class Insights { const severities = ['V', 'I', 'W', 'E', 'C'] const propertyString = JSON.stringify(traceTelemetry.properties) if (this.client) this.client.trackTrace(traceTelemetry) - if (this.echo) - console.log(`[${severities[traceTelemetry.severity]}] ${traceTelemetry.message} ${propertyString}`) + if (this.echo) console.log(`[${severities[traceTelemetry.severity]}] ${traceTelemetry.message} ${propertyString}`) } tattoo(telemetry) { diff --git a/providers/logging/logger.js b/providers/logging/logger.js index 147880e5..73f4ad42 100644 --- a/providers/logging/logger.js +++ b/providers/logging/logger.js @@ -14,7 +14,7 @@ function factory(tattoos) { insights: appInsights, treatErrorsAsExceptions: true, exitOnError: false, - level: 'info' + level: 'info', }) return result } diff --git a/providers/process/abstractClearlyDefinedProcessor.js b/providers/process/abstractClearlyDefinedProcessor.js index 506d860c..154d6749 100644 --- a/providers/process/abstractClearlyDefinedProcessor.js +++ b/providers/process/abstractClearlyDefinedProcessor.js @@ -39,13 +39,13 @@ class AbstractClearlyDefinedProcessor extends AbstractProcessor { const fileList = await this.filterFiles(location) const files = await Promise.all( fileList.map( - throat(10, async file => { + throat(10, async (file) => { if (this._isInterestinglyNamed(file, interestingRoot)) await this.attachFiles(request.document, [file], location) const hashes = await this.computeHashes(path.join(location, file)) return { path: file, hashes } - }) - ) + }), + ), ) request.document.files = files } @@ -62,7 +62,7 @@ class AbstractClearlyDefinedProcessor extends AbstractProcessor { 'NOTICE', 'NOTICES', 'CONTRIBUTORS', - 'PATENTS' + 'PATENTS', ] const extensions = ['.MD', '.HTML', '.TXT'] const extension = path.extname(name) @@ -74,11 +74,11 @@ class AbstractClearlyDefinedProcessor extends AbstractProcessor { async _computeSize(location) { let count = 0 const bytes = await du(location, { - filter: file => { + filter: (file) => { if (isGitFile(file)) return false count++ return true - } + }, }) return { k: Math.round(bytes / 1024), count } } diff --git a/providers/process/abstractProcessor.js b/providers/process/abstractProcessor.js index 4ccb5d32..b6766698 100644 --- a/providers/process/abstractProcessor.js +++ b/providers/process/abstractProcessor.js @@ -16,7 +16,7 @@ class AbstractProcessor extends BaseHandler { constructor(options) { super(options) this._schemaVersion = this.aggregateVersions( - this._collectClasses().map(entry => entry.schemaVersion || entry.toolVersion) + this._collectClasses().map((entry) => entry.schemaVersion || entry.toolVersion), ) } @@ -66,19 +66,17 @@ class AbstractProcessor extends BaseHandler { if (!version) return result if (typeof version !== 'string') throw new Error(`Invalid processor version ${version}`) const parts = version.split('.') - if (parts.length !== 3 || parts.some(part => isNaN(+part))) throw new Error(`${errorRoot}: ${version}`) + if (parts.length !== 3 || parts.some((part) => isNaN(+part))) throw new Error(`${errorRoot}: ${version}`) for (let i = 0; i < 3; i++) result[i] += +parts[i] return result }, - [0, 0, 0] + [0, 0, 0], ) .join('.') } _computeToken(content) { - return shajs('sha256') - .update(content) - .digest('hex') + return shajs('sha256').update(content).digest('hex') } /** @@ -92,7 +90,7 @@ class AbstractProcessor extends BaseHandler { if (!files || !files.length) return if (!document._attachments) Object.defineProperty(document, '_attachments', { value: [], enumerable: false }) document.attachments = document.attachments || [] - files.forEach(file => { + files.forEach((file) => { const fullPath = path.join(location, file) const attachment = fs.readFileSync(fullPath, 'utf8') const token = this._computeToken(attachment) @@ -114,12 +112,12 @@ class AbstractProcessor extends BaseHandler { if (!locationStat.isDirectory()) return [location] const subdirs = await readdir(location) const files = await Promise.all( - subdirs.map(subdir => { + subdirs.map((subdir) => { const entry = path.resolve(location, subdir) return this.getFiles(entry) - }) + }), ) - return flatten(files).filter(x => x) + return flatten(files).filter((x) => x) } /** @@ -132,14 +130,14 @@ class AbstractProcessor extends BaseHandler { async getFolders(location, ignorePaths = []) { const subdirs = await readdir(location) const folders = await Promise.all( - subdirs.map(async subdir => { + subdirs.map(async (subdir) => { const entry = path.resolve(location, subdir) const entryStat = await lstat(entry) if (entryStat.isSymbolicLink() || !entryStat.isDirectory()) return [] return [entry, ...(await this.getFolders(entry))] - }) + }), ) - return flatten(folders).filter(folder => folder && !ignorePaths.some(ignorePath => folder.includes(ignorePath))) + return flatten(folders).filter((folder) => folder && !ignorePaths.some((ignorePath) => folder.includes(ignorePath))) } /** @@ -150,8 +148,8 @@ class AbstractProcessor extends BaseHandler { */ async filterFiles(location) { const fullList = await this.getFiles(location) - const filteredList = fullList.filter(file => file && !isGitFile(file)) - return trimAllParents(filteredList, location).filter(x => x) + const filteredList = fullList.filter((file) => file && !isGitFile(file)) + return trimAllParents(filteredList, location).filter((x) => x) } shouldFetch() { @@ -224,8 +222,8 @@ class AbstractProcessor extends BaseHandler { } addLocalToolTasks(request, ...tools) { - const toolList = tools.length ? tools : ['licensee', 'scancode', 'reuse'/*, 'fossology'*/] - toolList.forEach(tool => this.linkAndQueueTool(request, tool, undefined, 'local')) + const toolList = tools.length ? tools : ['licensee', 'scancode', 'reuse' /*, 'fossology'*/] + toolList.forEach((tool) => this.linkAndQueueTool(request, tool, undefined, 'local')) } } diff --git a/providers/process/component.js b/providers/process/component.js index c8f29058..ef58f0df 100644 --- a/providers/process/component.js +++ b/providers/process/component.js @@ -24,4 +24,4 @@ class ComponentProcessor extends AbstractProcessor { } } -module.exports = options => new ComponentProcessor(options) +module.exports = (options) => new ComponentProcessor(options) diff --git a/providers/process/composerExtract.js b/providers/process/composerExtract.js index 46efc7ab..fa6a512c 100644 --- a/providers/process/composerExtract.js +++ b/providers/process/composerExtract.js @@ -61,7 +61,7 @@ class ComposerExtract extends AbstractClearlyDefinedProcessor { if (typeof manifest.bugs === 'string' && manifest.bugs.startsWith('http')) candidateUrls.push(manifest.bugs) else candidateUrls.push(manifest.bugs.url) } - return candidateUrls.filter(e => e) + return candidateUrls.filter((e) => e) } async _discoverSource(manifest, registryManifest) { @@ -72,7 +72,7 @@ class ComposerExtract extends AbstractClearlyDefinedProcessor { // TODO lookup source discovery in a set of services that have their own configuration return this.sourceFinder(registryManifest.version, candidates, { githubToken: this.options.githubToken, - logger: this.logger + logger: this.logger, }) } diff --git a/providers/process/condaExtract.js b/providers/process/condaExtract.js index b7b1f9a3..7dd4c856 100644 --- a/providers/process/condaExtract.js +++ b/providers/process/condaExtract.js @@ -41,12 +41,12 @@ class CondaExtract extends AbstractClearlyDefinedProcessor { registryData.channelData.home, registryData.channelData.dev_url, registryData.channelData.doc_url, - registryData.channelData.doc_source_url].filter(e => e) + registryData.channelData.doc_source_url, + ].filter((e) => e) let sourceInfo = undefined - const githubSource = await this.sourceFinder( - registryData.repoData.packageData.version, sourceCandidates, { + const githubSource = await this.sourceFinder(registryData.repoData.packageData.version, sourceCandidates, { githubToken: this.options.githubToken, - logger: this.logger + logger: this.logger, }) if (githubSource) { sourceInfo = githubSource @@ -60,4 +60,4 @@ class CondaExtract extends AbstractClearlyDefinedProcessor { } } -module.exports = (options, sourceFinder) => new CondaExtract(options, sourceFinder || sourceDiscovery) \ No newline at end of file +module.exports = (options, sourceFinder) => new CondaExtract(options, sourceFinder || sourceDiscovery) diff --git a/providers/process/condaSrcExtract.js b/providers/process/condaSrcExtract.js index a91cbb1d..97d071d9 100644 --- a/providers/process/condaSrcExtract.js +++ b/providers/process/condaSrcExtract.js @@ -22,4 +22,4 @@ class CondaSrcExtract extends AbstractClearlyDefinedProcessor { } } -module.exports = (options) => new CondaSrcExtract(options) \ No newline at end of file +module.exports = (options) => new CondaSrcExtract(options) diff --git a/providers/process/crateExtract.js b/providers/process/crateExtract.js index 2e6a3c97..539a6ad5 100644 --- a/providers/process/crateExtract.js +++ b/providers/process/crateExtract.js @@ -42,7 +42,7 @@ class CrateExtract extends AbstractClearlyDefinedProcessor { _discoverSource(manifest, registryData) { return this.sourceFinder(registryData.num, [manifest.repository, manifest.homepage, manifest.documentation], { githubToken: this.options.githubToken, - logger: this.logger + logger: this.logger, }) } } diff --git a/providers/process/debExtract.js b/providers/process/debExtract.js index 49799414..890d41db 100644 --- a/providers/process/debExtract.js +++ b/providers/process/debExtract.js @@ -39,14 +39,19 @@ class DebExtract extends AbstractClearlyDefinedProcessor { _createDocument(request, spec, registryData) { const { releaseDate, copyrightUrl, declaredLicenses } = request.document - request.document = merge(this.clone(request.document), { registryData, releaseDate, copyrightUrl, declaredLicenses }) + request.document = merge(this.clone(request.document), { + registryData, + releaseDate, + copyrightUrl, + declaredLicenses, + }) const sourceInfo = this._discoverSource(spec, registryData) if (sourceInfo) request.document.sourceInfo = sourceInfo } _discoverSource(spec, registryData) { const [revision, architecture] = spec.revision.split('_') - const source = (registryData.find(entry => entry.Architecture === architecture) || {}).Source + const source = (registryData.find((entry) => entry.Architecture === architecture) || {}).Source if (source) { const result = SourceSpec.fromObject(spec) result.type = 'debsrc' diff --git a/providers/process/debsrcExtract.js b/providers/process/debsrcExtract.js index ed93e515..051eeb09 100644 --- a/providers/process/debsrcExtract.js +++ b/providers/process/debsrcExtract.js @@ -18,8 +18,13 @@ class DebSrcExtract extends AbstractClearlyDefinedProcessor { await super.handle(request) // Re-arrange these fields to be at the end const { releaseDate, registryData, copyrightUrl, declaredLicenses } = request.document - request.document = merge(this.clone(request.document), { releaseDate, registryData, copyrightUrl, declaredLicenses }) + request.document = merge(this.clone(request.document), { + releaseDate, + registryData, + copyrightUrl, + declaredLicenses, + }) } } -module.exports = options => new DebSrcExtract(options) +module.exports = (options) => new DebSrcExtract(options) diff --git a/providers/process/fossology.js b/providers/process/fossology.js index 0f47ce84..68ebef31 100644 --- a/providers/process/fossology.js +++ b/providers/process/fossology.js @@ -51,19 +51,19 @@ class FossologyProcessor extends AbstractProcessor { async _runNomos(request) { const parameters = [] - const result = await new Promise(resolve => { + const result = await new Promise((resolve) => { let data = '' const nomos = spawn(`${this.options.installDir}/nomos/agent/nomossa`, [ '-ld', request.document.location, - ...parameters + ...parameters, ]) - nomos.stdout.on('data', chunk => { + nomos.stdout.on('data', (chunk) => { if (data) data += chunk else data = chunk }) nomos - .on('error', error => { + .on('error', (error) => { this.logger.error(error) resolve(null) }) @@ -73,7 +73,7 @@ class FossologyProcessor extends AbstractProcessor { }) const output = { contentType: 'text/plain', - content: result.replace(new RegExp(`${request.document.location}/`, 'g'), '') + content: result.replace(new RegExp(`${request.document.location}/`, 'g'), ''), } return { version: this._nomosVersion, parameters: parameters.join(' '), output } } @@ -93,8 +93,8 @@ class FossologyProcessor extends AbstractProcessor { async _runCopyright(request, files, root) { const parameters = ['-J'] - const output = await this._visitFiles(files, file => - this._runCopyrightOnFile(request, path.join(root, file), parameters) + const output = await this._visitFiles(files, (file) => + this._runCopyrightOnFile(request, path.join(root, file), parameters), ) return { version: this._copyrightVersion, parameters, output } } @@ -104,7 +104,7 @@ class FossologyProcessor extends AbstractProcessor { const { stdout } = await execFile( `${this.options.installDir}/copyright/agent/copyright`, ['--files', file, ...parameters], - { cwd: `${this.options.installDir}/copyright/agent` } + { cwd: `${this.options.installDir}/copyright/agent` }, ) return stdout } catch (error) { @@ -120,21 +120,21 @@ class FossologyProcessor extends AbstractProcessor { const chunkSize = 500 const output = { contentType: 'text/plain', - content: '' + content: '', } for (let i = 0; i < files.length; i += chunkSize) { - const fileArguments = files.slice(i, i + chunkSize).map(file => path.join(root, file)) - const result = await new Promise(resolve => { + const fileArguments = files.slice(i, i + chunkSize).map((file) => path.join(root, file)) + const result = await new Promise((resolve) => { let data = '' const monk = spawn(`${this.options.installDir}/monk/agent/monk`, [...parameters, ...fileArguments], { - cwd: `${this.options.installDir}/monk/agent` + cwd: `${this.options.installDir}/monk/agent`, }) - monk.stdout.on('data', chunk => { + monk.stdout.on('data', (chunk) => { if (data) data += chunk else data = chunk }) monk - .on('error', error => { + .on('error', (error) => { this.logger.error(error) resolve(null) }) @@ -193,4 +193,4 @@ class FossologyProcessor extends AbstractProcessor { } } -module.exports = options => new FossologyProcessor(options) +module.exports = (options) => new FossologyProcessor(options) diff --git a/providers/process/fsfeReuse.js b/providers/process/fsfeReuse.js index d7d93a54..ed7a12ac 100644 --- a/providers/process/fsfeReuse.js +++ b/providers/process/fsfeReuse.js @@ -5,7 +5,10 @@ const AbstractProcessor = require('./abstractProcessor') const { promisify } = require('util') const execFile = promisify(require('child_process').execFile) const { merge } = require('lodash') -const { readdirSync, promises: { readFile } } = require('fs') +const { + readdirSync, + promises: { readFile }, +} = require('fs') class FsfeReuseProcessor extends AbstractProcessor { constructor(options) { @@ -39,13 +42,17 @@ class FsfeReuseProcessor extends AbstractProcessor { if (!record) return const location = request.document.location request.document = merge(this.clone(request.document), { reuse: record }) - this.attachFiles(request.document, record.licenses.map(file => file.filePath), location) + this.attachFiles( + request.document, + record.licenses.map((file) => file.filePath), + location, + ) } async _run(request) { const root = request.document.location - const { name: outFileName } = this.createTempFile(request) - const parameters = [('spdx'), '-o', outFileName] + const { name: outFileName } = this.createTempFile(request) + const parameters = ['spdx', '-o', outFileName] try { await execFile('reuse', parameters, { cwd: root }) const out = await readFile(outFileName, 'utf8') @@ -53,7 +60,10 @@ class FsfeReuseProcessor extends AbstractProcessor { const results = { metadata: {}, files: [], licenses: this._getLicenses(request) } // REUSE SPDX results are grouped in sections that are separated with two newlines // The first result group contains generic result metadata, the following ones represent a file each. We process both variants in a single loop... - out.trim().split(/\n\n/).forEach((spdxResult, entryIndex) => this._handleResultSection(spdxResult, entryIndex, results)) + out + .trim() + .split(/\n\n/) + .forEach((spdxResult, entryIndex) => this._handleResultSection(spdxResult, entryIndex, results)) return results } catch (error) { request.markDead('Error', error ? error.message : 'REUSE run failed') @@ -64,7 +74,9 @@ class FsfeReuseProcessor extends AbstractProcessor { const spdxResultFile = {} const spdxRawValues = spdxResult.split(/\n/) // Each line represents a single result attribute - spdxRawValues.forEach(spdxRawValue => this._handleResultAttribute(spdxRawValue, entryIndex, results, spdxResultFile)) + spdxRawValues.forEach((spdxRawValue) => + this._handleResultAttribute(spdxRawValue, entryIndex, results, spdxResultFile), + ) // Generic metadata was already added to results.metadata // In case we have file metadata, all attributes are read now and information can be added to the file results if (entryIndex > 0) { @@ -75,7 +87,11 @@ class FsfeReuseProcessor extends AbstractProcessor { _handleResultAttribute(spdxRawValue, entryIndex, results, spdxResultFile) { const spdxMatchResult = spdxRawValue.match(/((?\w+):\s)((?\w+):\s)?(?.+)/) if (spdxMatchResult !== null) { - const spdxResultValue = { key: spdxMatchResult.groups.first_key, secondaryKey: spdxMatchResult.groups.second_key, spdxValue: spdxMatchResult.groups.spdx_value.replace(/(<\/?([^>]+)>)/g, '') } + const spdxResultValue = { + key: spdxMatchResult.groups.first_key, + secondaryKey: spdxMatchResult.groups.second_key, + spdxValue: spdxMatchResult.groups.spdx_value.replace(/(<\/?([^>]+)>)/g, ''), + } // First result section contains generic metadata, any other section attributes for a particular file if (entryIndex === 0) { this._addMetadataAttribute(spdxResultValue, results) @@ -88,7 +104,8 @@ class FsfeReuseProcessor extends AbstractProcessor { _addMetadataAttribute(spdxResultValue, results) { // Relationship attributes are ignored on purpose as they won't be used later and would only consume memory... if (spdxResultValue.key !== 'Relationship') { - results.metadata[spdxResultValue.key + (spdxResultValue.secondaryKey ? spdxResultValue.secondaryKey : '')] = spdxResultValue.spdxValue + results.metadata[spdxResultValue.key + (spdxResultValue.secondaryKey ? spdxResultValue.secondaryKey : '')] = + spdxResultValue.spdxValue } } @@ -103,7 +120,8 @@ class FsfeReuseProcessor extends AbstractProcessor { if (spdxResultValue.key === 'FileCopyrightText' && attributeValue.startsWith('SPDX-FileCopyrightText: ')) { attributeValue = attributeValue.substring(24) } - spdxResultFile[spdxResultValue.key + (spdxResultValue.secondaryKey ? spdxResultValue.secondaryKey : '')] = attributeValue + spdxResultFile[spdxResultValue.key + (spdxResultValue.secondaryKey ? spdxResultValue.secondaryKey : '')] = + attributeValue } _getLicenses(request) { @@ -111,9 +129,10 @@ class FsfeReuseProcessor extends AbstractProcessor { const licensesDir = 'LICENSES' try { const licenseFiles = readdirSync(request.document.location + '/' + licensesDir) - licenseFiles.forEach(file => { + licenseFiles.forEach((file) => { licenses.push({ - filePath: licensesDir + '/' + file, spdxId: file.substring(0, file.indexOf('.txt')) + filePath: licensesDir + '/' + file, + spdxId: file.substring(0, file.indexOf('.txt')), }) }) } catch (error) { @@ -125,20 +144,20 @@ class FsfeReuseProcessor extends AbstractProcessor { _detectVersion() { if (this._versionPromise !== undefined) return this._versionPromise this._versionPromise = execFile('reuse', ['--version']) - .then(result => { + .then((result) => { const reuseRegex = /reuse\s+(\d+\.\d+(\.\d+)?)/i this._toolVersion = result.stdout.trim().match(reuseRegex)[1] this._schemaVersion = this.aggregateVersions( [this._schemaVersion, this.toolVersion, this.configVersion], - 'Invalid REUSE version' + 'Invalid REUSE version', ) return this._schemaVersion }) - .catch(error => { + .catch((error) => { if (error) this.logger.log(`Could not detect version of REUSE: ${error.message}`) }) return this._versionPromise } } -module.exports = options => new FsfeReuseProcessor(options) +module.exports = (options) => new FsfeReuseProcessor(options) diff --git a/providers/process/gemExtract.js b/providers/process/gemExtract.js index 328f8009..d421fba4 100644 --- a/providers/process/gemExtract.js +++ b/providers/process/gemExtract.js @@ -44,7 +44,7 @@ class GemExtract extends AbstractClearlyDefinedProcessor { candidates.push(get(registryData, 'homepage_uri')) candidates.push(get(registryData, 'mailing_list_uri')) candidates.push(get(registryData, 'source_code_uri')) - const allCandidates = candidates.filter(e => e) + const allCandidates = candidates.filter((e) => e) return this.sourceFinder(version, allCandidates, { githubToken: this.options.githubToken, logger: this.logger }) } diff --git a/providers/process/goExtract.js b/providers/process/goExtract.js index 1cc04964..1035dea2 100644 --- a/providers/process/goExtract.js +++ b/providers/process/goExtract.js @@ -31,4 +31,4 @@ class GoExtract extends AbstractClearlyDefinedProcessor { } } -module.exports = (options, sourceFinder) => new GoExtract(options, sourceFinder || sourceDiscovery) \ No newline at end of file +module.exports = (options, sourceFinder) => new GoExtract(options, sourceFinder || sourceDiscovery) diff --git a/providers/process/licensee.js b/providers/process/licensee.js index dbe401c8..42cb9905 100644 --- a/providers/process/licensee.js +++ b/providers/process/licensee.js @@ -41,7 +41,7 @@ class LicenseeProcessor extends AbstractProcessor { if (!record) return const location = request.document.location request.document = merge(this.clone(request.document), { licensee: record }) - const toAttach = record.output.content.matched_files.map(file => file.filename) + const toAttach = record.output.content.matched_files.map((file) => file.filename) this.attachFiles(request.document, toAttach, location) } @@ -51,18 +51,18 @@ class LicenseeProcessor extends AbstractProcessor { const subfolders = await this.getFolders(root, ['/.git']) const paths = ['', ...trimAllParents(subfolders, root)] try { - const results = (await Promise.all( - paths.map(throat(10, path => this._runOnFolder(path, root, parameters))) - )).filter(x => x) - const licenses = uniqBy(flatten(results.map(result => result.licenses)), 'spdx_id') - const matched_files = flatten(results.map(result => result.matched_files)) + const results = ( + await Promise.all(paths.map(throat(10, (path) => this._runOnFolder(path, root, parameters)))) + ).filter((x) => x) + const licenses = uniqBy(flatten(results.map((result) => result.licenses)), 'spdx_id') + const matched_files = flatten(results.map((result) => result.matched_files)) return { version: this.toolVersion, parameters: parameters, output: { contentType: 'application/json', - content: { licenses, matched_files } - } + content: { licenses, matched_files }, + }, } } catch (exception) { request.markDead('Error', exception ? exception.message : 'Licensee run failed') @@ -75,7 +75,7 @@ class LicenseeProcessor extends AbstractProcessor { const stdout = await this._runLicensee(parameters, path.join(root, folder)) if (!stdout.trim()) return const result = JSON.parse(stdout) - result.matched_files.forEach(file => (file.filename = `${folder ? folder + '/' : ''}${file.filename}`)) + result.matched_files.forEach((file) => (file.filename = `${folder ? folder + '/' : ''}${file.filename}`)) return result } catch (error) { // Licensee fails with code = 1 if there are no license files found in the given folder. @@ -93,19 +93,19 @@ class LicenseeProcessor extends AbstractProcessor { _detectVersion() { if (this._versionPromise !== undefined) return this._versionPromise this._versionPromise = execFile('licensee', ['version']) - .then(result => { + .then((result) => { this._toolVersion = result.stdout.trim() this._schemaVersion = this.aggregateVersions( [this._schemaVersion, this.toolVersion, this.configVersion], - 'Invalid Licensee version' + 'Invalid Licensee version', ) return this._schemaVersion }) - .catch(error => { + .catch((error) => { if (error) this.logger.log(`Could not detect version of Licensee: ${error.message}`) }) return this._versionPromise } } -module.exports = options => new LicenseeProcessor(options) +module.exports = (options) => new LicenseeProcessor(options) diff --git a/providers/process/mavenExtract.js b/providers/process/mavenExtract.js index 0b24777d..8c7ee7b2 100644 --- a/providers/process/mavenExtract.js +++ b/providers/process/mavenExtract.js @@ -42,7 +42,7 @@ class MavenExtract extends AbstractClearlyDefinedProcessor { _discoverCandidateSourceLocations(manifest) { const candidateUrls = [] candidateUrls.push(get(manifest, 'summary.scm.0.url.0')) - return candidateUrls.filter(e => e) + return candidateUrls.filter((e) => e) } async _discoverSource(spec, manifest) { @@ -50,7 +50,7 @@ class MavenExtract extends AbstractClearlyDefinedProcessor { // TODO lookup source discovery in a set of services that have their own configuration const githubSource = await this.sourceFinder(spec.revision, manifestCandidates, { githubToken: this.options.githubToken, - logger: this.logger + logger: this.logger, }) if (githubSource) return githubSource // didn't find any source in GitHub so make up a sources url to try if the registry thinks there is source diff --git a/providers/process/npmExtract.js b/providers/process/npmExtract.js index a3e99f92..7f55651e 100644 --- a/providers/process/npmExtract.js +++ b/providers/process/npmExtract.js @@ -62,7 +62,7 @@ class NpmExtract extends AbstractClearlyDefinedProcessor { if (typeof manifest.bugs === 'string' && manifest.bugs.startsWith('http')) candidateUrls.push(manifest.bugs) else candidateUrls.push(manifest.bugs.url) } - return candidateUrls.filter(e => e) + return candidateUrls.filter((e) => e) } async _discoverSource(manifest, registryManifest) { @@ -73,7 +73,7 @@ class NpmExtract extends AbstractClearlyDefinedProcessor { // TODO lookup source discovery in a set of services that have their own configuration return this.sourceFinder(registryManifest.version, candidates, { githubToken: this.options.githubToken, - logger: this.logger + logger: this.logger, }) } diff --git a/providers/process/nugetExtract.js b/providers/process/nugetExtract.js index 76958734..e4de1a55 100644 --- a/providers/process/nugetExtract.js +++ b/providers/process/nugetExtract.js @@ -78,7 +78,7 @@ class NuGetExtract extends AbstractClearlyDefinedProcessor { const candidates = [...nuspecCandidates, ...manifestCandidates, ...latestNuspecCandidates] return this.sourceFinder(manifest.version, candidates, { githubToken: this.options.githubToken, - logger: this.logger + logger: this.logger, }) } diff --git a/providers/process/package.js b/providers/process/package.js index ef526920..9b7790f5 100644 --- a/providers/process/package.js +++ b/providers/process/package.js @@ -24,4 +24,4 @@ class PackageProcessor extends AbstractProcessor { } } -module.exports = { processor: options => new PackageProcessor(options), supportedTypes } +module.exports = { processor: (options) => new PackageProcessor(options), supportedTypes } diff --git a/providers/process/podExtract.js b/providers/process/podExtract.js index ff3e73a5..dc9121d8 100644 --- a/providers/process/podExtract.js +++ b/providers/process/podExtract.js @@ -61,7 +61,7 @@ class PodExtract extends AbstractClearlyDefinedProcessor { // there is no way to pass the branch/tag/commit we have in the manifest return this.sourceFinder(registryData.version, sources, { githubToken: this.options.githubToken, - logger: this.logger + logger: this.logger, }) } } diff --git a/providers/process/pypiExtract.js b/providers/process/pypiExtract.js index 278de711..e2764112 100644 --- a/providers/process/pypiExtract.js +++ b/providers/process/pypiExtract.js @@ -45,14 +45,14 @@ class PyPiExtract extends AbstractClearlyDefinedProcessor { candidates.push(get(registryData, 'info.package_url')) candidates.push(get(registryData, 'info.project_url')) candidates.push(get(registryData, 'info.release_url')) - const allCandidates = candidates.filter(e => e) + const allCandidates = candidates.filter((e) => e) return this.sourceFinder(revision, allCandidates, { githubToken: this.options.githubToken, logger: this.logger }) } async _createDocument(request, spec, registryData) { request.document = merge(this.clone(request.document), { registryData, - declaredLicense: request.document.declaredLicense + declaredLicense: request.document.declaredLicense, }) const sourceInfo = await this._discoverSource(spec.revision, registryData) if (sourceInfo) request.document.sourceInfo = sourceInfo diff --git a/providers/process/scancode.js b/providers/process/scancode.js index 592bdd21..537dce98 100644 --- a/providers/process/scancode.js +++ b/providers/process/scancode.js @@ -44,14 +44,14 @@ class ScanCodeProcessor extends AbstractProcessor { async _runScancode(request, file) { this.logger.info( - `Analyzing ${request.toString()} using ScanCode. input: ${request.document.location} output: ${file.name}` + `Analyzing ${request.toString()} using ScanCode. input: ${request.document.location} output: ${file.name}`, ) const { options, timeout, processes, format } = this.options const parameters = [...options, '--timeout', timeout.toString(), '-n', processes.toString(), format] try { await execFile(`${this.options.installDir}/scancode`, [...parameters, file.name, request.document.location], { cwd: this.options.installDir, - maxBuffer: 5 * 1024 * 1024 + maxBuffer: 5 * 1024 * 1024, }) } catch (error) { // TODO see if the new version of ScanCode has a better way of differentiating errors @@ -66,13 +66,13 @@ class ScanCodeProcessor extends AbstractProcessor { const output = JSON.parse(fs.readFileSync(outputFile)) // Pick files that are potentially whole licenses. We can be reasonably agressive here // and the summarizers etc will further refine what makes it into the final definitions - const licenses = output.files.filter(file => file.is_license_text).map(file => file.path) + const licenses = output.files.filter((file) => file.is_license_text).map((file) => file.path) this.attachFiles(document, licenses, root) // Pick files that represent whole packages. We can be reasonably agressive here // and the summarizers etc will further refine what makes it into the final definitions const packages = output.files.reduce((result, file) => { - file.packages.forEach(entry => { + file.packages.forEach((entry) => { // in this case the manifest_path contains a subpath pointing to the corresponding file if (file.type === 'directory' && entry.manifest_path) result.push(`${file.path ? file.path + '/' : ''}${entry.manifest_path}`) @@ -93,23 +93,23 @@ class ScanCodeProcessor extends AbstractProcessor { _hasRealErrors(resultFile) { const results = JSON.parse(fs.readFileSync(resultFile)) return results.files.some( - file => + (file) => file.scan_errors && - file.scan_errors.some(error => { + file.scan_errors.some((error) => { return !( error.includes('ERROR: Processing interrupted: timeout after') || error.includes('ValueError:') || error.includes('package.json') || error.includes('UnicodeDecodeError') ) - }) + }), ) } _detectVersion() { if (this._versionPromise) return this._versionPromise this._versionPromise = execFile(`${this.options.installDir}/scancode`, ['--version']) - .then(result => { + .then((result) => { this.logger.info('Detecting ScanCode version') const raw_output = result.stdout @@ -117,15 +117,15 @@ class ScanCodeProcessor extends AbstractProcessor { this._toolVersion = scancode_line.replace('ScanCode version ', '').trim() this._schemaVersion = this.aggregateVersions( [this._schemaVersion, this.toolVersion, this.configVersion], - 'Invalid ScanCode version' + 'Invalid ScanCode version', ) return this._schemaVersion }) - .catch(error => { + .catch((error) => { this.logger.log(`Could not detect version of ScanCode: ${error.message} `) }) return this._versionPromise } } -module.exports = options => new ScanCodeProcessor(options) +module.exports = (options) => new ScanCodeProcessor(options) diff --git a/providers/process/source.js b/providers/process/source.js index 45cf5330..078a6f37 100644 --- a/providers/process/source.js +++ b/providers/process/source.js @@ -23,4 +23,4 @@ class SourceProcessor extends AbstractProcessor { } } -module.exports = { processor: options => new SourceProcessor(options), supportedTypes } +module.exports = { processor: (options) => new SourceProcessor(options), supportedTypes } diff --git a/providers/process/sourceExtract.js b/providers/process/sourceExtract.js index 7addffe6..bf26ff5e 100644 --- a/providers/process/sourceExtract.js +++ b/providers/process/sourceExtract.js @@ -29,4 +29,4 @@ class SourceExtract extends AbstractClearlyDefinedProcessor { } } -module.exports = options => new SourceExtract(options) +module.exports = (options) => new SourceExtract(options) diff --git a/providers/process/top.js b/providers/process/top.js index 3d6f8dce..a6473c6e 100644 --- a/providers/process/top.js +++ b/providers/process/top.js @@ -18,9 +18,21 @@ class TopProcessor extends AbstractProcessor { return ( request.type === 'top' && spec && - ['anaconda-main', 'anaconda-r', 'npmjs', 'cocoapods', 'conda-forge', 'cratesio', 'mavencentral', 'mavengoogle', 'nuget', 'github', 'pypi', 'composer', 'debian'].includes( - spec.provider - ) + [ + 'anaconda-main', + 'anaconda-r', + 'npmjs', + 'cocoapods', + 'conda-forge', + 'cratesio', + 'mavencentral', + 'mavengoogle', + 'nuget', + 'github', + 'pypi', + 'composer', + 'debian', + ].includes(spec.provider) ) } @@ -78,10 +90,10 @@ class TopProcessor extends AbstractProcessor { const initialOffset = Math.floor(start / 36) * 36 for (let offset = initialOffset; offset < end; offset += 36) { const response = await requestRetry.get(`https://www.npmjs.com/browse/depended?offset=${offset}`, { - headers: { 'x-spiferack': 1 } + headers: { 'x-spiferack': 1 }, }) const packages = response.packages || [] - const requestsPage = packages.map(pkg => { + const requestsPage = packages.map((pkg) => { let [namespace, name] = pkg.name.split('/') if (!name) { name = namespace @@ -143,10 +155,10 @@ class TopProcessor extends AbstractProcessor { for (let offset = start; offset < end; offset += 100) { const page = offset / 100 + 1 const response = await requestRetry.get( - `https://crates.io/api/v1/crates?page=${page}&per_page=100&sort=downloads` + `https://crates.io/api/v1/crates?page=${page}&per_page=100&sort=downloads`, ) const requestsPage = response.crates.map( - x => new Request('package', `cd:/crate/cratesio/-/${x.name}/${x.max_version}`) + (x) => new Request('package', `cd:/crate/cratesio/-/${x.name}/${x.max_version}`), ) await request.queueRequests(requestsPage) console.log(`Queued ${requestsPage.length} Crate packages. Offset: ${offset}`) @@ -173,7 +185,7 @@ class TopProcessor extends AbstractProcessor { const condaFetch = CondaFetch({ logger: this.logger, - cdFileLocation: config.get('FILE_STORE_LOCATION') || (process.platform === 'win32' ? 'c:/temp/cd' : '/tmp/cd') + cdFileLocation: config.get('FILE_STORE_LOCATION') || (process.platform === 'win32' ? 'c:/temp/cd' : '/tmp/cd'), }) if (!condaFetch.channels[spec.provider]) return request.markSkip(`Unrecognized conda channel ${spec.provider}`) @@ -184,25 +196,30 @@ class TopProcessor extends AbstractProcessor { if (spec.type === 'conda') { for (let subdir of channelData.subdirs) { let repoData = await condaFetch.getRepoData(channelUrl, spec.provider, subdir) - let repoCoordinates = Object.entries(repoData.packages). - map(([, packageData]) => `cd:/conda/${spec.provider}/${subdir}/${packageData.name}/${packageData.version}-${packageData.build}/` - ) + let repoCoordinates = Object.entries(repoData.packages).map( + ([, packageData]) => + `cd:/conda/${spec.provider}/${subdir}/${packageData.name}/${packageData.version}-${packageData.build}/`, + ) packagesCoordinates = packagesCoordinates.concat(repoCoordinates) if (start < packagesCoordinates.length && end <= packagesCoordinates.length) { break } } } else { - packagesCoordinates = Object.entries(channelData.packages).map(([packageName, packageData]) => `cd:/condasrc/${spec.provider}/-/${packageName}/${packageData.version}/`) + packagesCoordinates = Object.entries(channelData.packages).map( + ([packageName, packageData]) => `cd:/condasrc/${spec.provider}/-/${packageName}/${packageData.version}/`, + ) } let slicedCoordinates = packagesCoordinates.slice(start, end) this.logger.info( - `Conda top - coordinates: ${packagesCoordinates.length}, start: ${start}, end: ${end}, sliced: ${slicedCoordinates.length}` + `Conda top - coordinates: ${packagesCoordinates.length}, start: ${start}, end: ${end}, sliced: ${slicedCoordinates.length}`, ) - await request.queueRequests(slicedCoordinates.map(coord => new Request(spec.type === 'conda' ? 'package' : 'source', coord))) + await request.queueRequests( + slicedCoordinates.map((coord) => new Request(spec.type === 'conda' ? 'package' : 'source', coord)), + ) return request.markNoSave() } @@ -252,7 +269,7 @@ class TopProcessor extends AbstractProcessor { start = start && start >= 0 ? ++start : 1 // Exclude header from CSV file end = end && end > 0 ? ++end : fileLines.length const lines = fileLines.slice(start, end) - const requests = lines.map(line => { + const requests = lines.map((line) => { let [, groupId, artifactId] = line.split(',') groupId = groupId.substring(1, groupId.length - 1) // Remove quotes artifactId = artifactId.substring(1, artifactId.length - 1) @@ -269,7 +286,7 @@ class TopProcessor extends AbstractProcessor { start = start && start >= 0 ? ++start : 1 // Exclude header from CSV file end = end && end > 0 ? ++end : fileLines.length const lines = fileLines.slice(start, end) - const requests = lines.map(line => { + const requests = lines.map((line) => { let [, groupId, artifactId] = line.split(',') groupId = groupId.substring(1, groupId.length - 1) // Remove quotes artifactId = artifactId.substring(1, artifactId.length - 1) @@ -300,9 +317,9 @@ class TopProcessor extends AbstractProcessor { if (!end || end - start <= 0) end = start + 1000 for (let offset = start; offset < end; offset += pageSize) { const topComponents = await requestRetry.get( - `https://api-v2v3search-0.nuget.org/query?prerelease=false&skip=${offset}&take=${pageSize}` + `https://api-v2v3search-0.nuget.org/query?prerelease=false&skip=${offset}&take=${pageSize}`, ) - const requests = topComponents.data.map(component => { + const requests = topComponents.data.map((component) => { return new Request('package', `cd:/nuget/nuget/-/${component.id}`) }) await request.queueRequests(requests) @@ -322,18 +339,18 @@ class TopProcessor extends AbstractProcessor { async _processAllGitHubOrgRepos(request) { const { namespace } = this.toSpec(request) const headers = { - 'User-Agent': 'clearlydefined/scanning' + 'User-Agent': 'clearlydefined/scanning', } const token = this.options.githubToken if (token) headers.Authorization = 'token ' + token const repos = await ghrequestor.getAll(`https://api.github.com/orgs/${namespace}/repos`, { headers, - tokenLowerBound: 10 + tokenLowerBound: 10, }) const requests = [] for (let i = 0; i < repos.length; i++) { const commits = await requestRetry.get(`https://api.github.com/repos/${namespace}/${repos[i].name}/commits`, { - headers + headers, }) if (commits.length > 0) { requests.push(new Request('source', `cd:/git/github/${namespace}/${repos[i].name}/${commits[0].sha}`)) @@ -361,15 +378,15 @@ class TopProcessor extends AbstractProcessor { if (!end || end - start <= 0) end = start + 100 const debianFetch = DebianFetch({ logger: this.logger, - cdFileLocation: config.get('FILE_STORE_LOCATION') || (process.platform === 'win32' ? 'c:/temp/cd' : '/tmp/cd') + cdFileLocation: config.get('FILE_STORE_LOCATION') || (process.platform === 'win32' ? 'c:/temp/cd' : '/tmp/cd'), }) await debianFetch._getPackageMapFile() const packagesCoordinates = await this._getDebianPackagesCoordinates(debianFetch) const slicedCoordinates = packagesCoordinates.slice(start, end) this.logger.info( - `Debian top - coordinates: ${packagesCoordinates.length}, start: ${start}, end: ${end}, sliced: ${slicedCoordinates.length}` + `Debian top - coordinates: ${packagesCoordinates.length}, start: ${start}, end: ${end}, sliced: ${slicedCoordinates.length}`, ) - const requests = slicedCoordinates.map(coordinate => new Request('package', coordinate)) + const requests = slicedCoordinates.map((coordinate) => new Request('package', coordinate)) await request.queueRequests(requests) return request.markNoSave() } @@ -380,7 +397,7 @@ class TopProcessor extends AbstractProcessor { const lineReader = linebyline(debianFetch.packageMapFileLocation) let entry = {} lineReader - .on('line', line => { + .on('line', (line) => { if (line === '') { const architecture = entry.Architecture const binary = entry.Binary @@ -397,11 +414,11 @@ class TopProcessor extends AbstractProcessor { .on('end', () => { return resolve(coordinates) }) - .on('error', error => reject(error)) + .on('error', (error) => reject(error)) }) } // TODO: Implement _processTopPackagists } -module.exports = options => new TopProcessor(options) +module.exports = (options) => new TopProcessor(options) diff --git a/providers/store/attachmentStore.js b/providers/store/attachmentStore.js index dd45a8ba..109b5dde 100644 --- a/providers/store/attachmentStore.js +++ b/providers/store/attachmentStore.js @@ -16,7 +16,7 @@ class AttachmentStore { upsert(document) { const documentPromise = this.baseStore.upsert(document) if (!document._attachments) return documentPromise - const attachmentPromises = document._attachments.map(entry => { + const attachmentPromises = document._attachments.map((entry) => { return this.baseStore.upsert({ _metadata: { type: 'attachment', @@ -24,14 +24,14 @@ class AttachmentStore { links: { self: { href: `urn:attachment:${entry.token}`, - type: 'resource' - } + type: 'resource', + }, }, fetchedAt: get(document, '_metadata.fetchedAt'), processedAt: get(document, '_metadata.processedAt'), - version: '1' + version: '1', }, - attachment: Buffer.from(entry.attachment).toString() + attachment: Buffer.from(entry.attachment).toString(), }) }) attachmentPromises.push(documentPromise) @@ -63,4 +63,4 @@ class AttachmentStore { } } -module.exports = options => new AttachmentStore(options) +module.exports = (options) => new AttachmentStore(options) diff --git a/providers/store/attachmentStoreFactory.js b/providers/store/attachmentStoreFactory.js index d3804de3..608835ad 100644 --- a/providers/store/attachmentStoreFactory.js +++ b/providers/store/attachmentStoreFactory.js @@ -3,6 +3,6 @@ const AttachmentStore = require('./attachmentStore') -module.exports = realFactory => { - return options => AttachmentStore({ ...options, baseStore: realFactory(options) }) +module.exports = (realFactory) => { + return (options) => AttachmentStore({ ...options, baseStore: realFactory(options) }) } diff --git a/providers/store/azureQueueStore.js b/providers/store/azureQueueStore.js index b1e0e461..2ecce3be 100644 --- a/providers/store/azureQueueStore.js +++ b/providers/store/azureQueueStore.js @@ -47,4 +47,4 @@ class AzureStorageQueue { } } -module.exports = options => new AzureStorageQueue(options) +module.exports = (options) => new AzureStorageQueue(options) diff --git a/providers/store/storeDispatcher.js b/providers/store/storeDispatcher.js index 367d3ddf..f064f000 100644 --- a/providers/store/storeDispatcher.js +++ b/providers/store/storeDispatcher.js @@ -8,35 +8,35 @@ class StoreDispatcher { } connect() { - return this._perform(store => store.connect()) + return this._perform((store) => store.connect()) } upsert(document) { - return this._perform(store => store.upsert(document)) + return this._perform((store) => store.upsert(document)) } get(type, key) { - return this._perform(store => store.get(type, key), true) + return this._perform((store) => store.get(type, key), true) } etag(type, key) { - return this._perform(store => store.etag(type, key), true) + return this._perform((store) => store.etag(type, key), true) } list(type) { - return this._perform(store => store.list(type), true) + return this._perform((store) => store.list(type), true) } count(type) { - return this._perform(store => store.count(type), true) + return this._perform((store) => store.count(type), true) } close() { - return this._perform(store => store.close()) + return this._perform((store) => store.close()) } delete(type, key) { - return this._perform(store => store.delete(type, key)) + return this._perform((store) => store.delete(type, key)) } async _perform(operation, first = false) { diff --git a/providers/store/webhookDeltaStore.js b/providers/store/webhookDeltaStore.js index baf5f71d..469f24ef 100644 --- a/providers/store/webhookDeltaStore.js +++ b/providers/store/webhookDeltaStore.js @@ -22,9 +22,9 @@ class WebhookDeltaStore { json: true, body: pick(document, '_metadata'), headers: { - 'x-crawler': this.options.token || 'secret' + 'x-crawler': this.options.token || 'secret', }, - resolveWithFullResponse: true + resolveWithFullResponse: true, } try { const response = await request(options) @@ -60,4 +60,4 @@ class WebhookDeltaStore { } } -module.exports = options => new WebhookDeltaStore(options) +module.exports = (options) => new WebhookDeltaStore(options) diff --git a/template.env.json b/template.env.json index 106f62ad..166dc7e1 100644 --- a/template.env.json +++ b/template.env.json @@ -10,4 +10,4 @@ "========== Crawler Queue settings ==========": "", "CRAWLER_QUEUE_PROVIDER": "memory" -} \ No newline at end of file +} diff --git a/test/fixtures/conda/channeldata.json b/test/fixtures/conda/channeldata.json index 3d4005e9..6ee0e523 100644 --- a/test/fixtures/conda/channeldata.json +++ b/test/fixtures/conda/channeldata.json @@ -35,4 +35,4 @@ "version": "3.3.1" } } -} \ No newline at end of file +} diff --git a/test/fixtures/conda/repodata.json b/test/fixtures/conda/repodata.json index baba4903..48bcbd1d 100644 --- a/test/fixtures/conda/repodata.json +++ b/test/fixtures/conda/repodata.json @@ -130,4 +130,4 @@ "version": "3.0.2" } } -} \ No newline at end of file +} diff --git a/test/fixtures/crates/bitflags.json b/test/fixtures/crates/bitflags.json index f4802f49..87219c6c 100644 --- a/test/fixtures/crates/bitflags.json +++ b/test/fixtures/crates/bitflags.json @@ -4,30 +4,8 @@ "name": "bitflags", "updated_at": "2018-08-21T19:55:12.284583+00:00", "versions": [ - 104810, - 90918, - 90315, - 71130, - 64852, - 55072, - 53976, - 48660, - 48642, - 45450, - 45376, - 27090, - 25901, - 23166, - 20770, - 20767, - 18188, - 12713, - 12706, - 12247, - 11144, - 11138, - 4788, - 2989 + 104810, 90918, 90315, 71130, 64852, 55072, 53976, 48660, 48642, 45450, 45376, 27090, 25901, 23166, 20770, 20767, + 18188, 12713, 12706, 12247, 11144, 11138, 4788, 2989 ], "keywords": ["bit", "bitflags", "flags", "bitmask"], "categories": ["no-std"], diff --git a/test/fixtures/go/license.html b/test/fixtures/go/license.html index 3a8dae07..ea645a60 100644 --- a/test/fixtures/go/license.html +++ b/test/fixtures/go/license.html @@ -1,10 +1,11 @@

-
Apache-2.0
+
Apache-2.0

This is not legal advice. Read disclaimer.

-
                                 Apache License
+    
+                                 Apache License
                       Version 2.0, January 2004
                     http://www.apache.org/licenses/
     
@@ -14,9 +15,10 @@

BSD-2-Clause, BSD-3-Clause, HPND

This is not legal advice. Read disclaimer.

-
Copyright (c) 2013-2019 Tommi Virtanen.
+    
+Copyright (c) 2013-2019 Tommi Virtanen.
       Copyright (c) 2009, 2011, 2012 The Go Authors.
       All rights reserved.
     
-
\ No newline at end of file + diff --git a/test/fixtures/packagist/registryData.json b/test/fixtures/packagist/registryData.json index e5bf0e5b..99f7b10f 100644 --- a/test/fixtures/packagist/registryData.json +++ b/test/fixtures/packagist/registryData.json @@ -2,23 +2,12 @@ "manifest": { "name": "symfony/polyfill-mbstring", "description": "Symfony polyfill for the Mbstring extension", - "keywords": [ - "mbstring", - "compatibility", - "portable", - "polyfill", - "shim" - ], + "keywords": ["mbstring", "compatibility", "portable", "polyfill", "shim"], "homepage": "https://symfony.com", "version": "v1.11.0", "version_normalized": "1.11.0.0", - "license": [ - "MIT" - ], - "authors": [ - [], - [] - ], + "license": ["MIT"], + "authors": [[], []], "source": { "type": "git", "url": "https://github.com/symfony/polyfill-mbstring.git", @@ -48,4 +37,4 @@ "uid": 2850406 }, "releaseDate": "2019-02-06T07:57:58+00:00" -} \ No newline at end of file +} diff --git a/test/fixtures/pod/registryData.json b/test/fixtures/pod/registryData.json index 69704063..ad7283ff 100644 --- a/test/fixtures/pod/registryData.json +++ b/test/fixtures/pod/registryData.json @@ -29,4 +29,4 @@ } } ] -} \ No newline at end of file +} diff --git a/test/fixtures/pod/versions.json b/test/fixtures/pod/versions.json index 9413d5fa..5b1a8ecd 100644 --- a/test/fixtures/pod/versions.json +++ b/test/fixtures/pod/versions.json @@ -46,4 +46,4 @@ "created_at": "2014-05-22 00:58:35 UTC" } ] -} \ No newline at end of file +} diff --git a/test/fixtures/pypi/registryData.json b/test/fixtures/pypi/registryData.json index 347371ce..4ef1c52e 100644 --- a/test/fixtures/pypi/registryData.json +++ b/test/fixtures/pypi/registryData.json @@ -183,4 +183,4 @@ } ], "vulnerabilities": [] -} \ No newline at end of file +} diff --git a/test/fixtures/pypi/registryData_dnspython.json b/test/fixtures/pypi/registryData_dnspython.json index bfc0e935..763e90d9 100644 --- a/test/fixtures/pypi/registryData_dnspython.json +++ b/test/fixtures/pypi/registryData_dnspython.json @@ -651,4 +651,4 @@ } ], "vulnerabilities": [] -} \ No newline at end of file +} diff --git a/test/fixtures/pypi/registryData_lgpl2.json b/test/fixtures/pypi/registryData_lgpl2.json index fd698284..00c71492 100644 --- a/test/fixtures/pypi/registryData_lgpl2.json +++ b/test/fixtures/pypi/registryData_lgpl2.json @@ -1474,4 +1474,4 @@ } ], "vulnerabilities": [] -} \ No newline at end of file +} diff --git a/test/unit/ghcrawler/crawlerFactoryTest.js b/test/unit/ghcrawler/crawlerFactoryTest.js index 7d8107cc..452819d3 100644 --- a/test/unit/ghcrawler/crawlerFactoryTest.js +++ b/test/unit/ghcrawler/crawlerFactoryTest.js @@ -7,7 +7,6 @@ const CrawlerFactory = require('../../../ghcrawler/crawlerFactory') const MemoryFactory = require('../../../ghcrawler/providers/queuing/memoryFactory') describe('create scopedQueueSets', () => { - before(() => { sinon.stub(CrawlerFactory, '_getProvider').callsFake((options, provider = options.provider) => { const opts = options[provider] || {} @@ -25,12 +24,11 @@ describe('create scopedQueueSets', () => { provider: 'memory', memory: { _config: { on: sinon.stub() }, - weights: { immediate: 3, soon: 2, normal: 3, later: 2 } - } + weights: { immediate: 3, soon: 2, normal: 3, later: 2 }, + }, } const queues = CrawlerFactory.createQueues(queueOptions) expect(queues).to.be.ok expect(queueOptions.memory._config.on.calledTwice).to.be.true }) - }) diff --git a/test/unit/ghcrawler/lib/traversalPolicy.js b/test/unit/ghcrawler/lib/traversalPolicy.js index 91238450..d044d2f0 100644 --- a/test/unit/ghcrawler/lib/traversalPolicy.js +++ b/test/unit/ghcrawler/lib/traversalPolicy.js @@ -30,4 +30,4 @@ describe('Test hasExpired', () => { const future = DateTime.now().plus({ hours: 2 }).toISO() expect(hasExpired(future, 1)).to.be.false }) -}) \ No newline at end of file +}) diff --git a/test/unit/ghcrawler/queueSetTests.js b/test/unit/ghcrawler/queueSetTests.js index aa8c91a7..f151894d 100644 --- a/test/unit/ghcrawler/queueSetTests.js +++ b/test/unit/ghcrawler/queueSetTests.js @@ -15,7 +15,7 @@ describe('QueueSet construction', () => { describe('QueueSet weighting', () => { it('should create a simple startMap', () => { - const set = new QueueSet([createBaseQueue('1'), createBaseQueue('2')], createOptions({ '1': 3, '2': 2 })) + const set = new QueueSet([createBaseQueue('1'), createBaseQueue('2')], createOptions({ 1: 3, 2: 2 })) expect(set.startMap.length).to.be.equal(5) expect(set.startMap[0]).to.be.equal(0) expect(set.startMap[1]).to.be.equal(0) @@ -25,7 +25,7 @@ describe('QueueSet weighting', () => { }) it('should create a default startMap if no weights given', () => { - const set = new QueueSet([createBaseQueue('1'), createBaseQueue('2')], { _config: { on: () => { } } }) + const set = new QueueSet([createBaseQueue('1'), createBaseQueue('2')], { _config: { on: () => {} } }) expect(set.startMap.length).to.be.equal(2) expect(set.startMap[0]).to.be.equal(0) expect(set.startMap[1]).to.be.equal(1) @@ -37,10 +37,10 @@ describe('QueueSet weighting', () => { it('should pop other queue if nothing available', async () => { const priority = createBaseQueue('priority', { - pop: async () => new Request('priority', 'http://test') + pop: async () => new Request('priority', 'http://test'), }) const normal = createBaseQueue('normal', { - pop: async () => null + pop: async () => null, }) const queues = createBaseQueues([priority, normal], null, [1, 1]) queues.popCount = 1 @@ -57,8 +57,7 @@ describe('QueueSet weighting', () => { describe('QueueSet pushing', () => { it('should accept a simple request into a named queue', async () => { const priority = createBaseQueue('priority', { - push: async () => null - + push: async () => null, }) const normal = createBaseQueue('normal') const queues = createBaseQueues([priority, normal]) @@ -72,10 +71,10 @@ describe('QueueSet pushing', () => { it('should throw when pushing into an unknown queue', async () => { const priority = createBaseQueue('priority', { - push: async () => null + push: async () => null, }) const normal = createBaseQueue('normal', { - push: async () => null + push: async () => null, }) const queues = createBaseQueues([priority, normal]) const request = new Request('test', 'http://test') @@ -98,8 +97,8 @@ describe('QueueSet originQueue management', () => { describe('QueueSet subscription management', () => { it('should subscribe all', () => { - const priority = createBaseQueue('priority', { subscribe: () => { } }) - const normal = createBaseQueue('normal', { subscribe: () => { } }) + const priority = createBaseQueue('priority', { subscribe: () => {} }) + const normal = createBaseQueue('normal', { subscribe: () => {} }) const queues = createBaseQueues([priority, normal]) sinon.spy(priority, 'subscribe') sinon.spy(normal, 'subscribe') @@ -111,8 +110,8 @@ describe('QueueSet subscription management', () => { }) it('should unsubscribe all', () => { - const priority = createBaseQueue('priority', { unsubscribe: () => { } }) - const normal = createBaseQueue('normal', { unsubscribe: () => { } }) + const priority = createBaseQueue('priority', { unsubscribe: () => {} }) + const normal = createBaseQueue('normal', { unsubscribe: () => {} }) const queues = createBaseQueues([priority, normal]) sinon.spy(priority, 'unsubscribe') sinon.spy(normal, 'unsubscribe') @@ -127,7 +126,7 @@ describe('QueueSet subscription management', () => { function createOptions(weights) { return { weights: weights, - _config: { on: () => { } } + _config: { on: () => {} }, } } @@ -137,7 +136,7 @@ function createBaseQueues(queues, weights = null) { function createBaseQueue( name, - { pop = null, push = null, done = null, abandon = null, subscribe = null, unsubscribe = null } = {} + { pop = null, push = null, done = null, abandon = null, subscribe = null, unsubscribe = null } = {}, ) { const result = { name: name } result.getName = () => { diff --git a/test/unit/ghcrawler/requestTests.js b/test/unit/ghcrawler/requestTests.js index 0b8290f6..00ecad65 100644 --- a/test/unit/ghcrawler/requestTests.js +++ b/test/unit/ghcrawler/requestTests.js @@ -6,7 +6,7 @@ const expect = require('chai').expect const Request = require('../../../ghcrawler/lib/request.js') describe('Request context/qualifier', () => { - it('will not queueRoot if none transitivity', () => { }) + it('will not queueRoot if none transitivity', () => {}) }) describe('Request link management', () => { diff --git a/test/unit/lib/entitySpecTests.js b/test/unit/lib/entitySpecTests.js index 683440e8..f76ef559 100644 --- a/test/unit/lib/entitySpecTests.js +++ b/test/unit/lib/entitySpecTests.js @@ -16,10 +16,12 @@ describe('entitySpec', () => { }) it('creates an EntitySpec from a Maven url', () => { - const entityFromUrl = EntitySpec.fromUrl('cd:/maven/mavencentral/org.eclipse.xtext/org.eclipse.xtext.common.types/2.25.0') + const entityFromUrl = EntitySpec.fromUrl( + 'cd:/maven/mavencentral/org.eclipse.xtext/org.eclipse.xtext.common.types/2.25.0', + ) expect(entityFromUrl.namespace).to.eq('org.eclipse.xtext') expect(entityFromUrl.name).to.eq('org.eclipse.xtext.common.types') expect(entityFromUrl.revision).to.eq('2.25.0') }) -}) \ No newline at end of file +}) diff --git a/test/unit/lib/fetchResultTests.js b/test/unit/lib/fetchResultTests.js index 9c1a089d..ad97dad1 100644 --- a/test/unit/lib/fetchResultTests.js +++ b/test/unit/lib/fetchResultTests.js @@ -62,7 +62,8 @@ describe('fetchResult', () => { const request = new Request('test', 'http://test').trackCleanup([ dir1.removeCallback, dir2.removeCallback, - { removeCallback: sinon.stub() }]) + { removeCallback: sinon.stub() }, + ]) expect(request.getTrackedCleanups().length).to.be.equal(3) fetchResult.adoptCleanup([dir1, dir2], request) @@ -171,4 +172,4 @@ describe('fetchResult', () => { expect(fetchResult.isInUse()).to.be.false }) }) -}) \ No newline at end of file +}) diff --git a/test/unit/lib/memoryCacheTest.js b/test/unit/lib/memoryCacheTest.js index b4616f11..638cc180 100644 --- a/test/unit/lib/memoryCacheTest.js +++ b/test/unit/lib/memoryCacheTest.js @@ -106,11 +106,11 @@ describe('cache timeout callback', () => { expect(false).to.be.true done() } - callCount ++ + callCount++ } cache.withVerify(verifyNotExpiredFirstThenExpired) cache.setWithConditionalExpiry('a', 'A', afterExpire, shouldExpire) expect(cache.get('a')).to.be.equal('A') }) -}) \ No newline at end of file +}) diff --git a/test/unit/lib/sourceSpecTests.js b/test/unit/lib/sourceSpecTests.js index 96a02ee4..9b682be6 100644 --- a/test/unit/lib/sourceSpecTests.js +++ b/test/unit/lib/sourceSpecTests.js @@ -13,7 +13,9 @@ describe('sourceSpec', () => { it('creates maven url/urn', () => { const spec = new SourceSpec('maven', 'mavengoogle', 'androidx.activity', 'activity', '1.3.0-alpha05') - expect(spec.toUrl()).to.eq('https://dl.google.com/android/maven2/androidx/activity/activity/1.3.0-alpha05/activity-1.3.0-alpha05.jar') + expect(spec.toUrl()).to.eq( + 'https://dl.google.com/android/maven2/androidx/activity/activity/1.3.0-alpha05/activity-1.3.0-alpha05.jar', + ) expect(spec.toUrn()).to.eq('urn:maven:mavengoogle:androidx.activity:activity:revision:1.3.0-alpha05') }) diff --git a/test/unit/lib/utilsTests.js b/test/unit/lib/utilsTests.js index b3cf5028..adc12b7b 100644 --- a/test/unit/lib/utilsTests.js +++ b/test/unit/lib/utilsTests.js @@ -3,7 +3,15 @@ const chai = require('chai') const chaiAsPromised = require('chai-as-promised') -const { normalizePath, normalizePaths, trimParents, trimAllParents, extractDate, spawnPromisified, isGitFile } = require('../../../lib/utils') +const { + normalizePath, + normalizePaths, + trimParents, + trimAllParents, + extractDate, + spawnPromisified, + isGitFile, +} = require('../../../lib/utils') const { promisify } = require('util') const execFile = promisify(require('child_process').execFile) chai.use(chaiAsPromised) @@ -63,13 +71,12 @@ describe('Util isGitFile', () => { ['/tmp/tempX/package/src', false], ['.git', true], ['/tmp/tempX/package/.git', true], - ['/tmp/tempX/package/.git/hooks/pre-merge-commit.sample', true] + ['/tmp/tempX/package/.git/hooks/pre-merge-commit.sample', true], ]) entries.forEach((expected, file) => { it(`should return ${expected} for isGitFile given '${file}'`, () => expect(isGitFile(file)).to.eq(expected)) }) - }) describe('Util extractDate', () => { @@ -105,9 +112,8 @@ describe('Util extractDate', () => { }) describe('test spawnPromisified ', () => { - it('should handle spawn + command successfully', async () => { - const { stdout: expected} = await execFile('ls', ['-l']) + const { stdout: expected } = await execFile('ls', ['-l']) const actual = await spawnPromisified('ls', ['-l']) expect(actual).to.be.equal(expected) }) @@ -129,7 +135,7 @@ describe('test spawnPromisified ', () => { it('should handle output more than 5MB', async () => { const largeFile = 'test/fixtures/debian/0ad_0.0.17-1_armhf.deb' const execFilePromise = execFile('cat', [largeFile, largeFile], { - maxBuffer: 5 * 1024 * 1024 + maxBuffer: 5 * 1024 * 1024, }) await expect(execFilePromise).to.be.rejectedWith('stdout maxBuffer length exceeded') @@ -145,4 +151,3 @@ async function getError(promise) { return error } } - diff --git a/test/unit/providers/fetch/condaFetchTests.js b/test/unit/providers/fetch/condaFetchTests.js index 72544cae..04eff259 100644 --- a/test/unit/providers/fetch/condaFetchTests.js +++ b/test/unit/providers/fetch/condaFetchTests.js @@ -8,7 +8,7 @@ const Request = require('../../../../ghcrawler/lib/request.js') describe('condaFetch utilities', () => { let fetch = CondaFetch({ logger: { info: sinon.stub() }, - cdFileLocation: 'test/fixtures/conda/fragment' + cdFileLocation: 'test/fixtures/conda/fragment', }) let repoData = JSON.parse(fs.readFileSync('test/fixtures/conda/repodata.json')) @@ -19,7 +19,9 @@ describe('condaFetch utilities', () => { it('matches packages in repodata.packages.conda correctly', () => { expect(fetch._matchPackage('21cmfast', '3.0.2', 'py37h48b2cff_0', repoData).length).to.greaterThan(0) - expect(fetch._matchPackage('21cmfast', '3.0.2', 'py37h48b2cff_0', repoData)[0].packageData.build).to.equal('py37h48b2cff_0') + expect(fetch._matchPackage('21cmfast', '3.0.2', 'py37h48b2cff_0', repoData)[0].packageData.build).to.equal( + 'py37h48b2cff_0', + ) }) it('matches the latest package when version not specified', () => { @@ -36,15 +38,11 @@ describe('condaFetch', () => { beforeEach(() => { fetch = CondaFetch({ logger: { info: sinon.stub() }, - cdFileLocation: 'test/fixtures/conda/fragment' + cdFileLocation: 'test/fixtures/conda/fragment', }) - fetch.getChannelData = sinon.stub().resolves( - JSON.parse(fs.readFileSync('test/fixtures/conda/channeldata.json')) - ) + fetch.getChannelData = sinon.stub().resolves(JSON.parse(fs.readFileSync('test/fixtures/conda/channeldata.json'))) - fetch.getRepoData = sinon.stub().resolves( - JSON.parse(fs.readFileSync('test/fixtures/conda/repodata.json')) - ) + fetch.getRepoData = sinon.stub().resolves(JSON.parse(fs.readFileSync('test/fixtures/conda/repodata.json'))) fetch._downloadPackage = sinon.stub().callsFake((downloadUrl, destination) => { expect(downloadUrl).to.contains('https://conda.anaconda.org/conda-forge/') @@ -57,7 +55,7 @@ describe('condaFetch', () => { expect(result.url).to.be.contains('cd:/conda/conda-forge/linux-64/21cmfast/3.0.2') expect(result.document.hashes).to.be.deep.equal({ sha1: '9b2f4958826956be03cf3793dbdb663a53a8a1f1', - sha256: '1154fceeb5c4ee9bb97d245713ac21eb1910237c724d2b7103747215663273c2' + sha256: '1154fceeb5c4ee9bb97d245713ac21eb1910237c724d2b7103747215663273c2', }) expect(result.document.location).to.be.a.string expect(result.document.releaseDate).to.match(/\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/) @@ -106,7 +104,9 @@ describe('condaFetch', () => { }) it('fetch package with version, architecture, and build version', async () => { - const result = await fetch.handle(new Request('test', 'cd:/conda/conda-forge/linux-64/21cmfast/3.0.2-py37hd45b216_1')) + const result = await fetch.handle( + new Request('test', 'cd:/conda/conda-forge/linux-64/21cmfast/3.0.2-py37hd45b216_1'), + ) verifyFetch(result.fetchResult) }) @@ -116,14 +116,20 @@ describe('condaFetch', () => { }) it('reports failed package matching', async () => { - const result = await fetch.handle(new Request('test', 'cd:/conda/conda-forge/linux-64/21cmfast/3.0.2-py9999_invalid')) - expect(result.outcome).to.equal('Missing package with matching spec (version: 3.0.2, buildVersion: py9999_invalid) in linux-64 repository') + const result = await fetch.handle( + new Request('test', 'cd:/conda/conda-forge/linux-64/21cmfast/3.0.2-py9999_invalid'), + ) + expect(result.outcome).to.equal( + 'Missing package with matching spec (version: 3.0.2, buildVersion: py9999_invalid) in linux-64 repository', + ) }) it('reports failed repodata fetching and parsing', async () => { fetch.getRepoData = sinon.stub().resolves(null) const result = await fetch.handle(new Request('test', 'cd:/conda/conda-forge/linux-64/21cmfast/3.0.2')) - expect(result.outcome).to.equal('failed to fetch and parse repodata json file for channel conda-forge in architecture linux-64') + expect(result.outcome).to.equal( + 'failed to fetch and parse repodata json file for channel conda-forge in architecture linux-64', + ) }) it('reports failed channeldata fetching and parsing', async () => { @@ -138,15 +144,11 @@ describe('condaSrcFetch', () => { beforeEach(() => { fetch = CondaFetch({ logger: { info: sinon.stub() }, - cdFileLocation: 'test/fixtures/conda/fragment' + cdFileLocation: 'test/fixtures/conda/fragment', }) - fetch.getChannelData = sinon.stub().resolves( - JSON.parse(fs.readFileSync('test/fixtures/conda/channeldata.json')) - ) + fetch.getChannelData = sinon.stub().resolves(JSON.parse(fs.readFileSync('test/fixtures/conda/channeldata.json'))) - fetch.getRepoData = sinon.stub().resolves( - JSON.parse(fs.readFileSync('test/fixtures/conda/repodata.json')) - ) + fetch.getRepoData = sinon.stub().resolves(JSON.parse(fs.readFileSync('test/fixtures/conda/repodata.json'))) fetch._downloadPackage = sinon.stub().callsFake((downloadUrl, destination) => { expect(downloadUrl).to.equal('https://pypi.io/packages/source/2/21cmFAST/21cmFAST-3.3.1.tar.gz') @@ -158,7 +160,7 @@ describe('condaSrcFetch', () => { expect(result.url).to.be.contains('cd:/condasrc/conda-forge/-/21cmfast/3.3.1') expect(result.document.hashes).to.be.deep.equal({ sha1: '92ec2a84d2377426ff51ad3b07a75921245c8881', - sha256: '96f5809d111a8a137c25758fa3f41586ea44cecba7ae191518767895afc7b3c6' + sha256: '96f5809d111a8a137c25758fa3f41586ea44cecba7ae191518767895afc7b3c6', }) expect(result.document.location).to.be.a.string expect(result.document.releaseDate).to.match(/\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/) @@ -188,4 +190,4 @@ describe('condaSrcFetch', () => { const downloadPackageStub = async (file, destination) => { await promisify(fs.copyFile)(file, destination) -} \ No newline at end of file +} diff --git a/test/unit/providers/fetch/cratesioFetchTests.js b/test/unit/providers/fetch/cratesioFetchTests.js index 7bea8ea1..b502790a 100644 --- a/test/unit/providers/fetch/cratesioFetchTests.js +++ b/test/unit/providers/fetch/cratesioFetchTests.js @@ -14,8 +14,8 @@ let Fetch const hashes = { 'bitflags-1.0.4.crate': { sha1: 'fbc1ce9fa176ed7a7e15cfc6d1f6c2389f536361', - sha256: '228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12' - } + sha256: '228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12', + }, } function pickFile(url) { @@ -25,7 +25,7 @@ function pickFile(url) { describe('crateFetch workflow', () => { beforeEach(() => { - const requestPromiseStub = options => { + const requestPromiseStub = (options) => { if (options && options.url) { if (options.url.includes('error')) throw new Error('yikes') if (options.url.includes('missing')) throw { statusCode: 404 } @@ -39,7 +39,7 @@ describe('crateFetch workflow', () => { return response } Fetch = proxyquire('../../../../providers/fetch/cratesioFetch', { - 'request-promise-native': requestPromiseStub + 'request-promise-native': requestPromiseStub, }) }) @@ -62,7 +62,7 @@ describe('crateFetch workflow', () => { const handler = setup() handler._getRegistryData = () => { return { - version: { num: '1.0.4', dl_path: 'error' } + version: { num: '1.0.4', dl_path: 'error' }, } } const request = new Request('test', 'cd:/crate/cratesio/-/bitflags/1.0.4') @@ -79,7 +79,7 @@ describe('crateFetch workflow', () => { const handler = setup() handler._getRegistryData = () => { return { - version: { num: '1.0.4', dl_path: 'missing' } + version: { num: '1.0.4', dl_path: 'missing' }, } } const request = new Request('test', 'cd:/crate/cratesio/-/bitflags/1.0.4') @@ -125,7 +125,7 @@ describe('crateFetch', () => { const crateFetch = mockCrateFetch({ registryData: () => { return { manifest: null, version: null } - } + }, }) const request = new Request('crate', 'cd:/crate/cratesio/-/name/0.1.0') await crateFetch.handle(request) @@ -139,7 +139,7 @@ describe('crateFetch', () => { const crateFetch = mockCrateFetch({ registryData: () => { return { manifest: {}, version: { num: '0.5.0', crate: 'name' } } - } + }, }) const request = await crateFetch.handle(new Request('crate', 'cd:/crate/cratesio/-/name/0.1.0')) request.fetchResult.copyTo(request) @@ -150,7 +150,7 @@ describe('crateFetch', () => { const crateFetch = mockCrateFetch({ registryData: () => { return { manifest: {}, version: { num: '0.1.0', crate: 'name' } } - } + }, }) const request = await crateFetch.handle(new Request('crate', 'cd:/crate/cratesio/-/naME/0.1.0')) request.fetchResult.copyTo(request) @@ -165,7 +165,7 @@ function mockCrateFetch(options) { return { name: '/tmp' } } crateFetch._getPackage = () => '/tmp/crate' - crateFetch.decompress = () => { } + crateFetch.decompress = () => {} crateFetch.computeHashes = () => { return { sha1: '42' } } diff --git a/test/unit/providers/fetch/debianFetchTests.js b/test/unit/providers/fetch/debianFetchTests.js index bb0b4592..106f1c0e 100644 --- a/test/unit/providers/fetch/debianFetchTests.js +++ b/test/unit/providers/fetch/debianFetchTests.js @@ -16,18 +16,18 @@ describe('Debian utility functions', () => { expect((await fetch._getDataFromPackageMapFile(spec('deb', 'debian', '0ad', '0.0.17-1_armhf'))).length).to.equal(9) expect((await fetch._getDataFromPackageMapFile(spec('debsrc', 'debian', '0ad', '0.0.17-1'))).length).to.equal(9) expect( - (await fetch._getDataFromPackageMapFile(spec('deb', 'debian', '0ad', '0.0.23-1~bpo9+1_amd64'))).length + (await fetch._getDataFromPackageMapFile(spec('deb', 'debian', '0ad', '0.0.23-1~bpo9+1_amd64'))).length, ).to.equal(3) expect( - (await fetch._getDataFromPackageMapFile(spec('debsrc', 'debian', '0ad', '0.0.23-1~bpo9+1'))).length + (await fetch._getDataFromPackageMapFile(spec('debsrc', 'debian', '0ad', '0.0.23-1~bpo9+1'))).length, ).to.equal(3) expect((await fetch._getDataFromPackageMapFile(spec('deb', 'debian', 'amiwm', '0.21pl2-1_amd64'))).length).to.equal( - 7 + 7, ) expect((await fetch._getDataFromPackageMapFile(spec('debsrc', 'debian', 'amiwm', '0.21pl2-1'))).length).to.equal(7) expect( - (await fetch._getDataFromPackageMapFile(spec('deb', 'debian', 'non-existant', 'non-existant'))).length + (await fetch._getDataFromPackageMapFile(spec('deb', 'debian', 'non-existant', 'non-existant'))).length, ).to.equal(0) }) @@ -44,15 +44,15 @@ describe('Debian utility functions', () => { const spec1 = spec('deb', 'debian', '0ad', '0.0.17-1_armhf') const registryData1 = await fetch._getDataFromPackageMapFile(spec1) expect(fetch._getDownloadUrls(spec1, registryData1).binary).to.equal( - 'http://ftp.debian.org/debian/pool/main/0/0ad/0ad_0.0.17-1_armhf.deb' + 'http://ftp.debian.org/debian/pool/main/0/0ad/0ad_0.0.17-1_armhf.deb', ) const spec2 = spec('debsrc', 'debian', '0ad', '0.0.17-1') const registryData2 = await fetch._getDataFromPackageMapFile(spec1) expect(fetch._getDownloadUrls(spec2, registryData2).source).to.equal( - 'http://ftp.debian.org/debian/pool/main/0/0ad/0ad_0.0.17.orig.tar.xz' + 'http://ftp.debian.org/debian/pool/main/0/0ad/0ad_0.0.17.orig.tar.xz', ) expect(fetch._getDownloadUrls(spec2, registryData2).patches).to.equal( - 'http://ftp.debian.org/debian/pool/main/0/0ad/0ad_0.0.17-1.debian.tar.xz' + 'http://ftp.debian.org/debian/pool/main/0/0ad/0ad_0.0.17-1.debian.tar.xz', ) }) @@ -80,17 +80,17 @@ describe('Debian utility functions', () => { 'public-domain', 'MPL-1.1', 'GPL-2.0', - 'LGPL-2.1' + 'LGPL-2.1', ]) // Edge cases: expect(fetch._parseDeclaredLicenses('License: GPL-1+ or Artistic')).to.deep.equal(['(GPL-1+ OR Artistic)']) expect(fetch._parseDeclaredLicenses('License: GPL-2+ and BSD-3-clause')).to.deep.equal(['GPL-2+', 'BSD-3-clause']) expect(fetch._parseDeclaredLicenses('License: GPL-2+ or Artistic-2.0, and BSD-3-clause')).to.deep.equal([ '(GPL-2+ OR Artistic-2.0)', - 'BSD-3-clause' + 'BSD-3-clause', ]) expect(fetch._parseDeclaredLicenses('License: Expat or Artistic and Artistic-2.0')).to.deep.equal([ - '(MIT OR Artistic AND Artistic-2.0)' + '(MIT OR Artistic AND Artistic-2.0)', ]) }) }) @@ -98,8 +98,8 @@ describe('Debian utility functions', () => { const hashes = { '0ad_0.0.17-1_armhf.deb': { sha1: '18dc18cb6397aa968408e554f3ff0e2010554b0d', - sha256: '2906a834ca562152afbf2f25315727608c4b25566960cf9ee8b15e8110850fb8' - } + sha256: '2906a834ca562152afbf2f25315727608c4b25566960cf9ee8b15e8110850fb8', + }, } describe('Debian fetching', () => { @@ -130,7 +130,7 @@ describe('Debian fetching', () => { expect(request.document.hashes.sha256).to.be.equal(hashes['0ad_0.0.17-1_armhf.deb']['sha256']) expect(request.document.releaseDate.getFullYear()).to.be.equal(2014) expect(request.document.copyrightUrl).to.be.equal( - 'https://metadata.ftp-master.debian.org/changelogs/main/0/0ad/0ad_0.0.17-1_copyright' + 'https://metadata.ftp-master.debian.org/changelogs/main/0/0ad/0ad_0.0.17-1_copyright', ) expect(request.document.declaredLicenses).to.deep.equal(['MIT', 'BSD-3-clause']) }) @@ -159,6 +159,6 @@ function spec(type, provider, name, revision) { namespace, name, revision, - toUrl: () => `cd:/${type}/${provider}/${namespace}/${name}/${revision}` + toUrl: () => `cd:/${type}/${provider}/${namespace}/${name}/${revision}`, } } diff --git a/test/unit/providers/fetch/dispatcherTests.js b/test/unit/providers/fetch/dispatcherTests.js index 3fa4a8ee..4af2ea14 100644 --- a/test/unit/providers/fetch/dispatcherTests.js +++ b/test/unit/providers/fetch/dispatcherTests.js @@ -31,7 +31,7 @@ describe('fetchDispatcher', () => { const processorsStub = [{ canHandle: () => true, shouldFetch: () => false }] const fetchDispatcher = FetchDispatcher({}, {}, {}, processorsStub) const request = {} - chai.spy.on(request, 'markNoSave', () => { }) + chai.spy.on(request, 'markNoSave', () => {}) await fetchDispatcher.handle(request) expect(request.markNoSave).to.have.been.called.once }) @@ -48,7 +48,6 @@ describe('fetchDispatcher', () => { }) describe('fetchDispatcher cache fetch result', () => { - let resultCache let inProgressPromiseCache @@ -58,7 +57,7 @@ describe('fetchDispatcher cache fetch result', () => { }) afterEach(() => { - Object.values(resultCache).forEach(fetched => fetched.cleanup()) + Object.values(resultCache).forEach((fetched) => fetched.cleanup()) }) function setupDispatcher(fetcher) { @@ -66,13 +65,21 @@ describe('fetchDispatcher cache fetch result', () => { const processorsStub = [{ canHandle: () => true, shouldFetch: () => true, getUrnFor: () => 'documentkey' }] const filterStub = { shouldFetchMissing: () => true, shouldFetch: () => true } const options = { logger: { info: sinon.stub(), debug: sinon.stub() } } - return FetchDispatcher(options, storeStub, [fetcher], processorsStub, filterStub, mockResultCache(resultCache), inProgressPromiseCache) + return FetchDispatcher( + options, + storeStub, + [fetcher], + processorsStub, + filterStub, + mockResultCache(resultCache), + inProgressPromiseCache, + ) } function mockResultCache(cache) { return { - get: key => cache[key], - setWithConditionalExpiry : (key, value) => cache[key] = value, + get: (key) => cache[key], + setWithConditionalExpiry: (key, value) => (cache[key] = value), } } @@ -105,7 +112,7 @@ describe('fetchDispatcher cache fetch result', () => { describe('cache maven fetch result', () => { function setupMavenFetch() { - const fileSupplier = url => { + const fileSupplier = (url) => { let fileName if (url.includes('solrsearch')) fileName = 'swt-3.3.0-v3346.json' if (url.endsWith('.pom')) fileName = 'swt-3.3.0-v3346.pom' @@ -116,7 +123,7 @@ describe('fetchDispatcher cache fetch result', () => { return MavenFetch({ logger: { log: sinon.stub() }, requestPromise: createRequestPromiseStub(fileSupplier), - requestStream: createGetStub(fileSupplier) + requestStream: createGetStub(fileSupplier), }) } @@ -175,7 +182,9 @@ describe('fetchDispatcher cache fetch result', () => { }) it('cached result same as fetched', async () => { - pypiFetch._getRegistryData = sinon.stub().resolves(JSON.parse(fs.readFileSync('test/fixtures/pypi/registryData.json'))) + pypiFetch._getRegistryData = sinon + .stub() + .resolves(JSON.parse(fs.readFileSync('test/fixtures/pypi/registryData.json'))) const fetchDispatcher = setupDispatcher(pypiFetch) await verifyFetchAndCache(fetchDispatcher, 'cd:/pypi/pypi/-/backports.ssl-match-hostname/3.7.0.1') }) @@ -191,13 +200,12 @@ describe('fetchDispatcher cache fetch result', () => { }) describe('cache NpmFetch result', () => { - const npmRegistryRequestStub = () => { const version = '0.3.0' return { manifest: { version }, versions: { [version]: { test: true } }, - time: { [version]: '42' } + time: { [version]: '42' }, } } @@ -205,11 +213,12 @@ describe('fetchDispatcher cache fetch result', () => { beforeEach(() => { const NpmFetch = proxyquire('../../../../providers/fetch/npmjsFetch', { - 'request-promise-native': npmRegistryRequestStub + 'request-promise-native': npmRegistryRequestStub, }) const npmFetch = NpmFetch({ logger: { log: sinon.stub() } }) - npmFetch._getPackage = sinon.stub().callsFake(async (spec, destination) => - await getPacakgeStub('test/fixtures/npm/redie-0.3.0.tgz', destination)) + npmFetch._getPackage = sinon + .stub() + .callsFake(async (spec, destination) => await getPacakgeStub('test/fixtures/npm/redie-0.3.0.tgz', destination)) fetchDispatcher = setupDispatcher(npmFetch) }) @@ -229,8 +238,9 @@ describe('fetchDispatcher cache fetch result', () => { version: '0.5.1', gem_uri: 'https://rubygems.org/gems/small-0.5.1.gem', }) - rubyGemsFetch._getPackage = sinon.stub().callsFake(async (spec, destination) => - await getPacakgeStub('test/fixtures/ruby/small-0.5.1.gem', destination)) + rubyGemsFetch._getPackage = sinon + .stub() + .callsFake(async (spec, destination) => await getPacakgeStub('test/fixtures/ruby/small-0.5.1.gem', destination)) fetchDispatcher = setupDispatcher(rubyGemsFetch) }) @@ -245,10 +255,18 @@ describe('fetchDispatcher cache fetch result', () => { beforeEach(() => { const packagistFetch = PackagistFetch({ logger: { log: sinon.stub() } }) - packagistFetch._getRegistryData = sinon.stub().resolves( - JSON.parse(fs.readFileSync('test/fixtures/packagist/registryData.json'))) - packagistFetch._getPackage = sinon.stub().callsFake(async (spec, registryData, destination) => - await getPacakgeStub('test/fixtures/composer/symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip', destination)) + packagistFetch._getRegistryData = sinon + .stub() + .resolves(JSON.parse(fs.readFileSync('test/fixtures/packagist/registryData.json'))) + packagistFetch._getPackage = sinon + .stub() + .callsFake( + async (spec, registryData, destination) => + await getPacakgeStub( + 'test/fixtures/composer/symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip', + destination, + ), + ) fetchDispatcher = setupDispatcher(packagistFetch) }) @@ -259,7 +277,7 @@ describe('fetchDispatcher cache fetch result', () => { }) describe('cache CrateioFetch result', () => { - const requestPromiseStub = options => { + const requestPromiseStub = (options) => { const body = fs.readFileSync('test/fixtures/crates/bitflags.json') if (options && options.json) return JSON.parse(body) const response = new PassThrough() @@ -273,7 +291,7 @@ describe('fetchDispatcher cache fetch result', () => { beforeEach(() => { const CrateioFetch = proxyquire('../../../../providers/fetch/cratesioFetch', { - 'request-promise-native': requestPromiseStub + 'request-promise-native': requestPromiseStub, }) const packagistFetch = CrateioFetch({ logger: { log: sinon.stub() } }) fetchDispatcher = setupDispatcher(packagistFetch) @@ -290,7 +308,7 @@ describe('fetchDispatcher cache fetch result', () => { beforeEach(() => { const DebianFetch = proxyquire('../../../../providers/fetch/debianFetch', { - 'memory-cache': memCacheStub + 'memory-cache': memCacheStub, }) const fetch = DebianFetch({ logger: { info: sinon.stub() }, cdFileLocation: 'test/fixtures/debian/fragment' }) fetch._download = async (downloadUrl, destination) => @@ -316,8 +334,8 @@ describe('fetchDispatcher cache fetch result', () => { const successHttpStub = { get: sinon.stub().returns({ status: 200, - data: httpContent - }) + data: httpContent, + }), } let fetchDispatcher @@ -325,7 +343,7 @@ describe('fetchDispatcher cache fetch result', () => { beforeEach(() => { const GoFetch = proxyquire('../../../../providers/fetch/goFetch', { request: { get: createGetStub(fileSupplier) }, - 'request-promise-native': createRequestPromiseStub(fileSupplier) + 'request-promise-native': createRequestPromiseStub(fileSupplier), }) const fetch = GoFetch({ logger: { info: sinon.stub() }, http: successHttpStub }) fetchDispatcher = setupDispatcher(fetch) @@ -366,8 +384,8 @@ describe('fetchDispatcher cache fetch result', () => { requestretry: { defaults: () => { return { get: requestPromiseStub } - } - } + }, + }, }) const fetch = NugetFetch({ logger: { info: sinon.stub() } }) fetchDispatcher = setupDispatcher(fetch) @@ -385,9 +403,9 @@ describe('fetchDispatcher cache fetch result', () => { requestretry: { defaults: () => { return { get: sinon.stub().resolves({ body: loadJson('pod/versions.json'), statusCode: 200 }) } - } + }, }, - 'request-promise-native': sinon.stub().resolves(loadJson('pod/registryData.json')) + 'request-promise-native': sinon.stub().resolves(loadJson('pod/registryData.json')), }) const fetch = PodFetch({ logger: { info: sinon.stub() } }) fetch._getPackage = sinon.stub().resolves('/tmp/cd-pYKk9q/SwiftLCS-1.0') @@ -400,8 +418,8 @@ describe('fetchDispatcher cache fetch result', () => { }) }) -const createRequestPromiseStub = fileSupplier => { - return options => { +const createRequestPromiseStub = (fileSupplier) => { + return (options) => { if (options.url) { if (options.url.includes('error')) throw new Error('yikes') if (options.url.includes('code')) throw { statusCode: 500, message: 'Code' } @@ -412,7 +430,7 @@ const createRequestPromiseStub = fileSupplier => { } } -const createGetStub = fileSupplier => { +const createGetStub = (fileSupplier) => { return (url, callback) => { const response = new PassThrough() const file = `test/fixtures/${fileSupplier(url)}` @@ -431,6 +449,6 @@ const getPacakgeStub = async (file, destination) => { await promisify(fs.copyFile)(file, destination) } -const loadJson = fileName => { +const loadJson = (fileName) => { return JSON.parse(fs.readFileSync(`test/fixtures/${fileName}`)) -} \ No newline at end of file +} diff --git a/test/unit/providers/fetch/gitClonerTests.js b/test/unit/providers/fetch/gitClonerTests.js index 0c963525..2d49696e 100644 --- a/test/unit/providers/fetch/gitClonerTests.js +++ b/test/unit/providers/fetch/gitClonerTests.js @@ -8,12 +8,18 @@ const github_stub = 'https://github.com/' const cloner = gitCloner({}) describe('building git urls', () => { it('builds a gitlab url', () => { - expect(cloner._buildUrl(spec('git', 'gitlab', 'namespace', 'repo', 'abc123'))).to.equal(gitlab_stub + 'namespace/repo.git') - expect(cloner._buildUrl(spec('git', 'gitlab', 'name.space.thing', 'repo', 'abc123'))).to.equal(gitlab_stub + 'name/space/thing/repo.git') + expect(cloner._buildUrl(spec('git', 'gitlab', 'namespace', 'repo', 'abc123'))).to.equal( + gitlab_stub + 'namespace/repo.git', + ) + expect(cloner._buildUrl(spec('git', 'gitlab', 'name.space.thing', 'repo', 'abc123'))).to.equal( + gitlab_stub + 'name/space/thing/repo.git', + ) }) it('builds a github url', () => { - expect(cloner._buildUrl(spec('git', 'github', 'namespace', 'repo', 'abc123'))).to.equal(github_stub + 'namespace/repo.git') + expect(cloner._buildUrl(spec('git', 'github', 'namespace', 'repo', 'abc123'))).to.equal( + github_stub + 'namespace/repo.git', + ) }) }) @@ -32,7 +38,9 @@ describe('fetch result', () => { expect(request.url).to.be.equal('cd:/git/github/palantir/refreshable/deef80a18aa929943e5dab1dba7276c231c84519') expect(request.meta.gitSize).to.be.equal(532) expect(request.contentOrigin).to.be.equal('origin') - expect(request.casedSpec.toUrl()).to.be.equal('cd:/git/github/palantir/refreshable/deef80a18aa929943e5dab1dba7276c231c84519') + expect(request.casedSpec.toUrl()).to.be.equal( + 'cd:/git/github/palantir/refreshable/deef80a18aa929943e5dab1dba7276c231c84519', + ) expect(request.document.size).to.be.equal(532) expect(request.document.releaseDate.toISOString()).to.be.equal('2021-04-08T13:27:49.000Z') expect(request.getTrackedCleanups().length).to.be.equal(0) diff --git a/test/unit/providers/fetch/goFetchTests.js b/test/unit/providers/fetch/goFetchTests.js index 237d9819..9893433f 100644 --- a/test/unit/providers/fetch/goFetchTests.js +++ b/test/unit/providers/fetch/goFetchTests.js @@ -14,26 +14,34 @@ const goBaseURL = 'https://proxy.golang.org/' describe('Go utility functions', () => { it('builds URLs', () => { const fetch = GoFetch({}) - expect(fetch._buildUrl(spec('go', 'golang', 'cloud.google.com', 'go', 'v0.56.0'))).to.equal(goBaseURL + 'cloud.google.com/go/@v/v0.56.0.zip') - expect(fetch._buildUrl(spec('go', 'golang', 'cloud.google.com', 'go', 'v0.56.0'), '.mod')).to.equal(goBaseURL + 'cloud.google.com/go/@v/v0.56.0.mod') - expect(fetch._buildUrl(spec('go', 'golang', '-', 'collectd.org', 'v0.5.0'))).to.equal(goBaseURL + 'collectd.org/@v/v0.5.0.zip') - expect(fetch._buildUrl(spec('go', 'golang', 'github.com%2fAzure%2fazure-event-hubs-go', 'v3', 'v3.2.0'))).to.equal(goBaseURL + 'github.com/Azure/azure-event-hubs-go/v3/@v/v3.2.0.zip') - expect(fetch._buildUrl(spec('go', 'golang', 'github.com%2FAzure%2Fazure-event-hubs-go', 'v3', 'v3.2.0'))).to.equal(goBaseURL + 'github.com/Azure/azure-event-hubs-go/v3/@v/v3.2.0.zip') + expect(fetch._buildUrl(spec('go', 'golang', 'cloud.google.com', 'go', 'v0.56.0'))).to.equal( + goBaseURL + 'cloud.google.com/go/@v/v0.56.0.zip', + ) + expect(fetch._buildUrl(spec('go', 'golang', 'cloud.google.com', 'go', 'v0.56.0'), '.mod')).to.equal( + goBaseURL + 'cloud.google.com/go/@v/v0.56.0.mod', + ) + expect(fetch._buildUrl(spec('go', 'golang', '-', 'collectd.org', 'v0.5.0'))).to.equal( + goBaseURL + 'collectd.org/@v/v0.5.0.zip', + ) + expect(fetch._buildUrl(spec('go', 'golang', 'github.com%2fAzure%2fazure-event-hubs-go', 'v3', 'v3.2.0'))).to.equal( + goBaseURL + 'github.com/Azure/azure-event-hubs-go/v3/@v/v3.2.0.zip', + ) + expect(fetch._buildUrl(spec('go', 'golang', 'github.com%2FAzure%2Fazure-event-hubs-go', 'v3', 'v3.2.0'))).to.equal( + goBaseURL + 'github.com/Azure/azure-event-hubs-go/v3/@v/v3.2.0.zip', + ) }) }) - const hashes = { 'v1.3.0.zip': { sha1: '270d80279fca2d21c401dd40b6fc6370c41bfd94', - sha256: '03872ee7d6747bc2ee0abadbd4eb09e60f6df17d0a6142264abe8a8a00af50e7' - } + sha256: '03872ee7d6747bc2ee0abadbd4eb09e60f6df17d0a6142264abe8a8a00af50e7', + }, } let Fetch function pickArtifact(url) { - if (url.endsWith('.mod')) return 'v1.3.0.mod' if (url.endsWith('.info')) return 'v1.3.0.info' if (url.endsWith('.zip')) return 'v1.3.0.zip' @@ -45,7 +53,7 @@ describe('Go Proxy fetching', () => { let successHttpStub beforeEach(() => { - const requestPromiseStub = options => { + const requestPromiseStub = (options) => { if (options.url) { expect(options.url).to.contain(goBaseURL) if (options.url.includes('error')) throw new Error('yikes') @@ -75,8 +83,8 @@ describe('Go Proxy fetching', () => { successHttpStub = { get: sinon.stub().returns({ status: 200, - data: httpContent - }) + data: httpContent, + }), } Fetch = proxyquire('../../../../providers/fetch/goFetch', { request: { get: getStub }, @@ -155,12 +163,14 @@ describe('Go Proxy fetching', () => { info: sinon.spy(), }, http: { - get: sinon.stub().throws(merge(new Error(), { - response: { - status: 429 - } - })) - } + get: sinon.stub().throws( + merge(new Error(), { + response: { + status: 429, + }, + }), + ), + }, }) const request = await handler.handle(new Request('test', 'cd:/go/golang/rsc.io/quote/v1.3.0')) expect(request.processControl).to.equal('requeue') @@ -173,12 +183,14 @@ describe('Go Proxy fetching', () => { info: sinon.spy(), }, http: { - get: sinon.stub().throws(merge(new Error(), { - response: { - status: 429 - } - })) - } + get: sinon.stub().throws( + merge(new Error(), { + response: { + status: 429, + }, + }), + ), + }, }) let request = new Request('test', 'cd:/go/golang/rsc.io/quote/v1.3.0') request.attemptCount = 5 @@ -193,12 +205,14 @@ describe('Go Proxy fetching', () => { info: sinon.spy(), }, http: { - get: sinon.stub().throws(merge(new Error(), { - response: { - status: 404 - } - })) - } + get: sinon.stub().throws( + merge(new Error(), { + response: { + status: 404, + }, + }), + ), + }, }) const request = await handler.handle(new Request('test', 'cd:/go/golang/rsc.io/quote/v1.3.0')) expect(request.fetchResult.document.registryData?.licenses).to.be.undefined @@ -214,17 +228,16 @@ describe('Go Proxy fetching', () => { http: { get: sinon.stub().returns({ status: 200, - data: - `
+ data: `

Apache-2.0
HTML has changed

-
` - }) - } +
`, + }), + }, }) const request = await handler.handle(new Request('test', 'cd:/go/golang/rsc.io/quote/v1.3.0')) expect(request.fetchResult.document.registryData?.licenses).to.be.undefined diff --git a/test/unit/providers/fetch/gradlePluginFetchTests.js b/test/unit/providers/fetch/gradlePluginFetchTests.js index 11fd0983..023b3d7f 100644 --- a/test/unit/providers/fetch/gradlePluginFetchTests.js +++ b/test/unit/providers/fetch/gradlePluginFetchTests.js @@ -9,19 +9,18 @@ const GradlePluginFetch = require('../../../../providers/fetch/gradlePluginFetch const Request = require('../../../../ghcrawler').request describe('Gradle plugin fetch', () => { - describe('look up latest version in maven meta data', () => { const spec = { type: 'maven', provider: 'gradleplugin', namespace: 'io.github.lognet', - name: 'grpc-spring-boot-starter-gradle-plugin' + name: 'grpc-spring-boot-starter-gradle-plugin', } it('get latest version from maven meta data', async () => { const gradleFetch = GradlePluginFetch({ logger: { log: sinon.stub() }, - requestPromise: sinon.stub().resolves(fs.readFileSync('test/fixtures/maven/maven-metadata.xml')) + requestPromise: sinon.stub().resolves(fs.readFileSync('test/fixtures/maven/maven-metadata.xml')), }) const latest = await gradleFetch._getLatestVersion(spec) expect(latest).to.be.eq('4.5.10') @@ -30,7 +29,7 @@ describe('Gradle plugin fetch', () => { it('no latest version', async () => { const gradleFetch = GradlePluginFetch({ logger: { log: sinon.stub() }, - requestPromise: sinon.stub().resolves('') + requestPromise: sinon.stub().resolves(''), }) const latest = await gradleFetch._getLatestVersion(spec) expect(latest).to.be.null @@ -39,7 +38,7 @@ describe('Gradle plugin fetch', () => { it('no maven meta data found', async () => { const gradleFetch = GradlePluginFetch({ logger: { log: sinon.stub() }, - requestPromise: sinon.stub().rejects({ statusCode: 404 }) + requestPromise: sinon.stub().rejects({ statusCode: 404 }), }) const latest = await gradleFetch._getLatestVersion(spec) expect(latest).to.be.null @@ -50,8 +49,8 @@ describe('Gradle plugin fetch', () => { const hashes = { 'swt-3.3.0-v3346.jar': { sha1: 'd886a6db6b7195911516896feebe3a5d1dddfd46', - sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354' - } + sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354', + }, } function pickArtifact(url) { @@ -78,7 +77,7 @@ describe('Gradle plugin fetch', () => { let handler beforeEach(() => { - const requestPromiseStub = options => { + const requestPromiseStub = (options) => { const content = contentFromFile(options.url) return options.json ? JSON.parse(content) : content } @@ -92,7 +91,7 @@ describe('Gradle plugin fetch', () => { handler = GradlePluginFetch({ logger: { log: sinon.stub(), error: sinon.stub() }, requestPromise: requestPromiseStub, - requestStream: getStub + requestStream: getStub, }) }) @@ -100,7 +99,7 @@ describe('Gradle plugin fetch', () => { const url = handler._buildBaseUrl({ type: 'maven', provider: 'gradleplugin', - name: 'grpc-spring-boot-starter-gradle-plugin' + name: 'grpc-spring-boot-starter-gradle-plugin', }) //should not fail expect(url).not.to.be.undefined @@ -127,7 +126,9 @@ describe('Gradle plugin fetch', () => { }) it('test success with sourcearchive', async () => { - const request = await handler.handle(new Request('test', 'cd:/sourcearchive/gradleplugin/org.eclipse/swt/3.3.0-v3344')) + const request = await handler.handle( + new Request('test', 'cd:/sourcearchive/gradleplugin/org.eclipse/swt/3.3.0-v3344'), + ) verifySuccess(request.fetchResult) expect(request.fetchResult.casedSpec.revision).to.equal('3.3.0-v3344') expect(request.fetchResult.document.location).to.be.a('string') @@ -142,7 +143,9 @@ describe('Gradle plugin fetch', () => { it('handle no pom found', async () => { handler._handleRequestPromise = sinon.stub().rejects({ statusCode: 404 }) - const request = await handler.handle(new Request('test', 'cd:/sourcearchive/gradleplugin/org.eclipse/swt/3.3.0-v3344')) + const request = await handler.handle( + new Request('test', 'cd:/sourcearchive/gradleplugin/org.eclipse/swt/3.3.0-v3344'), + ) expect(request.processControl).to.be.equal('skip') }) @@ -153,8 +156,10 @@ describe('Gradle plugin fetch', () => { response.end() return response } - const request = await handler.handle(new Request('test', 'cd:/sourcearchive/gradleplugin/org.eclipse/swt/3.3.0-v3344')) + const request = await handler.handle( + new Request('test', 'cd:/sourcearchive/gradleplugin/org.eclipse/swt/3.3.0-v3344'), + ) expect(request.processControl).to.be.equal('skip') }) }) -}) \ No newline at end of file +}) diff --git a/test/unit/providers/fetch/mavenBasedFetchTests.js b/test/unit/providers/fetch/mavenBasedFetchTests.js index 8c7eaf6f..5b5a826e 100644 --- a/test/unit/providers/fetch/mavenBasedFetchTests.js +++ b/test/unit/providers/fetch/mavenBasedFetchTests.js @@ -16,4 +16,4 @@ describe('MavenBasedFetch', () => { expect(file.mtime.toISOString().includes('2022-02-24')) }) }) -}) \ No newline at end of file +}) diff --git a/test/unit/providers/fetch/mavencentralFetchTests.js b/test/unit/providers/fetch/mavencentralFetchTests.js index bd027fa3..2759b62e 100644 --- a/test/unit/providers/fetch/mavencentralFetchTests.js +++ b/test/unit/providers/fetch/mavencentralFetchTests.js @@ -17,10 +17,10 @@ describe('Maven Central utility functions', () => { expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'), '.pom')).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.pom') expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'))).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.jar') expect(fetch._buildUrl(spec('sourcearchive', 'g1', 'a1', '1.2.3'), '-sources.jar')).to.equal( - stub + 'g1/a1/1.2.3/a1-1.2.3-sources.jar' + stub + 'g1/a1/1.2.3/a1-1.2.3-sources.jar', ) expect(fetch._buildUrl(spec('maven', 'com.g1', 'a1.foo', '1.2.3'))).to.equal( - stub + 'com/g1/a1.foo/1.2.3/a1.foo-1.2.3.jar' + stub + 'com/g1/a1.foo/1.2.3/a1.foo-1.2.3.jar', ) expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'), '.jar')).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.jar') expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'), '.aar')).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.aar') @@ -34,11 +34,12 @@ describe('Maven Central utility functions', () => { it('gets releaseDate from pomProperties', async () => { const fetch = MavenFetch({ logger: { log: sinon.stub() }, - requestPromise: sinon.stub().resolves({}) + requestPromise: sinon.stub().resolves({}), }) sinon.replace(fs, 'exists', (loc, cb) => cb(true)) sinon.replace(fs, 'readFile', (loc, cb) => - cb(null, '#Generated by Maven\n#Fri May 13 12:26:22 GMT+01:00 2011\ngroupId=g1\nartifactId=a1\nversion=1.2.3')) + cb(null, '#Generated by Maven\n#Fri May 13 12:26:22 GMT+01:00 2011\ngroupId=g1\nartifactId=a1\nversion=1.2.3'), + ) const date = await fetch._getReleaseDate('/tmp/', spec('maven', 'g1', 'a1', '1.2.3')) expect(date).to.eq('2011-05-13T11:26:22.000Z') @@ -52,8 +53,8 @@ function spec(type, namespace, name, revision) { const hashes = { 'swt-3.3.0-v3346.jar': { sha1: 'd886a6db6b7195911516896feebe3a5d1dddfd46', - sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354' - } + sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354', + }, } function pickArtifact(url) { @@ -64,11 +65,10 @@ function pickArtifact(url) { } describe('MavenCentral fetching', () => { - let handler beforeEach(() => { - const requestPromiseStub = options => { + const requestPromiseStub = (options) => { if (options.url) { if (options.url.includes('error')) throw new Error('yikes') if (options.url.includes('code')) throw { statusCode: 500, message: 'Code' } @@ -94,7 +94,7 @@ describe('MavenCentral fetching', () => { handler = MavenFetch({ logger: { log: sinon.stub() }, requestPromise: requestPromiseStub, - requestStream: getStub + requestStream: getStub, }) }) @@ -115,17 +115,17 @@ describe('MavenCentral fetching', () => { it('handles download error', async () => { handler._getPoms = () => [dummyPom1] - handler.decompress = () => { } - handler.computeHashes = () => { } + handler.decompress = () => {} + handler.computeHashes = () => {} handler.createTempDir = () => { return { name: '/tmp/' } } handler.createTempFile = () => { return { - name: '/tmp/random' + name: '/tmp/random', } } - handler._getArtifact = () => { } + handler._getArtifact = () => {} try { const result = await handler.handle(new Request('test', 'cd:/maven/mavencentral/org.eclipse/error/3.3.0-v3344')) expect(result.outcome).to.eq('Missing ') @@ -171,12 +171,12 @@ const dummyPom1 = { license: [ { name: ['Eclipse Public License - v 1.0'], - url: ['http://www.eclipse.org/org/documents/epl-v10.html'] - } - ] - } - ] - } + url: ['http://www.eclipse.org/org/documents/epl-v10.html'], + }, + ], + }, + ], + }, } const dummyPom2 = { @@ -189,12 +189,12 @@ const dummyPom2 = { license: [ { name: ['Eclipse Public License - v 1.0'], - url: ['http://www.eclipse.org/org/documents/epl-v10.html'] - } - ] - } - ] - } + url: ['http://www.eclipse.org/org/documents/epl-v10.html'], + }, + ], + }, + ], + }, } const dummyMerged = { @@ -205,12 +205,12 @@ const dummyMerged = { license: [ { name: ['Eclipse Public License - v 1.0'], - url: ['http://www.eclipse.org/org/documents/epl-v10.html'] - } - ] - } + url: ['http://www.eclipse.org/org/documents/epl-v10.html'], + }, + ], + }, ], modelVersion: ['4.0.0'], name: ['Standard Widget Toolkit'], - version: ['3.3.0-v3346'] -} \ No newline at end of file + version: ['3.3.0-v3346'], +} diff --git a/test/unit/providers/fetch/mavengoogleFetchTests.js b/test/unit/providers/fetch/mavengoogleFetchTests.js index 99294db1..4dc52bbe 100644 --- a/test/unit/providers/fetch/mavengoogleFetchTests.js +++ b/test/unit/providers/fetch/mavengoogleFetchTests.js @@ -17,10 +17,10 @@ describe('Maven Google utility functions', () => { expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'), '.pom')).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.pom') expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'))).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.jar') expect(fetch._buildUrl(spec('sourcearchive', 'g1', 'a1', '1.2.3'), '-sources.jar')).to.equal( - stub + 'g1/a1/1.2.3/a1-1.2.3-sources.jar' + stub + 'g1/a1/1.2.3/a1-1.2.3-sources.jar', ) expect(fetch._buildUrl(spec('maven', 'com.g1', 'a1.foo', '1.2.3'))).to.equal( - stub + 'com/g1/a1.foo/1.2.3/a1.foo-1.2.3.jar' + stub + 'com/g1/a1.foo/1.2.3/a1.foo-1.2.3.jar', ) expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'), '.jar')).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.jar') expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'), '.aar')).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.aar') @@ -36,7 +36,8 @@ describe('Maven Google utility functions', () => { const fs = require('fs') sinon.replace(fs, 'exists', (loc, cb) => cb(true)) sinon.replace(fs, 'readFile', (loc, cb) => - cb(null, '#Generated by Maven\n#Fri May 13 12:26:22 GMT+01:00 2011\ngroupId=g1\nartifactId=a1\nversion=1.2.3')) + cb(null, '#Generated by Maven\n#Fri May 13 12:26:22 GMT+01:00 2011\ngroupId=g1\nartifactId=a1\nversion=1.2.3'), + ) const date = await fetch._getReleaseDate('/tmp/', spec('maven', 'g1', 'a1', '1.2.3')) expect(date).to.eq('2011-05-13T11:26:22.000Z') @@ -50,8 +51,8 @@ function spec(type, namespace, name, revision) { const hashes = { 'swt-3.3.0-v3346.jar': { sha1: 'd886a6db6b7195911516896feebe3a5d1dddfd46', - sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354' - } + sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354', + }, } function pickArtifact(url) { @@ -62,11 +63,10 @@ function pickArtifact(url) { } describe('MavenGoogle fetching', () => { - let handler beforeEach(() => { - const requestPromiseStub = options => { + const requestPromiseStub = (options) => { if (options.url) { if (options.url.includes('error')) throw new Error('yikes') if (options.url.includes('code')) throw { statusCode: 500, message: 'Code' } @@ -92,7 +92,7 @@ describe('MavenGoogle fetching', () => { handler = MavenGoogleFetch({ logger: { log: sinon.stub() }, requestPromise: requestPromiseStub, - requestStream: getStub + requestStream: getStub, }) }) @@ -113,17 +113,17 @@ describe('MavenGoogle fetching', () => { it('handles download error', async () => { handler._getPoms = () => [dummyPom1] - handler.decompress = () => { } - handler.computeHashes = () => { } + handler.decompress = () => {} + handler.computeHashes = () => {} handler.createTempDir = () => { return { name: '/tmp/' } } handler.createTempFile = () => { return { - name: '/tmp/random' + name: '/tmp/random', } } - handler._getArtifact = () => { } + handler._getArtifact = () => {} try { const result = await handler.handle(new Request('test', 'cd:/maven/mavengoogle/org.eclipse/error/3.3.0-v3344')) expect(result.outcome).to.eq('Missing ') @@ -169,12 +169,12 @@ const dummyPom1 = { license: [ { name: ['Eclipse Public License - v 1.0'], - url: ['http://www.eclipse.org/org/documents/epl-v10.html'] - } - ] - } - ] - } + url: ['http://www.eclipse.org/org/documents/epl-v10.html'], + }, + ], + }, + ], + }, } const dummyPom2 = { @@ -187,12 +187,12 @@ const dummyPom2 = { license: [ { name: ['Eclipse Public License - v 1.0'], - url: ['http://www.eclipse.org/org/documents/epl-v10.html'] - } - ] - } - ] - } + url: ['http://www.eclipse.org/org/documents/epl-v10.html'], + }, + ], + }, + ], + }, } const dummyMerged = { @@ -203,12 +203,12 @@ const dummyMerged = { license: [ { name: ['Eclipse Public License - v 1.0'], - url: ['http://www.eclipse.org/org/documents/epl-v10.html'] - } - ] - } + url: ['http://www.eclipse.org/org/documents/epl-v10.html'], + }, + ], + }, ], modelVersion: ['4.0.0'], name: ['Standard Widget Toolkit'], - version: ['3.3.0-v3346'] -} \ No newline at end of file + version: ['3.3.0-v3346'], +} diff --git a/test/unit/providers/fetch/npmjsFetchTests.js b/test/unit/providers/fetch/npmjsFetchTests.js index ddc6653a..b8277d6a 100644 --- a/test/unit/providers/fetch/npmjsFetchTests.js +++ b/test/unit/providers/fetch/npmjsFetchTests.js @@ -46,14 +46,14 @@ let Fetch const hashes = { 'redie-0.3.0.tgz': { sha1: '48581317ac174ac269c398ff946d6c4779145374', - sha256: '66185c319680ee41268217c2467e314019e8ba4ea4d8374335fbe29e64a8d19f' - } + sha256: '66185c319680ee41268217c2467e314019e8ba4ea4d8374335fbe29e64a8d19f', + }, } describe('', () => { beforeEach(() => { const resultBox = {} - const requestPromiseStub = options => { + const requestPromiseStub = (options) => { if (options.url) { if (options.url.includes('regError')) throw new Error('yikes') if (options.url.includes('missing')) throw { statusCode: 404 } @@ -73,7 +73,7 @@ describe('', () => { } Fetch = proxyquire('../../../../providers/fetch/npmjsFetch', { request: { get: getStub }, - 'request-promise-native': requestPromiseStub + 'request-promise-native': requestPromiseStub, }) Fetch._resultBox = resultBox }) @@ -133,7 +133,7 @@ function createRegistryData(version) { return { manifest: { version }, versions: { [version]: { test: true } }, - time: { [version]: '42' } + time: { [version]: '42' }, } } diff --git a/test/unit/providers/fetch/nugetFetchTests.js b/test/unit/providers/fetch/nugetFetchTests.js index feab0dd5..5ae1c900 100644 --- a/test/unit/providers/fetch/nugetFetchTests.js +++ b/test/unit/providers/fetch/nugetFetchTests.js @@ -33,8 +33,8 @@ let Fetch const hashes = { 'xunit.core.2.4.1.nupkg': { sha1: '362ec34f3358c23e2effa87ecfc5de1c4292d60a', - sha256: '2a05200082483c7439550e05881fa2e6ed895d26319af30257ccd73f891ccbda' - } + sha256: '2a05200082483c7439550e05881fa2e6ed895d26319af30257ccd73f891ccbda', + }, } function pickFile(url) { @@ -66,7 +66,7 @@ describe('', () => { const requestRetryStub = { defaults: () => { return { get } - } + }, } Fetch = proxyquire('../../../../providers/fetch/nugetFetch', { requestretry: requestRetryStub }) }) @@ -128,8 +128,8 @@ describe('', () => { } handler._getManifest = () => '{}' handler._getNuspec = () => '{}' - handler._createTempDir = () => { } - handler._persistMetadata = () => { } + handler._createTempDir = () => {} + handler._persistMetadata = () => {} try { await handler.handle(new Request('test', 'cd:/nuget/nuget/-/xunit.core/2.4.1')) expect(false).to.be.true diff --git a/test/unit/providers/fetch/packagistFetchTests.js b/test/unit/providers/fetch/packagistFetchTests.js index 43d944f2..06173b20 100644 --- a/test/unit/providers/fetch/packagistFetchTests.js +++ b/test/unit/providers/fetch/packagistFetchTests.js @@ -14,14 +14,14 @@ let Fetch const hashes = { 'symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip': { sha1: '8d24c52e593042529ba86549d9920eb4d9649763', - sha256: '797a607b7ea7dad62f78a56f3687f2b2108d221b0682d0ea1386db61714dc8a2' - } + sha256: '797a607b7ea7dad62f78a56f3687f2b2108d221b0682d0ea1386db61714dc8a2', + }, } describe('packagistFetch', () => { beforeEach(() => { const resultBox = {} - const requestPromiseStub = options => { + const requestPromiseStub = (options) => { if (options.url) { if (options.url.includes('regError')) throw new Error('Invalid url') if (options.url.includes('missing')) throw { statusCode: 404 } @@ -41,7 +41,7 @@ describe('packagistFetch', () => { } Fetch = proxyquire('../../../../providers/fetch/packagistFetch', { request: { get: getStub }, - 'request-promise-native': requestPromiseStub + 'request-promise-native': requestPromiseStub, }) Fetch._resultBox = resultBox }) @@ -62,7 +62,7 @@ describe('packagistFetch', () => { request.fetchResult.copyTo(request) expect(request.document.hashes.sha1).to.be.equal(hashes['symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip']['sha1']) expect(request.document.hashes.sha256).to.be.equal( - hashes['symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip']['sha256'] + hashes['symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip']['sha256'], ) expect(request.document.dirRoot).to.be.equal('symfony-polyfill-mbstring-fe5e94c') expect(request.document.releaseDate).to.equal('2019-02-06T07:57:58+00:00') diff --git a/test/unit/providers/fetch/podFetchTests.js b/test/unit/providers/fetch/podFetchTests.js index bbb3b7ca..b8884400 100644 --- a/test/unit/providers/fetch/podFetchTests.js +++ b/test/unit/providers/fetch/podFetchTests.js @@ -6,8 +6,7 @@ const proxyquire = require('proxyquire') const Request = require('../../../../ghcrawler/lib/request.js') describe('podFetch', () => { - - const loadJson = fileName => { + const loadJson = (fileName) => { return JSON.parse(fs.readFileSync(`test/fixtures/pod/${fileName}`)) } @@ -15,11 +14,11 @@ describe('podFetch', () => { requestretry: { defaults: () => { return { - get: sinon.stub().resolves({ body: loadJson('versions.json'), statusCode: 200 }) + get: sinon.stub().resolves({ body: loadJson('versions.json'), statusCode: 200 }), } - } + }, }, - 'request-promise-native': sinon.stub().resolves(loadJson('registryData.json')) + 'request-promise-native': sinon.stub().resolves(loadJson('registryData.json')), }) let fetch @@ -46,4 +45,4 @@ describe('podFetch', () => { expect(result.document.releaseDate).to.be.equal('2019-04-10 00:22:10 UTC') expect(result.casedSpec.toUrl()).to.be.equal('cd:/pod/cocoapods/-/SwiftLCS/1.3.4') }) -}) \ No newline at end of file +}) diff --git a/test/unit/providers/fetch/pypiFetchTests.js b/test/unit/providers/fetch/pypiFetchTests.js index f431f916..9e369568 100644 --- a/test/unit/providers/fetch/pypiFetchTests.js +++ b/test/unit/providers/fetch/pypiFetchTests.js @@ -50,7 +50,7 @@ describe('pypiFetch handle function', () => { expect(result.document.releaseDate).to.be.equal('2019-01-12T22:25:58') expect(result.document.hashes).to.be.deep.equal({ sha1: 'd886a6db6b7195911516896feebe3a5d1dddfd46', - sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354' + sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354', }) }) @@ -58,9 +58,9 @@ describe('pypiFetch handle function', () => { // release information in the registry data is empty requestGetStub.returns({ body: { - 'releases': { '1.10.0': [] } + releases: { '1.10.0': [] }, }, - statusCode: 200 + statusCode: 200, }) let result = await fetch.handle(new Request('pypi', 'cd:/pypi/pypi/-/dnspython/1.10.0')) @@ -91,7 +91,7 @@ describe('pypiFetch handle function', () => { 'GNU Lesser General Public License v3.0': 'LGPL-3.0-or-later', 'GNU LGPL v3.0': 'LGPL-3.0-or-later', '(LGPL)': 'LGPL-3.0-or-later', - 'LGLP3': 'LGPL-3.0-or-later', + LGLP3: 'LGPL-3.0-or-later', 'LGPL 2.1': 'LGPL-2.1-only', 'LGPL 3': 'LGPL-3.0-or-later', 'LGPL 3.0': 'LGPL-3.0-or-later', @@ -99,23 +99,23 @@ describe('pypiFetch handle function', () => { 'LGPL v2': 'LGPL-2.0-only', 'LGPL v2+': 'LGPL-2.0-or-later', 'LGPL v3': 'LGPL-3.0-or-later', - 'LGPL': 'LGPL-3.0-or-later', + LGPL: 'LGPL-3.0-or-later', 'LGPL-2': 'LGPL-2.0-only', 'LGPL-3': 'LGPL-3.0-or-later', 'LGPL.v3': 'LGPL-3.0-or-later', - 'LGPL2': 'LGPL-2.0-only', + LGPL2: 'LGPL-2.0-only', 'LGPL2.1': 'LGPL-2.1-only', 'LGPL2.1+': 'LGPL-2.1-or-later', - 'LGPL3': 'LGPL-3.0-or-later', + LGPL3: 'LGPL-3.0-or-later', 'LGPL3+': 'LGPL-3.0-or-later', 'LGPL3.0': 'LGPL-3.0-or-later', 'LGPL:': 'LGPL-3.0-or-later', 'LGPLv2.1': 'LGPL-2.1-only', - 'LGPLv3': 'LGPL-3.0-or-later', + LGPLv3: 'LGPL-3.0-or-later', 'LGPLv3+': 'LGPL-3.0-or-later', 'LGPL-2.0+': 'LGPL-2.0-or-later', 'LGPL-2.1+': 'LGPL-2.1-or-later', - 'LGPL-3.0+': 'LGPL-3.0-or-later' + 'LGPL-3.0+': 'LGPL-3.0-or-later', } for (const [key, value] of Object.entries(conversions)) { expect(spdxCorrect(key)).to.be.equal(value) diff --git a/test/unit/providers/fetch/rubyGemsFetchTests.js b/test/unit/providers/fetch/rubyGemsFetchTests.js index 387ca54a..110bee9f 100644 --- a/test/unit/providers/fetch/rubyGemsFetchTests.js +++ b/test/unit/providers/fetch/rubyGemsFetchTests.js @@ -14,8 +14,9 @@ describe('rubyGemsFetch', () => { version: '0.5.1', gem_uri: 'https://rubygems.org/gems/small-0.5.1.gem', }) - fetch._getPackage = sinon.stub().callsFake((spec, destination) => - getPacakgeStub('test/fixtures/ruby/small-0.5.1.gem', destination)) + fetch._getPackage = sinon + .stub() + .callsFake((spec, destination) => getPacakgeStub('test/fixtures/ruby/small-0.5.1.gem', destination)) }) function verifyFetch(result) { @@ -23,7 +24,7 @@ describe('rubyGemsFetch', () => { expect(result.casedSpec.toUrl()).to.be.equal('cd:/ruby/rubygems/-/small/0.5.1') expect(result.document.hashes).to.be.deep.equal({ sha1: 'f343d34992fffa1e4abbb1a2bfae45fcf49123ba', - sha256: '2b5e4ba4e915e897d6fe9392c1cd1f5a21f8e7963679fb23f0a1953124772da0' + sha256: '2b5e4ba4e915e897d6fe9392c1cd1f5a21f8e7963679fb23f0a1953124772da0', }) expect(result.document.releaseDate).to.contain('2012-05-21') } @@ -41,4 +42,4 @@ describe('rubyGemsFetch', () => { const getPacakgeStub = async (file, destination) => { await promisify(fs.copyFile)(file, destination) -} \ No newline at end of file +} diff --git a/test/unit/providers/process/abstractClearylDefinedProcessorTests.js b/test/unit/providers/process/abstractClearylDefinedProcessorTests.js index 9795efe1..6dfd5ba3 100644 --- a/test/unit/providers/process/abstractClearylDefinedProcessorTests.js +++ b/test/unit/providers/process/abstractClearylDefinedProcessorTests.js @@ -10,13 +10,13 @@ describe('AbstractClearlyDefinedProcessor interesting file identification', () = it('finds files it should', () => { const files = ['license', 'License.md', 'LICENSE.HTML', 'LICENSE.txt'] const processor = new AbstractCDProcessor({}) - files.forEach(file => expect(processor._isInterestinglyNamed(file)).to.be.true) + files.forEach((file) => expect(processor._isInterestinglyNamed(file)).to.be.true) }) it('does not fine files it should not', () => { const files = ['licenser', 'Licenset.md', 'test.HTML', 'LICENSE.doc'] const processor = new AbstractCDProcessor({}) - files.forEach(file => expect(processor._isInterestinglyNamed(file)).to.be.false) + files.forEach((file) => expect(processor._isInterestinglyNamed(file)).to.be.false) }) }) @@ -31,8 +31,8 @@ describe('AbstractClearlyDefinedProcessor add files', () => { const document = { location: '/test' } await processor._addFiles({ document }) expect(document.files.length).to.be.equal(2) - expect(document.files.map(file => file.path)).to.have.members(['license', 'package/notice.txt']) - expect(document.files.every(file => file.hashes.sha1 === '42')).to.be.true + expect(document.files.map((file) => file.path)).to.have.members(['license', 'package/notice.txt']) + expect(document.files.every((file) => file.hashes.sha1 === '42')).to.be.true expect(processor.attachFiles.callCount).to.be.equal(2) }) @@ -43,7 +43,7 @@ describe('AbstractClearlyDefinedProcessor add files', () => { processor.computeHashes = sinon.stub() const document = { location: 'c:\\test' } await processor._addFiles({ document }) - expect(document.files.map(file => file.path)).to.have.members(['license', 'package/notice.txt']) + expect(document.files.map((file) => file.path)).to.have.members(['license', 'package/notice.txt']) }) it('handles no files', async () => { diff --git a/test/unit/providers/process/abstractProcessorTests.js b/test/unit/providers/process/abstractProcessorTests.js index f6f51cf8..f6b0e9d1 100644 --- a/test/unit/providers/process/abstractProcessorTests.js +++ b/test/unit/providers/process/abstractProcessorTests.js @@ -64,13 +64,13 @@ describe('AbstractProcessor aggregateVersions', () => { describe('AbstractProcessor attach files', () => { beforeEach(() => { const fsStub = { - readFileSync: path => { + readFileSync: (path) => { path = path.replace(/\\/g, '/') return `${path.startsWith('/test') ? path.slice(6) : path} attachment` - } + }, } const handlerClass = proxyquire('../../../../providers/process/abstractProcessor', { - fs: fsStub + fs: fsStub, }) Handler = new handlerClass({}) }) @@ -83,10 +83,10 @@ describe('AbstractProcessor attach files', () => { request.document = { _metadata: { links: {} } } request.crawler = { queue: sinon.stub() } request.track = sinon.stub() - Object.getOwnPropertyNames(map).forEach(name => VisitorMap.register(name, map[name])) + Object.getOwnPropertyNames(map).forEach((name) => VisitorMap.register(name, map[name])) new AbstractProcessor({}).linkAndQueueTool(request, 'licensee') expect(request.document._metadata.links.licensee.href).to.be.equal( - 'urn:npm:npmjs:-:redie:revision:0.3.0:tool:licensee' + 'urn:npm:npmjs:-:redie:revision:0.3.0:tool:licensee', ) expect(request.document._metadata.links.licensee.type).to.be.equal('collection') expect(request.crawler.queue.calledOnce).to.be.true @@ -99,7 +99,7 @@ describe('AbstractProcessor attach files', () => { request.document = { _metadata: { links: {} } } request.crawler = { queue: sinon.stub() } request.track = sinon.stub() - Object.getOwnPropertyNames(map).forEach(name => VisitorMap.register(name, map[name])) + Object.getOwnPropertyNames(map).forEach((name) => VisitorMap.register(name, map[name])) new AbstractProcessor({}).linkAndQueue(request, 'source') expect(request.document._metadata.links.source.href).to.be.equal('urn:npm:npmjs:-:redie:revision:0.3.0') expect(request.document._metadata.links.source.type).to.be.equal('resource') @@ -113,7 +113,7 @@ describe('AbstractProcessor attach files', () => { request.document = { _metadata: { links: {} } } request.crawler = { queue: sinon.stub() } request.track = sinon.stub() - Object.getOwnPropertyNames(map).forEach(name => VisitorMap.register(name, map[name])) + Object.getOwnPropertyNames(map).forEach((name) => VisitorMap.register(name, map[name])) new AbstractProcessor({}).addSelfLink(request) expect(request.document._metadata.links.self.href).to.be.equal('urn:npm:npmjs:-:redie:revision:0.3.0') expect(request.document._metadata.links.self.type).to.be.equal('resource') @@ -182,11 +182,7 @@ describe('link and queue local tasks', () => { const request = new Request('npm', 'cd:/npm/npmjs/-/redie/0.3.0') processor.addLocalToolTasks(request) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ - 'licensee', - 'scancode', - 'reuse' - ]) + expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members(['licensee', 'scancode', 'reuse']) }) }) @@ -228,7 +224,7 @@ describe('AbstractProcessor get interesting files', () => { function validateAttachedFile(name, list, checkContent = false) { const attachment = `${name} attachment` const token = Handler._computeToken(attachment) - const entry = find(list, entry => entry.path === name) + const entry = find(list, (entry) => entry.path === name) expect(!!entry).to.be.true expect(entry.token).to.eq(token) if (checkContent) expect(entry.attachment).to.eq(attachment) diff --git a/test/unit/providers/process/composerExtractTests.js b/test/unit/providers/process/composerExtractTests.js index 4b6a8cfe..8c392b76 100644 --- a/test/unit/providers/process/composerExtractTests.js +++ b/test/unit/providers/process/composerExtractTests.js @@ -13,17 +13,17 @@ const hashes = { 'symfony/polyfill-mbstring-1.11.0': { 'symfony-polyfill-mbstring-fe5e94c/LICENSE': { sha1: '53a47cd3f3fee7cd8179a19d7741da412eed9de7', - sha256: 'a718d662afdccd5db0c47543119dfa62b2d8b0dfd2d6d44a5e14397cb574e52b' + sha256: 'a718d662afdccd5db0c47543119dfa62b2d8b0dfd2d6d44a5e14397cb574e52b', }, 'symfony-polyfill-mbstring-fe5e94c/README.md': { sha1: 'c20aaad7bd777b2c7839c363a7a8dfd15f6cca63', - sha256: '74a6cefb78dc6b1447f9686cc2a062112027c8d2a39c4da66fd43f0f2bf76c3f' + sha256: '74a6cefb78dc6b1447f9686cc2a062112027c8d2a39c4da66fd43f0f2bf76c3f', }, 'symfony-polyfill-mbstring-fe5e94c/composer.json': { sha1: '9005581bb58110bc5525c70693f9d79d8fe76616', - sha256: 'a81f24d2da5637b570ebb8999e48d6e145887c37109dd553d3c04f4e6d3980bf' - } - } + sha256: 'a81f24d2da5637b570ebb8999e48d6e145887c37109dd553d3c04f4e6d3980bf', + }, + }, } describe('PHP processing', () => { @@ -37,7 +37,7 @@ describe('PHP processing', () => { const files = request.document.files expect(request.document).to.be.not.null - files.forEach(file => { + files.forEach((file) => { if (file.path.includes('LICENSE')) { expect(file.hashes.sha1).to.be.equal(hashes['symfony/polyfill-mbstring-1.11.0'][file.path].sha1) expect(file.hashes.sha256).to.be.equal(hashes['symfony/polyfill-mbstring-1.11.0'][file.path].sha256) @@ -50,10 +50,10 @@ describe('PHP processing', () => { } }) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ + expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members([ 'licensee', - 'scancode', /*, 'fossology'*/ - 'reuse' + 'scancode' /*, 'fossology'*/, + 'reuse', ]) expect(request.document.attachments.length).to.eq(1) expect(request.document.summaryInfo.count).to.be.equal(8) @@ -64,7 +64,7 @@ describe('PHP processing', () => { }) async function setup() { - const processor = composerExtract({ logger: { info: () => { } } }, () => { }) + const processor = composerExtract({ logger: { info: () => {} } }, () => {}) processor._detectLicenses = () => 'MIT' processor.linkAndQueueTool = sinon.stub() const request = createRequest() @@ -72,7 +72,7 @@ async function setup() { request.document.location = dir.name await new AbstractFetch({}).decompress( 'test/fixtures/composer/symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip', - dir.name + dir.name, ) return { processor, request } } @@ -86,14 +86,14 @@ function createRequest() { describe('composerExtract source discovery', () => { it('discovers source candidates', async () => { - const processor = composerExtract({ logger: { info: () => { } } }, () => { }) + const processor = composerExtract({ logger: { info: () => {} } }, () => {}) const manifest = { source: { url: 'one' }, homepage: 'two', bugs: 'http://three' } const candidates = processor._discoverCandidateSourceLocations(manifest) expect(candidates).to.have.members(['one', 'two', 'http://three']) }) it('discovers source candidates with odd structures', async () => { - const processor = composerExtract({ logger: { info: () => { } } }, () => { }) + const processor = composerExtract({ logger: { info: () => {} } }, () => {}) const manifest = { source: { url: 'one' }, homepage: ['two', 'three'], bugs: { url: 'four' } } const candidates = processor._discoverCandidateSourceLocations(manifest) expect(candidates.length).to.eq(3) @@ -154,7 +154,7 @@ function sourceDiscovery() { const githubResults = { 'http://repo': createSourceSpec('repo'), 'http://url': createSourceSpec('url'), - 'http://bugs': createSourceSpec('bugs') + 'http://bugs': createSourceSpec('bugs'), } function createManifest(repo, url, homepage, bugs) { diff --git a/test/unit/providers/process/condaExtractTests.js b/test/unit/providers/process/condaExtractTests.js index a34a9588..ca4cf7cc 100644 --- a/test/unit/providers/process/condaExtractTests.js +++ b/test/unit/providers/process/condaExtractTests.js @@ -16,10 +16,10 @@ describe('Conda processing', () => { await processor.handle(request) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ + expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members([ 'licensee', 'scancode', - 'reuse' /*, 'fossology'*/ + 'reuse' /*, 'fossology'*/, ]) expect(processor.linkAndQueue.callCount).to.be.equal(1) expect(processor.linkAndQueue.args[0][1]).to.equal('source') @@ -28,7 +28,7 @@ describe('Conda processing', () => { }) async function setup() { - const processor = CondaExtract({ logger: { info: () => { } } }, () => { }) + const processor = CondaExtract({ logger: { info: () => {} } }, () => {}) processor.linkAndQueueTool = sinon.stub() const request = createRequest() const dir = processor.createTempDir(request) @@ -45,17 +45,17 @@ function createRequest() { provider: 'conda-forge', namespace: '-', name: '21cmfast', - revision: 'linux-64--3.0.2' + revision: 'linux-64--3.0.2', }, registryData: { downloadUrl: '21cmfast', channelData: {}, repoData: { packageData: { - 'version': '3.0.2' - } - } - } + version: '3.0.2', + }, + }, + }, } request.processMode = 'process' return request diff --git a/test/unit/providers/process/crateExtractTests.js b/test/unit/providers/process/crateExtractTests.js index 6a58947c..e78813f6 100644 --- a/test/unit/providers/process/crateExtractTests.js +++ b/test/unit/providers/process/crateExtractTests.js @@ -12,45 +12,45 @@ const hashes = { 'bitflags-1.0.4': { '.gitignore': { sha1: '3254b5d5538166f1fd5a0bb41f7f3d3bbd455c56', - sha256: 'f9b1ca6ae27d1c18215265024629a8960c31379f206d9ed20f64e0b2dcf79805' + sha256: 'f9b1ca6ae27d1c18215265024629a8960c31379f206d9ed20f64e0b2dcf79805', }, 'CHANGELOG.md': { sha1: '87b1447fcb5155a5ba3bc476c6b870799bed78c7', - sha256: 'b9f503da2d3c91b0a244f1dc853d975f971f782b209ea52cd4cd98705e6e2749' + sha256: 'b9f503da2d3c91b0a244f1dc853d975f971f782b209ea52cd4cd98705e6e2749', }, 'CODE_OF_CONDUCT.md': { sha1: '82ce99058d5f84f3c3c2f548e7674de67d786e83', - sha256: '42634d0f6d922f49857175af991802822f7f920487aefa2ee250a50d12251a66' + sha256: '42634d0f6d922f49857175af991802822f7f920487aefa2ee250a50d12251a66', }, 'Cargo.toml': { sha1: '116f829c6f5099f58b7c7ef6d11655e93d35e34f', - sha256: '0234b6f827764ca093d897126b45505be0996e67860d61caeab696d092ffb781' + sha256: '0234b6f827764ca093d897126b45505be0996e67860d61caeab696d092ffb781', }, 'Cargo.toml.orig': { sha1: '810c9f23ba089372b992496166cdec13733959fc', - sha256: 'b2512e34fec0b32dabd8a2d4339ed22c9d1a3697f525f25500020bbd6f020456' + sha256: 'b2512e34fec0b32dabd8a2d4339ed22c9d1a3697f525f25500020bbd6f020456', }, 'LICENSE-APACHE': { sha1: '5798832c31663cedc1618d18544d445da0295229', - sha256: 'a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2' + sha256: 'a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2', }, 'LICENSE-MIT': { sha1: '9f3c36d2b7d381d9cf382a00166f3fbd06783636', - sha256: '6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb' + sha256: '6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb', }, 'README.md': { sha1: 'efd05ffa19723f822a85c5b76bda239be1d1aee1', - sha256: '6b236f8b62c82f189fabce0756e01a2c0ab1f32cb84cad9ff3c96b2ce5282bda' + sha256: '6b236f8b62c82f189fabce0756e01a2c0ab1f32cb84cad9ff3c96b2ce5282bda', }, 'src/example_generated.rs': { sha1: '6f1ac32232c5519998c87432f356c0090ef09b76', - sha256: 'e43eb59e90f317f38d436670a6067d2fd9eb35fb319fe716184e4a04e24ed1b2' + sha256: 'e43eb59e90f317f38d436670a6067d2fd9eb35fb319fe716184e4a04e24ed1b2', }, 'src/lib.rs': { sha1: '731ff4783523618c1e98b064d716fa5768dbac54', - sha256: '5751eb6fbb8cb97d8accd0846493168d9b5acff1f8d64435d4da8ad7dbf36b4d' - } - } + sha256: '5751eb6fbb8cb97d8accd0846493168d9b5acff1f8d64435d4da8ad7dbf36b4d', + }, + }, } describe('Crate processing', () => { @@ -63,15 +63,15 @@ describe('Crate processing', () => { await processor.handle(request) const files = request.document.files expect(request.document).to.be.not.null - files.forEach(file => { + files.forEach((file) => { expect(file.hashes.sha1).to.be.equal(hashes['bitflags-1.0.4'][file.path].sha1) expect(file.hashes.sha256).to.be.equal(hashes['bitflags-1.0.4'][file.path].sha256) }) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ + expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members([ 'licensee', 'scancode', - 'reuse' /*, 'fossology'*/ + 'reuse' /*, 'fossology'*/, ]) expect(request.document.summaryInfo.count).to.be.equal(10) expect(processor.linkAndQueue.callCount).to.be.equal(1) @@ -81,7 +81,7 @@ describe('Crate processing', () => { }) async function setup() { - const processor = CrateExtract({ logger: {} }, () => { }) + const processor = CrateExtract({ logger: {} }, () => {}) processor._detectLicenses = () => 'MIT' processor.linkAndQueueTool = sinon.stub() const request = createRequest() @@ -99,8 +99,8 @@ function createRequest() { manifest: { homepage: 'https://github.com/bitflags/bitflags', documentation: 'https://docs.rs/bitflags', - repository: 'https://github.com/bitflags/bitflags' - } + repository: 'https://github.com/bitflags/bitflags', + }, } request.processMode = 'process' return request diff --git a/test/unit/providers/process/debExtractTests.js b/test/unit/providers/process/debExtractTests.js index 74b4bbff..99c9de3a 100644 --- a/test/unit/providers/process/debExtractTests.js +++ b/test/unit/providers/process/debExtractTests.js @@ -14,10 +14,10 @@ describe('Debian processing', () => { expect(request.document.sourceInfo.type).to.equal('debsrc') expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ + expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members([ 'licensee', 'scancode', - 'reuse' /*, 'fossology'*/ + 'reuse' /*, 'fossology'*/, ]) expect(processor.linkAndQueue.callCount).to.be.equal(1) expect(processor.linkAndQueue.args[0][1]).to.equal('source') @@ -26,7 +26,7 @@ describe('Debian processing', () => { }) async function setup() { - const processor = debianExtract({ logger: { info: () => { } } }, () => { }) + const processor = debianExtract({ logger: { info: () => {} } }, () => {}) processor.linkAndQueueTool = sinon.stub() const request = createRequest() const dir = processor.createTempDir(request) @@ -43,9 +43,9 @@ function createRequest() { provider: 'debian', namespace: '-', name: '0ad', - revision: '0.0.17-1' + revision: '0.0.17-1', }, - registryData: [{ Architecture: 'armhf', Source: '0ad' }] + registryData: [{ Architecture: 'armhf', Source: '0ad' }], } request.processMode = 'process' return request diff --git a/test/unit/providers/process/fsfeReuseTests.js b/test/unit/providers/process/fsfeReuseTests.js index b9162c75..300b31b9 100644 --- a/test/unit/providers/process/fsfeReuseTests.js +++ b/test/unit/providers/process/fsfeReuseTests.js @@ -21,7 +21,10 @@ describe('FSFE REUSE software process', () => { expect(document.reuse.metadata.CreatorTool).to.equal('reuse-0.13') expect(document.reuse.files.length).to.equal(4) expect(document.attachments.length).to.equal(2) - expect(document.reuse.licenses).to.eql([{ filePath: 'LICENSES/Apache-2.0.txt', spdxId: 'Apache-2.0' }, { filePath: 'LICENSES/CC-BY-3.0.txt', spdxId: 'CC-BY-3.0' }]) + expect(document.reuse.licenses).to.eql([ + { filePath: 'LICENSES/Apache-2.0.txt', spdxId: 'Apache-2.0' }, + { filePath: 'LICENSES/CC-BY-3.0.txt', spdxId: 'CC-BY-3.0' }, + ]) let readmeFound = false let securityFound = false let helloWorldFound = false @@ -31,19 +34,25 @@ describe('FSFE REUSE software process', () => { readmeFound = true expect(document.reuse.files[i].LicenseConcluded).to.equal('NOASSERTION') expect(document.reuse.files[i].LicenseInfoInFile).to.equal('Apache-2.0') - expect(document.reuse.files[i].FileCopyrightText).to.equal('1982-2021 SAP SE or an SAP affiliate company and ospo-reuse contributors') + expect(document.reuse.files[i].FileCopyrightText).to.equal( + '1982-2021 SAP SE or an SAP affiliate company and ospo-reuse contributors', + ) } if (document.reuse.files[i].FileName === 'SECURITY.md') { securityFound = true expect(document.reuse.files[i].LicenseConcluded).to.equal('NOASSERTION') expect(document.reuse.files[i].LicenseInfoInFile).to.equal('Beerware') - expect(document.reuse.files[i].FileCopyrightText).to.equal('2013-2017 SAP SE or an SAP affiliate company and ospo-reuse contributors') + expect(document.reuse.files[i].FileCopyrightText).to.equal( + '2013-2017 SAP SE or an SAP affiliate company and ospo-reuse contributors', + ) } if (document.reuse.files[i].FileName === 'ospo-reuse/src/main/java/com/sap/ospo-reuse/HelloWorld.java') { helloWorldFound = true expect(document.reuse.files[i].LicenseConcluded).to.equal('NOASSERTION') expect(document.reuse.files[i].LicenseInfoInFile).to.equal('GPL-3.0-or-later') - expect(document.reuse.files[i].FileCopyrightText).to.equal('2019-2021 SAP SE or an SAP affiliate company and ospo-reuse contributors') + expect(document.reuse.files[i].FileCopyrightText).to.equal( + '2019-2021 SAP SE or an SAP affiliate company and ospo-reuse contributors', + ) } if (document.reuse.files[i].FileName === 'ospo-reuse/src/test/java/com/sap/ospo-reuse/TestsHelloWorld.java') { testHelloWorldFound = true @@ -90,7 +99,7 @@ describe('FSFE REUSE software process', () => { return callbackOrOptions(resultBox.versionError, { stdout: resultBox.versionResult }) } callback(resultBox.error, {}) - } + }, } const fsStub = { readdirSync: () => resultBox.licensesDirectory } Handler = proxyquire('../../../../providers/process/fsfeReuse', { child_process: processStub, fs: fsStub }) @@ -110,7 +119,7 @@ function setup(fixture, error, versionError) { Handler._resultBox.error = error Handler._resultBox.versionError = versionError const processor = Handler(options) - processor.createTempFile = sinon.stub().returns({ name: `test/fixtures/fsfeReuse/${fixture}/output.txt`}) + processor.createTempFile = sinon.stub().returns({ name: `test/fixtures/fsfeReuse/${fixture}/output.txt` }) //processor.attachFiles = sinon.stub() return { request: testRequest, processor } } diff --git a/test/unit/providers/process/gemExtractTests.js b/test/unit/providers/process/gemExtractTests.js index 33e6e20e..37ec1b43 100644 --- a/test/unit/providers/process/gemExtractTests.js +++ b/test/unit/providers/process/gemExtractTests.js @@ -47,7 +47,7 @@ const githubResults = { 'http://gem': createSourceSpec('gem'), 'http://home': createSourceSpec('home'), 'http://mail': createSourceSpec('mail'), - 'http://source': createSourceSpec('source') + 'http://source': createSourceSpec('source'), } function createManifest( @@ -57,7 +57,7 @@ function createManifest( gem_uri, homepage_uri, mailing_list_uri, - source_code_uri + source_code_uri, ) { return { bug_tracker_uri, changelog_uri, documentation_uri, gem_uri, homepage_uri, mailing_list_uri, source_code_uri } } diff --git a/test/unit/providers/process/goExtractTests.js b/test/unit/providers/process/goExtractTests.js index d3922f40..17b306d0 100644 --- a/test/unit/providers/process/goExtractTests.js +++ b/test/unit/providers/process/goExtractTests.js @@ -28,17 +28,13 @@ describe('Go processing', () => { await processor.handle(request) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ - 'licensee', - 'scancode', - 'reuse' - ]) + expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members(['licensee', 'scancode', 'reuse']) expect(request.document.registryData.licenses).to.be.deep.equal(licenses) }) }) async function setup() { - const processor = GoExtract({ logger: {} }, () => { }) + const processor = GoExtract({ logger: {} }, () => {}) processor.linkAndQueueTool = sinon.stub() const request = createRequest() const dir = processor.createTempDir(request) @@ -55,11 +51,11 @@ function createRequest() { provider: 'golang', namespace: 'rsc.io', name: 'quote', - revision: '1.5.2' + revision: '1.5.2', }, registryData: { - licenses - } + licenses, + }, } request.processMode = 'process' return request @@ -74,11 +70,11 @@ function createInvalidRequest() { provider: 'golang', namespace: 'rsc.io', name: 'quote', - revision: '1.5.2' - } + revision: '1.5.2', + }, } request.processMode = 'process' return request } -const licenses = ['Apache-2.0', 'BSD-2-Clause, BSD-3-Clause, HPND'] \ No newline at end of file +const licenses = ['Apache-2.0', 'BSD-2-Clause, BSD-3-Clause, HPND'] diff --git a/test/unit/providers/process/licenseeTests.js b/test/unit/providers/process/licenseeTests.js index 8a901221..62b20d6a 100644 --- a/test/unit/providers/process/licenseeTests.js +++ b/test/unit/providers/process/licenseeTests.js @@ -22,7 +22,7 @@ describe('Licensee process', () => { 'LICENSE', 'package.json', 'subfolder/LICENSE.foo', - 'subfolder/LICENSE.bar' + 'subfolder/LICENSE.bar', ]) expect(processor.attachFiles.args[0][2]).to.equal(path.resolve('test/fixtures/licensee/9.10.1/folder1')) }) @@ -51,19 +51,19 @@ describe('Licensee process', () => { expect(request.processControl).to.equal('skip') }) - beforeEach(function() { + beforeEach(function () { const resultBox = { error: null, versionResult: '1.2.0', versionError: null } const processStub = { execFile: (command, parameters, callbackOrOptions) => { if (parameters.includes('version')) return callbackOrOptions(resultBox.versionError, { stdout: resultBox.versionResult }) - } + }, } Handler = proxyquire('../../../../providers/process/licensee', { child_process: processStub }) Handler._resultBox = resultBox }) - afterEach(function() { + afterEach(function () { sandbox.restore() }) }) @@ -76,9 +76,9 @@ function setup(fixture, error, versionError) { Handler._resultBox.error = error Handler._resultBox.versionError = versionError const processor = Handler(options) - processor._runLicensee = error ? - sinon.stub().rejects(error) : - (parameters, inputFolder) => Promise.resolve(fs.readFileSync(`${inputFolder}/output.json`).toString()) + processor._runLicensee = error + ? sinon.stub().rejects(error) + : (parameters, inputFolder) => Promise.resolve(fs.readFileSync(`${inputFolder}/output.json`).toString()) processor.attachFiles = sinon.stub() return { request: testRequest, processor } } diff --git a/test/unit/providers/process/mavenExtractTests.js b/test/unit/providers/process/mavenExtractTests.js index 114bd283..932aef3d 100644 --- a/test/unit/providers/process/mavenExtractTests.js +++ b/test/unit/providers/process/mavenExtractTests.js @@ -38,7 +38,7 @@ describe('mavenExtract source discovery', () => { it('handles maven google', async () => { const spec = new EntitySpec('maven', 'mavengoogle', 'testorg', 'test', '42') - const extractor = extract({}, () => { }) + const extractor = extract({}, () => {}) const sourceLocation = await extractor._discoverSource(spec) expect(sourceLocation.revision).to.eq('42') expect(sourceLocation.type).to.eq('sourcearchive') @@ -49,7 +49,7 @@ describe('mavenExtract source discovery', () => { it('falls back to maven central', async () => { const spec = createSpec('test') - const extractor = extract({}, () => { }) + const extractor = extract({}, () => {}) const sourceLocation = await extractor._discoverSource(spec) expect(sourceLocation.revision).to.eq('42') expect(sourceLocation.type).to.eq('sourcearchive') @@ -66,7 +66,7 @@ function sourceDiscovery() { } const githubResults = { - 'http://url': createSourceSpec('url') + 'http://url': createSourceSpec('url'), } function createManifest(url) { diff --git a/test/unit/providers/process/npmExtractTests.js b/test/unit/providers/process/npmExtractTests.js index 03b8a456..165ebe75 100644 --- a/test/unit/providers/process/npmExtractTests.js +++ b/test/unit/providers/process/npmExtractTests.js @@ -13,21 +13,21 @@ const hashes = { 'redie-0.3.0': { 'package/LICENSE': { sha1: '6401e7f1f46654117270c4860a263d3c4d6df1eb', - sha256: '42c7def049b7ef692085ca9bdf5984d439d3291922e02cb112d5cd1287b3cc56' + sha256: '42c7def049b7ef692085ca9bdf5984d439d3291922e02cb112d5cd1287b3cc56', }, 'package/README.md': { sha1: 'f137a2544ac6b3589796fbd7dee87a35858f8d75', - sha256: 'df3005370ff27872f241341dd11089951e099786a2b7e949262ab2ed5b3e4237' + sha256: 'df3005370ff27872f241341dd11089951e099786a2b7e949262ab2ed5b3e4237', }, 'package/index.js': { sha1: '7561b32ffa21eeb8ca1c12a5e76ec28d718c3dfd', - sha256: 'b83c7eeef19b2f4be9a8947db0bedc4ef43a15746e9c9b6f14e491f68bd2db60' + sha256: 'b83c7eeef19b2f4be9a8947db0bedc4ef43a15746e9c9b6f14e491f68bd2db60', }, 'package/package.json': { sha1: '74c5c9c1de88406c3d08272bfb6fe57055625fc9', - sha256: '7bf06a09d2b1c79b2cad7820a97e3887749418e6c53da1f7fb7f1b7c430e386d' - } - } + sha256: '7bf06a09d2b1c79b2cad7820a97e3887749418e6c53da1f7fb7f1b7c430e386d', + }, + }, } describe('NPM processing', () => { @@ -40,15 +40,15 @@ describe('NPM processing', () => { await processor.handle(request) const files = request.document.files expect(request.document).to.be.not.null - files.forEach(file => { + files.forEach((file) => { expect(file.hashes.sha1).to.be.equal(hashes['redie-0.3.0'][file.path].sha1) expect(file.hashes.sha256).to.be.equal(hashes['redie-0.3.0'][file.path].sha256) }) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ + expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members([ 'licensee', 'scancode', - 'reuse' /*, 'fossology'*/ + 'reuse' /*, 'fossology'*/, ]) expect(request.document.attachments.length).to.eq(2) expect(request.document._attachments.length).to.eq(2) @@ -60,7 +60,7 @@ describe('NPM processing', () => { }) async function setup() { - const processor = npmExtract({ logger: {} }, () => { }) + const processor = npmExtract({ logger: {} }, () => {}) processor._detectLicenses = () => 'MIT' processor.linkAndQueueTool = sinon.stub() const request = createRequest() @@ -79,14 +79,14 @@ function createRequest() { describe('npmExtract source discovery', () => { it('discovers source candidates', async () => { - const processor = npmExtract({ logger: { info: () => { } } }, () => { }) + const processor = npmExtract({ logger: { info: () => {} } }, () => {}) const manifest = { repository: { url: 'one' }, url: 'two', homepage: 'three', bugs: 'http://four' } const candidates = processor._discoverCandidateSourceLocations(manifest) expect(candidates).to.have.members(['one', 'two', 'three', 'http://four']) }) it('discovers source candidates with odd structures', async () => { - const processor = npmExtract({ logger: { info: () => { } } }, () => { }) + const processor = npmExtract({ logger: { info: () => {} } }, () => {}) const manifest = { repository: { url: 'one' }, url: 'two', homepage: ['three', 'four'], bugs: { url: 'five' } } const candidates = processor._discoverCandidateSourceLocations(manifest) expect(candidates).to.have.members(['one', 'two', 'three', 'five']) @@ -155,7 +155,7 @@ function sourceDiscovery() { const githubResults = { 'http://repo': createSourceSpec('repo'), 'http://url': createSourceSpec('url'), - 'http://bugs': createSourceSpec('bugs') + 'http://bugs': createSourceSpec('bugs'), } function createManifest(repo, url, homepage, bugs) { diff --git a/test/unit/providers/process/nugetExtractTests.js b/test/unit/providers/process/nugetExtractTests.js index dafd8432..dcdcee5d 100644 --- a/test/unit/providers/process/nugetExtractTests.js +++ b/test/unit/providers/process/nugetExtractTests.js @@ -13,41 +13,41 @@ const hashes = { 'xunit.core.2.4.1': { '.signature.p7s': { sha1: 'cfdbf40dc9729d51621609c440b0aab6e82ca62c', - sha256: '83a8224a271c8340855d80baa7169604a0d60c914e3a852b6423b3c54124e2e7' + sha256: '83a8224a271c8340855d80baa7169604a0d60c914e3a852b6423b3c54124e2e7', }, '[Content_Types].xml': { sha1: '5e7b5e8e973dfb200d56e6894978cf4652c431dc', - sha256: 'b5a90ff27fec02ae69707b8a1bbe2bd069b47519daeface707303722fbf6e01e' + sha256: 'b5a90ff27fec02ae69707b8a1bbe2bd069b47519daeface707303722fbf6e01e', }, 'xunit.core.nuspec': { sha1: 'c05dad55561e3c2df400b8b13c944590b15ee98c', - sha256: '2c411d7ef591767dfc42910d6cad592d77a3ce4c4d4333b8477c1465e936af10' + sha256: '2c411d7ef591767dfc42910d6cad592d77a3ce4c4d4333b8477c1465e936af10', }, '_rels/.rels': { sha1: 'b5515c2da3422faba0848fe256a5b6ec4afca732', - sha256: '0c3ee1caf5de49929c8be1050b5d13e7e97130f008749a0a4c38da292cfe049e' + sha256: '0c3ee1caf5de49929c8be1050b5d13e7e97130f008749a0a4c38da292cfe049e', }, 'build/xunit.core.props': { sha1: '9cce282dd8f38294df68a8945988572b07f7298b', - sha256: '91d72e308289a3b92f4ea16357f3d893c6552e5af256838cb5372b45f2ad2856' + sha256: '91d72e308289a3b92f4ea16357f3d893c6552e5af256838cb5372b45f2ad2856', }, 'build/xunit.core.targets': { sha1: '04727e3c2a540f437c37d20e4e6cb872618c7e81', - sha256: '5ee8e74529a707ebf9c86904a38d4d0aaadea70e991b0c61697246fa7adbb71d' + sha256: '5ee8e74529a707ebf9c86904a38d4d0aaadea70e991b0c61697246fa7adbb71d', }, 'buildMultiTargeting/xunit.core.props': { sha1: '9cce282dd8f38294df68a8945988572b07f7298b', - sha256: '91d72e308289a3b92f4ea16357f3d893c6552e5af256838cb5372b45f2ad2856' + sha256: '91d72e308289a3b92f4ea16357f3d893c6552e5af256838cb5372b45f2ad2856', }, 'buildMultiTargeting/xunit.core.targets': { sha1: '04727e3c2a540f437c37d20e4e6cb872618c7e81', - sha256: '5ee8e74529a707ebf9c86904a38d4d0aaadea70e991b0c61697246fa7adbb71d' + sha256: '5ee8e74529a707ebf9c86904a38d4d0aaadea70e991b0c61697246fa7adbb71d', }, 'package/services/metadata/core-properties/929de7b81e6f4062812c1a95465898c7.psmdcp': { sha1: '2cc94ae30faf15ea01ddd2aa49fbf581a7005b2a', - sha256: 'd4a95f4d4c7f23c17942fecac5cac2bb9dd8a41dfc9fcb57adbf20ab1b64841f' - } - } + sha256: 'd4a95f4d4c7f23c17942fecac5cac2bb9dd8a41dfc9fcb57adbf20ab1b64841f', + }, + }, } describe('NuGet processing', () => { @@ -60,16 +60,12 @@ describe('NuGet processing', () => { await processor.handle(request) const files = request.document.files expect(request.document).to.be.not.null - files.forEach(file => { + files.forEach((file) => { expect(file.hashes.sha1).to.be.equal(hashes['xunit.core.2.4.1'][file.path].sha1) expect(file.hashes.sha256).to.be.equal(hashes['xunit.core.2.4.1'][file.path].sha256) }) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ - 'licensee', - 'scancode', - 'reuse' - ]) + expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members(['licensee', 'scancode', 'reuse']) expect(request.document.summaryInfo.count).to.be.equal(9) expect(processor.linkAndQueue.callCount).to.be.equal(1) expect(processor.linkAndQueue.args[0][1]).to.equal('source') @@ -78,13 +74,13 @@ describe('NuGet processing', () => { }) async function setup() { - const processor = extract({ logger: {} }, () => { }) + const processor = extract({ logger: {} }, () => {}) processor.linkAndQueueTool = sinon.stub() const request = createRequest() const dir = processor.createTempDir(request) request.document.metadataLocation = { manifest: 'test/fixtures/nuget/xunit.core.2.4.1.catalog.json', - nuspec: 'test/fixtures/nuget/xunit.core.2.4.1.nuspec' + nuspec: 'test/fixtures/nuget/xunit.core.2.4.1.nuspec', } request.document.location = `${dir.name}/nupkg` await new AbstractFetch({}).decompress('test/fixtures/nuget/xunit.core.2.4.1.nupkg', `${dir.name}/nupkg`) @@ -144,7 +140,7 @@ function sourceDiscovery() { const githubResults = { 'http://repo': createSourceSpec('repo'), 'http://project': createSourceSpec('project'), - 'http://license': createSourceSpec('license') + 'http://license': createSourceSpec('license'), } function createManifest(repo, projectUrl, licenseUrl) { diff --git a/test/unit/providers/process/pypiExtractTests.js b/test/unit/providers/process/pypiExtractTests.js index 8a03dbc0..0d4c37b9 100644 --- a/test/unit/providers/process/pypiExtractTests.js +++ b/test/unit/providers/process/pypiExtractTests.js @@ -47,7 +47,7 @@ const githubResults = { 'http://home': createSourceSpec('home'), 'http://package': createSourceSpec('package'), 'http://project': createSourceSpec('project'), - 'http://release': createSourceSpec('release') + 'http://release': createSourceSpec('release'), } function createManifest(bugtrack_url, docs_url, download_url, home_page, package_url, project_url, release_url) { diff --git a/test/unit/providers/process/scancodeTests.js b/test/unit/providers/process/scancodeTests.js index 5aa2bc93..5b90f4f3 100644 --- a/test/unit/providers/process/scancodeTests.js +++ b/test/unit/providers/process/scancodeTests.js @@ -14,19 +14,19 @@ let Handler describe('ScanCode misc', () => { it('differentiates real errors', () => { Handler._resultBox.result = { - files: [{ scan_errors: ['ValueError: this is a test'] }, { scan_errors: ['bogus package.json'] }] + files: [{ scan_errors: ['ValueError: this is a test'] }, { scan_errors: ['bogus package.json'] }], } expect(Handler._hasRealErrors()).to.be.false Handler._resultBox.result = { - files: [{ scan_errors: ['Yikes. Tragedy has struck'] }, { scan_errors: ['Panic'] }] + files: [{ scan_errors: ['Yikes. Tragedy has struck'] }, { scan_errors: ['Panic'] }], } expect(Handler._hasRealErrors()).to.be.true Handler._resultBox.result = { - files: [] + files: [], } expect(Handler._hasRealErrors()).to.be.false Handler._resultBox.result = { - files: [{}] + files: [{}], } expect(Handler._hasRealErrors()).to.be.false }) @@ -34,12 +34,12 @@ describe('ScanCode misc', () => { beforeEach(() => { const resultBox = {} const fsStub = { - readFileSync: () => JSON.stringify(resultBox.result) + readFileSync: () => JSON.stringify(resultBox.result), } const handlerFactory = proxyquire('../../../../providers/process/scancode', { - fs: fsStub + fs: fsStub, }) - Handler = handlerFactory({ logger: { log: () => { } } }) + Handler = handlerFactory({ logger: { log: () => {} } }) Handler._resultBox = resultBox }) @@ -53,19 +53,19 @@ describe('ScanCode process', () => { const { request, processor } = setup('2.9.8/gem.json') await processor.handle(request) expect(request.document._metadata.toolVersion).to.equal('1.2.0') - expect(flatten(processor.attachFiles.args.map(x => x[1]))).to.have.members([]) + expect(flatten(processor.attachFiles.args.map((x) => x[1]))).to.have.members([]) }) it('should handle simple npms', async () => { const { request, processor } = setup('2.9.8/npm-basic.json') await processor.handle(request) - expect(flatten(processor.attachFiles.args.map(x => x[1]))).to.have.members(['package/package.json']) + expect(flatten(processor.attachFiles.args.map((x) => x[1]))).to.have.members(['package/package.json']) }) it('should handle large npms', async () => { const { request, processor } = setup('2.9.8/npm-large.json') await processor.handle(request) - expect(flatten(processor.attachFiles.args.map(x => x[1]))).to.have.members(['package/package.json']) + expect(flatten(processor.attachFiles.args.map((x) => x[1]))).to.have.members(['package/package.json']) }) it('should skip if ScanCode not found', async () => { @@ -91,7 +91,7 @@ describe('ScanCode process', () => { if (parameters.includes('--version')) return callbackOrOptions(resultBox.versionError, { stdout: resultBox.versionResult }) callback(resultBox.error) - } + }, } Handler = proxyquire('../../../../providers/process/scancode', { child_process: processStub }) Handler._resultBox = resultBox @@ -108,7 +108,7 @@ function setup(fixture, error, versionError) { timeout: 200, processes: 2, format: 'json', - logger: { log: sinon.stub(), info: sinon.stub() } + logger: { log: sinon.stub(), info: sinon.stub() }, } const testRequest = new request('npm', 'cd:/npm/npmjs/-/test/1.1') testRequest.document = { _metadata: { links: {} }, location: '/test' } diff --git a/test/unit/providers/process/sourceTests.js b/test/unit/providers/process/sourceTests.js index 2d243f6e..47102fe3 100644 --- a/test/unit/providers/process/sourceTests.js +++ b/test/unit/providers/process/sourceTests.js @@ -17,11 +17,11 @@ describe('Source processing', () => { processor.handle(request) expect(processor.linkAndQueueTool.callCount).to.be.equal(4) - expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ + expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members([ 'clearlydefined', 'licensee', 'scancode', - 'reuse' + 'reuse', ]) }) }) @@ -29,7 +29,7 @@ describe('Source processing', () => { function mockRequest(url) { const request = new Request('source', url) request.document = { - _metadata: { links: {} } + _metadata: { links: {} }, } return request -} \ No newline at end of file +} diff --git a/test/unit/providers/queuing/scopedQueueSetsTests.js b/test/unit/providers/queuing/scopedQueueSetsTests.js index 10859f47..783957c3 100644 --- a/test/unit/providers/queuing/scopedQueueSetsTests.js +++ b/test/unit/providers/queuing/scopedQueueSetsTests.js @@ -11,7 +11,6 @@ const QueueSet = require('../../../../ghcrawler/providers/queuing/queueSet.js') const EventEmitter = require('events') describe('scopedQueueSets', () => { - describe('subscription management', () => { let scopedQueues let globalQueues @@ -20,7 +19,7 @@ describe('scopedQueueSets', () => { function createQueues() { return { subscribe: sinon.stub(), - unsubscribe: sinon.stub() + unsubscribe: sinon.stub(), } } @@ -131,7 +130,7 @@ describe('scopedQueueSets', () => { function mockPopReturn(fromQueue) { const queue = { - getName: sinon.stub().returns(fromQueue) + getName: sinon.stub().returns(fromQueue), } return poppedRequest(queue) } @@ -172,11 +171,11 @@ describe('scopedQueueSets', () => { globalQueue = mockQueue('normal') globalQueues = { getQueue: sinon.stub().returns(globalQueue), - pop: sinon.stub().resolves(poppedRequest(globalQueue)) + pop: sinon.stub().resolves(poppedRequest(globalQueue)), } localQueue = mockQueue('normal') localQueues = { - pop: sinon.stub() + pop: sinon.stub(), } scopedQueues = new ScopedQueueSets(globalQueues, localQueues) }) @@ -212,12 +211,12 @@ describe('scopedQueueSets', () => { beforeEach(() => { globalQueue = mockQueue('normal') globalQueues = { - getQueue: () => globalQueue + getQueue: () => globalQueue, } localQueue = mockQueue('normal') localQueue.pop.resolves(poppedRequest(localQueue)) localQueues = { - queues: [localQueue] + queues: [localQueue], } scopedQueues = new ScopedQueueSets(globalQueues, localQueues) @@ -287,7 +286,7 @@ describe('integration test with AttenuatedQueue and InMemoryCrawlQueue', () => { _config: new EventEmitter(), logger: { verbose: sinon.stub(), - } + }, } queueSets = createScopedQueueSets(queueName, options) scopedQueues = new ScopedQueueSets(queueSets.global, queueSets.local) @@ -460,7 +459,7 @@ function createScopedQueueSets(queueName, options) { return { global: new QueueSet([global], options), - local: new QueueSet([local], options) + local: new QueueSet([local], options), } } @@ -492,6 +491,6 @@ function mockQueue(fromQueue) { getName: sinon.stub().returns(fromQueue), push: sinon.stub().resolves(), done: sinon.stub().resolves(), - pop: sinon.stub() + pop: sinon.stub(), } -} \ No newline at end of file +} diff --git a/test/unit/providers/queuing/storageBackedQueueTest.js b/test/unit/providers/queuing/storageBackedQueueTest.js index 403e23c6..52986637 100644 --- a/test/unit/providers/queuing/storageBackedQueueTest.js +++ b/test/unit/providers/queuing/storageBackedQueueTest.js @@ -16,8 +16,8 @@ describe('storageBackedQueue', () => { const createTestQueue = (memoryQueue, storageQueue) => { const options = { logger: { - verbose: sinon.stub() - } + verbose: sinon.stub(), + }, } return new StorageBackedQueue(memoryQueue, storageQueue, options) } @@ -31,7 +31,6 @@ describe('storageBackedQueue', () => { }) describe('subscribe', async () => { - beforeEach(() => { memoryQueueStub.subscribe.resolves() storageQueueStub.subscribe.resolves() @@ -52,7 +51,6 @@ describe('storageBackedQueue', () => { }) }) - describe('unsubscribe', async () => { beforeEach(() => { memoryQueueStub.unsubscribe.resolves() @@ -76,7 +74,6 @@ describe('storageBackedQueue', () => { }) describe('push', async () => { - beforeEach(() => { memoryQueueStub.push.resolves() storageQueueStub.push.resolves([]) @@ -101,7 +98,6 @@ describe('storageBackedQueue', () => { }) describe('pop', async () => { - it('should be able pop empty', async () => { memoryQueueStub.pop.resolves(undefined) const popped = await testQueue.pop() @@ -189,9 +185,8 @@ describe('storageBackedQueue', () => { }) describe('flush', async () => { - beforeEach(() => { - memoryQueueStub.getInfo.resolves({count: 1}) + memoryQueueStub.getInfo.resolves({ count: 1 }) memoryQueueStub.pop.resolves(new Request('test', 'http://test')) memoryQueueStub.done.resolves() storageQueueStub.updateVisibilityTimeout.rejects('should not be called') @@ -233,11 +228,7 @@ describe('storageBackedQueue', () => { .resolves(new Request('test1', 'http://test')) .onSecondCall() .resolves(new Request('test2', 'http://test')) - storageQueueStub.done - .onFirstCall() - .rejects(new Error('test')) - .onSecondCall() - .resolves() + storageQueueStub.done.onFirstCall().rejects(new Error('test')).onSecondCall().resolves() storageQueueStub.isMessageNotFound.returns(false) await expect(testQueue.flush()).to.be.rejectedWith('Failed to flush') @@ -255,5 +246,5 @@ const createQueueStub = () => ({ push: sinon.stub(), pop: sinon.stub(), done: sinon.stub(), - getInfo: sinon.stub() + getInfo: sinon.stub(), }) diff --git a/test/unit/providers/store/attachmentStoreTests.js b/test/unit/providers/store/attachmentStoreTests.js index ec307b72..ee8e825b 100644 --- a/test/unit/providers/store/attachmentStoreTests.js +++ b/test/unit/providers/store/attachmentStoreTests.js @@ -11,7 +11,10 @@ describe('AttachmentStore', () => { const { store } = setup() const document = { _metadata: { type: 'test', fetchedAt: 'now', processedAt: 'then', extra: 'value' }, - _attachments: [{ token: '42', attachment: '42 attachment' }, { token: '13', attachment: '13 attachment' }] + _attachments: [ + { token: '42', attachment: '42 attachment' }, + { token: '13', attachment: '13 attachment' }, + ], } await store.upsert(document) const baseStore = store.baseStore @@ -48,7 +51,7 @@ describe('AttachmentStore', () => { it('works with no attachments', async () => { const { store } = setup() const document = { - _metadata: { type: 'test', fetchedAt: 'now', processedAt: 'then', extra: 'value' } + _metadata: { type: 'test', fetchedAt: 'now', processedAt: 'then', extra: 'value' }, } await store.upsert(document) const baseStore = store.baseStore From 29fd50a2c8abb39d3f40d8b4c819ac7cd2e1f0c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Spie=C3=9F?= Date: Wed, 31 Jan 2024 15:20:28 +0100 Subject: [PATCH 10/11] Update Prettier config to minimize changes and follow styleguide --- .prettierrc.json | 4 +- config/cdConfig.js | 40 +++++------ config/cdMemoryConfig.js | 16 ++--- config/map.js | 32 ++++----- eslint.config.js | 12 ++-- ghcrawler/app.js | 6 +- ghcrawler/bin/www.js | 6 +- ghcrawler/crawlerFactory.js | 26 ++++---- ghcrawler/index.js | 2 +- ghcrawler/lib/crawler.js | 36 +++++----- ghcrawler/lib/crawlerService.js | 18 ++--- ghcrawler/lib/request.js | 4 +- ghcrawler/lib/traversalPolicy.js | 4 +- ghcrawler/memoryConfig.js | 14 ++-- ghcrawler/middleware/asyncMiddleware.js | 2 +- ghcrawler/middleware/sendHelper.js | 4 +- ghcrawler/providers/index.js | 8 +-- .../providers/queuing/attenuatedQueue.js | 8 +-- .../providers/queuing/inmemorycrawlqueue.js | 4 +- ghcrawler/providers/queuing/memoryFactory.js | 4 +- ghcrawler/providers/queuing/queueSet.js | 10 +-- .../providers/queuing/scopedQueueSets.js | 22 +++---- .../providers/queuing/storageBackedQueue.js | 8 +-- ghcrawler/providers/queuing/storageQueue.js | 20 +++--- .../providers/queuing/storageQueueFactory.js | 2 +- .../providers/queuing/storageQueueManager.js | 2 +- .../providers/storage/azureBlobFactory.js | 2 +- ghcrawler/providers/storage/file.js | 10 +-- .../providers/storage/inmemoryDocStore.js | 8 +-- .../providers/storage/storageDocStore.js | 22 +++---- ghcrawler/routes/requests.js | 4 +- index.js | 2 +- lib/baseHandler.js | 6 +- lib/entitySpec.js | 4 +- lib/fetchResult.js | 8 +-- lib/sourceDiscovery.js | 8 +-- lib/utils.js | 16 ++--- providers/fetch/abstractFetch.js | 8 +-- providers/fetch/condaFetch.js | 26 ++++---- providers/fetch/cratesioFetch.js | 14 ++-- providers/fetch/debianFetch.js | 52 +++++++-------- providers/fetch/dispatcher.js | 6 +- providers/fetch/gitCloner.js | 12 ++-- providers/fetch/goFetch.js | 18 ++--- providers/fetch/gradlePluginFetch.js | 6 +- providers/fetch/mavenBasedFetch.js | 8 +-- providers/fetch/mavenGoogleFetch.js | 6 +- providers/fetch/mavencentralFetch.js | 6 +- providers/fetch/npmjsFetch.js | 6 +- providers/fetch/nugetFetch.js | 24 ++++--- providers/fetch/packagistFetch.js | 10 +-- providers/fetch/podFetch.js | 20 +++--- providers/fetch/pypiFetch.js | 10 +-- providers/fetch/rubyGemsFetch.js | 10 +-- providers/filter/filter.js | 2 +- providers/index.js | 10 +-- providers/logging/logger.js | 2 +- .../abstractClearlyDefinedProcessor.js | 12 ++-- providers/process/abstractProcessor.js | 26 ++++---- providers/process/component.js | 2 +- providers/process/composerExtract.js | 4 +- providers/process/condaExtract.js | 6 +- providers/process/condaSrcExtract.js | 2 +- providers/process/crateExtract.js | 2 +- providers/process/debExtract.js | 4 +- providers/process/debsrcExtract.js | 4 +- providers/process/fossology.js | 30 ++++----- providers/process/fsfeReuse.js | 24 +++---- providers/process/gemExtract.js | 2 +- providers/process/licensee.js | 24 +++---- providers/process/mavenExtract.js | 4 +- providers/process/npmExtract.js | 4 +- providers/process/nugetExtract.js | 2 +- providers/process/package.js | 2 +- providers/process/podExtract.js | 2 +- providers/process/pypiExtract.js | 4 +- providers/process/scancode.js | 22 +++---- providers/process/source.js | 2 +- providers/process/sourceExtract.js | 2 +- providers/process/top.js | 46 ++++++------- providers/store/attachmentStore.js | 12 ++-- providers/store/attachmentStoreFactory.js | 4 +- providers/store/azureQueueStore.js | 2 +- providers/store/storeDispatcher.js | 16 ++--- providers/store/webhookDeltaStore.js | 6 +- test/unit/ghcrawler/crawlerFactoryTest.js | 4 +- test/unit/ghcrawler/queueSetTests.js | 16 ++--- test/unit/lib/entitySpecTests.js | 2 +- test/unit/lib/fetchResultTests.js | 2 +- test/unit/lib/memoryCacheTest.js | 12 ++-- test/unit/lib/sourceSpecTests.js | 2 +- test/unit/lib/utilsTests.js | 6 +- test/unit/providers/fetch/condaFetchTests.js | 20 +++--- .../providers/fetch/cratesioFetchTests.js | 18 ++--- test/unit/providers/fetch/debianFetchTests.js | 28 ++++---- test/unit/providers/fetch/dispatcherTests.js | 53 +++++++-------- test/unit/providers/fetch/gitClonerTests.js | 8 +-- test/unit/providers/fetch/goFetchTests.js | 66 +++++++++---------- .../providers/fetch/gradlePluginFetchTests.js | 24 +++---- .../providers/fetch/mavencentralFetchTests.js | 52 +++++++-------- .../providers/fetch/mavengoogleFetchTests.js | 50 +++++++------- test/unit/providers/fetch/npmjsFetchTests.js | 10 +-- test/unit/providers/fetch/nugetFetchTests.js | 6 +- .../providers/fetch/packagistFetchTests.js | 10 +-- test/unit/providers/fetch/podFetchTests.js | 8 +-- test/unit/providers/fetch/pypiFetchTests.js | 8 +-- .../providers/fetch/rubyGemsFetchTests.js | 4 +- .../abstractClearylDefinedProcessorTests.js | 10 +-- .../process/abstractProcessorTests.js | 18 ++--- .../providers/process/composerExtractTests.js | 20 +++--- .../providers/process/condaExtractTests.js | 14 ++-- .../providers/process/crateExtractTests.js | 34 +++++----- .../unit/providers/process/debExtractTests.js | 8 +-- test/unit/providers/process/fsfeReuseTests.js | 10 +-- .../unit/providers/process/gemExtractTests.js | 4 +- test/unit/providers/process/goExtractTests.js | 12 ++-- test/unit/providers/process/licenseeTests.js | 4 +- .../providers/process/mavenExtractTests.js | 2 +- .../unit/providers/process/npmExtractTests.js | 20 +++--- .../providers/process/nugetExtractTests.js | 30 ++++----- .../providers/process/pypiExtractTests.js | 2 +- test/unit/providers/process/scancodeTests.js | 22 +++---- test/unit/providers/process/sourceTests.js | 6 +- .../providers/queuing/scopedQueueSetsTests.js | 20 +++--- .../queuing/storageBackedQueueTest.js | 6 +- .../providers/store/attachmentStoreTests.js | 6 +- 126 files changed, 792 insertions(+), 797 deletions(-) diff --git a/.prettierrc.json b/.prettierrc.json index b3977820..449ecaa3 100644 --- a/.prettierrc.json +++ b/.prettierrc.json @@ -1,5 +1,7 @@ { + "arrowParens": "avoid", "printWidth": 120, "singleQuote": true, - "semi": false + "semi": false, + "trailingComma": "none" } diff --git a/config/cdConfig.js b/config/cdConfig.js index 7af14e0c..31869b54 100644 --- a/config/cdConfig.js +++ b/config/cdConfig.js @@ -5,13 +5,13 @@ const config = require('painless-config') const cd_azblob = { connection: config.get('CRAWLER_AZBLOB_CONNECTION_STRING'), - container: config.get('CRAWLER_AZBLOB_CONTAINER_NAME'), + container: config.get('CRAWLER_AZBLOB_CONTAINER_NAME') } const githubToken = config.get('CRAWLER_GITHUB_TOKEN') const cd_file = { - location: config.get('FILE_STORE_LOCATION') || (process.platform === 'win32' ? 'c:/temp/cd' : '/tmp/cd'), + location: config.get('FILE_STORE_LOCATION') || (process.platform === 'win32' ? 'c:/temp/cd' : '/tmp/cd') } const crawlerStoreProvider = config.get('CRAWLER_STORE_PROVIDER') || 'cd(file)' const maxRequeueAttemptCount = config.get('CRAWLER_MAX_REQUEUE_ATTEMPTS') || 5 @@ -22,20 +22,20 @@ module.exports = { searchPath: [module], crawler: { count: 2, - maxRequeueAttemptCount, + maxRequeueAttemptCount }, filter: { provider: 'filter', - filter: {}, + filter: {} }, fetch: { dispatcher: 'cdDispatch', cdDispatch: { - fetched: { defaultTtlSeconds: fetchedCacheTtlSeconds }, + fetched: { defaultTtlSeconds: fetchedCacheTtlSeconds } }, cocoapods: { githubToken }, conda: { - cdFileLocation: cd_file.location, + cdFileLocation: cd_file.location }, cratesio: {}, debian: { cdFileLocation: cd_file.location }, @@ -48,7 +48,7 @@ module.exports = { nuget: {}, packagist: {}, pypi: {}, - rubygems: {}, + rubygems: {} }, process: { cdsource: {}, @@ -60,7 +60,7 @@ module.exports = { debsrc: {}, fossology: { disabled: true, - installDir: config.get('FOSSOLOGY_HOME') || '/mnt/c/git/fo/fossology/src/', + installDir: config.get('FOSSOLOGY_HOME') || '/mnt/c/git/fo/fossology/src/' }, gem: { githubToken }, go: { githubToken }, @@ -90,39 +90,39 @@ module.exports = { '--classify', '--generated', '--summary', - '--summary-key-files', + '--summary-key-files' // '--quiet' ], timeout: 1000, processes: 2, - format: '--json-pp', + format: '--json-pp' }, source: {}, - top: { githubToken }, + top: { githubToken } }, store: { dispatcher: crawlerStoreProvider, cdDispatch: {}, webhook: { url: config.get('CRAWLER_WEBHOOK_URL') || 'http://localhost:4000/webhook', - token: config.get('CRAWLER_WEBHOOK_TOKEN'), + token: config.get('CRAWLER_WEBHOOK_TOKEN') }, azqueue: { connectionString: cd_azblob.connection, - queueName: config.get('CRAWLER_HARVESTS_QUEUE_NAME') || 'harvests', + queueName: config.get('CRAWLER_HARVESTS_QUEUE_NAME') || 'harvests' }, 'cd(azblob)': cd_azblob, - 'cd(file)': cd_file, + 'cd(file)': cd_file }, deadletter: { provider: config.get('CRAWLER_DEADLETTER_PROVIDER') || crawlerStoreProvider, 'cd(azblob)': cd_azblob, - 'cd(file)': cd_file, + 'cd(file)': cd_file }, queue: { provider: config.get('CRAWLER_QUEUE_PROVIDER') || 'memory', memory: { - weights: { immediate: 3, soon: 2, normal: 3, later: 2 }, + weights: { immediate: 3, soon: 2, normal: 3, later: 2 } }, storageQueue: { weights: { immediate: 3, soon: 2, normal: 3, later: 2 }, @@ -132,8 +132,8 @@ module.exports = { visibilityTimeout_remainLocal: fetchedCacheTtlSeconds, maxDequeueCount: 5, attenuation: { - ttl: 3000, - }, - }, - }, + ttl: 3000 + } + } + } } diff --git a/config/cdMemoryConfig.js b/config/cdMemoryConfig.js index 03a58b1d..7c11c4ff 100644 --- a/config/cdMemoryConfig.js +++ b/config/cdMemoryConfig.js @@ -4,26 +4,26 @@ module.exports = { crawler: { count: 1, - maxRequeueAttemptCount: 5, + maxRequeueAttemptCount: 5 }, fetch: { - github: {}, + github: {} }, process: { scancode: {}, licensee: {}, - reuse: {}, + reuse: {} }, store: { - provider: 'memory', + provider: 'memory' }, deadletter: { - provider: 'memory', + provider: 'memory' }, queue: { provider: 'memory', memory: { - weights: { events: 10, immediate: 3, soon: 2, normal: 3, later: 2 }, - }, - }, + weights: { events: 10, immediate: 3, soon: 2, normal: 3, later: 2 } + } + } } diff --git a/config/map.js b/config/map.js index 5f11552a..63e7a312 100644 --- a/config/map.js +++ b/config/map.js @@ -22,7 +22,7 @@ const source = { licensee, reuse, scancode, - fossology, + fossology } const npm = { @@ -32,7 +32,7 @@ const npm = { licensee, reuse, scancode, - fossology, + fossology } const conda = { @@ -42,7 +42,7 @@ const conda = { licensee, reuse, scancode, - fossology, + fossology } const crate = { @@ -52,7 +52,7 @@ const crate = { licensee, reuse, scancode, - fossology, + fossology } const deb = { @@ -62,7 +62,7 @@ const deb = { licensee, reuse, scancode, - fossology, + fossology } const go = { @@ -72,7 +72,7 @@ const go = { licensee, reuse, scancode, - fossology, + fossology } const maven = { @@ -82,7 +82,7 @@ const maven = { licensee, reuse, scancode, - fossology, + fossology } const nuget = { @@ -91,7 +91,7 @@ const nuget = { clearlydefined, licensee, scancode, - reuse, + reuse } const pod = { @@ -101,7 +101,7 @@ const pod = { licensee, reuse, scancode, - fossology, + fossology } const pypi = { @@ -111,7 +111,7 @@ const pypi = { licensee, reuse, scancode, - fossology, + fossology } const composer = { @@ -121,7 +121,7 @@ const composer = { licensee, reuse, scancode, - fossology, + fossology } const gem = { @@ -131,7 +131,7 @@ const gem = { licensee, reuse, scancode, - fossology, + fossology } const _package = { @@ -146,13 +146,13 @@ const _package = { pod, pypi, composer, - gem, + gem } const component = { _type: 'component', source, - package: _package, + package: _package } const entities = { @@ -176,9 +176,9 @@ const entities = { composer, pod, pypi, - gem, + gem } module.exports = { - default: entities, + default: entities } diff --git a/eslint.config.js b/eslint.config.js index d4e56395..ff3291eb 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -8,17 +8,17 @@ module.exports = [ languageOptions: { globals: { ...globals.node, - ...globals.mocha, + ...globals.mocha }, parserOptions: { - sourceType: 'module', - }, + sourceType: 'module' + } }, rules: { quotes: ['error', 'single'], semi: ['error', 'never'], - 'no-console': 'off', - }, + 'no-console': 'off' + } }, - eslintConfigPrettier, + eslintConfigPrettier ] diff --git a/ghcrawler/app.js b/ghcrawler/app.js index e50982c6..20af0261 100644 --- a/ghcrawler/app.js +++ b/ghcrawler/app.js @@ -9,7 +9,7 @@ const morgan = require('morgan') const sendHelper = require('./middleware/sendHelper') function configureApp(service, logger) { - process.on('unhandledRejection', (exception) => logger.error('unhandledRejection', exception)) + process.on('unhandledRejection', exception => logger.error('unhandledRejection', exception)) auth.initialize(config.get('CRAWLER_SERVICE_AUTH_TOKEN') || 'secret', config.get('CRAWLER_SERVICE_FORCE_AUTH')) const app = express() @@ -44,11 +44,11 @@ function configureApp(service, logger) { // call the callback but with no args. An arg indicates an error. callback() }, - (error) => { + error => { console.log(`Service initialization error: ${error.message}`) console.dir(error) callback(error) - }, + } ) } diff --git a/ghcrawler/bin/www.js b/ghcrawler/bin/www.js index fe076836..996b2d97 100644 --- a/ghcrawler/bin/www.js +++ b/ghcrawler/bin/www.js @@ -25,7 +25,7 @@ function run(service, logger) { const server = http.createServer(app) // initialize the apps (if they have async init functions) and start listening - init(app, (error) => { + init(app, error => { if (error) { console.log('Error initializing the Express app: ' + error) throw new Error(error) @@ -105,10 +105,10 @@ function run(service, logger) { console.log('Server closed.') process.exit(0) }, - (error) => { + error => { console.error(`Closing server: ${error}`) process.exit(1) - }, + } ) } diff --git a/ghcrawler/crawlerFactory.js b/ghcrawler/crawlerFactory.js index 184ac01e..2879e585 100644 --- a/ghcrawler/crawlerFactory.js +++ b/ghcrawler/crawlerFactory.js @@ -21,14 +21,14 @@ class CrawlerFactory { const optionsProvider = defaults.provider || 'memory' const crawlerName = (defaults.crawler && defaults.crawler.name) || 'crawler' - searchPath.forEach((entry) => providerSearchPath.push(entry)) + searchPath.forEach(entry => providerSearchPath.push(entry)) const subsystemNames = ['crawler', 'filter', 'fetch', 'process', 'queue', 'store', 'deadletter', 'lock'] const crawlerPromise = CrawlerFactory.createRefreshingOptions( crawlerName, subsystemNames, defaults, - optionsProvider, - ).then((options) => { + optionsProvider + ).then(options => { logger.info('created all refreshingOptions') finalOptions = options const crawler = CrawlerFactory.createCrawler(options) @@ -55,8 +55,8 @@ class CrawlerFactory { locker = null, filter = null, fetchers = null, - processors = null, - } = {}, + processors = null + } = {} ) { logger.info('creating crawler') queues = queues || CrawlerFactory.createQueues(options.queue) @@ -84,7 +84,7 @@ class CrawlerFactory { const result = {} refreshingProvider = refreshingProvider.toLowerCase() await Promise.all( - subsystemNames.map((subsystemName) => { + subsystemNames.map(subsystemName => { // Any given subsytem may have a provider or may be a list of providers. If a particular provider is // identified then hook up just that set of options for refreshing. logger.info(`creating refreshing options ${subsystemName} with provider ${refreshingProvider}`) @@ -97,7 +97,7 @@ class CrawlerFactory { } else { throw new Error(`Invalid refreshing provider setting ${refreshingProvider}`) } - return config.getAll().then((values) => { + return config.getAll().then(values => { logger.info(`got refreshingOption values for ${subsystemName}`) // grab the right defaults. May need to drill down a level if the subsystem has a provider const trueDefaults = subProvider ? subDefaults[subProvider] || {} : subDefaults @@ -109,7 +109,7 @@ class CrawlerFactory { else result[subsystemName] = values }) }) - }), + }) ) return result } @@ -119,9 +119,9 @@ class CrawlerFactory { return config } await Promise.all( - Object.getOwnPropertyNames(defaults).map((optionName) => { + Object.getOwnPropertyNames(defaults).map(optionName => { return config._config.set(optionName, defaults[optionName]) - }), + }) ) return config._config.getAll() } @@ -130,7 +130,7 @@ class CrawlerFactory { logger.info('creating in memory refreshing config') const configStore = new RefreshingConfig.InMemoryConfigStore(values) const config = new RefreshingConfig.RefreshingConfig(configStore).withExtension( - new RefreshingConfig.InMemoryPubSubRefreshPolicyAndChangePublisher(), + new RefreshingConfig.InMemoryPubSubRefreshPolicyAndChangePublisher() ) return config } @@ -164,8 +164,8 @@ class CrawlerFactory { static _getNamedProviders(options, namespace, names, ...params) { return names - .filter((key) => !['_config', 'logger', 'dispatcher', options.dispatcher].includes(key)) - .map((name) => CrawlerFactory._getProvider(options, name, namespace, ...params)) + .filter(key => !['_config', 'logger', 'dispatcher', options.dispatcher].includes(key)) + .map(name => CrawlerFactory._getProvider(options, name, namespace, ...params)) } static createFilter(options, processors) { diff --git a/ghcrawler/index.js b/ghcrawler/index.js index 9248b727..765b3cb1 100644 --- a/ghcrawler/index.js +++ b/ghcrawler/index.js @@ -17,6 +17,6 @@ const VisitorMap = require('./lib/visitorMap') module.exports.run = (defaults, logger, searchPath, maps) => { const service = CrawlerFactory.createService(defaults, logger, searchPath) - Object.getOwnPropertyNames(maps).forEach((name) => VisitorMap.register(name, maps[name])) + Object.getOwnPropertyNames(maps).forEach(name => VisitorMap.register(name, maps[name])) www(service, logger) } diff --git a/ghcrawler/lib/crawler.js b/ghcrawler/lib/crawler.js index 6d922bb2..69553802 100644 --- a/ghcrawler/lib/crawler.js +++ b/ghcrawler/lib/crawler.js @@ -19,7 +19,7 @@ const defaultOptions = { processingTtl: 60 * 1000, promiseTrace: false, requeueDelay: 5000, - deadletterPolicy: 'always', // Another option: excludeNotFound + deadletterPolicy: 'always' // Another option: excludeNotFound } class Crawler { @@ -262,7 +262,7 @@ class Crawler { return request } - request.getTrackedCleanups().forEach((cleanup) => { + request.getTrackedCleanups().forEach(cleanup => { try { cleanup() } catch (error) { @@ -277,25 +277,25 @@ class Crawler { const originalPromise = trackedPromises[i] originalPromise.then( - (result) => { + result => { completedPromises++ debug( `_completeRequest(${loopName}:${request.toUniqueString()}): completed ${completedPromises} of ${ trackedPromises.length - } promises (${failedPromises} failed)`, + } promises (${failedPromises} failed)` ) return result }, - (error) => { + error => { completedPromises++ failedPromises++ debug( `_completeRequest(${loopName}:${request.toUniqueString()}): completed ${completedPromises} of ${ trackedPromises.length - } promises (${failedPromises} failed)`, + } promises (${failedPromises} failed)` ) throw error - }, + } ) } debug(`_completeRequest(${loopName}:${request.toUniqueString()}): ${trackedPromises.length} tracked promises`) @@ -306,25 +306,25 @@ class Crawler { () => { return self._deleteFromQueue(request) }, - (error) => { + error => { debug(`_completeRequest(${loopName}:${request.toUniqueString()}): catch release lock`) self.logger.error(error) return self._abandonInQueue(request) - }, + } ) }, - (error) => { + error => { debug(`_completeRequest(${loopName}:${request.toUniqueString()}): catch tracked promises`) self.logger.error(error) return self._completeRequest(request, true) - }, + } ) return completeWork .then(() => { debug(`_completeRequest(${loopName}:${request.toUniqueString()}): exit (success)`) return request }) - .catch((error) => { + .catch(error => { debug(`_completeRequest(${loopName}:${request.toUniqueString()}): catch completeWork`) throw error }) @@ -389,7 +389,7 @@ class Crawler { return request } return handler.handle(request) - }).then((request) => { + }).then(request => { debug(`_fetch(${loopName}:${request.toUniqueString()}): exit (success - fetched)`) return request }) @@ -422,7 +422,7 @@ class Crawler { type: request.type, url: request.url, fetchedAt: DateTime.utc().toISO(), - links: {}, + links: {} } if (request.response) { if (request.response.headers) { @@ -470,7 +470,7 @@ class Crawler { return request } return this._logStartEnd('processing', request, () => { - return this._process(request).then((result) => { + return this._process(request).then(result => { debug(`_processDocument(${loopName}:${request.toUniqueString()}): exit (success)`) return result }) @@ -549,7 +549,7 @@ class Crawler { const start = Date.now() const documentToStore = this._buildDocumentToStore(request.document) - return this.store.upsert(documentToStore).then((upsert) => { + return this.store.upsert(documentToStore).then(upsert => { request.upsert = upsert request.addMeta({ write: Date.now() - start }) debug(`_storeDocument(${loopName}:${request.toUniqueString()}): exit (success)`) @@ -620,7 +620,7 @@ class Crawler { debug(`storeDeadletter(${loopName}:${request.toUniqueString()}): enter`) if (this.options.deadletterPolicy === 'excludeNotFound' && reason && reason.toLowerCase().includes('status 404')) { this.logger.info( - `storeDeadletter(${loopName}:${request.toUniqueString()}): not storing due to configured deadletter policy`, + `storeDeadletter(${loopName}:${request.toUniqueString()}): not storing due to configured deadletter policy` ) return request } @@ -654,7 +654,7 @@ class Crawler { _preFilter(requests) { const list = Array.isArray(requests) ? requests : [requests] - return list.filter((request) => { + return list.filter(request => { if (!request.url || !request.type) { this._storeDeadletter(request, `Attempt to queue malformed request ${request.toString()}`) return false diff --git a/ghcrawler/lib/crawlerService.js b/ghcrawler/lib/crawlerService.js index c7e329d8..a6a2743e 100644 --- a/ghcrawler/lib/crawlerService.js +++ b/ghcrawler/lib/crawlerService.js @@ -37,7 +37,7 @@ class CrawlerService { } async ensureLoops(targetCount = this.options.crawler.count) { - this.loops = this.loops.filter((loop) => loop.running()) + this.loops = this.loops.filter(loop => loop.running()) const running = this.status() const delta = targetCount - running if (delta < 0) { @@ -98,10 +98,10 @@ class CrawlerService { for (let i = 0; i < count; i++) { result.push(queue.pop()) } - return Promise.all(result).then((requests) => { - const filtered = requests.filter((request) => request) - return Promise.all(filtered.map((request) => (remove ? queue.done(request) : queue.abandon(request)))).then( - filtered, + return Promise.all(result).then(requests => { + const filtered = requests.filter(request => request) + return Promise.all(filtered.map(request => (remove ? queue.done(request) : queue.abandon(request)))).then( + filtered ) }) } @@ -121,7 +121,7 @@ class CrawlerService { requeueDeadletter(url, queue) { const self = this return this.getDeadletter(url) - .then((document) => { + .then(document => { const request = Request.adopt(document).createRequeuable() request.attemptCount = 0 return self.crawler.queues.push([request], queue) @@ -137,7 +137,7 @@ class CrawlerService { _reconfigure(current, changes) { // if the loop count changed, make it so - if (changes.some((patch) => patch.path === '/count')) { + if (changes.some(patch => patch.path === '/count')) { return this.options.crawler.count.value > 0 ? this.run() : this.stop() } return null @@ -162,8 +162,8 @@ class CrawlerLoop { } this.state = 'running' // Create callback that when run, resolves a promise and completes this loop - const donePromise = new Promise((resolve) => { - this.done = (value) => resolve(value) + const donePromise = new Promise(resolve => { + this.done = value => resolve(value) this.options.done = this.done }) donePromise.finally(() => { diff --git a/ghcrawler/lib/request.js b/ghcrawler/lib/request.js index 307722ac..241143b0 100644 --- a/ghcrawler/lib/request.js +++ b/ghcrawler/lib/request.js @@ -131,7 +131,7 @@ class Request { return this } const toRemove = Array.isArray(cleanups) ? cleanups : [cleanups] - this.cleanups = this.cleanups.filter((item) => !toRemove.includes(item)) + this.cleanups = this.cleanups.filter(item => !toRemove.includes(item)) return this } @@ -192,7 +192,7 @@ class Request { queueRequests(requests, name = null, scope = null) { requests = Array.isArray(requests) ? requests : [requests] - const toQueue = requests.filter((request) => !this.hasSeen(request)) + const toQueue = requests.filter(request => !this.hasSeen(request)) this.track(this.crawler.queue(toQueue, name, scope)) } diff --git a/ghcrawler/lib/traversalPolicy.js b/ghcrawler/lib/traversalPolicy.js index 9acd2bc8..badb8679 100644 --- a/ghcrawler/lib/traversalPolicy.js +++ b/ghcrawler/lib/traversalPolicy.js @@ -217,7 +217,7 @@ class TraversalPolicy { originMutable: 'storage', storageOriginIfMissing: 'storage', mutables: mutablesValue, - originOnly: 'origin', + originOnly: 'origin' }[this.fetch] if (!result) { throw new Error(`Fetch policy misconfigured ${this.fetch}`) @@ -234,7 +234,7 @@ class TraversalPolicy { originStorage: 'origin', storageOriginIfMissing: 'origin', mutables: 'origin', - originOnly: null, + originOnly: null }[this.fetch] if (result === undefined) { throw new Error(`Fetch policy misconfigured ${this.fetch}`) diff --git a/ghcrawler/memoryConfig.js b/ghcrawler/memoryConfig.js index b77e9b11..15777ddb 100644 --- a/ghcrawler/memoryConfig.js +++ b/ghcrawler/memoryConfig.js @@ -3,23 +3,23 @@ module.exports = { crawler: { - count: 1, + count: 1 }, fetch: {}, process: {}, store: { - provider: 'memory', + provider: 'memory' }, deadletter: { - provider: 'memory', + provider: 'memory' }, lock: { - provider: 'memory', + provider: 'memory' }, queue: { provider: 'memory', memory: { - weights: { immediate: 3, soon: 2, normal: 3, later: 2 }, - }, - }, + weights: { immediate: 3, soon: 2, normal: 3, later: 2 } + } + } } diff --git a/ghcrawler/middleware/asyncMiddleware.js b/ghcrawler/middleware/asyncMiddleware.js index 9d2dfd59..95f7de65 100644 --- a/ghcrawler/middleware/asyncMiddleware.js +++ b/ghcrawler/middleware/asyncMiddleware.js @@ -1,7 +1,7 @@ // Copyright (c) Microsoft Corporation and others. Licensed under the MIT license. // SPDX-License-Identifier: MIT -module.exports = (func) => async (request, response, next) => { +module.exports = func => async (request, response, next) => { try { await func(request, response, next) } catch (error) { diff --git a/ghcrawler/middleware/sendHelper.js b/ghcrawler/middleware/sendHelper.js index 23d60ba6..ec9ec4c8 100644 --- a/ghcrawler/middleware/sendHelper.js +++ b/ghcrawler/middleware/sendHelper.js @@ -10,10 +10,10 @@ function create() { response.helpers.send = { context: { request: request, - response: response, + response: response }, noContent: noContent, - partialHtml: partialHtml, + partialHtml: partialHtml } next() } diff --git a/ghcrawler/providers/index.js b/ghcrawler/providers/index.js index 55f6c56c..cad24f75 100644 --- a/ghcrawler/providers/index.js +++ b/ghcrawler/providers/index.js @@ -4,14 +4,14 @@ module.exports = { queue: { storageQueue: require('./queuing/storageQueueFactory'), - memory: require('./queuing/memoryFactory'), + memory: require('./queuing/memoryFactory') }, store: { memory: require('./storage/inmemoryDocStore'), file: require('./storage/file'), - azblob: require('./storage/azureBlobFactory'), + azblob: require('./storage/azureBlobFactory') }, lock: { - memory: require('./locker/memory'), - }, + memory: require('./locker/memory') + } } diff --git a/ghcrawler/providers/queuing/attenuatedQueue.js b/ghcrawler/providers/queuing/attenuatedQueue.js index 79dc13e0..125dbaee 100644 --- a/ghcrawler/providers/queuing/attenuatedQueue.js +++ b/ghcrawler/providers/queuing/attenuatedQueue.js @@ -25,10 +25,10 @@ class AttenuatedQueue extends NestedQueue { requests = Array.isArray(requests) ? requests : [requests] return Promise.all( requests.map( - qlimit(this.options.parallelPush || 1)((request) => { + qlimit(this.options.parallelPush || 1)(request => { return self._pushOne(request) - }), - ), + }) + ) ) } @@ -55,7 +55,7 @@ class AttenuatedQueue extends NestedQueue { } entry = { timestamp: Date.now(), - promise: this.queue.push(request), + promise: this.queue.push(request) } const ttl = (this.options.attenuation && this.options.attenuation.ttl) || 1000 memoryCache.put(key, entry, ttl) diff --git a/ghcrawler/providers/queuing/inmemorycrawlqueue.js b/ghcrawler/providers/queuing/inmemorycrawlqueue.js index b198e319..10b9bebf 100644 --- a/ghcrawler/providers/queuing/inmemorycrawlqueue.js +++ b/ghcrawler/providers/queuing/inmemorycrawlqueue.js @@ -18,7 +18,7 @@ class InMemoryCrawlQueue { async push(requests) { requests = Array.isArray(requests) ? requests : [requests] - requests = requests.map((request) => extend(true, {}, request)) + requests = requests.map(request => extend(true, {}, request)) this.queue = this.queue.concat(requests) } @@ -60,7 +60,7 @@ class InMemoryCrawlQueue { async getInfo() { return { count: this.queue.length, - metricsName: this.name, + metricsName: this.name } } } diff --git a/ghcrawler/providers/queuing/memoryFactory.js b/ghcrawler/providers/queuing/memoryFactory.js index 861873f8..fbed68d6 100644 --- a/ghcrawler/providers/queuing/memoryFactory.js +++ b/ghcrawler/providers/queuing/memoryFactory.js @@ -5,11 +5,11 @@ const CrawlerFactory = require('../../crawlerFactory') const AttenuatedQueue = require('./attenuatedQueue') const InMemoryCrawlQueue = require('./inmemorycrawlqueue') -module.exports = (options) => { +module.exports = options => { const manager = { createQueueChain: (name, options) => { return new AttenuatedQueue(new InMemoryCrawlQueue(name, options), options) - }, + } } return CrawlerFactory.createScopedQueueSets({ globalManager: manager, localManager: manager }, options) } diff --git a/ghcrawler/providers/queuing/queueSet.js b/ghcrawler/providers/queuing/queueSet.js index 07198518..3df4efd1 100644 --- a/ghcrawler/providers/queuing/queueSet.js +++ b/ghcrawler/providers/queuing/queueSet.js @@ -28,7 +28,7 @@ class QueueSet { } _reconfigure(current, changes) { - if (changes.some((patch) => patch.path.includes('/weights'))) { + if (changes.some(patch => patch.path.includes('/weights'))) { this._startMap = this._createStartMap(this.options.weights) } return Promise.resolve() @@ -40,17 +40,17 @@ class QueueSet { subscribe() { return Promise.all( - this.queues.map((queue) => { + this.queues.map(queue => { return queue.subscribe() - }), + }) ) } unsubscribe() { return Promise.all( - this.queues.map((queue) => { + this.queues.map(queue => { return queue.unsubscribe() - }), + }) ) } diff --git a/ghcrawler/providers/queuing/scopedQueueSets.js b/ghcrawler/providers/queuing/scopedQueueSets.js index 394defbb..9c2c6876 100644 --- a/ghcrawler/providers/queuing/scopedQueueSets.js +++ b/ghcrawler/providers/queuing/scopedQueueSets.js @@ -7,7 +7,7 @@ class ScopedQueueSets { constructor(globalQueues, localQueues) { this._scopedQueues = { local: localQueues, - global: globalQueues, + global: globalQueues } } @@ -32,22 +32,22 @@ class ScopedQueueSets { subscribe() { return Promise.all( - Object.values(this._scopedQueues).map((queues) => { + Object.values(this._scopedQueues).map(queues => { return queues.subscribe() - }), + }) ) } unsubscribe() { return Promise.all( - Object.values(this._scopedQueues).map((queues) => { + Object.values(this._scopedQueues).map(queues => { return queues.unsubscribe() - }), + }) ) } pop() { - return this._scopedQueues.local.pop().then((request) => { + return this._scopedQueues.local.pop().then(request => { if (request) { //mark to retry on the global queues request._retryQueue = request._originQueue.getName() @@ -78,23 +78,23 @@ class ScopedQueueSets { } publish() { - const publishToGlobal = async (localQueue) => { + const publishToGlobal = async localQueue => { const localRequests = [] const info = await localQueue.getInfo() for (let count = info.count; count > 0; count--) { localRequests.push( localQueue .pop() - .then((request) => request && localQueue.done(request).then(() => request.createRequeuable())) - .then((request) => request && this.push(request, localQueue.getName(), 'global')), + .then(request => request && localQueue.done(request).then(() => request.createRequeuable())) + .then(request => request && this.push(request, localQueue.getName(), 'global')) ) } debug(`publishing ${localRequests.length} to ${localQueue.getName()}`) return Promise.all(localRequests) } - return Promise.allSettled(this._scopedQueues.local.queues.map(publishToGlobal)).then((results) => { - const found = results.find((result) => result.status === 'rejected') + return Promise.allSettled(this._scopedQueues.local.queues.map(publishToGlobal)).then(results => { + const found = results.find(result => result.status === 'rejected') if (found) throw new Error(found.reason) }) } diff --git a/ghcrawler/providers/queuing/storageBackedQueue.js b/ghcrawler/providers/queuing/storageBackedQueue.js index f8324e26..b5de7f3d 100644 --- a/ghcrawler/providers/queuing/storageBackedQueue.js +++ b/ghcrawler/providers/queuing/storageBackedQueue.js @@ -82,7 +82,7 @@ class StorageBackedQueue extends NestedQueue { const deleteRequests = [] const info = await this.getInfo() for (let count = info.count; count > 0; count--) { - const deleteOne = super.pop().then((request) => this.done(request)) + const deleteOne = super.pop().then(request => this.done(request)) deleteRequests.push(deleteOne) } const results = await Promise.allSettled(deleteRequests) @@ -90,9 +90,7 @@ class StorageBackedQueue extends NestedQueue { } _throwIfError(results, message) { - const errors = results - .filter((result) => result.status === 'rejected') - .map((rejected) => new Error(rejected.reason)) + const errors = results.filter(result => result.status === 'rejected').map(rejected => new Error(rejected.reason)) if (errors.length) throw new AggregateError(errors, message) } @@ -103,7 +101,7 @@ class StorageBackedQueue extends NestedQueue { static create(queue, storageQueue, options = {}) { const defaultOptions = { visibilityTimeout_remainLocal: VISIBILITY_TIMEOUT_TO_REMAIN_ON_LOCAL_QUEUE, - visibilityTimeout: VISIBILITY_TIMEOUT_FOR_PROCESSING, + visibilityTimeout: VISIBILITY_TIMEOUT_FOR_PROCESSING } const optionsWithDefaults = { ...defaultOptions, ...options } return new StorageBackedQueue(queue, storageQueue, optionsWithDefaults) diff --git a/ghcrawler/providers/queuing/storageQueue.js b/ghcrawler/providers/queuing/storageQueue.js index 4e3e8490..c52e1257 100644 --- a/ghcrawler/providers/queuing/storageQueue.js +++ b/ghcrawler/providers/queuing/storageQueue.js @@ -16,7 +16,7 @@ class StorageQueue { async subscribe() { return new Promise((resolve, reject) => { - this.client.createQueueIfNotExists(this.queueName, (error) => { + this.client.createQueueIfNotExists(this.queueName, error => { if (error) { return reject(error) } @@ -34,7 +34,7 @@ class StorageQueue { requests = Array.isArray(requests) ? requests : [requests] return Promise.all( requests.map( - qlimit(this.options.parallelPush || 1)((request) => { + qlimit(this.options.parallelPush || 1)(request => { const body = JSON.stringify(request) return new Promise((resolve, reject) => { this.client.createMessage(this.queueName, body, option, (error, queueMessageResult) => { @@ -45,8 +45,8 @@ class StorageQueue { resolve(this._buildMessageReceipt(queueMessageResult, request)) }) }) - }), - ), + }) + ) ) } @@ -69,7 +69,7 @@ class StorageQueue { } if (this.options.maxDequeueCount && message.dequeueCount > this.options.maxDequeueCount) { this.logger.verbose('maxDequeueCount exceeded') - this.client.deleteMessage(this.queueName, message.messageId, message.popReceipt, (error) => { + this.client.deleteMessage(this.queueName, message.messageId, message.popReceipt, error => { if (error) return reject(error) resolve(null) }) @@ -89,7 +89,7 @@ class StorageQueue { return } return new Promise((resolve, reject) => { - this.client.deleteMessage(this.queueName, request._message.messageId, request._message.popReceipt, (error) => { + this.client.deleteMessage(this.queueName, request._message.messageId, request._message.popReceipt, error => { if (error) { return reject(error) } @@ -124,16 +124,16 @@ class StorageQueue { } this._log('NAKed', request._message.body) resolve(this._buildMessageReceipt(result, request._message.body)) - }, + } ) }) } async flush() { return new Promise((resolve, reject) => { - this.client.deleteQueue(this.queueName, (error) => { + this.client.deleteQueue(this.queueName, error => { if (error) return reject(error) - this.client.createQueueIfNotExists(this.queueName, (error) => { + this.client.createQueueIfNotExists(this.queueName, error => { if (error) return reject(error) resolve() }) @@ -142,7 +142,7 @@ class StorageQueue { } async getInfo() { - return new Promise((resolve) => { + return new Promise(resolve => { this.client.getQueueMetadata(this.queueName, (result, error) => { if (error) { this.logger.error(error) diff --git a/ghcrawler/providers/queuing/storageQueueFactory.js b/ghcrawler/providers/queuing/storageQueueFactory.js index 44178570..b345b27c 100644 --- a/ghcrawler/providers/queuing/storageQueueFactory.js +++ b/ghcrawler/providers/queuing/storageQueueFactory.js @@ -5,7 +5,7 @@ const StorageQueueManager = require('./storageQueueManager') const CrawlerFactory = require('../../crawlerFactory') const StorageBackedInMemoryQueueManager = require('./storageBackedInMemoryQueueManager') -module.exports = (options) => { +module.exports = options => { const { connectionString } = options const storageQueueManager = new StorageQueueManager(connectionString, options) const localManager = new StorageBackedInMemoryQueueManager(storageQueueManager) diff --git a/ghcrawler/providers/queuing/storageQueueManager.js b/ghcrawler/providers/queuing/storageQueueManager.js index 81b05917..2f23a7c9 100644 --- a/ghcrawler/providers/queuing/storageQueueManager.js +++ b/ghcrawler/providers/queuing/storageQueueManager.js @@ -22,7 +22,7 @@ class StorageQueueManager { } createQueue(name, options) { - const formatter = (message) => { + const formatter = message => { // make sure the message/request object is copied to enable deferral scenarios (i.e., the request is modified // and then put back on the queue) return Request.adopt(Object.assign({}, message.body)) diff --git a/ghcrawler/providers/storage/azureBlobFactory.js b/ghcrawler/providers/storage/azureBlobFactory.js index fbe0a2cd..2d2d5eb5 100644 --- a/ghcrawler/providers/storage/azureBlobFactory.js +++ b/ghcrawler/providers/storage/azureBlobFactory.js @@ -4,7 +4,7 @@ const AzureStorage = require('azure-storage') const AzureStorageDocStore = require('./storageDocStore') -module.exports = (options) => { +module.exports = options => { options.logger.info('creating azure storage store') const { account, key, connection, container } = options const retryOperations = new AzureStorage.ExponentialRetryPolicyFilter() diff --git a/ghcrawler/providers/storage/file.js b/ghcrawler/providers/storage/file.js index f8bb24ab..b38c48c7 100644 --- a/ghcrawler/providers/storage/file.js +++ b/ghcrawler/providers/storage/file.js @@ -24,14 +24,14 @@ class FileStore { const filePath = this._getPath(urn) mkdirp.sync(path.dirname(filePath)) return new Promise((resolve, reject) => - fs.writeFile(filePath, JSON.stringify(document, null, 2), (error) => (error ? reject(error) : resolve(document))), + fs.writeFile(filePath, JSON.stringify(document, null, 2), error => (error ? reject(error) : resolve(document))) ) } async get(type, key) { const path = this._getPath(key) return new Promise((resolve, reject) => - fs.readFile(path, (error, data) => (error ? reject(error) : resolve(JSON.parse(data)))), + fs.readFile(path, (error, data) => (error ? reject(error) : resolve(JSON.parse(data)))) ) } @@ -42,7 +42,7 @@ class FileStore { } etag(type, key) { - return this.get(type, key).then((result) => result._metadata.etag) + return this.get(type, key).then(result => result._metadata.etag) } // list(type) { @@ -57,7 +57,7 @@ class FileStore { count(type) { // TODO likewise wrt list. Not sure this is needed - return this.list(type).then((results) => { + return this.list(type).then(results => { return results.length }) } @@ -65,4 +65,4 @@ class FileStore { close() {} } -module.exports = (options) => new FileStore(options) +module.exports = options => new FileStore(options) diff --git a/ghcrawler/providers/storage/inmemoryDocStore.js b/ghcrawler/providers/storage/inmemoryDocStore.js index d6aaf068..dc4d8565 100644 --- a/ghcrawler/providers/storage/inmemoryDocStore.js +++ b/ghcrawler/providers/storage/inmemoryDocStore.js @@ -48,10 +48,10 @@ class InmemoryDocStore { collection = {} } return Object.keys(collection) - .filter((key) => { + .filter(key => { return key.startsWith('urn:') ? true : false }) - .map((key) => { + .map(key => { const metadata = collection[key]._metadata return { version: metadata.version, @@ -61,7 +61,7 @@ class InmemoryDocStore { urn: metadata.links.self.href, fetchedAt: metadata.fetchedAt, processedAt: metadata.processedAt, - extra: metadata.extra, + extra: metadata.extra } }) } @@ -90,4 +90,4 @@ class InmemoryDocStore { } } -module.exports = (options) => new InmemoryDocStore(options) +module.exports = options => new InmemoryDocStore(options) diff --git a/ghcrawler/providers/storage/storageDocStore.js b/ghcrawler/providers/storage/storageDocStore.js index 819eb9c8..8131dfa5 100644 --- a/ghcrawler/providers/storage/storageDocStore.js +++ b/ghcrawler/providers/storage/storageDocStore.js @@ -20,7 +20,7 @@ class AzureStorageDocStore { async _createContainer(name) { return new Promise((resolve, reject) => { - this.service.createContainerIfNotExists(name, (error) => { + this.service.createContainerIfNotExists(name, error => { if (error) { return reject(error) } @@ -38,7 +38,7 @@ class AzureStorageDocStore { url: document._metadata.url, urn: document._metadata.links.self.href, fetchedat: document._metadata.fetchedAt, - processedat: document._metadata.processedAt, + processedat: document._metadata.processedAt } if (document._metadata.extra) { blobMetadata.extra = JSON.stringify(document._metadata.extra) @@ -50,7 +50,7 @@ class AzureStorageDocStore { return new Promise((resolve, reject) => { dataStream .pipe(this.service.createWriteStreamToBlockBlob(this.name, blobName, options)) - .on('error', (error) => { + .on('error', error => { return reject(error) }) .on('finish', () => { @@ -76,7 +76,7 @@ class AzureStorageDocStore { // TODO: Consistency on whether key is a URL or URN async etag(type, key) { const blobName = this._getBlobNameFromKey(type, key) - return new Promise((resolve) => { + return new Promise(resolve => { this.service.getBlobMetadata(this.name, blobName, (error, blob) => { resolve(error ? null : blob.metadata.etag) }) @@ -95,7 +95,7 @@ class AzureStorageDocStore { continuationToken, { include: azure.BlobUtilities.BlobListingDetails.METADATA, - location: azure.StorageUtilities.LocationMode.PRIMARY_THEN_SECONDARY, + location: azure.StorageUtilities.LocationMode.PRIMARY_THEN_SECONDARY }, (error, response) => { if (error) { @@ -103,11 +103,11 @@ class AzureStorageDocStore { reject(error) } return resolve(response) - }, + } ) }) entries = entries.concat( - result.entries.map((entry) => { + result.entries.map(entry => { const blobMetadata = entry.metadata return { version: blobMetadata.version, @@ -117,9 +117,9 @@ class AzureStorageDocStore { urn: blobMetadata.urn, fetchedAt: blobMetadata.fetchedat, processedAt: blobMetadata.processedat, - extra: blobMetadata.extra ? JSON.parse(blobMetadata.extra) : undefined, + extra: blobMetadata.extra ? JSON.parse(blobMetadata.extra) : undefined } - }), + }) ) } while (continuationToken && entries.length < 10000) return entries @@ -130,7 +130,7 @@ class AzureStorageDocStore { this._ensureDeadletter(type) const blobName = this._getBlobNameFromKey(type, key) return new Promise((resolve, reject) => { - this.service.deleteBlob(this.name, blobName, (error) => { + this.service.deleteBlob(this.name, blobName, error => { if (error) { return reject(error) } @@ -163,7 +163,7 @@ class AzureStorageDocStore { reject(error) } return resolve(response) - }, + } ) }) entryCount += result.entries.length diff --git a/ghcrawler/routes/requests.js b/ghcrawler/routes/requests.js index 61e6a2d4..fb8db609 100644 --- a/ghcrawler/routes/requests.js +++ b/ghcrawler/routes/requests.js @@ -18,12 +18,12 @@ router.post( return response.sendStatus(404) } response.sendStatus(201) - }), + }) ) async function queueRequests(requestSpecs, queueName) { requestSpecs = Array.isArray(requestSpecs) ? requestSpecs : [requestSpecs] - const requests = requestSpecs.map((spec) => rationalizeRequest(spec)) + const requests = requestSpecs.map(spec => rationalizeRequest(spec)) try { return crawlerService.queue(requests, queueName) } catch (error) { diff --git a/index.js b/index.js index f572e85a..4d264077 100644 --- a/index.js +++ b/index.js @@ -10,7 +10,7 @@ const uuid = require('node-uuid') const logger = require('./providers/logging/logger')({ crawlerId: config.get('CRAWLER_ID') || uuid.v4(), crawlerHost: config.get('CRAWLER_HOST'), - buildNumber: config.get('CRAWLER_BUILD_NUMBER') || 'local', + buildNumber: config.get('CRAWLER_BUILD_NUMBER') || 'local' }) run(defaults, logger, searchPath, maps) diff --git a/lib/baseHandler.js b/lib/baseHandler.js index c7de76ec..f9a706a4 100644 --- a/lib/baseHandler.js +++ b/lib/baseHandler.js @@ -27,7 +27,7 @@ class BaseHandler { const tmpBase = config.get('TEMPDIR') || (process.platform === 'win32' ? 'c:/temp/' : '/tmp/') return { unsafeCleanup: true, - template: tmpBase + 'cd-XXXXXX', + template: tmpBase + 'cd-XXXXXX' } } @@ -48,7 +48,7 @@ class BaseHandler { hash.end() resolve(hash.read()) }) - file.on('error', (error) => reject(error)) + file.on('error', error => reject(error)) file.pipe(hash) }) } @@ -74,7 +74,7 @@ class BaseHandler { if (versions.length === 0) return null if (versions.length === 1) return versions[0] return versions - .filter((v) => !this.isPreReleaseVersion(v)) + .filter(v => !this.isPreReleaseVersion(v)) .reduce((max, current) => (semver.gt(current, max) ? current : max), versions[0]) } diff --git a/lib/entitySpec.js b/lib/entitySpec.js index ecb82de6..e513d23f 100644 --- a/lib/entitySpec.js +++ b/lib/entitySpec.js @@ -5,7 +5,7 @@ class EntitySpec { static fromUrl(url) { if (!url) return null const [, type, provider, namespace, name, revision, toolSpec] = url.match( - /.*:\/*([^/]+)\/([^/]+)\/([^/]+)\/([^/]+)\/?([^/]+)?(\/tool\/.+)?/, + /.*:\/*([^/]+)\/([^/]+)\/([^/]+)\/([^/]+)\/?([^/]+)?(\/tool\/.+)?/ ) const [, , toolName, toolVersion] = toolSpec ? toolSpec.split('/') : [] return new EntitySpec(type, provider, namespace, name, revision, toolName, toolVersion) @@ -21,7 +21,7 @@ class EntitySpec { spec.name, spec.revision, spec.tool, - spec.toolVersion, + spec.toolVersion ) } diff --git a/lib/fetchResult.js b/lib/fetchResult.js index d8c8f5bb..6c306873 100644 --- a/lib/fetchResult.js +++ b/lib/fetchResult.js @@ -23,8 +23,8 @@ class FetchResult { adoptCleanup(needCleanup, fromRequest) { if (!needCleanup) return this const cleanups = (Array.isArray(needCleanup) ? needCleanup : [needCleanup]) - .map((toCleanup) => toCleanup.removeCallback) - .filter((item) => item) + .map(toCleanup => toCleanup.removeCallback) + .filter(item => item) //transfer the clean up from request to fetchResult this.trackCleanup(cleanups) fromRequest?.removeCleanup(cleanups) @@ -32,7 +32,7 @@ class FetchResult { } cleanup(errorHandler) { - this._cleanups.forEach((cleanup) => { + this._cleanups.forEach(cleanup => { try { cleanup() } catch (error) { @@ -57,7 +57,7 @@ class FetchResult { } removeDependents(...toRemove) { - this._dependents = this._dependents.filter((item) => !toRemove.includes(item)) + this._dependents = this._dependents.filter(item => !toRemove.includes(item)) return this } diff --git a/lib/sourceDiscovery.js b/lib/sourceDiscovery.js index 09170169..a02d4997 100644 --- a/lib/sourceDiscovery.js +++ b/lib/sourceDiscovery.js @@ -49,11 +49,11 @@ async function discoverRevision(version, candidate, options) { function resolveGitHubLocations(locations) { const result = locations - .map((location) => { + .map(location => { var parsedUrl = location ? parseGitHubUrl(location) : null return parsedUrl && parsedUrl.owner && parsedUrl.name ? parsedUrl : null }) - .filter((e) => e) + .filter(e => e) return uniqWith(result, (a, b) => a.owner === b.owner && a.name === b.name) } @@ -62,7 +62,7 @@ function resolveGitHubLocations(locations) { // eslint-disable-next-line no-unused-vars async function discoverFromGitHubRefs(version, candidate, options) { const headers = { - 'User-Agent': 'clearlydefined/scanning', + 'User-Agent': 'clearlydefined/scanning' } const token = options.githubToken if (token) headers.Authorization = 'token ' + token @@ -77,7 +77,7 @@ async function discoverFromGitHubRefs(version, candidate, options) { retryDelay: 250, retryStrategy: request.RetryStrategies.HTTPOrNetworkError, tokenLowerBound: 10, - json: true, + json: true }) if (!refs) return null for (let i = 0; i < refs.length; i++) { diff --git a/lib/utils.js b/lib/utils.js index b01bd371..b455a31e 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -5,7 +5,7 @@ const { spawn } = require('child_process') const { intersection } = require('lodash') const dateTimeFormats = [ - "EEE MMM d HH:mm:ss 'GMT'ZZ yyyy", //in pom properties + "EEE MMM d HH:mm:ss 'GMT'ZZ yyyy" //in pom properties ] function normalizePath(path) { @@ -15,7 +15,7 @@ function normalizePath(path) { function normalizePaths(paths) { if (!Array.isArray(paths)) return paths - return paths.map((path) => normalizePath(path)) + return paths.map(path => normalizePath(path)) } function trimParents(path, parents) { @@ -29,7 +29,7 @@ function trimParents(path, parents) { function trimAllParents(paths, parents) { if (!Array.isArray(paths)) return paths - return paths.map((path) => trimParents(path, parents)) + return paths.map(path => trimParents(path, parents)) } function isGitFile(file) { @@ -61,12 +61,12 @@ function attachListeners(child, resolve, reject) { let stdoutData = [], stderrData = [] - child.stdout.on('data', (chunk) => stdoutData.push(chunk)) - child.stderr.on('data', (chunk) => stderrData.push(chunk)) + child.stdout.on('data', chunk => stdoutData.push(chunk)) + child.stderr.on('data', chunk => stderrData.push(chunk)) child - .on('error', (err) => reject(err)) - .on('close', (code) => { + .on('error', err => reject(err)) + .on('close', code => { if (code === 0) resolve(stdoutData.join('')) else { const errorFromChild = new Error(stderrData.join('')) @@ -90,5 +90,5 @@ module.exports = { trimAllParents, isGitFile, extractDate, - spawnPromisified, + spawnPromisified } diff --git a/providers/fetch/abstractFetch.js b/providers/fetch/abstractFetch.js index 808a3c66..29263e7f 100644 --- a/providers/fetch/abstractFetch.js +++ b/providers/fetch/abstractFetch.js @@ -27,20 +27,20 @@ class AbstractFetch extends BaseHandler { unzip(source, destination) { return new Promise((resolve, reject) => - extract(source, { dir: destination }, (error) => (error ? reject(error) : resolve())), + extract(source, { dir: destination }, error => (error ? reject(error) : resolve())) ) } decompress(source, destination) { return decompress(source, destination, { - filter: (file) => !file.path.endsWith('/'), + filter: file => !file.path.endsWith('/'), plugins: [ decompressTar(), decompressTarbz2(), decompressTargz(), decompressTarxz(), - decompressUnzip({ validateEntrySizes: false }), - ], + decompressUnzip({ validateEntrySizes: false }) + ] }) } } diff --git a/providers/fetch/condaFetch.js b/providers/fetch/condaFetch.js index e67a7259..b56ad869 100644 --- a/providers/fetch/condaFetch.js +++ b/providers/fetch/condaFetch.js @@ -15,10 +15,10 @@ class CondaFetch extends AbstractFetch { this.channels = { 'anaconda-main': 'https://repo.anaconda.com/pkgs/main', 'anaconda-r': 'https://repo.anaconda.com/pkgs/r', - 'conda-forge': 'https://conda.anaconda.org/conda-forge', + 'conda-forge': 'https://conda.anaconda.org/conda-forge' } this.headers = { - 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)', + 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' } this.CACHE_DURATION = 8 * 60 * 60 * 1000 // 8 hours } @@ -79,7 +79,7 @@ class CondaFetch extends AbstractFetch { registryData: { channelData: packageChannelData, downloadUrl }, releaseDate: new Date(packageChannelData.timestamp || 0).toISOString(), declaredLicenses: packageChannelData.license, - hashes, + hashes } fetchResult.casedSpec = clone(spec) request.fetchResult = fetchResult.adoptCleanup(dir, request) @@ -101,7 +101,7 @@ class CondaFetch extends AbstractFetch { .filter(packageMatches) .map(([packageFile, packageData]) => { return { packageFile, packageData } - }), + }) ) } if (repoData['packages.conda']) { @@ -110,7 +110,7 @@ class CondaFetch extends AbstractFetch { .filter(packageMatches) .map(([packageFile, packageData]) => { return { packageFile, packageData } - }), + }) ) } packageRepoEntries.sort((a, b) => (b.packageData.timestamp || 0) - (a.packageData.timestamp || 0)) @@ -124,19 +124,19 @@ class CondaFetch extends AbstractFetch { this.logger.info(`No binary architecture specified for ${spec.name}, using architecture: ${architecture}`) } let repoData = undefined - if (!packageChannelData.subdirs.find((x) => x === architecture)) { + if (!packageChannelData.subdirs.find(x => x === architecture)) { return request.markSkip(`Missing architecture ${architecture} for package ${spec.name} in channel`) } repoData = await this.getRepoData(this.channels[spec.provider], spec.provider, architecture) if (!repoData) { return request.markSkip( - `failed to fetch and parse repodata json file for channel ${spec.provider} in architecture ${architecture}`, + `failed to fetch and parse repodata json file for channel ${spec.provider} in architecture ${architecture}` ) } let packageRepoEntries = this._matchPackage(spec.name, version, buildVersion, repoData) if (packageRepoEntries.length === 0) { return request.markSkip( - `Missing package with matching spec (version: ${version}, buildVersion: ${buildVersion}) in ${architecture} repository`, + `Missing package with matching spec (version: ${version}, buildVersion: ${buildVersion}) in ${architecture} repository` ) } let packageRepoEntry = packageRepoEntries[0] @@ -156,7 +156,7 @@ class CondaFetch extends AbstractFetch { registryData: { channelData: packageChannelData, repoData: packageRepoEntry, downloadUrl }, releaseDate: new Date(packageRepoEntry.packageData.timestamp || 0).toISOString(), declaredLicenses: packageRepoEntry.packageData.license, - hashes, + hashes } fetchResult.casedSpec = clone(spec) request.fetchResult = fetchResult.adoptCleanup(dir, request) @@ -190,7 +190,7 @@ class CondaFetch extends AbstractFetch { memCache.put(cacheKey, true, cacheDuration) this.logger.info(`Conda: retrieved ${sourceUrl}. Stored data file at ${fileDstLocation}`) return resolve() - }), + }) ) }) } @@ -210,7 +210,7 @@ class CondaFetch extends AbstractFetch { `${condaChannelID}-channelDataFile`, `${condaChannelUrl}/channeldata.json`, this.CACHE_DURATION, - `${this.packageMapFolder}/${condaChannelID}-channelDataFile.json`, + `${this.packageMapFolder}/${condaChannelID}-channelDataFile.json` ) } @@ -219,9 +219,9 @@ class CondaFetch extends AbstractFetch { `${condaChannelID}-repoDataFile-${architecture}`, `${condaChannelUrl}/${architecture}/repodata.json`, this.CACHE_DURATION, - `${this.packageMapFolder}/${condaChannelID}-repoDataFile-${architecture}.json`, + `${this.packageMapFolder}/${condaChannelID}-repoDataFile-${architecture}.json` ) } } -module.exports = (options) => new CondaFetch(options) +module.exports = options => new CondaFetch(options) diff --git a/providers/fetch/cratesioFetch.js b/providers/fetch/cratesioFetch.js index d5ac02a3..e0a5e9bf 100644 --- a/providers/fetch/cratesioFetch.js +++ b/providers/fetch/cratesioFetch.js @@ -35,7 +35,7 @@ class CratesioFetch extends AbstractFetch { releaseDate: version.created_at, location, hashes: await this.computeHashes(zip), - manifest: registryData.manifest, + manifest: registryData.manifest } if (version.crate) { fetchResult.casedSpec = clone(spec) @@ -52,17 +52,17 @@ class CratesioFetch extends AbstractFetch { registryData = await request({ url: `https://crates.io/api/v1/crates/${spec.name}`, json: true, - headers: { 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' }, + headers: { 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' } }) } catch (exception) { if (exception.statusCode !== 404) throw exception return null } if (!registryData.versions) return null - const version = spec.revision || this.getLatestVersion(registryData.versions.map((x) => x.num)) + const version = spec.revision || this.getLatestVersion(registryData.versions.map(x => x.num)) return { manifest: registryData.crate, - version: registryData.versions.find((x) => x.num === version), + version: registryData.versions.find(x => x.num === version) } } @@ -73,15 +73,15 @@ class CratesioFetch extends AbstractFetch { url: `https://crates.io${version.dl_path}`, json: false, encoding: null, - headers: { 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' }, + headers: { 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' } }).pipe( fs .createWriteStream(zip) .on('finish', () => resolve(null)) - .on('error', reject), + .on('error', reject) ) }) } } -module.exports = (options) => new CratesioFetch(options) +module.exports = options => new CratesioFetch(options) diff --git a/providers/fetch/debianFetch.js b/providers/fetch/debianFetch.js index 29c3adf7..64c83029 100644 --- a/providers/fetch/debianFetch.js +++ b/providers/fetch/debianFetch.js @@ -23,13 +23,13 @@ const readdir = promisify(fs.readdir) const readFile = promisify(fs.readFile) const providerMap = { - debian: 'http://ftp.debian.org/debian/', + debian: 'http://ftp.debian.org/debian/' } const packageFileMap = { url: 'http://ftp.debian.org/debian/indices/package-file.map.bz2', cacheKey: 'packageFileMap', - cacheDuration: 8 * 60 * 60 * 1000, // 8 hours + cacheDuration: 8 * 60 * 60 * 1000 // 8 hours } const metadataChangelogsUrl = 'https://metadata.ftp-master.debian.org/changelogs/' @@ -68,7 +68,7 @@ class DebianFetch extends AbstractFetch { releaseDate, copyrightUrl, declaredLicenses, - hashes, + hashes }) fetchResult.casedSpec = clone(spec) request.fetchResult = fetchResult.adoptCleanup(dir, request) @@ -99,7 +99,7 @@ class DebianFetch extends AbstractFetch { memCache.put(packageFileMap.cacheKey, true, packageFileMap.cacheDuration) return new Promise((resolve, reject) => { const dom = domain.create() - dom.on('error', (error) => { + dom.on('error', error => { memCache.del(packageFileMap.cacheKey) return reject(error) }) @@ -110,7 +110,7 @@ class DebianFetch extends AbstractFetch { .pipe(fs.createWriteStream(this.packageMapFileLocation)) .on('finish', () => { this.logger.info( - `Debian: retrieved ${packageFileMap.url}. Stored map file at ${this.packageMapFileLocation}`, + `Debian: retrieved ${packageFileMap.url}. Stored map file at ${this.packageMapFileLocation}` ) return resolve() }) @@ -128,7 +128,7 @@ class DebianFetch extends AbstractFetch { let entry = {} const lineReader = linebyline(this.packageMapFileLocation) lineReader - .on('line', (line) => { + .on('line', line => { if (line === '') { if ( [entry.Source, entry.Binary].includes(name) && @@ -146,7 +146,7 @@ class DebianFetch extends AbstractFetch { this.logger.info(`Debian: got ${relevantEntries.length} entries for ${spec.toUrl()}`) return resolve(relevantEntries) }) - .on('error', (error) => reject(error)) + .on('error', error => reject(error)) }) } @@ -159,7 +159,7 @@ class DebianFetch extends AbstractFetch { _ensureArchitecturePresenceForBinary(spec, registryData) { const { architecture } = this._fromSpec(spec) if (spec.type === 'deb' && !architecture) { - const randomBinaryArchitecture = (registryData.find((entry) => entry.Architecture) || {}).Architecture + const randomBinaryArchitecture = (registryData.find(entry => entry.Architecture) || {}).Architecture if (!randomBinaryArchitecture) return false spec.revision += '_' + randomBinaryArchitecture } @@ -170,14 +170,14 @@ class DebianFetch extends AbstractFetch { const isSrc = spec.type === 'debsrc' const { architecture } = this._fromSpec(spec) if (isSrc) { - const sourceAndPatches = registryData.filter((entry) => !entry.Architecture && !entry.Path.endsWith('.dsc')) - const sourcePath = (sourceAndPatches.find((entry) => entry.Path.includes('.orig.tar.')) || {}).Path + const sourceAndPatches = registryData.filter(entry => !entry.Architecture && !entry.Path.endsWith('.dsc')) + const sourcePath = (sourceAndPatches.find(entry => entry.Path.includes('.orig.tar.')) || {}).Path const source = sourcePath ? new URL(providerMap.debian + sourcePath).href : null - const patchPath = (sourceAndPatches.find((entry) => !entry.Path.includes('.orig.tar.')) || {}).Path + const patchPath = (sourceAndPatches.find(entry => !entry.Path.includes('.orig.tar.')) || {}).Path const patches = patchPath ? new URL(providerMap.debian + patchPath).href : null return { source, patches } } - const binary = new URL(providerMap.debian + registryData.find((entry) => entry.Architecture === architecture).Path) + const binary = new URL(providerMap.debian + registryData.find(entry => entry.Architecture === architecture).Path) .href return { binary } } @@ -212,7 +212,7 @@ class DebianFetch extends AbstractFetch { async _download(downloadUrl, destination) { return new Promise((resolve, reject) => { const dom = domain.create() - dom.on('error', (error) => reject(error)) + dom.on('error', error => reject(error)) dom.run(() => { nodeRequest .get(downloadUrl, (error, response) => { @@ -234,7 +234,7 @@ class DebianFetch extends AbstractFetch { const fullName = path.join(destination, name) entry.fileData().pipe(fs.createWriteStream(fullName)).on('finish', next) }) - reader.on('error', (error) => { + reader.on('error', error => { reject(error) }) reader.on('end', () => { @@ -267,12 +267,12 @@ class DebianFetch extends AbstractFetch { if (!locationStat.isDirectory()) return [location] const subdirs = await readdir(location) const files = await Promise.all( - subdirs.map((subdir) => { + subdirs.map(subdir => { const entry = path.resolve(location, subdir) return this._getFiles(entry) - }), + }) ) - return flatten(files).filter((x) => x) + return flatten(files).filter(x => x) } async _getSourceDirectoryName(location) { @@ -286,7 +286,7 @@ class DebianFetch extends AbstractFetch { const orderedPatches = (await readFile(patchesSeriesLocation)) .toString() .split('\n') - .filter((patch) => patch && !patch.trim().startsWith('#') && !patch.trim().startsWith('|')) + .filter(patch => patch && !patch.trim().startsWith('#') && !patch.trim().startsWith('|')) for (let patchFileName of orderedPatches) { const patchCommand = `patch -p01 -i ${path.join(patchesLocation, 'patches', patchFileName)}` try { @@ -300,7 +300,7 @@ class DebianFetch extends AbstractFetch { } _getCopyrightUrl(registryData) { - const entry = registryData.find((entry) => entry.Source) + const entry = registryData.find(entry => entry.Source) if (!entry) return null // Example: ./pool/main/0/0ad/0ad_0.0.17-1.debian.tar.xz -> main/0 const pathFragment = entry.Path.replace('./pool/', '').split('/').slice(0, 2).join('/') @@ -328,15 +328,15 @@ class DebianFetch extends AbstractFetch { const licensesSet = new Set() const licenses = copyrightResponse .split('\n') - .filter((line) => line.startsWith('License: ')) - .map((line) => line.replace('License:', '').trim()) - .map((licenseId) => { + .filter(line => line.startsWith('License: ')) + .map(line => line.replace('License:', '').trim()) + .map(licenseId => { if (licenseId.includes('CPL') && !licenseId.includes('RSCPL')) licenseId = licenseId.replace('CPL', 'CPL-1.0') if (licenseId.toLowerCase().includes('expat')) licenseId = licenseId.replace(/expat/i, 'MIT') return licenseId }) // Over-simplified parsing of edge cases: - licenses.forEach((licenseId) => { + licenses.forEach(licenseId => { if (licenseId.includes(' or ') && !licenseId.includes(',')) { // A or B and C => (A OR B AND C) licenseId = licenseId.replace(' or ', ' OR ') @@ -345,7 +345,7 @@ class DebianFetch extends AbstractFetch { } else if (licenseId.includes(' or ') && licenseId.includes(',')) { // A or B, and C => (A OR B) AND C licenseId = licenseId.replace(' or ', ' OR ') - licenseId.split(' and ').forEach((part) => { + licenseId.split(' and ').forEach(part => { if (part.includes('OR') && part.endsWith(',')) { licensesSet.add('(' + part.replace(',', ')')) } else { @@ -353,7 +353,7 @@ class DebianFetch extends AbstractFetch { } }) } else if (licenseId.includes(' and ')) { - licenseId.split(' and ').forEach((part) => licensesSet.add(part)) + licenseId.split(' and ').forEach(part => licensesSet.add(part)) } else { licensesSet.add(licenseId) } @@ -362,4 +362,4 @@ class DebianFetch extends AbstractFetch { } } -module.exports = (options) => new DebianFetch(options) +module.exports = options => new DebianFetch(options) diff --git a/providers/fetch/dispatcher.js b/providers/fetch/dispatcher.js index eec52ef4..815dccbe 100644 --- a/providers/fetch/dispatcher.js +++ b/providers/fetch/dispatcher.js @@ -97,17 +97,17 @@ class FetchDispatcher extends AbstractFetch { cacheKey, fetchResult, this._cleanupResult.bind(this), - (key, result) => !result.isInUse(), + (key, result) => !result.isInUse() ) } _cleanupResult(key, result) { - result.cleanup((error) => this.logger.info(`Cleanup Problem cleaning up after ${key} ${error.message}`)) + result.cleanup(error => this.logger.info(`Cleanup Problem cleaning up after ${key} ${error.message}`)) } // get all the handler that apply to this request from the given list of handlers _getHandler(request, list) { - return list.filter((element) => element.canHandle(request))[0] + return list.filter(element => element.canHandle(request))[0] } } diff --git a/providers/fetch/gitCloner.js b/providers/fetch/gitCloner.js index 43080240..c28f9ff1 100644 --- a/providers/fetch/gitCloner.js +++ b/providers/fetch/gitCloner.js @@ -9,7 +9,7 @@ const FetchResult = require('../../lib/fetchResult') const providerMap = { gitlab: 'https://gitlab.com', - github: 'https://github.com', + github: 'https://github.com' } class GitCloner extends AbstractFetch { @@ -54,7 +54,7 @@ class GitCloner extends AbstractFetch { return new Promise((resolve, reject) => { exec( `cd ${dirName} && git clone ${sourceUrl} --quiet && cd ${specName} ${reset} && git count-objects -v`, - (error, stdout) => (error ? reject(error) : resolve(this._getRepoSize(stdout))), + (error, stdout) => (error ? reject(error) : resolve(this._getRepoSize(stdout))) ) }) } @@ -62,7 +62,7 @@ class GitCloner extends AbstractFetch { _getDate(dirName, specName) { return new Promise((resolve, reject) => { exec(`cd ${dirName}/${specName} && git show -s --format=%ci`, (error, stdout) => - error ? reject(error) : resolve(new Date(stdout.trim())), + error ? reject(error) : resolve(new Date(stdout.trim())) ) }) } @@ -75,7 +75,7 @@ class GitCloner extends AbstractFetch { _getRevision(dirName, specName) { return new Promise((resolve, reject) => { exec(`cd ${dirName}/${specName} && git rev-parse HEAD`, (error, stdout) => - error ? reject(error) : resolve(stdout.trim()), + error ? reject(error) : resolve(stdout.trim()) ) }) } @@ -87,7 +87,7 @@ class GitCloner extends AbstractFetch { _deleteGitDatabase(dirName, specName) { return new Promise((resolve, reject) => { - rimraf(`${dirName}/${specName}/.git`, (error) => { + rimraf(`${dirName}/${specName}/.git`, error => { error ? reject(error) : resolve() }) }) @@ -99,4 +99,4 @@ class GitCloner extends AbstractFetch { } } -module.exports = (options) => new GitCloner(options) +module.exports = options => new GitCloner(options) diff --git a/providers/fetch/goFetch.js b/providers/fetch/goFetch.js index ebcf8d2f..e666168b 100644 --- a/providers/fetch/goFetch.js +++ b/providers/fetch/goFetch.js @@ -10,7 +10,7 @@ const { parse: spdxParser } = require('@clearlydefined/spdx') const FetchResult = require('../../lib/fetchResult') const providerMap = { - golang: 'https://proxy.golang.org', + golang: 'https://proxy.golang.org' } class GoFetch extends AbstractFetch { @@ -20,9 +20,9 @@ class GoFetch extends AbstractFetch { axiosRetry(axios, { retries: 5, retryDelay: exponentialDelay, - retryCondition: (err) => { + retryCondition: err => { return isNetworkOrIdempotentRequestError(err) || err.response?.status == 429 - }, + } }) this.options.http = options.http || axios } @@ -111,7 +111,7 @@ class GoFetch extends AbstractFetch { async _getArtifact(spec, destination) { const url = this._buildUrl(spec) - const status = await new Promise((resolve) => { + const status = await new Promise(resolve => { nodeRequest .get(url, (error, response) => { if (error) this.logger.error(this._google_proxy_error_string(error)) @@ -139,7 +139,7 @@ class GoFetch extends AbstractFetch { async _getRegistryData(spec) { const registryLicenseUrl = this._replace_encodings( - this._remove_blank_fields(`https://pkg.go.dev/${spec.namespace}/${spec.name}@${spec.revision}?tab=licenses`), + this._remove_blank_fields(`https://pkg.go.dev/${spec.namespace}/${spec.name}@${spec.revision}?tab=licenses`) ) try { // Based on this discussion https://github.com/golang/go/issues/36785, there is no API for pkg.go.dev for now. @@ -147,10 +147,10 @@ class GoFetch extends AbstractFetch { const root = htmlParser(response.data) // Here is the license html template file. // https://github.com/golang/pkgsite/blob/master/static/frontend/unit/licenses/licenses.tmpl - const licenses = root.querySelectorAll('[id^=#lic-]').map((ele) => ele.textContent) + const licenses = root.querySelectorAll('[id^=#lic-]').map(ele => ele.textContent) if (this._validateLicenses(licenses)) { return { - licenses, + licenses } } else { this.logger.info(`Licenses from html could not be parsed. The licenses are ${JSON.stringify(licenses)}.`) @@ -168,7 +168,7 @@ class GoFetch extends AbstractFetch { throw new RequeueError(msg) } this.logger.info( - `Getting declared license from pkg.go.dev failed. ${JSON.stringify(err.response?.data || err.request || err.message)}`, + `Getting declared license from pkg.go.dev failed. ${JSON.stringify(err.response?.data || err.request || err.message)}` ) } } @@ -196,4 +196,4 @@ class RequeueError extends Error { } } -module.exports = (options) => new GoFetch(options) +module.exports = options => new GoFetch(options) diff --git a/providers/fetch/gradlePluginFetch.js b/providers/fetch/gradlePluginFetch.js index f5d4da09..7a567de4 100644 --- a/providers/fetch/gradlePluginFetch.js +++ b/providers/fetch/gradlePluginFetch.js @@ -7,9 +7,9 @@ class GradlePluginFetch extends MavenBasedFetch { constructor(options) { super( { - gradleplugin: 'https://plugins.gradle.org/m2/', + gradleplugin: 'https://plugins.gradle.org/m2/' }, - options, + options ) } @@ -20,4 +20,4 @@ class GradlePluginFetch extends MavenBasedFetch { } } -module.exports = (options) => new GradlePluginFetch(options) +module.exports = options => new GradlePluginFetch(options) diff --git a/providers/fetch/mavenBasedFetch.js b/providers/fetch/mavenBasedFetch.js index fcb21c29..a505cbfc 100644 --- a/providers/fetch/mavenBasedFetch.js +++ b/providers/fetch/mavenBasedFetch.js @@ -20,7 +20,7 @@ const extensionMap = { sourcesJar: '-sources.jar', pom: '.pom', aar: '.aar', - jar: '.jar', + jar: '.jar' } const defaultHeaders = { headers: { 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' } } @@ -94,7 +94,7 @@ class MavenBasedFetch extends AbstractFetch { const extensions = spec.type === 'sourcearchive' ? [extensionMap.sourcesJar] : [extensionMap.jar, extensionMap.aar] for (let extension of extensions) { const url = this._buildUrl(spec, extension) - const status = await new Promise((resolve) => { + const status = await new Promise(resolve => { this._handleRequestStream(url, (error, response) => { if (error) this.logger.error(error) if (response.statusCode !== 200) return resolve(false) @@ -135,7 +135,7 @@ class MavenBasedFetch extends AbstractFetch { spec.provider, parent.groupId[0].trim(), parent.artifactId[0].trim(), - parent.version[0].trim(), + parent.version[0].trim() ) } @@ -176,7 +176,7 @@ class MavenBasedFetch extends AbstractFetch { const subdirs = await readdir(location) return subdirs.reduce((prev, subdir) => { const entry = path.resolve(location, subdir) - return prev.then((result) => result || MavenBasedFetch._findAnyFileStat(entry)) + return prev.then(result => result || MavenBasedFetch._findAnyFileStat(entry)) }, Promise.resolve()) } diff --git a/providers/fetch/mavenGoogleFetch.js b/providers/fetch/mavenGoogleFetch.js index af6c9c52..e2f24bf9 100644 --- a/providers/fetch/mavenGoogleFetch.js +++ b/providers/fetch/mavenGoogleFetch.js @@ -7,9 +7,9 @@ class MavenGoogleFetch extends MavenBasedFetch { constructor(options) { super( { - mavengoogle: 'https://dl.google.com/android/maven2/', + mavengoogle: 'https://dl.google.com/android/maven2/' }, - options, + options ) } //The format for source url is: https://dl.google.com/android/maven2/groudId1/groupdId2/artifactId/revision/artifactId-revision-sources.jar @@ -23,4 +23,4 @@ class MavenGoogleFetch extends MavenBasedFetch { } } -module.exports = (options) => new MavenGoogleFetch(options) +module.exports = options => new MavenGoogleFetch(options) diff --git a/providers/fetch/mavencentralFetch.js b/providers/fetch/mavencentralFetch.js index 661e6a47..8215fb1c 100644 --- a/providers/fetch/mavencentralFetch.js +++ b/providers/fetch/mavencentralFetch.js @@ -8,9 +8,9 @@ class MavenCentralFetch extends MavenBasedFetch { constructor(options) { super( { - mavencentral: 'https://search.maven.org/remotecontent?filepath=', + mavencentral: 'https://search.maven.org/remotecontent?filepath=' }, - options, + options ) } // query maven to get the latest version if we don't already have that. @@ -32,4 +32,4 @@ class MavenCentralFetch extends MavenBasedFetch { } } -module.exports = (options) => new MavenCentralFetch(options) +module.exports = options => new MavenCentralFetch(options) diff --git a/providers/fetch/npmjsFetch.js b/providers/fetch/npmjsFetch.js index bd6958f6..8eb7861b 100644 --- a/providers/fetch/npmjsFetch.js +++ b/providers/fetch/npmjsFetch.js @@ -9,7 +9,7 @@ const { clone, get } = require('lodash') const FetchResult = require('../../lib/fetchResult') const providerMap = { - npmjs: 'https://registry.npmjs.com', + npmjs: 'https://registry.npmjs.com' } class NpmFetch extends AbstractFetch { @@ -63,7 +63,7 @@ class NpmFetch extends AbstractFetch { try { registryData = await requestPromise({ url: `${baseUrl}/${encodeURIComponent(fullName).replace('%40', '@')}`, // npmjs doesn't handle the escaped version - json: true, + json: true }) } catch (exception) { if (exception.statusCode !== 404) throw exception @@ -109,4 +109,4 @@ class NpmFetch extends AbstractFetch { } } -module.exports = (options) => new NpmFetch(options) +module.exports = options => new NpmFetch(options) diff --git a/providers/fetch/nugetFetch.js b/providers/fetch/nugetFetch.js index 8d5214b5..f9b5780f 100644 --- a/providers/fetch/nugetFetch.js +++ b/providers/fetch/nugetFetch.js @@ -11,7 +11,7 @@ const requestRetry = require('requestretry').defaults({ maxAttempts: 3, fullResp const FetchResult = require('../../lib/fetchResult') const providerMap = { - nuget: 'https://api.nuget.org', + nuget: 'https://api.nuget.org' } class NuGetFetch extends AbstractFetch { @@ -50,7 +50,7 @@ class NuGetFetch extends AbstractFetch { location, metadataLocation, releaseDate: registryData ? new Date(registryData.published).toISOString() : null, - hashes: await this.computeHashes(zip), + hashes: await this.computeHashes(zip) } if (manifest.licenseUrl) { await this._downloadLicense({ dirName: location, licenseUrl: manifest.licenseUrl }) @@ -71,7 +71,7 @@ class NuGetFetch extends AbstractFetch { // https://api.nuget.org/v3/registration5-gz-semver2/microsoft.powershell.native/7.0.0-preview.1.json const { body, statusCode } = await requestRetry.get( `${baseUrl}/v3/registration5-gz-semver2/${spec.name.toLowerCase()}/${spec.revision}.json`, - { gzip: true }, + { gzip: true } ) return statusCode !== 200 || !body ? null : JSON.parse(body) } @@ -79,10 +79,8 @@ class NuGetFetch extends AbstractFetch { // https://docs.microsoft.com/en-us/nuget/reference/package-versioning#normalized-version-numbers _normalizeVersion(version) { const parts = version.split('-') - const trimmed = parts[0].split('.').map((part) => trimStart(part, '0') || '0') - return [(trimmed[3] === '0' ? trimmed.slice(0, 3) : trimmed).join('.'), ...parts.slice(1)] - .filter((x) => x) - .join('-') + const trimmed = parts[0].split('.').map(part => trimStart(part, '0') || '0') + return [(trimmed[3] === '0' ? trimmed.slice(0, 3) : trimmed).join('.'), ...parts.slice(1)].filter(x => x).join('-') } async _getLatestVersion(name) { @@ -90,11 +88,11 @@ class NuGetFetch extends AbstractFetch { // Example: https://api.nuget.org/v3-flatcontainer/moq/index.json const baseUrl = providerMap.nuget const { body, statusCode } = await requestRetry.get(`${baseUrl}/v3-flatcontainer/${name}/index.json`, { - json: true, + json: true }) // If statusCode is not 200, XML may be returned if (statusCode === 200 && body.versions) { - const versions = body.versions.filter((version) => !version.includes('build')) + const versions = body.versions.filter(version => !version.includes('build')) return versions[versions.length - 1] // the versions are already sorted } return null @@ -122,7 +120,7 @@ class NuGetFetch extends AbstractFetch { // https://docs.microsoft.com/en-us/nuget/api/package-base-address-resource#download-package-manifest-nuspec // Example: https://api.nuget.org/v3-flatcontainer/newtonsoft.json/11.0.1/newtonsoft.json.nuspec const { body, statusCode } = await requestRetry.get( - `https://api.nuget.org/v3-flatcontainer/${spec.name.toLowerCase()}/${spec.revision}/${spec.name.toLowerCase()}.nuspec`, + `https://api.nuget.org/v3-flatcontainer/${spec.name.toLowerCase()}/${spec.revision}/${spec.name.toLowerCase()}.nuspec` ) if (statusCode !== 200) return null return body @@ -132,11 +130,11 @@ class NuGetFetch extends AbstractFetch { const location = { manifest: path.join(dir.name, 'manifest.json'), nuspec: path.join(dir.name, 'nuspec.xml'), - latestNuspec: latestNuspec ? path.join(dir.name, 'latestNuspec.xml') : null, + latestNuspec: latestNuspec ? path.join(dir.name, 'latestNuspec.xml') : null } await Promise.all([ promisify(fs.writeFile)(location.manifest, JSON.stringify(manifest)), - promisify(fs.writeFile)(location.nuspec, nuspec), + promisify(fs.writeFile)(location.nuspec, nuspec) ]) if (latestNuspec) { await promisify(fs.writeFile)(location.latestNuspec, latestNuspec) @@ -154,4 +152,4 @@ class NuGetFetch extends AbstractFetch { } } -module.exports = (options) => new NuGetFetch(options) +module.exports = options => new NuGetFetch(options) diff --git a/providers/fetch/packagistFetch.js b/providers/fetch/packagistFetch.js index dfd8825a..8f8ce338 100644 --- a/providers/fetch/packagistFetch.js +++ b/providers/fetch/packagistFetch.js @@ -11,7 +11,7 @@ const readdir = promisify(fs.readdir) const FetchResult = require('../../lib/fetchResult') const providerMap = { - packagist: 'https://repo.packagist.org/', + packagist: 'https://repo.packagist.org/' } class PackagistFetch extends AbstractFetch { @@ -42,7 +42,7 @@ class PackagistFetch extends AbstractFetch { let registryData const baseUrl = providerMap.packagist const { body, statusCode } = await requestRetry.get(`${baseUrl}/p/${spec.namespace}/${spec.name}.json`, { - json: true, + json: true }) if (statusCode !== 200 || !body) return null registryData = body @@ -63,8 +63,8 @@ class PackagistFetch extends AbstractFetch { const options = { url: distUrl, headers: { - 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)', - }, + 'User-Agent': 'clearlydefined.io crawler (clearlydefined@outlook.com)' + } } nodeRequest .get(options, (error, response) => { @@ -85,4 +85,4 @@ class PackagistFetch extends AbstractFetch { } } -module.exports = (options) => new PackagistFetch(options) +module.exports = options => new PackagistFetch(options) diff --git a/providers/fetch/podFetch.js b/providers/fetch/podFetch.js index 5e3665c5..dd6dcc91 100644 --- a/providers/fetch/podFetch.js +++ b/providers/fetch/podFetch.js @@ -13,7 +13,7 @@ const FetchResult = require('../../lib/fetchResult') const services = { trunk: 'https://trunk.cocoapods.org/api/v1', - specs: 'https://raw.githubusercontent.com/CocoaPods/Specs/master', + specs: 'https://raw.githubusercontent.com/CocoaPods/Specs/master' } class PodFetch extends AbstractFetch { @@ -43,7 +43,7 @@ class PodFetch extends AbstractFetch { fetchResult.document = { location: location, registryData: registryData, - releaseDate: version.created_at, + releaseDate: version.created_at } if (registryData.name) { @@ -60,9 +60,9 @@ class PodFetch extends AbstractFetch { registryData = await request({ url: `${services.specs}/Specs/${this._masterRepoPathFragment(spec, [1, 1, 1])}/${spec.name}.podspec.json`, headers: { - Authorization: this.options.githubToken ? `token ${this.options.githubToken}` : '', + Authorization: this.options.githubToken ? `token ${this.options.githubToken}` : '' }, - json: true, + json: true }) } catch (exception) { if (exception.statusCode !== 404) throw exception @@ -97,7 +97,7 @@ class PodFetch extends AbstractFetch { await this.decompress(archive, output) resolve(output) }) - .on('error', reject), + .on('error', reject) ) }) } @@ -120,25 +120,25 @@ class PodFetch extends AbstractFetch { const cloneCommands = [ `git -C "${dir.name}" clone ${cloneOptions.join(' ')} ${repo} "${outputDirName}"`, - `git -C "${output}" reset --quiet --hard ${rev}`, + `git -C "${output}" reset --quiet --hard ${rev}` ] return new Promise((resolve, reject) => { - exec(cloneCommands.join(' && '), (error) => (error ? reject(error) : resolve(output))) + exec(cloneCommands.join(' && '), error => (error ? reject(error) : resolve(output))) }) } async _getVersion(spec) { // Example: https://trunk.cocoapods.org/api/v1/pods/SwiftLCS const { body, statusCode } = await requestRetry.get(`${services.trunk}/pods/${spec.name}`, { - json: true, + json: true }) if (statusCode === 200 && body.versions) { const versions = body.versions if (spec.revision) { - return versions.find((version) => version.name === spec.revision) + return versions.find(version => version.name === spec.revision) } else { return versions[versions.length - 1] // the versions are already sorted } @@ -171,4 +171,4 @@ class PodFetch extends AbstractFetch { } } -module.exports = (options) => new PodFetch(options) +module.exports = options => new PodFetch(options) diff --git a/providers/fetch/pypiFetch.js b/providers/fetch/pypiFetch.js index 09b687ac..fa7d8731 100644 --- a/providers/fetch/pypiFetch.js +++ b/providers/fetch/pypiFetch.js @@ -10,7 +10,7 @@ const { findLastKey, get, find, clone } = require('lodash') const FetchResult = require('../../lib/fetchResult') const providerMap = { - pypi: 'https://pypi.python.org', + pypi: 'https://pypi.python.org' } class PyPiFetch extends AbstractFetch { @@ -47,7 +47,7 @@ class PyPiFetch extends AbstractFetch { async _getRegistryData(spec) { const baseUrl = providerMap.pypi const { body, statusCode } = await requestRetry.get(`${baseUrl}/pypi/${spec.name}/json`, { - json: true, + json: true }) if (statusCode !== 200 || !body) return null return body @@ -66,7 +66,7 @@ class PyPiFetch extends AbstractFetch { _extractReleaseDate(spec, registryData) { const releaseTypes = get(registryData, ['releases', spec.revision]) - const release = find(releaseTypes, (entry) => { + const release = find(releaseTypes, entry => { return entry.url && entry.url.length > 6 && entry.url.slice(-6) === 'tar.gz' }) if (!release) return @@ -95,7 +95,7 @@ class PyPiFetch extends AbstractFetch { async _getPackage(spec, registryData, destination) { const releaseTypes = get(registryData, ['releases', spec.revision]) - const release = find(releaseTypes, (entry) => entry.url?.endsWith('tar.gz') || entry.url?.endsWith('zip')) + const release = find(releaseTypes, entry => entry.url?.endsWith('tar.gz') || entry.url?.endsWith('zip')) if (!release) return false return new Promise((resolve, reject) => { @@ -109,4 +109,4 @@ class PyPiFetch extends AbstractFetch { } } -module.exports = (options) => new PyPiFetch(options) +module.exports = options => new PyPiFetch(options) diff --git a/providers/fetch/rubyGemsFetch.js b/providers/fetch/rubyGemsFetch.js index 4e1bb0a6..c3838c70 100644 --- a/providers/fetch/rubyGemsFetch.js +++ b/providers/fetch/rubyGemsFetch.js @@ -12,7 +12,7 @@ const FetchResult = require('../../lib/fetchResult') const { extractDate } = require('../../lib/utils') const providerMap = { - rubyGems: 'https://rubygems.org', + rubyGems: 'https://rubygems.org' } class RubyGemsFetch extends AbstractFetch { @@ -49,7 +49,7 @@ class RubyGemsFetch extends AbstractFetch { async _getRegistryData(spec) { const baseUrl = providerMap.rubyGems const { body, statusCode } = await requestRetry.get(`${baseUrl}/api/v1/gems/${spec.name}.json`, { - json: true, + json: true }) return statusCode === 200 && body ? body : null } @@ -77,8 +77,8 @@ class RubyGemsFetch extends AbstractFetch { await new Promise((resolve, reject) => { fs.createReadStream(`${dirName}/metadata.gz`) .pipe(zlib.createGunzip()) - .on('data', (data) => { - fs.writeFile(`${dirName}/metadata.txt`, data, (error) => { + .on('data', data => { + fs.writeFile(`${dirName}/metadata.txt`, data, error => { if (error) return reject(error) return resolve() }) @@ -105,4 +105,4 @@ class RubyGemsFetch extends AbstractFetch { } } -module.exports = (options) => new RubyGemsFetch(options) +module.exports = options => new RubyGemsFetch(options) diff --git a/providers/filter/filter.js b/providers/filter/filter.js index 5f23dc04..e38fc855 100644 --- a/providers/filter/filter.js +++ b/providers/filter/filter.js @@ -23,7 +23,7 @@ class StandardFilter extends AbstractProcessor { } _getProcessor(request) { - return this.processors.filter((processor) => processor.canHandle(request))[0] + return this.processors.filter(processor => processor.canHandle(request))[0] } } diff --git a/providers/index.js b/providers/index.js index 3c772e42..e73ac303 100644 --- a/providers/index.js +++ b/providers/index.js @@ -7,7 +7,7 @@ const providers = require('../ghcrawler').providers module.exports = { filter: { provider: 'filter', - filter: require('./filter/filter'), + filter: require('./filter/filter') }, fetch: { cdDispatch: require('./fetch/dispatcher'), @@ -24,7 +24,7 @@ module.exports = { npmjs: require('./fetch/npmjsFetch'), nuget: require('./fetch/nugetFetch'), pypi: require('./fetch/pypiFetch'), - rubygems: require('./fetch/rubyGemsFetch'), + rubygems: require('./fetch/rubyGemsFetch') }, process: { cdsource: require('./process/sourceExtract'), @@ -48,13 +48,13 @@ module.exports = { scancode: require('./process/scancode'), fossology: require('./process/fossology'), source: require('./process/source').processor, - top: require('./process/top'), + top: require('./process/top') }, store: { cdDispatch: require('./store/storeDispatcher'), webhook: require('./store/webhookDeltaStore'), azqueue: require('./store/azureQueueStore'), 'cd(azblob)': AttachmentStoreFactory(providers.store.azblob), - 'cd(file)': AttachmentStoreFactory(providers.store.file), - }, + 'cd(file)': AttachmentStoreFactory(providers.store.file) + } } diff --git a/providers/logging/logger.js b/providers/logging/logger.js index 73f4ad42..147880e5 100644 --- a/providers/logging/logger.js +++ b/providers/logging/logger.js @@ -14,7 +14,7 @@ function factory(tattoos) { insights: appInsights, treatErrorsAsExceptions: true, exitOnError: false, - level: 'info', + level: 'info' }) return result } diff --git a/providers/process/abstractClearlyDefinedProcessor.js b/providers/process/abstractClearlyDefinedProcessor.js index 154d6749..506d860c 100644 --- a/providers/process/abstractClearlyDefinedProcessor.js +++ b/providers/process/abstractClearlyDefinedProcessor.js @@ -39,13 +39,13 @@ class AbstractClearlyDefinedProcessor extends AbstractProcessor { const fileList = await this.filterFiles(location) const files = await Promise.all( fileList.map( - throat(10, async (file) => { + throat(10, async file => { if (this._isInterestinglyNamed(file, interestingRoot)) await this.attachFiles(request.document, [file], location) const hashes = await this.computeHashes(path.join(location, file)) return { path: file, hashes } - }), - ), + }) + ) ) request.document.files = files } @@ -62,7 +62,7 @@ class AbstractClearlyDefinedProcessor extends AbstractProcessor { 'NOTICE', 'NOTICES', 'CONTRIBUTORS', - 'PATENTS', + 'PATENTS' ] const extensions = ['.MD', '.HTML', '.TXT'] const extension = path.extname(name) @@ -74,11 +74,11 @@ class AbstractClearlyDefinedProcessor extends AbstractProcessor { async _computeSize(location) { let count = 0 const bytes = await du(location, { - filter: (file) => { + filter: file => { if (isGitFile(file)) return false count++ return true - }, + } }) return { k: Math.round(bytes / 1024), count } } diff --git a/providers/process/abstractProcessor.js b/providers/process/abstractProcessor.js index b6766698..f81fb2ea 100644 --- a/providers/process/abstractProcessor.js +++ b/providers/process/abstractProcessor.js @@ -16,7 +16,7 @@ class AbstractProcessor extends BaseHandler { constructor(options) { super(options) this._schemaVersion = this.aggregateVersions( - this._collectClasses().map((entry) => entry.schemaVersion || entry.toolVersion), + this._collectClasses().map(entry => entry.schemaVersion || entry.toolVersion) ) } @@ -66,11 +66,11 @@ class AbstractProcessor extends BaseHandler { if (!version) return result if (typeof version !== 'string') throw new Error(`Invalid processor version ${version}`) const parts = version.split('.') - if (parts.length !== 3 || parts.some((part) => isNaN(+part))) throw new Error(`${errorRoot}: ${version}`) + if (parts.length !== 3 || parts.some(part => isNaN(+part))) throw new Error(`${errorRoot}: ${version}`) for (let i = 0; i < 3; i++) result[i] += +parts[i] return result }, - [0, 0, 0], + [0, 0, 0] ) .join('.') } @@ -90,7 +90,7 @@ class AbstractProcessor extends BaseHandler { if (!files || !files.length) return if (!document._attachments) Object.defineProperty(document, '_attachments', { value: [], enumerable: false }) document.attachments = document.attachments || [] - files.forEach((file) => { + files.forEach(file => { const fullPath = path.join(location, file) const attachment = fs.readFileSync(fullPath, 'utf8') const token = this._computeToken(attachment) @@ -112,12 +112,12 @@ class AbstractProcessor extends BaseHandler { if (!locationStat.isDirectory()) return [location] const subdirs = await readdir(location) const files = await Promise.all( - subdirs.map((subdir) => { + subdirs.map(subdir => { const entry = path.resolve(location, subdir) return this.getFiles(entry) - }), + }) ) - return flatten(files).filter((x) => x) + return flatten(files).filter(x => x) } /** @@ -130,14 +130,14 @@ class AbstractProcessor extends BaseHandler { async getFolders(location, ignorePaths = []) { const subdirs = await readdir(location) const folders = await Promise.all( - subdirs.map(async (subdir) => { + subdirs.map(async subdir => { const entry = path.resolve(location, subdir) const entryStat = await lstat(entry) if (entryStat.isSymbolicLink() || !entryStat.isDirectory()) return [] return [entry, ...(await this.getFolders(entry))] - }), + }) ) - return flatten(folders).filter((folder) => folder && !ignorePaths.some((ignorePath) => folder.includes(ignorePath))) + return flatten(folders).filter(folder => folder && !ignorePaths.some(ignorePath => folder.includes(ignorePath))) } /** @@ -148,8 +148,8 @@ class AbstractProcessor extends BaseHandler { */ async filterFiles(location) { const fullList = await this.getFiles(location) - const filteredList = fullList.filter((file) => file && !isGitFile(file)) - return trimAllParents(filteredList, location).filter((x) => x) + const filteredList = fullList.filter(file => file && !isGitFile(file)) + return trimAllParents(filteredList, location).filter(x => x) } shouldFetch() { @@ -223,7 +223,7 @@ class AbstractProcessor extends BaseHandler { addLocalToolTasks(request, ...tools) { const toolList = tools.length ? tools : ['licensee', 'scancode', 'reuse' /*, 'fossology'*/] - toolList.forEach((tool) => this.linkAndQueueTool(request, tool, undefined, 'local')) + toolList.forEach(tool => this.linkAndQueueTool(request, tool, undefined, 'local')) } } diff --git a/providers/process/component.js b/providers/process/component.js index ef58f0df..c8f29058 100644 --- a/providers/process/component.js +++ b/providers/process/component.js @@ -24,4 +24,4 @@ class ComponentProcessor extends AbstractProcessor { } } -module.exports = (options) => new ComponentProcessor(options) +module.exports = options => new ComponentProcessor(options) diff --git a/providers/process/composerExtract.js b/providers/process/composerExtract.js index fa6a512c..46efc7ab 100644 --- a/providers/process/composerExtract.js +++ b/providers/process/composerExtract.js @@ -61,7 +61,7 @@ class ComposerExtract extends AbstractClearlyDefinedProcessor { if (typeof manifest.bugs === 'string' && manifest.bugs.startsWith('http')) candidateUrls.push(manifest.bugs) else candidateUrls.push(manifest.bugs.url) } - return candidateUrls.filter((e) => e) + return candidateUrls.filter(e => e) } async _discoverSource(manifest, registryManifest) { @@ -72,7 +72,7 @@ class ComposerExtract extends AbstractClearlyDefinedProcessor { // TODO lookup source discovery in a set of services that have their own configuration return this.sourceFinder(registryManifest.version, candidates, { githubToken: this.options.githubToken, - logger: this.logger, + logger: this.logger }) } diff --git a/providers/process/condaExtract.js b/providers/process/condaExtract.js index 7dd4c856..93855881 100644 --- a/providers/process/condaExtract.js +++ b/providers/process/condaExtract.js @@ -41,12 +41,12 @@ class CondaExtract extends AbstractClearlyDefinedProcessor { registryData.channelData.home, registryData.channelData.dev_url, registryData.channelData.doc_url, - registryData.channelData.doc_source_url, - ].filter((e) => e) + registryData.channelData.doc_source_url + ].filter(e => e) let sourceInfo = undefined const githubSource = await this.sourceFinder(registryData.repoData.packageData.version, sourceCandidates, { githubToken: this.options.githubToken, - logger: this.logger, + logger: this.logger }) if (githubSource) { sourceInfo = githubSource diff --git a/providers/process/condaSrcExtract.js b/providers/process/condaSrcExtract.js index 97d071d9..eeab6857 100644 --- a/providers/process/condaSrcExtract.js +++ b/providers/process/condaSrcExtract.js @@ -22,4 +22,4 @@ class CondaSrcExtract extends AbstractClearlyDefinedProcessor { } } -module.exports = (options) => new CondaSrcExtract(options) +module.exports = options => new CondaSrcExtract(options) diff --git a/providers/process/crateExtract.js b/providers/process/crateExtract.js index 539a6ad5..2e6a3c97 100644 --- a/providers/process/crateExtract.js +++ b/providers/process/crateExtract.js @@ -42,7 +42,7 @@ class CrateExtract extends AbstractClearlyDefinedProcessor { _discoverSource(manifest, registryData) { return this.sourceFinder(registryData.num, [manifest.repository, manifest.homepage, manifest.documentation], { githubToken: this.options.githubToken, - logger: this.logger, + logger: this.logger }) } } diff --git a/providers/process/debExtract.js b/providers/process/debExtract.js index 890d41db..1c107192 100644 --- a/providers/process/debExtract.js +++ b/providers/process/debExtract.js @@ -43,7 +43,7 @@ class DebExtract extends AbstractClearlyDefinedProcessor { registryData, releaseDate, copyrightUrl, - declaredLicenses, + declaredLicenses }) const sourceInfo = this._discoverSource(spec, registryData) if (sourceInfo) request.document.sourceInfo = sourceInfo @@ -51,7 +51,7 @@ class DebExtract extends AbstractClearlyDefinedProcessor { _discoverSource(spec, registryData) { const [revision, architecture] = spec.revision.split('_') - const source = (registryData.find((entry) => entry.Architecture === architecture) || {}).Source + const source = (registryData.find(entry => entry.Architecture === architecture) || {}).Source if (source) { const result = SourceSpec.fromObject(spec) result.type = 'debsrc' diff --git a/providers/process/debsrcExtract.js b/providers/process/debsrcExtract.js index 051eeb09..0f481d2a 100644 --- a/providers/process/debsrcExtract.js +++ b/providers/process/debsrcExtract.js @@ -22,9 +22,9 @@ class DebSrcExtract extends AbstractClearlyDefinedProcessor { releaseDate, registryData, copyrightUrl, - declaredLicenses, + declaredLicenses }) } } -module.exports = (options) => new DebSrcExtract(options) +module.exports = options => new DebSrcExtract(options) diff --git a/providers/process/fossology.js b/providers/process/fossology.js index 68ebef31..0f47ce84 100644 --- a/providers/process/fossology.js +++ b/providers/process/fossology.js @@ -51,19 +51,19 @@ class FossologyProcessor extends AbstractProcessor { async _runNomos(request) { const parameters = [] - const result = await new Promise((resolve) => { + const result = await new Promise(resolve => { let data = '' const nomos = spawn(`${this.options.installDir}/nomos/agent/nomossa`, [ '-ld', request.document.location, - ...parameters, + ...parameters ]) - nomos.stdout.on('data', (chunk) => { + nomos.stdout.on('data', chunk => { if (data) data += chunk else data = chunk }) nomos - .on('error', (error) => { + .on('error', error => { this.logger.error(error) resolve(null) }) @@ -73,7 +73,7 @@ class FossologyProcessor extends AbstractProcessor { }) const output = { contentType: 'text/plain', - content: result.replace(new RegExp(`${request.document.location}/`, 'g'), ''), + content: result.replace(new RegExp(`${request.document.location}/`, 'g'), '') } return { version: this._nomosVersion, parameters: parameters.join(' '), output } } @@ -93,8 +93,8 @@ class FossologyProcessor extends AbstractProcessor { async _runCopyright(request, files, root) { const parameters = ['-J'] - const output = await this._visitFiles(files, (file) => - this._runCopyrightOnFile(request, path.join(root, file), parameters), + const output = await this._visitFiles(files, file => + this._runCopyrightOnFile(request, path.join(root, file), parameters) ) return { version: this._copyrightVersion, parameters, output } } @@ -104,7 +104,7 @@ class FossologyProcessor extends AbstractProcessor { const { stdout } = await execFile( `${this.options.installDir}/copyright/agent/copyright`, ['--files', file, ...parameters], - { cwd: `${this.options.installDir}/copyright/agent` }, + { cwd: `${this.options.installDir}/copyright/agent` } ) return stdout } catch (error) { @@ -120,21 +120,21 @@ class FossologyProcessor extends AbstractProcessor { const chunkSize = 500 const output = { contentType: 'text/plain', - content: '', + content: '' } for (let i = 0; i < files.length; i += chunkSize) { - const fileArguments = files.slice(i, i + chunkSize).map((file) => path.join(root, file)) - const result = await new Promise((resolve) => { + const fileArguments = files.slice(i, i + chunkSize).map(file => path.join(root, file)) + const result = await new Promise(resolve => { let data = '' const monk = spawn(`${this.options.installDir}/monk/agent/monk`, [...parameters, ...fileArguments], { - cwd: `${this.options.installDir}/monk/agent`, + cwd: `${this.options.installDir}/monk/agent` }) - monk.stdout.on('data', (chunk) => { + monk.stdout.on('data', chunk => { if (data) data += chunk else data = chunk }) monk - .on('error', (error) => { + .on('error', error => { this.logger.error(error) resolve(null) }) @@ -193,4 +193,4 @@ class FossologyProcessor extends AbstractProcessor { } } -module.exports = (options) => new FossologyProcessor(options) +module.exports = options => new FossologyProcessor(options) diff --git a/providers/process/fsfeReuse.js b/providers/process/fsfeReuse.js index ed7a12ac..ccdb4339 100644 --- a/providers/process/fsfeReuse.js +++ b/providers/process/fsfeReuse.js @@ -7,7 +7,7 @@ const execFile = promisify(require('child_process').execFile) const { merge } = require('lodash') const { readdirSync, - promises: { readFile }, + promises: { readFile } } = require('fs') class FsfeReuseProcessor extends AbstractProcessor { @@ -44,8 +44,8 @@ class FsfeReuseProcessor extends AbstractProcessor { request.document = merge(this.clone(request.document), { reuse: record }) this.attachFiles( request.document, - record.licenses.map((file) => file.filePath), - location, + record.licenses.map(file => file.filePath), + location ) } @@ -74,8 +74,8 @@ class FsfeReuseProcessor extends AbstractProcessor { const spdxResultFile = {} const spdxRawValues = spdxResult.split(/\n/) // Each line represents a single result attribute - spdxRawValues.forEach((spdxRawValue) => - this._handleResultAttribute(spdxRawValue, entryIndex, results, spdxResultFile), + spdxRawValues.forEach(spdxRawValue => + this._handleResultAttribute(spdxRawValue, entryIndex, results, spdxResultFile) ) // Generic metadata was already added to results.metadata // In case we have file metadata, all attributes are read now and information can be added to the file results @@ -90,7 +90,7 @@ class FsfeReuseProcessor extends AbstractProcessor { const spdxResultValue = { key: spdxMatchResult.groups.first_key, secondaryKey: spdxMatchResult.groups.second_key, - spdxValue: spdxMatchResult.groups.spdx_value.replace(/(<\/?([^>]+)>)/g, ''), + spdxValue: spdxMatchResult.groups.spdx_value.replace(/(<\/?([^>]+)>)/g, '') } // First result section contains generic metadata, any other section attributes for a particular file if (entryIndex === 0) { @@ -129,10 +129,10 @@ class FsfeReuseProcessor extends AbstractProcessor { const licensesDir = 'LICENSES' try { const licenseFiles = readdirSync(request.document.location + '/' + licensesDir) - licenseFiles.forEach((file) => { + licenseFiles.forEach(file => { licenses.push({ filePath: licensesDir + '/' + file, - spdxId: file.substring(0, file.indexOf('.txt')), + spdxId: file.substring(0, file.indexOf('.txt')) }) }) } catch (error) { @@ -144,20 +144,20 @@ class FsfeReuseProcessor extends AbstractProcessor { _detectVersion() { if (this._versionPromise !== undefined) return this._versionPromise this._versionPromise = execFile('reuse', ['--version']) - .then((result) => { + .then(result => { const reuseRegex = /reuse\s+(\d+\.\d+(\.\d+)?)/i this._toolVersion = result.stdout.trim().match(reuseRegex)[1] this._schemaVersion = this.aggregateVersions( [this._schemaVersion, this.toolVersion, this.configVersion], - 'Invalid REUSE version', + 'Invalid REUSE version' ) return this._schemaVersion }) - .catch((error) => { + .catch(error => { if (error) this.logger.log(`Could not detect version of REUSE: ${error.message}`) }) return this._versionPromise } } -module.exports = (options) => new FsfeReuseProcessor(options) +module.exports = options => new FsfeReuseProcessor(options) diff --git a/providers/process/gemExtract.js b/providers/process/gemExtract.js index d421fba4..328f8009 100644 --- a/providers/process/gemExtract.js +++ b/providers/process/gemExtract.js @@ -44,7 +44,7 @@ class GemExtract extends AbstractClearlyDefinedProcessor { candidates.push(get(registryData, 'homepage_uri')) candidates.push(get(registryData, 'mailing_list_uri')) candidates.push(get(registryData, 'source_code_uri')) - const allCandidates = candidates.filter((e) => e) + const allCandidates = candidates.filter(e => e) return this.sourceFinder(version, allCandidates, { githubToken: this.options.githubToken, logger: this.logger }) } diff --git a/providers/process/licensee.js b/providers/process/licensee.js index 42cb9905..516cbf18 100644 --- a/providers/process/licensee.js +++ b/providers/process/licensee.js @@ -41,7 +41,7 @@ class LicenseeProcessor extends AbstractProcessor { if (!record) return const location = request.document.location request.document = merge(this.clone(request.document), { licensee: record }) - const toAttach = record.output.content.matched_files.map((file) => file.filename) + const toAttach = record.output.content.matched_files.map(file => file.filename) this.attachFiles(request.document, toAttach, location) } @@ -52,17 +52,17 @@ class LicenseeProcessor extends AbstractProcessor { const paths = ['', ...trimAllParents(subfolders, root)] try { const results = ( - await Promise.all(paths.map(throat(10, (path) => this._runOnFolder(path, root, parameters)))) - ).filter((x) => x) - const licenses = uniqBy(flatten(results.map((result) => result.licenses)), 'spdx_id') - const matched_files = flatten(results.map((result) => result.matched_files)) + await Promise.all(paths.map(throat(10, path => this._runOnFolder(path, root, parameters)))) + ).filter(x => x) + const licenses = uniqBy(flatten(results.map(result => result.licenses)), 'spdx_id') + const matched_files = flatten(results.map(result => result.matched_files)) return { version: this.toolVersion, parameters: parameters, output: { contentType: 'application/json', - content: { licenses, matched_files }, - }, + content: { licenses, matched_files } + } } } catch (exception) { request.markDead('Error', exception ? exception.message : 'Licensee run failed') @@ -75,7 +75,7 @@ class LicenseeProcessor extends AbstractProcessor { const stdout = await this._runLicensee(parameters, path.join(root, folder)) if (!stdout.trim()) return const result = JSON.parse(stdout) - result.matched_files.forEach((file) => (file.filename = `${folder ? folder + '/' : ''}${file.filename}`)) + result.matched_files.forEach(file => (file.filename = `${folder ? folder + '/' : ''}${file.filename}`)) return result } catch (error) { // Licensee fails with code = 1 if there are no license files found in the given folder. @@ -93,19 +93,19 @@ class LicenseeProcessor extends AbstractProcessor { _detectVersion() { if (this._versionPromise !== undefined) return this._versionPromise this._versionPromise = execFile('licensee', ['version']) - .then((result) => { + .then(result => { this._toolVersion = result.stdout.trim() this._schemaVersion = this.aggregateVersions( [this._schemaVersion, this.toolVersion, this.configVersion], - 'Invalid Licensee version', + 'Invalid Licensee version' ) return this._schemaVersion }) - .catch((error) => { + .catch(error => { if (error) this.logger.log(`Could not detect version of Licensee: ${error.message}`) }) return this._versionPromise } } -module.exports = (options) => new LicenseeProcessor(options) +module.exports = options => new LicenseeProcessor(options) diff --git a/providers/process/mavenExtract.js b/providers/process/mavenExtract.js index 8c7ee7b2..0b24777d 100644 --- a/providers/process/mavenExtract.js +++ b/providers/process/mavenExtract.js @@ -42,7 +42,7 @@ class MavenExtract extends AbstractClearlyDefinedProcessor { _discoverCandidateSourceLocations(manifest) { const candidateUrls = [] candidateUrls.push(get(manifest, 'summary.scm.0.url.0')) - return candidateUrls.filter((e) => e) + return candidateUrls.filter(e => e) } async _discoverSource(spec, manifest) { @@ -50,7 +50,7 @@ class MavenExtract extends AbstractClearlyDefinedProcessor { // TODO lookup source discovery in a set of services that have their own configuration const githubSource = await this.sourceFinder(spec.revision, manifestCandidates, { githubToken: this.options.githubToken, - logger: this.logger, + logger: this.logger }) if (githubSource) return githubSource // didn't find any source in GitHub so make up a sources url to try if the registry thinks there is source diff --git a/providers/process/npmExtract.js b/providers/process/npmExtract.js index 7f55651e..a3e99f92 100644 --- a/providers/process/npmExtract.js +++ b/providers/process/npmExtract.js @@ -62,7 +62,7 @@ class NpmExtract extends AbstractClearlyDefinedProcessor { if (typeof manifest.bugs === 'string' && manifest.bugs.startsWith('http')) candidateUrls.push(manifest.bugs) else candidateUrls.push(manifest.bugs.url) } - return candidateUrls.filter((e) => e) + return candidateUrls.filter(e => e) } async _discoverSource(manifest, registryManifest) { @@ -73,7 +73,7 @@ class NpmExtract extends AbstractClearlyDefinedProcessor { // TODO lookup source discovery in a set of services that have their own configuration return this.sourceFinder(registryManifest.version, candidates, { githubToken: this.options.githubToken, - logger: this.logger, + logger: this.logger }) } diff --git a/providers/process/nugetExtract.js b/providers/process/nugetExtract.js index e4de1a55..76958734 100644 --- a/providers/process/nugetExtract.js +++ b/providers/process/nugetExtract.js @@ -78,7 +78,7 @@ class NuGetExtract extends AbstractClearlyDefinedProcessor { const candidates = [...nuspecCandidates, ...manifestCandidates, ...latestNuspecCandidates] return this.sourceFinder(manifest.version, candidates, { githubToken: this.options.githubToken, - logger: this.logger, + logger: this.logger }) } diff --git a/providers/process/package.js b/providers/process/package.js index 9b7790f5..ef526920 100644 --- a/providers/process/package.js +++ b/providers/process/package.js @@ -24,4 +24,4 @@ class PackageProcessor extends AbstractProcessor { } } -module.exports = { processor: (options) => new PackageProcessor(options), supportedTypes } +module.exports = { processor: options => new PackageProcessor(options), supportedTypes } diff --git a/providers/process/podExtract.js b/providers/process/podExtract.js index dc9121d8..ff3e73a5 100644 --- a/providers/process/podExtract.js +++ b/providers/process/podExtract.js @@ -61,7 +61,7 @@ class PodExtract extends AbstractClearlyDefinedProcessor { // there is no way to pass the branch/tag/commit we have in the manifest return this.sourceFinder(registryData.version, sources, { githubToken: this.options.githubToken, - logger: this.logger, + logger: this.logger }) } } diff --git a/providers/process/pypiExtract.js b/providers/process/pypiExtract.js index e2764112..278de711 100644 --- a/providers/process/pypiExtract.js +++ b/providers/process/pypiExtract.js @@ -45,14 +45,14 @@ class PyPiExtract extends AbstractClearlyDefinedProcessor { candidates.push(get(registryData, 'info.package_url')) candidates.push(get(registryData, 'info.project_url')) candidates.push(get(registryData, 'info.release_url')) - const allCandidates = candidates.filter((e) => e) + const allCandidates = candidates.filter(e => e) return this.sourceFinder(revision, allCandidates, { githubToken: this.options.githubToken, logger: this.logger }) } async _createDocument(request, spec, registryData) { request.document = merge(this.clone(request.document), { registryData, - declaredLicense: request.document.declaredLicense, + declaredLicense: request.document.declaredLicense }) const sourceInfo = await this._discoverSource(spec.revision, registryData) if (sourceInfo) request.document.sourceInfo = sourceInfo diff --git a/providers/process/scancode.js b/providers/process/scancode.js index 537dce98..592bdd21 100644 --- a/providers/process/scancode.js +++ b/providers/process/scancode.js @@ -44,14 +44,14 @@ class ScanCodeProcessor extends AbstractProcessor { async _runScancode(request, file) { this.logger.info( - `Analyzing ${request.toString()} using ScanCode. input: ${request.document.location} output: ${file.name}`, + `Analyzing ${request.toString()} using ScanCode. input: ${request.document.location} output: ${file.name}` ) const { options, timeout, processes, format } = this.options const parameters = [...options, '--timeout', timeout.toString(), '-n', processes.toString(), format] try { await execFile(`${this.options.installDir}/scancode`, [...parameters, file.name, request.document.location], { cwd: this.options.installDir, - maxBuffer: 5 * 1024 * 1024, + maxBuffer: 5 * 1024 * 1024 }) } catch (error) { // TODO see if the new version of ScanCode has a better way of differentiating errors @@ -66,13 +66,13 @@ class ScanCodeProcessor extends AbstractProcessor { const output = JSON.parse(fs.readFileSync(outputFile)) // Pick files that are potentially whole licenses. We can be reasonably agressive here // and the summarizers etc will further refine what makes it into the final definitions - const licenses = output.files.filter((file) => file.is_license_text).map((file) => file.path) + const licenses = output.files.filter(file => file.is_license_text).map(file => file.path) this.attachFiles(document, licenses, root) // Pick files that represent whole packages. We can be reasonably agressive here // and the summarizers etc will further refine what makes it into the final definitions const packages = output.files.reduce((result, file) => { - file.packages.forEach((entry) => { + file.packages.forEach(entry => { // in this case the manifest_path contains a subpath pointing to the corresponding file if (file.type === 'directory' && entry.manifest_path) result.push(`${file.path ? file.path + '/' : ''}${entry.manifest_path}`) @@ -93,23 +93,23 @@ class ScanCodeProcessor extends AbstractProcessor { _hasRealErrors(resultFile) { const results = JSON.parse(fs.readFileSync(resultFile)) return results.files.some( - (file) => + file => file.scan_errors && - file.scan_errors.some((error) => { + file.scan_errors.some(error => { return !( error.includes('ERROR: Processing interrupted: timeout after') || error.includes('ValueError:') || error.includes('package.json') || error.includes('UnicodeDecodeError') ) - }), + }) ) } _detectVersion() { if (this._versionPromise) return this._versionPromise this._versionPromise = execFile(`${this.options.installDir}/scancode`, ['--version']) - .then((result) => { + .then(result => { this.logger.info('Detecting ScanCode version') const raw_output = result.stdout @@ -117,15 +117,15 @@ class ScanCodeProcessor extends AbstractProcessor { this._toolVersion = scancode_line.replace('ScanCode version ', '').trim() this._schemaVersion = this.aggregateVersions( [this._schemaVersion, this.toolVersion, this.configVersion], - 'Invalid ScanCode version', + 'Invalid ScanCode version' ) return this._schemaVersion }) - .catch((error) => { + .catch(error => { this.logger.log(`Could not detect version of ScanCode: ${error.message} `) }) return this._versionPromise } } -module.exports = (options) => new ScanCodeProcessor(options) +module.exports = options => new ScanCodeProcessor(options) diff --git a/providers/process/source.js b/providers/process/source.js index 078a6f37..45cf5330 100644 --- a/providers/process/source.js +++ b/providers/process/source.js @@ -23,4 +23,4 @@ class SourceProcessor extends AbstractProcessor { } } -module.exports = { processor: (options) => new SourceProcessor(options), supportedTypes } +module.exports = { processor: options => new SourceProcessor(options), supportedTypes } diff --git a/providers/process/sourceExtract.js b/providers/process/sourceExtract.js index bf26ff5e..7addffe6 100644 --- a/providers/process/sourceExtract.js +++ b/providers/process/sourceExtract.js @@ -29,4 +29,4 @@ class SourceExtract extends AbstractClearlyDefinedProcessor { } } -module.exports = (options) => new SourceExtract(options) +module.exports = options => new SourceExtract(options) diff --git a/providers/process/top.js b/providers/process/top.js index a6473c6e..238e8c72 100644 --- a/providers/process/top.js +++ b/providers/process/top.js @@ -31,7 +31,7 @@ class TopProcessor extends AbstractProcessor { 'github', 'pypi', 'composer', - 'debian', + 'debian' ].includes(spec.provider) ) } @@ -90,10 +90,10 @@ class TopProcessor extends AbstractProcessor { const initialOffset = Math.floor(start / 36) * 36 for (let offset = initialOffset; offset < end; offset += 36) { const response = await requestRetry.get(`https://www.npmjs.com/browse/depended?offset=${offset}`, { - headers: { 'x-spiferack': 1 }, + headers: { 'x-spiferack': 1 } }) const packages = response.packages || [] - const requestsPage = packages.map((pkg) => { + const requestsPage = packages.map(pkg => { let [namespace, name] = pkg.name.split('/') if (!name) { name = namespace @@ -155,10 +155,10 @@ class TopProcessor extends AbstractProcessor { for (let offset = start; offset < end; offset += 100) { const page = offset / 100 + 1 const response = await requestRetry.get( - `https://crates.io/api/v1/crates?page=${page}&per_page=100&sort=downloads`, + `https://crates.io/api/v1/crates?page=${page}&per_page=100&sort=downloads` ) const requestsPage = response.crates.map( - (x) => new Request('package', `cd:/crate/cratesio/-/${x.name}/${x.max_version}`), + x => new Request('package', `cd:/crate/cratesio/-/${x.name}/${x.max_version}`) ) await request.queueRequests(requestsPage) console.log(`Queued ${requestsPage.length} Crate packages. Offset: ${offset}`) @@ -185,7 +185,7 @@ class TopProcessor extends AbstractProcessor { const condaFetch = CondaFetch({ logger: this.logger, - cdFileLocation: config.get('FILE_STORE_LOCATION') || (process.platform === 'win32' ? 'c:/temp/cd' : '/tmp/cd'), + cdFileLocation: config.get('FILE_STORE_LOCATION') || (process.platform === 'win32' ? 'c:/temp/cd' : '/tmp/cd') }) if (!condaFetch.channels[spec.provider]) return request.markSkip(`Unrecognized conda channel ${spec.provider}`) @@ -198,7 +198,7 @@ class TopProcessor extends AbstractProcessor { let repoData = await condaFetch.getRepoData(channelUrl, spec.provider, subdir) let repoCoordinates = Object.entries(repoData.packages).map( ([, packageData]) => - `cd:/conda/${spec.provider}/${subdir}/${packageData.name}/${packageData.version}-${packageData.build}/`, + `cd:/conda/${spec.provider}/${subdir}/${packageData.name}/${packageData.version}-${packageData.build}/` ) packagesCoordinates = packagesCoordinates.concat(repoCoordinates) if (start < packagesCoordinates.length && end <= packagesCoordinates.length) { @@ -207,18 +207,18 @@ class TopProcessor extends AbstractProcessor { } } else { packagesCoordinates = Object.entries(channelData.packages).map( - ([packageName, packageData]) => `cd:/condasrc/${spec.provider}/-/${packageName}/${packageData.version}/`, + ([packageName, packageData]) => `cd:/condasrc/${spec.provider}/-/${packageName}/${packageData.version}/` ) } let slicedCoordinates = packagesCoordinates.slice(start, end) this.logger.info( - `Conda top - coordinates: ${packagesCoordinates.length}, start: ${start}, end: ${end}, sliced: ${slicedCoordinates.length}`, + `Conda top - coordinates: ${packagesCoordinates.length}, start: ${start}, end: ${end}, sliced: ${slicedCoordinates.length}` ) await request.queueRequests( - slicedCoordinates.map((coord) => new Request(spec.type === 'conda' ? 'package' : 'source', coord)), + slicedCoordinates.map(coord => new Request(spec.type === 'conda' ? 'package' : 'source', coord)) ) return request.markNoSave() } @@ -269,7 +269,7 @@ class TopProcessor extends AbstractProcessor { start = start && start >= 0 ? ++start : 1 // Exclude header from CSV file end = end && end > 0 ? ++end : fileLines.length const lines = fileLines.slice(start, end) - const requests = lines.map((line) => { + const requests = lines.map(line => { let [, groupId, artifactId] = line.split(',') groupId = groupId.substring(1, groupId.length - 1) // Remove quotes artifactId = artifactId.substring(1, artifactId.length - 1) @@ -286,7 +286,7 @@ class TopProcessor extends AbstractProcessor { start = start && start >= 0 ? ++start : 1 // Exclude header from CSV file end = end && end > 0 ? ++end : fileLines.length const lines = fileLines.slice(start, end) - const requests = lines.map((line) => { + const requests = lines.map(line => { let [, groupId, artifactId] = line.split(',') groupId = groupId.substring(1, groupId.length - 1) // Remove quotes artifactId = artifactId.substring(1, artifactId.length - 1) @@ -317,9 +317,9 @@ class TopProcessor extends AbstractProcessor { if (!end || end - start <= 0) end = start + 1000 for (let offset = start; offset < end; offset += pageSize) { const topComponents = await requestRetry.get( - `https://api-v2v3search-0.nuget.org/query?prerelease=false&skip=${offset}&take=${pageSize}`, + `https://api-v2v3search-0.nuget.org/query?prerelease=false&skip=${offset}&take=${pageSize}` ) - const requests = topComponents.data.map((component) => { + const requests = topComponents.data.map(component => { return new Request('package', `cd:/nuget/nuget/-/${component.id}`) }) await request.queueRequests(requests) @@ -339,18 +339,18 @@ class TopProcessor extends AbstractProcessor { async _processAllGitHubOrgRepos(request) { const { namespace } = this.toSpec(request) const headers = { - 'User-Agent': 'clearlydefined/scanning', + 'User-Agent': 'clearlydefined/scanning' } const token = this.options.githubToken if (token) headers.Authorization = 'token ' + token const repos = await ghrequestor.getAll(`https://api.github.com/orgs/${namespace}/repos`, { headers, - tokenLowerBound: 10, + tokenLowerBound: 10 }) const requests = [] for (let i = 0; i < repos.length; i++) { const commits = await requestRetry.get(`https://api.github.com/repos/${namespace}/${repos[i].name}/commits`, { - headers, + headers }) if (commits.length > 0) { requests.push(new Request('source', `cd:/git/github/${namespace}/${repos[i].name}/${commits[0].sha}`)) @@ -378,15 +378,15 @@ class TopProcessor extends AbstractProcessor { if (!end || end - start <= 0) end = start + 100 const debianFetch = DebianFetch({ logger: this.logger, - cdFileLocation: config.get('FILE_STORE_LOCATION') || (process.platform === 'win32' ? 'c:/temp/cd' : '/tmp/cd'), + cdFileLocation: config.get('FILE_STORE_LOCATION') || (process.platform === 'win32' ? 'c:/temp/cd' : '/tmp/cd') }) await debianFetch._getPackageMapFile() const packagesCoordinates = await this._getDebianPackagesCoordinates(debianFetch) const slicedCoordinates = packagesCoordinates.slice(start, end) this.logger.info( - `Debian top - coordinates: ${packagesCoordinates.length}, start: ${start}, end: ${end}, sliced: ${slicedCoordinates.length}`, + `Debian top - coordinates: ${packagesCoordinates.length}, start: ${start}, end: ${end}, sliced: ${slicedCoordinates.length}` ) - const requests = slicedCoordinates.map((coordinate) => new Request('package', coordinate)) + const requests = slicedCoordinates.map(coordinate => new Request('package', coordinate)) await request.queueRequests(requests) return request.markNoSave() } @@ -397,7 +397,7 @@ class TopProcessor extends AbstractProcessor { const lineReader = linebyline(debianFetch.packageMapFileLocation) let entry = {} lineReader - .on('line', (line) => { + .on('line', line => { if (line === '') { const architecture = entry.Architecture const binary = entry.Binary @@ -414,11 +414,11 @@ class TopProcessor extends AbstractProcessor { .on('end', () => { return resolve(coordinates) }) - .on('error', (error) => reject(error)) + .on('error', error => reject(error)) }) } // TODO: Implement _processTopPackagists } -module.exports = (options) => new TopProcessor(options) +module.exports = options => new TopProcessor(options) diff --git a/providers/store/attachmentStore.js b/providers/store/attachmentStore.js index 109b5dde..dd45a8ba 100644 --- a/providers/store/attachmentStore.js +++ b/providers/store/attachmentStore.js @@ -16,7 +16,7 @@ class AttachmentStore { upsert(document) { const documentPromise = this.baseStore.upsert(document) if (!document._attachments) return documentPromise - const attachmentPromises = document._attachments.map((entry) => { + const attachmentPromises = document._attachments.map(entry => { return this.baseStore.upsert({ _metadata: { type: 'attachment', @@ -24,14 +24,14 @@ class AttachmentStore { links: { self: { href: `urn:attachment:${entry.token}`, - type: 'resource', - }, + type: 'resource' + } }, fetchedAt: get(document, '_metadata.fetchedAt'), processedAt: get(document, '_metadata.processedAt'), - version: '1', + version: '1' }, - attachment: Buffer.from(entry.attachment).toString(), + attachment: Buffer.from(entry.attachment).toString() }) }) attachmentPromises.push(documentPromise) @@ -63,4 +63,4 @@ class AttachmentStore { } } -module.exports = (options) => new AttachmentStore(options) +module.exports = options => new AttachmentStore(options) diff --git a/providers/store/attachmentStoreFactory.js b/providers/store/attachmentStoreFactory.js index 608835ad..d3804de3 100644 --- a/providers/store/attachmentStoreFactory.js +++ b/providers/store/attachmentStoreFactory.js @@ -3,6 +3,6 @@ const AttachmentStore = require('./attachmentStore') -module.exports = (realFactory) => { - return (options) => AttachmentStore({ ...options, baseStore: realFactory(options) }) +module.exports = realFactory => { + return options => AttachmentStore({ ...options, baseStore: realFactory(options) }) } diff --git a/providers/store/azureQueueStore.js b/providers/store/azureQueueStore.js index 2ecce3be..b1e0e461 100644 --- a/providers/store/azureQueueStore.js +++ b/providers/store/azureQueueStore.js @@ -47,4 +47,4 @@ class AzureStorageQueue { } } -module.exports = (options) => new AzureStorageQueue(options) +module.exports = options => new AzureStorageQueue(options) diff --git a/providers/store/storeDispatcher.js b/providers/store/storeDispatcher.js index f064f000..367d3ddf 100644 --- a/providers/store/storeDispatcher.js +++ b/providers/store/storeDispatcher.js @@ -8,35 +8,35 @@ class StoreDispatcher { } connect() { - return this._perform((store) => store.connect()) + return this._perform(store => store.connect()) } upsert(document) { - return this._perform((store) => store.upsert(document)) + return this._perform(store => store.upsert(document)) } get(type, key) { - return this._perform((store) => store.get(type, key), true) + return this._perform(store => store.get(type, key), true) } etag(type, key) { - return this._perform((store) => store.etag(type, key), true) + return this._perform(store => store.etag(type, key), true) } list(type) { - return this._perform((store) => store.list(type), true) + return this._perform(store => store.list(type), true) } count(type) { - return this._perform((store) => store.count(type), true) + return this._perform(store => store.count(type), true) } close() { - return this._perform((store) => store.close()) + return this._perform(store => store.close()) } delete(type, key) { - return this._perform((store) => store.delete(type, key)) + return this._perform(store => store.delete(type, key)) } async _perform(operation, first = false) { diff --git a/providers/store/webhookDeltaStore.js b/providers/store/webhookDeltaStore.js index 469f24ef..baf5f71d 100644 --- a/providers/store/webhookDeltaStore.js +++ b/providers/store/webhookDeltaStore.js @@ -22,9 +22,9 @@ class WebhookDeltaStore { json: true, body: pick(document, '_metadata'), headers: { - 'x-crawler': this.options.token || 'secret', + 'x-crawler': this.options.token || 'secret' }, - resolveWithFullResponse: true, + resolveWithFullResponse: true } try { const response = await request(options) @@ -60,4 +60,4 @@ class WebhookDeltaStore { } } -module.exports = (options) => new WebhookDeltaStore(options) +module.exports = options => new WebhookDeltaStore(options) diff --git a/test/unit/ghcrawler/crawlerFactoryTest.js b/test/unit/ghcrawler/crawlerFactoryTest.js index 452819d3..8c2c012d 100644 --- a/test/unit/ghcrawler/crawlerFactoryTest.js +++ b/test/unit/ghcrawler/crawlerFactoryTest.js @@ -24,8 +24,8 @@ describe('create scopedQueueSets', () => { provider: 'memory', memory: { _config: { on: sinon.stub() }, - weights: { immediate: 3, soon: 2, normal: 3, later: 2 }, - }, + weights: { immediate: 3, soon: 2, normal: 3, later: 2 } + } } const queues = CrawlerFactory.createQueues(queueOptions) expect(queues).to.be.ok diff --git a/test/unit/ghcrawler/queueSetTests.js b/test/unit/ghcrawler/queueSetTests.js index f151894d..13677eaa 100644 --- a/test/unit/ghcrawler/queueSetTests.js +++ b/test/unit/ghcrawler/queueSetTests.js @@ -37,10 +37,10 @@ describe('QueueSet weighting', () => { it('should pop other queue if nothing available', async () => { const priority = createBaseQueue('priority', { - pop: async () => new Request('priority', 'http://test'), + pop: async () => new Request('priority', 'http://test') }) const normal = createBaseQueue('normal', { - pop: async () => null, + pop: async () => null }) const queues = createBaseQueues([priority, normal], null, [1, 1]) queues.popCount = 1 @@ -57,7 +57,7 @@ describe('QueueSet weighting', () => { describe('QueueSet pushing', () => { it('should accept a simple request into a named queue', async () => { const priority = createBaseQueue('priority', { - push: async () => null, + push: async () => null }) const normal = createBaseQueue('normal') const queues = createBaseQueues([priority, normal]) @@ -71,10 +71,10 @@ describe('QueueSet pushing', () => { it('should throw when pushing into an unknown queue', async () => { const priority = createBaseQueue('priority', { - push: async () => null, + push: async () => null }) const normal = createBaseQueue('normal', { - push: async () => null, + push: async () => null }) const queues = createBaseQueues([priority, normal]) const request = new Request('test', 'http://test') @@ -86,7 +86,7 @@ describe('QueueSet pushing', () => { describe('QueueSet originQueue management', () => { it('should set originQueue on pop', async () => { const priority = createBaseQueue('priority', { - pop: async () => new Request('test', 'http://test'), + pop: async () => new Request('test', 'http://test') }) const queues = createBaseQueues([priority]) @@ -126,7 +126,7 @@ describe('QueueSet subscription management', () => { function createOptions(weights) { return { weights: weights, - _config: { on: () => {} }, + _config: { on: () => {} } } } @@ -136,7 +136,7 @@ function createBaseQueues(queues, weights = null) { function createBaseQueue( name, - { pop = null, push = null, done = null, abandon = null, subscribe = null, unsubscribe = null } = {}, + { pop = null, push = null, done = null, abandon = null, subscribe = null, unsubscribe = null } = {} ) { const result = { name: name } result.getName = () => { diff --git a/test/unit/lib/entitySpecTests.js b/test/unit/lib/entitySpecTests.js index f76ef559..009a60d8 100644 --- a/test/unit/lib/entitySpecTests.js +++ b/test/unit/lib/entitySpecTests.js @@ -17,7 +17,7 @@ describe('entitySpec', () => { it('creates an EntitySpec from a Maven url', () => { const entityFromUrl = EntitySpec.fromUrl( - 'cd:/maven/mavencentral/org.eclipse.xtext/org.eclipse.xtext.common.types/2.25.0', + 'cd:/maven/mavencentral/org.eclipse.xtext/org.eclipse.xtext.common.types/2.25.0' ) expect(entityFromUrl.namespace).to.eq('org.eclipse.xtext') diff --git a/test/unit/lib/fetchResultTests.js b/test/unit/lib/fetchResultTests.js index ad97dad1..5133d3ba 100644 --- a/test/unit/lib/fetchResultTests.js +++ b/test/unit/lib/fetchResultTests.js @@ -62,7 +62,7 @@ describe('fetchResult', () => { const request = new Request('test', 'http://test').trackCleanup([ dir1.removeCallback, dir2.removeCallback, - { removeCallback: sinon.stub() }, + { removeCallback: sinon.stub() } ]) expect(request.getTrackedCleanups().length).to.be.equal(3) diff --git a/test/unit/lib/memoryCacheTest.js b/test/unit/lib/memoryCacheTest.js index 638cc180..42f61f7c 100644 --- a/test/unit/lib/memoryCacheTest.js +++ b/test/unit/lib/memoryCacheTest.js @@ -38,21 +38,21 @@ describe('cache timeout callback', () => { done() } - it('should expire', (done) => { + it('should expire', done => { cache.withVerify(verifyExpired.bind(undefined, done)) cache.set('a', 'A') expect(cache.get('a')).to.be.equal('A') }) - it('should expire with no condition', (done) => { + it('should expire with no condition', done => { cache.withVerify(verifyExpired.bind(undefined, done)) cache.setWithConditionalExpiry('a', 'A') expect(cache.get('a')).to.be.equal('A') }) - it('should trigger callback after expiry', (done) => { + it('should trigger callback after expiry', done => { const afterExpire = sinon.stub() cache.withVerify(verifyExpired.bind(undefined, done, afterExpire)) @@ -60,7 +60,7 @@ describe('cache timeout callback', () => { expect(cache.get('a')).to.be.equal('A') }) - it('should expire from condition', (done) => { + it('should expire from condition', done => { const afterExpire = sinon.stub() cache.withVerify(verifyExpired.bind(undefined, done, afterExpire)) @@ -68,7 +68,7 @@ describe('cache timeout callback', () => { expect(cache.get('a')).to.be.equal('A') }) - it('should not expire from condition', (done) => { + it('should not expire from condition', done => { const afterExpire = sinon.stub() const verifyNotExpired = () => { expect(afterExpire.called).to.be.false @@ -84,7 +84,7 @@ describe('cache timeout callback', () => { expect(cache.get('a')).to.be.equal('A') }) - it('should not expire 1st time and then expire 2nd time', (done) => { + it('should not expire 1st time and then expire 2nd time', done => { const afterExpire = sinon.stub() let callCount = 0 diff --git a/test/unit/lib/sourceSpecTests.js b/test/unit/lib/sourceSpecTests.js index 9b682be6..1b70577b 100644 --- a/test/unit/lib/sourceSpecTests.js +++ b/test/unit/lib/sourceSpecTests.js @@ -14,7 +14,7 @@ describe('sourceSpec', () => { it('creates maven url/urn', () => { const spec = new SourceSpec('maven', 'mavengoogle', 'androidx.activity', 'activity', '1.3.0-alpha05') expect(spec.toUrl()).to.eq( - 'https://dl.google.com/android/maven2/androidx/activity/activity/1.3.0-alpha05/activity-1.3.0-alpha05.jar', + 'https://dl.google.com/android/maven2/androidx/activity/activity/1.3.0-alpha05/activity-1.3.0-alpha05.jar' ) expect(spec.toUrn()).to.eq('urn:maven:mavengoogle:androidx.activity:activity:revision:1.3.0-alpha05') }) diff --git a/test/unit/lib/utilsTests.js b/test/unit/lib/utilsTests.js index adc12b7b..92ee1020 100644 --- a/test/unit/lib/utilsTests.js +++ b/test/unit/lib/utilsTests.js @@ -10,7 +10,7 @@ const { trimAllParents, extractDate, spawnPromisified, - isGitFile, + isGitFile } = require('../../../lib/utils') const { promisify } = require('util') const execFile = promisify(require('child_process').execFile) @@ -71,7 +71,7 @@ describe('Util isGitFile', () => { ['/tmp/tempX/package/src', false], ['.git', true], ['/tmp/tempX/package/.git', true], - ['/tmp/tempX/package/.git/hooks/pre-merge-commit.sample', true], + ['/tmp/tempX/package/.git/hooks/pre-merge-commit.sample', true] ]) entries.forEach((expected, file) => { @@ -135,7 +135,7 @@ describe('test spawnPromisified ', () => { it('should handle output more than 5MB', async () => { const largeFile = 'test/fixtures/debian/0ad_0.0.17-1_armhf.deb' const execFilePromise = execFile('cat', [largeFile, largeFile], { - maxBuffer: 5 * 1024 * 1024, + maxBuffer: 5 * 1024 * 1024 }) await expect(execFilePromise).to.be.rejectedWith('stdout maxBuffer length exceeded') diff --git a/test/unit/providers/fetch/condaFetchTests.js b/test/unit/providers/fetch/condaFetchTests.js index 04eff259..1d891429 100644 --- a/test/unit/providers/fetch/condaFetchTests.js +++ b/test/unit/providers/fetch/condaFetchTests.js @@ -8,7 +8,7 @@ const Request = require('../../../../ghcrawler/lib/request.js') describe('condaFetch utilities', () => { let fetch = CondaFetch({ logger: { info: sinon.stub() }, - cdFileLocation: 'test/fixtures/conda/fragment', + cdFileLocation: 'test/fixtures/conda/fragment' }) let repoData = JSON.parse(fs.readFileSync('test/fixtures/conda/repodata.json')) @@ -20,7 +20,7 @@ describe('condaFetch utilities', () => { it('matches packages in repodata.packages.conda correctly', () => { expect(fetch._matchPackage('21cmfast', '3.0.2', 'py37h48b2cff_0', repoData).length).to.greaterThan(0) expect(fetch._matchPackage('21cmfast', '3.0.2', 'py37h48b2cff_0', repoData)[0].packageData.build).to.equal( - 'py37h48b2cff_0', + 'py37h48b2cff_0' ) }) @@ -38,7 +38,7 @@ describe('condaFetch', () => { beforeEach(() => { fetch = CondaFetch({ logger: { info: sinon.stub() }, - cdFileLocation: 'test/fixtures/conda/fragment', + cdFileLocation: 'test/fixtures/conda/fragment' }) fetch.getChannelData = sinon.stub().resolves(JSON.parse(fs.readFileSync('test/fixtures/conda/channeldata.json'))) @@ -55,7 +55,7 @@ describe('condaFetch', () => { expect(result.url).to.be.contains('cd:/conda/conda-forge/linux-64/21cmfast/3.0.2') expect(result.document.hashes).to.be.deep.equal({ sha1: '9b2f4958826956be03cf3793dbdb663a53a8a1f1', - sha256: '1154fceeb5c4ee9bb97d245713ac21eb1910237c724d2b7103747215663273c2', + sha256: '1154fceeb5c4ee9bb97d245713ac21eb1910237c724d2b7103747215663273c2' }) expect(result.document.location).to.be.a.string expect(result.document.releaseDate).to.match(/\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/) @@ -105,7 +105,7 @@ describe('condaFetch', () => { it('fetch package with version, architecture, and build version', async () => { const result = await fetch.handle( - new Request('test', 'cd:/conda/conda-forge/linux-64/21cmfast/3.0.2-py37hd45b216_1'), + new Request('test', 'cd:/conda/conda-forge/linux-64/21cmfast/3.0.2-py37hd45b216_1') ) verifyFetch(result.fetchResult) }) @@ -117,10 +117,10 @@ describe('condaFetch', () => { it('reports failed package matching', async () => { const result = await fetch.handle( - new Request('test', 'cd:/conda/conda-forge/linux-64/21cmfast/3.0.2-py9999_invalid'), + new Request('test', 'cd:/conda/conda-forge/linux-64/21cmfast/3.0.2-py9999_invalid') ) expect(result.outcome).to.equal( - 'Missing package with matching spec (version: 3.0.2, buildVersion: py9999_invalid) in linux-64 repository', + 'Missing package with matching spec (version: 3.0.2, buildVersion: py9999_invalid) in linux-64 repository' ) }) @@ -128,7 +128,7 @@ describe('condaFetch', () => { fetch.getRepoData = sinon.stub().resolves(null) const result = await fetch.handle(new Request('test', 'cd:/conda/conda-forge/linux-64/21cmfast/3.0.2')) expect(result.outcome).to.equal( - 'failed to fetch and parse repodata json file for channel conda-forge in architecture linux-64', + 'failed to fetch and parse repodata json file for channel conda-forge in architecture linux-64' ) }) @@ -144,7 +144,7 @@ describe('condaSrcFetch', () => { beforeEach(() => { fetch = CondaFetch({ logger: { info: sinon.stub() }, - cdFileLocation: 'test/fixtures/conda/fragment', + cdFileLocation: 'test/fixtures/conda/fragment' }) fetch.getChannelData = sinon.stub().resolves(JSON.parse(fs.readFileSync('test/fixtures/conda/channeldata.json'))) @@ -160,7 +160,7 @@ describe('condaSrcFetch', () => { expect(result.url).to.be.contains('cd:/condasrc/conda-forge/-/21cmfast/3.3.1') expect(result.document.hashes).to.be.deep.equal({ sha1: '92ec2a84d2377426ff51ad3b07a75921245c8881', - sha256: '96f5809d111a8a137c25758fa3f41586ea44cecba7ae191518767895afc7b3c6', + sha256: '96f5809d111a8a137c25758fa3f41586ea44cecba7ae191518767895afc7b3c6' }) expect(result.document.location).to.be.a.string expect(result.document.releaseDate).to.match(/\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/) diff --git a/test/unit/providers/fetch/cratesioFetchTests.js b/test/unit/providers/fetch/cratesioFetchTests.js index b502790a..7dac184e 100644 --- a/test/unit/providers/fetch/cratesioFetchTests.js +++ b/test/unit/providers/fetch/cratesioFetchTests.js @@ -14,8 +14,8 @@ let Fetch const hashes = { 'bitflags-1.0.4.crate': { sha1: 'fbc1ce9fa176ed7a7e15cfc6d1f6c2389f536361', - sha256: '228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12', - }, + sha256: '228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12' + } } function pickFile(url) { @@ -25,7 +25,7 @@ function pickFile(url) { describe('crateFetch workflow', () => { beforeEach(() => { - const requestPromiseStub = (options) => { + const requestPromiseStub = options => { if (options && options.url) { if (options.url.includes('error')) throw new Error('yikes') if (options.url.includes('missing')) throw { statusCode: 404 } @@ -39,7 +39,7 @@ describe('crateFetch workflow', () => { return response } Fetch = proxyquire('../../../../providers/fetch/cratesioFetch', { - 'request-promise-native': requestPromiseStub, + 'request-promise-native': requestPromiseStub }) }) @@ -62,7 +62,7 @@ describe('crateFetch workflow', () => { const handler = setup() handler._getRegistryData = () => { return { - version: { num: '1.0.4', dl_path: 'error' }, + version: { num: '1.0.4', dl_path: 'error' } } } const request = new Request('test', 'cd:/crate/cratesio/-/bitflags/1.0.4') @@ -79,7 +79,7 @@ describe('crateFetch workflow', () => { const handler = setup() handler._getRegistryData = () => { return { - version: { num: '1.0.4', dl_path: 'missing' }, + version: { num: '1.0.4', dl_path: 'missing' } } } const request = new Request('test', 'cd:/crate/cratesio/-/bitflags/1.0.4') @@ -125,7 +125,7 @@ describe('crateFetch', () => { const crateFetch = mockCrateFetch({ registryData: () => { return { manifest: null, version: null } - }, + } }) const request = new Request('crate', 'cd:/crate/cratesio/-/name/0.1.0') await crateFetch.handle(request) @@ -139,7 +139,7 @@ describe('crateFetch', () => { const crateFetch = mockCrateFetch({ registryData: () => { return { manifest: {}, version: { num: '0.5.0', crate: 'name' } } - }, + } }) const request = await crateFetch.handle(new Request('crate', 'cd:/crate/cratesio/-/name/0.1.0')) request.fetchResult.copyTo(request) @@ -150,7 +150,7 @@ describe('crateFetch', () => { const crateFetch = mockCrateFetch({ registryData: () => { return { manifest: {}, version: { num: '0.1.0', crate: 'name' } } - }, + } }) const request = await crateFetch.handle(new Request('crate', 'cd:/crate/cratesio/-/naME/0.1.0')) request.fetchResult.copyTo(request) diff --git a/test/unit/providers/fetch/debianFetchTests.js b/test/unit/providers/fetch/debianFetchTests.js index 106f1c0e..bb0b4592 100644 --- a/test/unit/providers/fetch/debianFetchTests.js +++ b/test/unit/providers/fetch/debianFetchTests.js @@ -16,18 +16,18 @@ describe('Debian utility functions', () => { expect((await fetch._getDataFromPackageMapFile(spec('deb', 'debian', '0ad', '0.0.17-1_armhf'))).length).to.equal(9) expect((await fetch._getDataFromPackageMapFile(spec('debsrc', 'debian', '0ad', '0.0.17-1'))).length).to.equal(9) expect( - (await fetch._getDataFromPackageMapFile(spec('deb', 'debian', '0ad', '0.0.23-1~bpo9+1_amd64'))).length, + (await fetch._getDataFromPackageMapFile(spec('deb', 'debian', '0ad', '0.0.23-1~bpo9+1_amd64'))).length ).to.equal(3) expect( - (await fetch._getDataFromPackageMapFile(spec('debsrc', 'debian', '0ad', '0.0.23-1~bpo9+1'))).length, + (await fetch._getDataFromPackageMapFile(spec('debsrc', 'debian', '0ad', '0.0.23-1~bpo9+1'))).length ).to.equal(3) expect((await fetch._getDataFromPackageMapFile(spec('deb', 'debian', 'amiwm', '0.21pl2-1_amd64'))).length).to.equal( - 7, + 7 ) expect((await fetch._getDataFromPackageMapFile(spec('debsrc', 'debian', 'amiwm', '0.21pl2-1'))).length).to.equal(7) expect( - (await fetch._getDataFromPackageMapFile(spec('deb', 'debian', 'non-existant', 'non-existant'))).length, + (await fetch._getDataFromPackageMapFile(spec('deb', 'debian', 'non-existant', 'non-existant'))).length ).to.equal(0) }) @@ -44,15 +44,15 @@ describe('Debian utility functions', () => { const spec1 = spec('deb', 'debian', '0ad', '0.0.17-1_armhf') const registryData1 = await fetch._getDataFromPackageMapFile(spec1) expect(fetch._getDownloadUrls(spec1, registryData1).binary).to.equal( - 'http://ftp.debian.org/debian/pool/main/0/0ad/0ad_0.0.17-1_armhf.deb', + 'http://ftp.debian.org/debian/pool/main/0/0ad/0ad_0.0.17-1_armhf.deb' ) const spec2 = spec('debsrc', 'debian', '0ad', '0.0.17-1') const registryData2 = await fetch._getDataFromPackageMapFile(spec1) expect(fetch._getDownloadUrls(spec2, registryData2).source).to.equal( - 'http://ftp.debian.org/debian/pool/main/0/0ad/0ad_0.0.17.orig.tar.xz', + 'http://ftp.debian.org/debian/pool/main/0/0ad/0ad_0.0.17.orig.tar.xz' ) expect(fetch._getDownloadUrls(spec2, registryData2).patches).to.equal( - 'http://ftp.debian.org/debian/pool/main/0/0ad/0ad_0.0.17-1.debian.tar.xz', + 'http://ftp.debian.org/debian/pool/main/0/0ad/0ad_0.0.17-1.debian.tar.xz' ) }) @@ -80,17 +80,17 @@ describe('Debian utility functions', () => { 'public-domain', 'MPL-1.1', 'GPL-2.0', - 'LGPL-2.1', + 'LGPL-2.1' ]) // Edge cases: expect(fetch._parseDeclaredLicenses('License: GPL-1+ or Artistic')).to.deep.equal(['(GPL-1+ OR Artistic)']) expect(fetch._parseDeclaredLicenses('License: GPL-2+ and BSD-3-clause')).to.deep.equal(['GPL-2+', 'BSD-3-clause']) expect(fetch._parseDeclaredLicenses('License: GPL-2+ or Artistic-2.0, and BSD-3-clause')).to.deep.equal([ '(GPL-2+ OR Artistic-2.0)', - 'BSD-3-clause', + 'BSD-3-clause' ]) expect(fetch._parseDeclaredLicenses('License: Expat or Artistic and Artistic-2.0')).to.deep.equal([ - '(MIT OR Artistic AND Artistic-2.0)', + '(MIT OR Artistic AND Artistic-2.0)' ]) }) }) @@ -98,8 +98,8 @@ describe('Debian utility functions', () => { const hashes = { '0ad_0.0.17-1_armhf.deb': { sha1: '18dc18cb6397aa968408e554f3ff0e2010554b0d', - sha256: '2906a834ca562152afbf2f25315727608c4b25566960cf9ee8b15e8110850fb8', - }, + sha256: '2906a834ca562152afbf2f25315727608c4b25566960cf9ee8b15e8110850fb8' + } } describe('Debian fetching', () => { @@ -130,7 +130,7 @@ describe('Debian fetching', () => { expect(request.document.hashes.sha256).to.be.equal(hashes['0ad_0.0.17-1_armhf.deb']['sha256']) expect(request.document.releaseDate.getFullYear()).to.be.equal(2014) expect(request.document.copyrightUrl).to.be.equal( - 'https://metadata.ftp-master.debian.org/changelogs/main/0/0ad/0ad_0.0.17-1_copyright', + 'https://metadata.ftp-master.debian.org/changelogs/main/0/0ad/0ad_0.0.17-1_copyright' ) expect(request.document.declaredLicenses).to.deep.equal(['MIT', 'BSD-3-clause']) }) @@ -159,6 +159,6 @@ function spec(type, provider, name, revision) { namespace, name, revision, - toUrl: () => `cd:/${type}/${provider}/${namespace}/${name}/${revision}`, + toUrl: () => `cd:/${type}/${provider}/${namespace}/${name}/${revision}` } } diff --git a/test/unit/providers/fetch/dispatcherTests.js b/test/unit/providers/fetch/dispatcherTests.js index 4af2ea14..20f58c9a 100644 --- a/test/unit/providers/fetch/dispatcherTests.js +++ b/test/unit/providers/fetch/dispatcherTests.js @@ -57,7 +57,7 @@ describe('fetchDispatcher cache fetch result', () => { }) afterEach(() => { - Object.values(resultCache).forEach((fetched) => fetched.cleanup()) + Object.values(resultCache).forEach(fetched => fetched.cleanup()) }) function setupDispatcher(fetcher) { @@ -72,14 +72,14 @@ describe('fetchDispatcher cache fetch result', () => { processorsStub, filterStub, mockResultCache(resultCache), - inProgressPromiseCache, + inProgressPromiseCache ) } function mockResultCache(cache) { return { - get: (key) => cache[key], - setWithConditionalExpiry: (key, value) => (cache[key] = value), + get: key => cache[key], + setWithConditionalExpiry: (key, value) => (cache[key] = value) } } @@ -112,7 +112,7 @@ describe('fetchDispatcher cache fetch result', () => { describe('cache maven fetch result', () => { function setupMavenFetch() { - const fileSupplier = (url) => { + const fileSupplier = url => { let fileName if (url.includes('solrsearch')) fileName = 'swt-3.3.0-v3346.json' if (url.endsWith('.pom')) fileName = 'swt-3.3.0-v3346.pom' @@ -123,7 +123,7 @@ describe('fetchDispatcher cache fetch result', () => { return MavenFetch({ logger: { log: sinon.stub() }, requestPromise: createRequestPromiseStub(fileSupplier), - requestStream: createGetStub(fileSupplier), + requestStream: createGetStub(fileSupplier) }) } @@ -205,7 +205,7 @@ describe('fetchDispatcher cache fetch result', () => { return { manifest: { version }, versions: { [version]: { test: true } }, - time: { [version]: '42' }, + time: { [version]: '42' } } } @@ -213,7 +213,7 @@ describe('fetchDispatcher cache fetch result', () => { beforeEach(() => { const NpmFetch = proxyquire('../../../../providers/fetch/npmjsFetch', { - 'request-promise-native': npmRegistryRequestStub, + 'request-promise-native': npmRegistryRequestStub }) const npmFetch = NpmFetch({ logger: { log: sinon.stub() } }) npmFetch._getPackage = sinon @@ -236,7 +236,7 @@ describe('fetchDispatcher cache fetch result', () => { rubyGemsFetch._getRegistryData = sinon.stub().resolves({ name: 'small', version: '0.5.1', - gem_uri: 'https://rubygems.org/gems/small-0.5.1.gem', + gem_uri: 'https://rubygems.org/gems/small-0.5.1.gem' }) rubyGemsFetch._getPackage = sinon .stub() @@ -262,10 +262,7 @@ describe('fetchDispatcher cache fetch result', () => { .stub() .callsFake( async (spec, registryData, destination) => - await getPacakgeStub( - 'test/fixtures/composer/symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip', - destination, - ), + await getPacakgeStub('test/fixtures/composer/symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip', destination) ) fetchDispatcher = setupDispatcher(packagistFetch) @@ -277,7 +274,7 @@ describe('fetchDispatcher cache fetch result', () => { }) describe('cache CrateioFetch result', () => { - const requestPromiseStub = (options) => { + const requestPromiseStub = options => { const body = fs.readFileSync('test/fixtures/crates/bitflags.json') if (options && options.json) return JSON.parse(body) const response = new PassThrough() @@ -291,7 +288,7 @@ describe('fetchDispatcher cache fetch result', () => { beforeEach(() => { const CrateioFetch = proxyquire('../../../../providers/fetch/cratesioFetch', { - 'request-promise-native': requestPromiseStub, + 'request-promise-native': requestPromiseStub }) const packagistFetch = CrateioFetch({ logger: { log: sinon.stub() } }) fetchDispatcher = setupDispatcher(packagistFetch) @@ -308,7 +305,7 @@ describe('fetchDispatcher cache fetch result', () => { beforeEach(() => { const DebianFetch = proxyquire('../../../../providers/fetch/debianFetch', { - 'memory-cache': memCacheStub, + 'memory-cache': memCacheStub }) const fetch = DebianFetch({ logger: { info: sinon.stub() }, cdFileLocation: 'test/fixtures/debian/fragment' }) fetch._download = async (downloadUrl, destination) => @@ -334,8 +331,8 @@ describe('fetchDispatcher cache fetch result', () => { const successHttpStub = { get: sinon.stub().returns({ status: 200, - data: httpContent, - }), + data: httpContent + }) } let fetchDispatcher @@ -343,7 +340,7 @@ describe('fetchDispatcher cache fetch result', () => { beforeEach(() => { const GoFetch = proxyquire('../../../../providers/fetch/goFetch', { request: { get: createGetStub(fileSupplier) }, - 'request-promise-native': createRequestPromiseStub(fileSupplier), + 'request-promise-native': createRequestPromiseStub(fileSupplier) }) const fetch = GoFetch({ logger: { info: sinon.stub() }, http: successHttpStub }) fetchDispatcher = setupDispatcher(fetch) @@ -355,7 +352,7 @@ describe('fetchDispatcher cache fetch result', () => { }) describe('cache NugetFetch result', () => { - const fileSupplier = (url) => { + const fileSupplier = url => { let fileName = null if (url.includes('catalog')) fileName = 'xunit.core.2.4.1.catalog.json' if (url.endsWith('index.json')) fileName = 'xunit.core.index.json' @@ -384,8 +381,8 @@ describe('fetchDispatcher cache fetch result', () => { requestretry: { defaults: () => { return { get: requestPromiseStub } - }, - }, + } + } }) const fetch = NugetFetch({ logger: { info: sinon.stub() } }) fetchDispatcher = setupDispatcher(fetch) @@ -403,9 +400,9 @@ describe('fetchDispatcher cache fetch result', () => { requestretry: { defaults: () => { return { get: sinon.stub().resolves({ body: loadJson('pod/versions.json'), statusCode: 200 }) } - }, + } }, - 'request-promise-native': sinon.stub().resolves(loadJson('pod/registryData.json')), + 'request-promise-native': sinon.stub().resolves(loadJson('pod/registryData.json')) }) const fetch = PodFetch({ logger: { info: sinon.stub() } }) fetch._getPackage = sinon.stub().resolves('/tmp/cd-pYKk9q/SwiftLCS-1.0') @@ -418,8 +415,8 @@ describe('fetchDispatcher cache fetch result', () => { }) }) -const createRequestPromiseStub = (fileSupplier) => { - return (options) => { +const createRequestPromiseStub = fileSupplier => { + return options => { if (options.url) { if (options.url.includes('error')) throw new Error('yikes') if (options.url.includes('code')) throw { statusCode: 500, message: 'Code' } @@ -430,7 +427,7 @@ const createRequestPromiseStub = (fileSupplier) => { } } -const createGetStub = (fileSupplier) => { +const createGetStub = fileSupplier => { return (url, callback) => { const response = new PassThrough() const file = `test/fixtures/${fileSupplier(url)}` @@ -449,6 +446,6 @@ const getPacakgeStub = async (file, destination) => { await promisify(fs.copyFile)(file, destination) } -const loadJson = (fileName) => { +const loadJson = fileName => { return JSON.parse(fs.readFileSync(`test/fixtures/${fileName}`)) } diff --git a/test/unit/providers/fetch/gitClonerTests.js b/test/unit/providers/fetch/gitClonerTests.js index 2d49696e..59da2d04 100644 --- a/test/unit/providers/fetch/gitClonerTests.js +++ b/test/unit/providers/fetch/gitClonerTests.js @@ -9,16 +9,16 @@ const cloner = gitCloner({}) describe('building git urls', () => { it('builds a gitlab url', () => { expect(cloner._buildUrl(spec('git', 'gitlab', 'namespace', 'repo', 'abc123'))).to.equal( - gitlab_stub + 'namespace/repo.git', + gitlab_stub + 'namespace/repo.git' ) expect(cloner._buildUrl(spec('git', 'gitlab', 'name.space.thing', 'repo', 'abc123'))).to.equal( - gitlab_stub + 'name/space/thing/repo.git', + gitlab_stub + 'name/space/thing/repo.git' ) }) it('builds a github url', () => { expect(cloner._buildUrl(spec('git', 'github', 'namespace', 'repo', 'abc123'))).to.equal( - github_stub + 'namespace/repo.git', + github_stub + 'namespace/repo.git' ) }) }) @@ -39,7 +39,7 @@ describe('fetch result', () => { expect(request.meta.gitSize).to.be.equal(532) expect(request.contentOrigin).to.be.equal('origin') expect(request.casedSpec.toUrl()).to.be.equal( - 'cd:/git/github/palantir/refreshable/deef80a18aa929943e5dab1dba7276c231c84519', + 'cd:/git/github/palantir/refreshable/deef80a18aa929943e5dab1dba7276c231c84519' ) expect(request.document.size).to.be.equal(532) expect(request.document.releaseDate.toISOString()).to.be.equal('2021-04-08T13:27:49.000Z') diff --git a/test/unit/providers/fetch/goFetchTests.js b/test/unit/providers/fetch/goFetchTests.js index 9893433f..7425e708 100644 --- a/test/unit/providers/fetch/goFetchTests.js +++ b/test/unit/providers/fetch/goFetchTests.js @@ -15,19 +15,19 @@ describe('Go utility functions', () => { it('builds URLs', () => { const fetch = GoFetch({}) expect(fetch._buildUrl(spec('go', 'golang', 'cloud.google.com', 'go', 'v0.56.0'))).to.equal( - goBaseURL + 'cloud.google.com/go/@v/v0.56.0.zip', + goBaseURL + 'cloud.google.com/go/@v/v0.56.0.zip' ) expect(fetch._buildUrl(spec('go', 'golang', 'cloud.google.com', 'go', 'v0.56.0'), '.mod')).to.equal( - goBaseURL + 'cloud.google.com/go/@v/v0.56.0.mod', + goBaseURL + 'cloud.google.com/go/@v/v0.56.0.mod' ) expect(fetch._buildUrl(spec('go', 'golang', '-', 'collectd.org', 'v0.5.0'))).to.equal( - goBaseURL + 'collectd.org/@v/v0.5.0.zip', + goBaseURL + 'collectd.org/@v/v0.5.0.zip' ) expect(fetch._buildUrl(spec('go', 'golang', 'github.com%2fAzure%2fazure-event-hubs-go', 'v3', 'v3.2.0'))).to.equal( - goBaseURL + 'github.com/Azure/azure-event-hubs-go/v3/@v/v3.2.0.zip', + goBaseURL + 'github.com/Azure/azure-event-hubs-go/v3/@v/v3.2.0.zip' ) expect(fetch._buildUrl(spec('go', 'golang', 'github.com%2FAzure%2Fazure-event-hubs-go', 'v3', 'v3.2.0'))).to.equal( - goBaseURL + 'github.com/Azure/azure-event-hubs-go/v3/@v/v3.2.0.zip', + goBaseURL + 'github.com/Azure/azure-event-hubs-go/v3/@v/v3.2.0.zip' ) }) }) @@ -35,8 +35,8 @@ describe('Go utility functions', () => { const hashes = { 'v1.3.0.zip': { sha1: '270d80279fca2d21c401dd40b6fc6370c41bfd94', - sha256: '03872ee7d6747bc2ee0abadbd4eb09e60f6df17d0a6142264abe8a8a00af50e7', - }, + sha256: '03872ee7d6747bc2ee0abadbd4eb09e60f6df17d0a6142264abe8a8a00af50e7' + } } let Fetch @@ -53,7 +53,7 @@ describe('Go Proxy fetching', () => { let successHttpStub beforeEach(() => { - const requestPromiseStub = (options) => { + const requestPromiseStub = options => { if (options.url) { expect(options.url).to.contain(goBaseURL) if (options.url.includes('error')) throw new Error('yikes') @@ -83,12 +83,12 @@ describe('Go Proxy fetching', () => { successHttpStub = { get: sinon.stub().returns({ status: 200, - data: httpContent, - }), + data: httpContent + }) } Fetch = proxyquire('../../../../providers/fetch/goFetch', { request: { get: getStub }, - 'request-promise-native': requestPromiseStub, + 'request-promise-native': requestPromiseStub }) }) @@ -160,17 +160,17 @@ describe('Go Proxy fetching', () => { const handler = Fetch({ logger: { log: sinon.spy(), - info: sinon.spy(), + info: sinon.spy() }, http: { get: sinon.stub().throws( merge(new Error(), { response: { - status: 429, - }, - }), - ), - }, + status: 429 + } + }) + ) + } }) const request = await handler.handle(new Request('test', 'cd:/go/golang/rsc.io/quote/v1.3.0')) expect(request.processControl).to.equal('requeue') @@ -180,17 +180,17 @@ describe('Go Proxy fetching', () => { const handler = Fetch({ logger: { log: sinon.spy(), - info: sinon.spy(), + info: sinon.spy() }, http: { get: sinon.stub().throws( merge(new Error(), { response: { - status: 429, - }, - }), - ), - }, + status: 429 + } + }) + ) + } }) let request = new Request('test', 'cd:/go/golang/rsc.io/quote/v1.3.0') request.attemptCount = 5 @@ -202,17 +202,17 @@ describe('Go Proxy fetching', () => { const handler = Fetch({ logger: { log: sinon.spy(), - info: sinon.spy(), + info: sinon.spy() }, http: { get: sinon.stub().throws( merge(new Error(), { response: { - status: 404, - }, - }), - ), - }, + status: 404 + } + }) + ) + } }) const request = await handler.handle(new Request('test', 'cd:/go/golang/rsc.io/quote/v1.3.0')) expect(request.fetchResult.document.registryData?.licenses).to.be.undefined @@ -223,7 +223,7 @@ describe('Go Proxy fetching', () => { const handler = Fetch({ logger: { log: sinon.spy(), - info, + info }, http: { get: sinon.stub().returns({ @@ -235,9 +235,9 @@ describe('Go Proxy fetching', () => {
HTML has changed
- `, - }), - }, + ` + }) + } }) const request = await handler.handle(new Request('test', 'cd:/go/golang/rsc.io/quote/v1.3.0')) expect(request.fetchResult.document.registryData?.licenses).to.be.undefined diff --git a/test/unit/providers/fetch/gradlePluginFetchTests.js b/test/unit/providers/fetch/gradlePluginFetchTests.js index 023b3d7f..50bbf1bd 100644 --- a/test/unit/providers/fetch/gradlePluginFetchTests.js +++ b/test/unit/providers/fetch/gradlePluginFetchTests.js @@ -14,13 +14,13 @@ describe('Gradle plugin fetch', () => { type: 'maven', provider: 'gradleplugin', namespace: 'io.github.lognet', - name: 'grpc-spring-boot-starter-gradle-plugin', + name: 'grpc-spring-boot-starter-gradle-plugin' } it('get latest version from maven meta data', async () => { const gradleFetch = GradlePluginFetch({ logger: { log: sinon.stub() }, - requestPromise: sinon.stub().resolves(fs.readFileSync('test/fixtures/maven/maven-metadata.xml')), + requestPromise: sinon.stub().resolves(fs.readFileSync('test/fixtures/maven/maven-metadata.xml')) }) const latest = await gradleFetch._getLatestVersion(spec) expect(latest).to.be.eq('4.5.10') @@ -29,7 +29,7 @@ describe('Gradle plugin fetch', () => { it('no latest version', async () => { const gradleFetch = GradlePluginFetch({ logger: { log: sinon.stub() }, - requestPromise: sinon.stub().resolves(''), + requestPromise: sinon.stub().resolves('') }) const latest = await gradleFetch._getLatestVersion(spec) expect(latest).to.be.null @@ -38,7 +38,7 @@ describe('Gradle plugin fetch', () => { it('no maven meta data found', async () => { const gradleFetch = GradlePluginFetch({ logger: { log: sinon.stub() }, - requestPromise: sinon.stub().rejects({ statusCode: 404 }), + requestPromise: sinon.stub().rejects({ statusCode: 404 }) }) const latest = await gradleFetch._getLatestVersion(spec) expect(latest).to.be.null @@ -49,8 +49,8 @@ describe('Gradle plugin fetch', () => { const hashes = { 'swt-3.3.0-v3346.jar': { sha1: 'd886a6db6b7195911516896feebe3a5d1dddfd46', - sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354', - }, + sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354' + } } function pickArtifact(url) { @@ -77,7 +77,7 @@ describe('Gradle plugin fetch', () => { let handler beforeEach(() => { - const requestPromiseStub = (options) => { + const requestPromiseStub = options => { const content = contentFromFile(options.url) return options.json ? JSON.parse(content) : content } @@ -91,7 +91,7 @@ describe('Gradle plugin fetch', () => { handler = GradlePluginFetch({ logger: { log: sinon.stub(), error: sinon.stub() }, requestPromise: requestPromiseStub, - requestStream: getStub, + requestStream: getStub }) }) @@ -99,7 +99,7 @@ describe('Gradle plugin fetch', () => { const url = handler._buildBaseUrl({ type: 'maven', provider: 'gradleplugin', - name: 'grpc-spring-boot-starter-gradle-plugin', + name: 'grpc-spring-boot-starter-gradle-plugin' }) //should not fail expect(url).not.to.be.undefined @@ -127,7 +127,7 @@ describe('Gradle plugin fetch', () => { it('test success with sourcearchive', async () => { const request = await handler.handle( - new Request('test', 'cd:/sourcearchive/gradleplugin/org.eclipse/swt/3.3.0-v3344'), + new Request('test', 'cd:/sourcearchive/gradleplugin/org.eclipse/swt/3.3.0-v3344') ) verifySuccess(request.fetchResult) expect(request.fetchResult.casedSpec.revision).to.equal('3.3.0-v3344') @@ -144,7 +144,7 @@ describe('Gradle plugin fetch', () => { it('handle no pom found', async () => { handler._handleRequestPromise = sinon.stub().rejects({ statusCode: 404 }) const request = await handler.handle( - new Request('test', 'cd:/sourcearchive/gradleplugin/org.eclipse/swt/3.3.0-v3344'), + new Request('test', 'cd:/sourcearchive/gradleplugin/org.eclipse/swt/3.3.0-v3344') ) expect(request.processControl).to.be.equal('skip') }) @@ -157,7 +157,7 @@ describe('Gradle plugin fetch', () => { return response } const request = await handler.handle( - new Request('test', 'cd:/sourcearchive/gradleplugin/org.eclipse/swt/3.3.0-v3344'), + new Request('test', 'cd:/sourcearchive/gradleplugin/org.eclipse/swt/3.3.0-v3344') ) expect(request.processControl).to.be.equal('skip') }) diff --git a/test/unit/providers/fetch/mavencentralFetchTests.js b/test/unit/providers/fetch/mavencentralFetchTests.js index 2759b62e..1531e5b2 100644 --- a/test/unit/providers/fetch/mavencentralFetchTests.js +++ b/test/unit/providers/fetch/mavencentralFetchTests.js @@ -17,10 +17,10 @@ describe('Maven Central utility functions', () => { expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'), '.pom')).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.pom') expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'))).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.jar') expect(fetch._buildUrl(spec('sourcearchive', 'g1', 'a1', '1.2.3'), '-sources.jar')).to.equal( - stub + 'g1/a1/1.2.3/a1-1.2.3-sources.jar', + stub + 'g1/a1/1.2.3/a1-1.2.3-sources.jar' ) expect(fetch._buildUrl(spec('maven', 'com.g1', 'a1.foo', '1.2.3'))).to.equal( - stub + 'com/g1/a1.foo/1.2.3/a1.foo-1.2.3.jar', + stub + 'com/g1/a1.foo/1.2.3/a1.foo-1.2.3.jar' ) expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'), '.jar')).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.jar') expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'), '.aar')).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.aar') @@ -34,11 +34,11 @@ describe('Maven Central utility functions', () => { it('gets releaseDate from pomProperties', async () => { const fetch = MavenFetch({ logger: { log: sinon.stub() }, - requestPromise: sinon.stub().resolves({}), + requestPromise: sinon.stub().resolves({}) }) sinon.replace(fs, 'exists', (loc, cb) => cb(true)) sinon.replace(fs, 'readFile', (loc, cb) => - cb(null, '#Generated by Maven\n#Fri May 13 12:26:22 GMT+01:00 2011\ngroupId=g1\nartifactId=a1\nversion=1.2.3'), + cb(null, '#Generated by Maven\n#Fri May 13 12:26:22 GMT+01:00 2011\ngroupId=g1\nartifactId=a1\nversion=1.2.3') ) const date = await fetch._getReleaseDate('/tmp/', spec('maven', 'g1', 'a1', '1.2.3')) @@ -53,8 +53,8 @@ function spec(type, namespace, name, revision) { const hashes = { 'swt-3.3.0-v3346.jar': { sha1: 'd886a6db6b7195911516896feebe3a5d1dddfd46', - sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354', - }, + sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354' + } } function pickArtifact(url) { @@ -68,7 +68,7 @@ describe('MavenCentral fetching', () => { let handler beforeEach(() => { - const requestPromiseStub = (options) => { + const requestPromiseStub = options => { if (options.url) { if (options.url.includes('error')) throw new Error('yikes') if (options.url.includes('code')) throw { statusCode: 500, message: 'Code' } @@ -94,7 +94,7 @@ describe('MavenCentral fetching', () => { handler = MavenFetch({ logger: { log: sinon.stub() }, requestPromise: requestPromiseStub, - requestStream: getStub, + requestStream: getStub }) }) @@ -122,7 +122,7 @@ describe('MavenCentral fetching', () => { } handler.createTempFile = () => { return { - name: '/tmp/random', + name: '/tmp/random' } } handler._getArtifact = () => {} @@ -171,12 +171,12 @@ const dummyPom1 = { license: [ { name: ['Eclipse Public License - v 1.0'], - url: ['http://www.eclipse.org/org/documents/epl-v10.html'], - }, - ], - }, - ], - }, + url: ['http://www.eclipse.org/org/documents/epl-v10.html'] + } + ] + } + ] + } } const dummyPom2 = { @@ -189,12 +189,12 @@ const dummyPom2 = { license: [ { name: ['Eclipse Public License - v 1.0'], - url: ['http://www.eclipse.org/org/documents/epl-v10.html'], - }, - ], - }, - ], - }, + url: ['http://www.eclipse.org/org/documents/epl-v10.html'] + } + ] + } + ] + } } const dummyMerged = { @@ -205,12 +205,12 @@ const dummyMerged = { license: [ { name: ['Eclipse Public License - v 1.0'], - url: ['http://www.eclipse.org/org/documents/epl-v10.html'], - }, - ], - }, + url: ['http://www.eclipse.org/org/documents/epl-v10.html'] + } + ] + } ], modelVersion: ['4.0.0'], name: ['Standard Widget Toolkit'], - version: ['3.3.0-v3346'], + version: ['3.3.0-v3346'] } diff --git a/test/unit/providers/fetch/mavengoogleFetchTests.js b/test/unit/providers/fetch/mavengoogleFetchTests.js index 4dc52bbe..099b4f2d 100644 --- a/test/unit/providers/fetch/mavengoogleFetchTests.js +++ b/test/unit/providers/fetch/mavengoogleFetchTests.js @@ -17,10 +17,10 @@ describe('Maven Google utility functions', () => { expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'), '.pom')).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.pom') expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'))).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.jar') expect(fetch._buildUrl(spec('sourcearchive', 'g1', 'a1', '1.2.3'), '-sources.jar')).to.equal( - stub + 'g1/a1/1.2.3/a1-1.2.3-sources.jar', + stub + 'g1/a1/1.2.3/a1-1.2.3-sources.jar' ) expect(fetch._buildUrl(spec('maven', 'com.g1', 'a1.foo', '1.2.3'))).to.equal( - stub + 'com/g1/a1.foo/1.2.3/a1.foo-1.2.3.jar', + stub + 'com/g1/a1.foo/1.2.3/a1.foo-1.2.3.jar' ) expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'), '.jar')).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.jar') expect(fetch._buildUrl(spec('maven', 'g1', 'a1', '1.2.3'), '.aar')).to.equal(stub + 'g1/a1/1.2.3/a1-1.2.3.aar') @@ -36,7 +36,7 @@ describe('Maven Google utility functions', () => { const fs = require('fs') sinon.replace(fs, 'exists', (loc, cb) => cb(true)) sinon.replace(fs, 'readFile', (loc, cb) => - cb(null, '#Generated by Maven\n#Fri May 13 12:26:22 GMT+01:00 2011\ngroupId=g1\nartifactId=a1\nversion=1.2.3'), + cb(null, '#Generated by Maven\n#Fri May 13 12:26:22 GMT+01:00 2011\ngroupId=g1\nartifactId=a1\nversion=1.2.3') ) const date = await fetch._getReleaseDate('/tmp/', spec('maven', 'g1', 'a1', '1.2.3')) @@ -51,8 +51,8 @@ function spec(type, namespace, name, revision) { const hashes = { 'swt-3.3.0-v3346.jar': { sha1: 'd886a6db6b7195911516896feebe3a5d1dddfd46', - sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354', - }, + sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354' + } } function pickArtifact(url) { @@ -66,7 +66,7 @@ describe('MavenGoogle fetching', () => { let handler beforeEach(() => { - const requestPromiseStub = (options) => { + const requestPromiseStub = options => { if (options.url) { if (options.url.includes('error')) throw new Error('yikes') if (options.url.includes('code')) throw { statusCode: 500, message: 'Code' } @@ -92,7 +92,7 @@ describe('MavenGoogle fetching', () => { handler = MavenGoogleFetch({ logger: { log: sinon.stub() }, requestPromise: requestPromiseStub, - requestStream: getStub, + requestStream: getStub }) }) @@ -120,7 +120,7 @@ describe('MavenGoogle fetching', () => { } handler.createTempFile = () => { return { - name: '/tmp/random', + name: '/tmp/random' } } handler._getArtifact = () => {} @@ -169,12 +169,12 @@ const dummyPom1 = { license: [ { name: ['Eclipse Public License - v 1.0'], - url: ['http://www.eclipse.org/org/documents/epl-v10.html'], - }, - ], - }, - ], - }, + url: ['http://www.eclipse.org/org/documents/epl-v10.html'] + } + ] + } + ] + } } const dummyPom2 = { @@ -187,12 +187,12 @@ const dummyPom2 = { license: [ { name: ['Eclipse Public License - v 1.0'], - url: ['http://www.eclipse.org/org/documents/epl-v10.html'], - }, - ], - }, - ], - }, + url: ['http://www.eclipse.org/org/documents/epl-v10.html'] + } + ] + } + ] + } } const dummyMerged = { @@ -203,12 +203,12 @@ const dummyMerged = { license: [ { name: ['Eclipse Public License - v 1.0'], - url: ['http://www.eclipse.org/org/documents/epl-v10.html'], - }, - ], - }, + url: ['http://www.eclipse.org/org/documents/epl-v10.html'] + } + ] + } ], modelVersion: ['4.0.0'], name: ['Standard Widget Toolkit'], - version: ['3.3.0-v3346'], + version: ['3.3.0-v3346'] } diff --git a/test/unit/providers/fetch/npmjsFetchTests.js b/test/unit/providers/fetch/npmjsFetchTests.js index b8277d6a..ddc6653a 100644 --- a/test/unit/providers/fetch/npmjsFetchTests.js +++ b/test/unit/providers/fetch/npmjsFetchTests.js @@ -46,14 +46,14 @@ let Fetch const hashes = { 'redie-0.3.0.tgz': { sha1: '48581317ac174ac269c398ff946d6c4779145374', - sha256: '66185c319680ee41268217c2467e314019e8ba4ea4d8374335fbe29e64a8d19f', - }, + sha256: '66185c319680ee41268217c2467e314019e8ba4ea4d8374335fbe29e64a8d19f' + } } describe('', () => { beforeEach(() => { const resultBox = {} - const requestPromiseStub = (options) => { + const requestPromiseStub = options => { if (options.url) { if (options.url.includes('regError')) throw new Error('yikes') if (options.url.includes('missing')) throw { statusCode: 404 } @@ -73,7 +73,7 @@ describe('', () => { } Fetch = proxyquire('../../../../providers/fetch/npmjsFetch', { request: { get: getStub }, - 'request-promise-native': requestPromiseStub, + 'request-promise-native': requestPromiseStub }) Fetch._resultBox = resultBox }) @@ -133,7 +133,7 @@ function createRegistryData(version) { return { manifest: { version }, versions: { [version]: { test: true } }, - time: { [version]: '42' }, + time: { [version]: '42' } } } diff --git a/test/unit/providers/fetch/nugetFetchTests.js b/test/unit/providers/fetch/nugetFetchTests.js index 5ae1c900..454bef99 100644 --- a/test/unit/providers/fetch/nugetFetchTests.js +++ b/test/unit/providers/fetch/nugetFetchTests.js @@ -33,8 +33,8 @@ let Fetch const hashes = { 'xunit.core.2.4.1.nupkg': { sha1: '362ec34f3358c23e2effa87ecfc5de1c4292d60a', - sha256: '2a05200082483c7439550e05881fa2e6ed895d26319af30257ccd73f891ccbda', - }, + sha256: '2a05200082483c7439550e05881fa2e6ed895d26319af30257ccd73f891ccbda' + } } function pickFile(url) { @@ -66,7 +66,7 @@ describe('', () => { const requestRetryStub = { defaults: () => { return { get } - }, + } } Fetch = proxyquire('../../../../providers/fetch/nugetFetch', { requestretry: requestRetryStub }) }) diff --git a/test/unit/providers/fetch/packagistFetchTests.js b/test/unit/providers/fetch/packagistFetchTests.js index 06173b20..43d944f2 100644 --- a/test/unit/providers/fetch/packagistFetchTests.js +++ b/test/unit/providers/fetch/packagistFetchTests.js @@ -14,14 +14,14 @@ let Fetch const hashes = { 'symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip': { sha1: '8d24c52e593042529ba86549d9920eb4d9649763', - sha256: '797a607b7ea7dad62f78a56f3687f2b2108d221b0682d0ea1386db61714dc8a2', - }, + sha256: '797a607b7ea7dad62f78a56f3687f2b2108d221b0682d0ea1386db61714dc8a2' + } } describe('packagistFetch', () => { beforeEach(() => { const resultBox = {} - const requestPromiseStub = (options) => { + const requestPromiseStub = options => { if (options.url) { if (options.url.includes('regError')) throw new Error('Invalid url') if (options.url.includes('missing')) throw { statusCode: 404 } @@ -41,7 +41,7 @@ describe('packagistFetch', () => { } Fetch = proxyquire('../../../../providers/fetch/packagistFetch', { request: { get: getStub }, - 'request-promise-native': requestPromiseStub, + 'request-promise-native': requestPromiseStub }) Fetch._resultBox = resultBox }) @@ -62,7 +62,7 @@ describe('packagistFetch', () => { request.fetchResult.copyTo(request) expect(request.document.hashes.sha1).to.be.equal(hashes['symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip']['sha1']) expect(request.document.hashes.sha256).to.be.equal( - hashes['symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip']['sha256'], + hashes['symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip']['sha256'] ) expect(request.document.dirRoot).to.be.equal('symfony-polyfill-mbstring-fe5e94c') expect(request.document.releaseDate).to.equal('2019-02-06T07:57:58+00:00') diff --git a/test/unit/providers/fetch/podFetchTests.js b/test/unit/providers/fetch/podFetchTests.js index b8884400..5a8a9a2a 100644 --- a/test/unit/providers/fetch/podFetchTests.js +++ b/test/unit/providers/fetch/podFetchTests.js @@ -6,7 +6,7 @@ const proxyquire = require('proxyquire') const Request = require('../../../../ghcrawler/lib/request.js') describe('podFetch', () => { - const loadJson = (fileName) => { + const loadJson = fileName => { return JSON.parse(fs.readFileSync(`test/fixtures/pod/${fileName}`)) } @@ -14,11 +14,11 @@ describe('podFetch', () => { requestretry: { defaults: () => { return { - get: sinon.stub().resolves({ body: loadJson('versions.json'), statusCode: 200 }), + get: sinon.stub().resolves({ body: loadJson('versions.json'), statusCode: 200 }) } - }, + } }, - 'request-promise-native': sinon.stub().resolves(loadJson('registryData.json')), + 'request-promise-native': sinon.stub().resolves(loadJson('registryData.json')) }) let fetch diff --git a/test/unit/providers/fetch/pypiFetchTests.js b/test/unit/providers/fetch/pypiFetchTests.js index 9e369568..2d6e2da5 100644 --- a/test/unit/providers/fetch/pypiFetchTests.js +++ b/test/unit/providers/fetch/pypiFetchTests.js @@ -50,7 +50,7 @@ describe('pypiFetch handle function', () => { expect(result.document.releaseDate).to.be.equal('2019-01-12T22:25:58') expect(result.document.hashes).to.be.deep.equal({ sha1: 'd886a6db6b7195911516896feebe3a5d1dddfd46', - sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354', + sha256: '18a3a53a27df164d4db56d0f7f5da2edd25995418d5538f40eb4018347fe1354' }) }) @@ -58,9 +58,9 @@ describe('pypiFetch handle function', () => { // release information in the registry data is empty requestGetStub.returns({ body: { - releases: { '1.10.0': [] }, + releases: { '1.10.0': [] } }, - statusCode: 200, + statusCode: 200 }) let result = await fetch.handle(new Request('pypi', 'cd:/pypi/pypi/-/dnspython/1.10.0')) @@ -115,7 +115,7 @@ describe('pypiFetch handle function', () => { 'LGPLv3+': 'LGPL-3.0-or-later', 'LGPL-2.0+': 'LGPL-2.0-or-later', 'LGPL-2.1+': 'LGPL-2.1-or-later', - 'LGPL-3.0+': 'LGPL-3.0-or-later', + 'LGPL-3.0+': 'LGPL-3.0-or-later' } for (const [key, value] of Object.entries(conversions)) { expect(spdxCorrect(key)).to.be.equal(value) diff --git a/test/unit/providers/fetch/rubyGemsFetchTests.js b/test/unit/providers/fetch/rubyGemsFetchTests.js index 110bee9f..12d9622a 100644 --- a/test/unit/providers/fetch/rubyGemsFetchTests.js +++ b/test/unit/providers/fetch/rubyGemsFetchTests.js @@ -12,7 +12,7 @@ describe('rubyGemsFetch', () => { fetch._getRegistryData = sinon.stub().resolves({ name: 'small', version: '0.5.1', - gem_uri: 'https://rubygems.org/gems/small-0.5.1.gem', + gem_uri: 'https://rubygems.org/gems/small-0.5.1.gem' }) fetch._getPackage = sinon .stub() @@ -24,7 +24,7 @@ describe('rubyGemsFetch', () => { expect(result.casedSpec.toUrl()).to.be.equal('cd:/ruby/rubygems/-/small/0.5.1') expect(result.document.hashes).to.be.deep.equal({ sha1: 'f343d34992fffa1e4abbb1a2bfae45fcf49123ba', - sha256: '2b5e4ba4e915e897d6fe9392c1cd1f5a21f8e7963679fb23f0a1953124772da0', + sha256: '2b5e4ba4e915e897d6fe9392c1cd1f5a21f8e7963679fb23f0a1953124772da0' }) expect(result.document.releaseDate).to.contain('2012-05-21') } diff --git a/test/unit/providers/process/abstractClearylDefinedProcessorTests.js b/test/unit/providers/process/abstractClearylDefinedProcessorTests.js index 6dfd5ba3..9795efe1 100644 --- a/test/unit/providers/process/abstractClearylDefinedProcessorTests.js +++ b/test/unit/providers/process/abstractClearylDefinedProcessorTests.js @@ -10,13 +10,13 @@ describe('AbstractClearlyDefinedProcessor interesting file identification', () = it('finds files it should', () => { const files = ['license', 'License.md', 'LICENSE.HTML', 'LICENSE.txt'] const processor = new AbstractCDProcessor({}) - files.forEach((file) => expect(processor._isInterestinglyNamed(file)).to.be.true) + files.forEach(file => expect(processor._isInterestinglyNamed(file)).to.be.true) }) it('does not fine files it should not', () => { const files = ['licenser', 'Licenset.md', 'test.HTML', 'LICENSE.doc'] const processor = new AbstractCDProcessor({}) - files.forEach((file) => expect(processor._isInterestinglyNamed(file)).to.be.false) + files.forEach(file => expect(processor._isInterestinglyNamed(file)).to.be.false) }) }) @@ -31,8 +31,8 @@ describe('AbstractClearlyDefinedProcessor add files', () => { const document = { location: '/test' } await processor._addFiles({ document }) expect(document.files.length).to.be.equal(2) - expect(document.files.map((file) => file.path)).to.have.members(['license', 'package/notice.txt']) - expect(document.files.every((file) => file.hashes.sha1 === '42')).to.be.true + expect(document.files.map(file => file.path)).to.have.members(['license', 'package/notice.txt']) + expect(document.files.every(file => file.hashes.sha1 === '42')).to.be.true expect(processor.attachFiles.callCount).to.be.equal(2) }) @@ -43,7 +43,7 @@ describe('AbstractClearlyDefinedProcessor add files', () => { processor.computeHashes = sinon.stub() const document = { location: 'c:\\test' } await processor._addFiles({ document }) - expect(document.files.map((file) => file.path)).to.have.members(['license', 'package/notice.txt']) + expect(document.files.map(file => file.path)).to.have.members(['license', 'package/notice.txt']) }) it('handles no files', async () => { diff --git a/test/unit/providers/process/abstractProcessorTests.js b/test/unit/providers/process/abstractProcessorTests.js index f6b0e9d1..b1519b96 100644 --- a/test/unit/providers/process/abstractProcessorTests.js +++ b/test/unit/providers/process/abstractProcessorTests.js @@ -64,13 +64,13 @@ describe('AbstractProcessor aggregateVersions', () => { describe('AbstractProcessor attach files', () => { beforeEach(() => { const fsStub = { - readFileSync: (path) => { + readFileSync: path => { path = path.replace(/\\/g, '/') return `${path.startsWith('/test') ? path.slice(6) : path} attachment` - }, + } } const handlerClass = proxyquire('../../../../providers/process/abstractProcessor', { - fs: fsStub, + fs: fsStub }) Handler = new handlerClass({}) }) @@ -83,10 +83,10 @@ describe('AbstractProcessor attach files', () => { request.document = { _metadata: { links: {} } } request.crawler = { queue: sinon.stub() } request.track = sinon.stub() - Object.getOwnPropertyNames(map).forEach((name) => VisitorMap.register(name, map[name])) + Object.getOwnPropertyNames(map).forEach(name => VisitorMap.register(name, map[name])) new AbstractProcessor({}).linkAndQueueTool(request, 'licensee') expect(request.document._metadata.links.licensee.href).to.be.equal( - 'urn:npm:npmjs:-:redie:revision:0.3.0:tool:licensee', + 'urn:npm:npmjs:-:redie:revision:0.3.0:tool:licensee' ) expect(request.document._metadata.links.licensee.type).to.be.equal('collection') expect(request.crawler.queue.calledOnce).to.be.true @@ -99,7 +99,7 @@ describe('AbstractProcessor attach files', () => { request.document = { _metadata: { links: {} } } request.crawler = { queue: sinon.stub() } request.track = sinon.stub() - Object.getOwnPropertyNames(map).forEach((name) => VisitorMap.register(name, map[name])) + Object.getOwnPropertyNames(map).forEach(name => VisitorMap.register(name, map[name])) new AbstractProcessor({}).linkAndQueue(request, 'source') expect(request.document._metadata.links.source.href).to.be.equal('urn:npm:npmjs:-:redie:revision:0.3.0') expect(request.document._metadata.links.source.type).to.be.equal('resource') @@ -113,7 +113,7 @@ describe('AbstractProcessor attach files', () => { request.document = { _metadata: { links: {} } } request.crawler = { queue: sinon.stub() } request.track = sinon.stub() - Object.getOwnPropertyNames(map).forEach((name) => VisitorMap.register(name, map[name])) + Object.getOwnPropertyNames(map).forEach(name => VisitorMap.register(name, map[name])) new AbstractProcessor({}).addSelfLink(request) expect(request.document._metadata.links.self.href).to.be.equal('urn:npm:npmjs:-:redie:revision:0.3.0') expect(request.document._metadata.links.self.type).to.be.equal('resource') @@ -182,7 +182,7 @@ describe('link and queue local tasks', () => { const request = new Request('npm', 'cd:/npm/npmjs/-/redie/0.3.0') processor.addLocalToolTasks(request) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members(['licensee', 'scancode', 'reuse']) + expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members(['licensee', 'scancode', 'reuse']) }) }) @@ -224,7 +224,7 @@ describe('AbstractProcessor get interesting files', () => { function validateAttachedFile(name, list, checkContent = false) { const attachment = `${name} attachment` const token = Handler._computeToken(attachment) - const entry = find(list, (entry) => entry.path === name) + const entry = find(list, entry => entry.path === name) expect(!!entry).to.be.true expect(entry.token).to.eq(token) if (checkContent) expect(entry.attachment).to.eq(attachment) diff --git a/test/unit/providers/process/composerExtractTests.js b/test/unit/providers/process/composerExtractTests.js index 8c392b76..1854f1c0 100644 --- a/test/unit/providers/process/composerExtractTests.js +++ b/test/unit/providers/process/composerExtractTests.js @@ -13,17 +13,17 @@ const hashes = { 'symfony/polyfill-mbstring-1.11.0': { 'symfony-polyfill-mbstring-fe5e94c/LICENSE': { sha1: '53a47cd3f3fee7cd8179a19d7741da412eed9de7', - sha256: 'a718d662afdccd5db0c47543119dfa62b2d8b0dfd2d6d44a5e14397cb574e52b', + sha256: 'a718d662afdccd5db0c47543119dfa62b2d8b0dfd2d6d44a5e14397cb574e52b' }, 'symfony-polyfill-mbstring-fe5e94c/README.md': { sha1: 'c20aaad7bd777b2c7839c363a7a8dfd15f6cca63', - sha256: '74a6cefb78dc6b1447f9686cc2a062112027c8d2a39c4da66fd43f0f2bf76c3f', + sha256: '74a6cefb78dc6b1447f9686cc2a062112027c8d2a39c4da66fd43f0f2bf76c3f' }, 'symfony-polyfill-mbstring-fe5e94c/composer.json': { sha1: '9005581bb58110bc5525c70693f9d79d8fe76616', - sha256: 'a81f24d2da5637b570ebb8999e48d6e145887c37109dd553d3c04f4e6d3980bf', - }, - }, + sha256: 'a81f24d2da5637b570ebb8999e48d6e145887c37109dd553d3c04f4e6d3980bf' + } + } } describe('PHP processing', () => { @@ -37,7 +37,7 @@ describe('PHP processing', () => { const files = request.document.files expect(request.document).to.be.not.null - files.forEach((file) => { + files.forEach(file => { if (file.path.includes('LICENSE')) { expect(file.hashes.sha1).to.be.equal(hashes['symfony/polyfill-mbstring-1.11.0'][file.path].sha1) expect(file.hashes.sha256).to.be.equal(hashes['symfony/polyfill-mbstring-1.11.0'][file.path].sha256) @@ -50,10 +50,10 @@ describe('PHP processing', () => { } }) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members([ + expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ 'licensee', 'scancode' /*, 'fossology'*/, - 'reuse', + 'reuse' ]) expect(request.document.attachments.length).to.eq(1) expect(request.document.summaryInfo.count).to.be.equal(8) @@ -72,7 +72,7 @@ async function setup() { request.document.location = dir.name await new AbstractFetch({}).decompress( 'test/fixtures/composer/symfony-polyfill-mbstring-v1.11.0-0-gfe5e94c.zip', - dir.name, + dir.name ) return { processor, request } } @@ -154,7 +154,7 @@ function sourceDiscovery() { const githubResults = { 'http://repo': createSourceSpec('repo'), 'http://url': createSourceSpec('url'), - 'http://bugs': createSourceSpec('bugs'), + 'http://bugs': createSourceSpec('bugs') } function createManifest(repo, url, homepage, bugs) { diff --git a/test/unit/providers/process/condaExtractTests.js b/test/unit/providers/process/condaExtractTests.js index ca4cf7cc..a9a8123e 100644 --- a/test/unit/providers/process/condaExtractTests.js +++ b/test/unit/providers/process/condaExtractTests.js @@ -16,10 +16,10 @@ describe('Conda processing', () => { await processor.handle(request) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members([ + expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ 'licensee', 'scancode', - 'reuse' /*, 'fossology'*/, + 'reuse' /*, 'fossology'*/ ]) expect(processor.linkAndQueue.callCount).to.be.equal(1) expect(processor.linkAndQueue.args[0][1]).to.equal('source') @@ -45,17 +45,17 @@ function createRequest() { provider: 'conda-forge', namespace: '-', name: '21cmfast', - revision: 'linux-64--3.0.2', + revision: 'linux-64--3.0.2' }, registryData: { downloadUrl: '21cmfast', channelData: {}, repoData: { packageData: { - version: '3.0.2', - }, - }, - }, + version: '3.0.2' + } + } + } } request.processMode = 'process' return request diff --git a/test/unit/providers/process/crateExtractTests.js b/test/unit/providers/process/crateExtractTests.js index e78813f6..4db0bf5b 100644 --- a/test/unit/providers/process/crateExtractTests.js +++ b/test/unit/providers/process/crateExtractTests.js @@ -12,45 +12,45 @@ const hashes = { 'bitflags-1.0.4': { '.gitignore': { sha1: '3254b5d5538166f1fd5a0bb41f7f3d3bbd455c56', - sha256: 'f9b1ca6ae27d1c18215265024629a8960c31379f206d9ed20f64e0b2dcf79805', + sha256: 'f9b1ca6ae27d1c18215265024629a8960c31379f206d9ed20f64e0b2dcf79805' }, 'CHANGELOG.md': { sha1: '87b1447fcb5155a5ba3bc476c6b870799bed78c7', - sha256: 'b9f503da2d3c91b0a244f1dc853d975f971f782b209ea52cd4cd98705e6e2749', + sha256: 'b9f503da2d3c91b0a244f1dc853d975f971f782b209ea52cd4cd98705e6e2749' }, 'CODE_OF_CONDUCT.md': { sha1: '82ce99058d5f84f3c3c2f548e7674de67d786e83', - sha256: '42634d0f6d922f49857175af991802822f7f920487aefa2ee250a50d12251a66', + sha256: '42634d0f6d922f49857175af991802822f7f920487aefa2ee250a50d12251a66' }, 'Cargo.toml': { sha1: '116f829c6f5099f58b7c7ef6d11655e93d35e34f', - sha256: '0234b6f827764ca093d897126b45505be0996e67860d61caeab696d092ffb781', + sha256: '0234b6f827764ca093d897126b45505be0996e67860d61caeab696d092ffb781' }, 'Cargo.toml.orig': { sha1: '810c9f23ba089372b992496166cdec13733959fc', - sha256: 'b2512e34fec0b32dabd8a2d4339ed22c9d1a3697f525f25500020bbd6f020456', + sha256: 'b2512e34fec0b32dabd8a2d4339ed22c9d1a3697f525f25500020bbd6f020456' }, 'LICENSE-APACHE': { sha1: '5798832c31663cedc1618d18544d445da0295229', - sha256: 'a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2', + sha256: 'a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2' }, 'LICENSE-MIT': { sha1: '9f3c36d2b7d381d9cf382a00166f3fbd06783636', - sha256: '6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb', + sha256: '6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb' }, 'README.md': { sha1: 'efd05ffa19723f822a85c5b76bda239be1d1aee1', - sha256: '6b236f8b62c82f189fabce0756e01a2c0ab1f32cb84cad9ff3c96b2ce5282bda', + sha256: '6b236f8b62c82f189fabce0756e01a2c0ab1f32cb84cad9ff3c96b2ce5282bda' }, 'src/example_generated.rs': { sha1: '6f1ac32232c5519998c87432f356c0090ef09b76', - sha256: 'e43eb59e90f317f38d436670a6067d2fd9eb35fb319fe716184e4a04e24ed1b2', + sha256: 'e43eb59e90f317f38d436670a6067d2fd9eb35fb319fe716184e4a04e24ed1b2' }, 'src/lib.rs': { sha1: '731ff4783523618c1e98b064d716fa5768dbac54', - sha256: '5751eb6fbb8cb97d8accd0846493168d9b5acff1f8d64435d4da8ad7dbf36b4d', - }, - }, + sha256: '5751eb6fbb8cb97d8accd0846493168d9b5acff1f8d64435d4da8ad7dbf36b4d' + } + } } describe('Crate processing', () => { @@ -63,15 +63,15 @@ describe('Crate processing', () => { await processor.handle(request) const files = request.document.files expect(request.document).to.be.not.null - files.forEach((file) => { + files.forEach(file => { expect(file.hashes.sha1).to.be.equal(hashes['bitflags-1.0.4'][file.path].sha1) expect(file.hashes.sha256).to.be.equal(hashes['bitflags-1.0.4'][file.path].sha256) }) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members([ + expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ 'licensee', 'scancode', - 'reuse' /*, 'fossology'*/, + 'reuse' /*, 'fossology'*/ ]) expect(request.document.summaryInfo.count).to.be.equal(10) expect(processor.linkAndQueue.callCount).to.be.equal(1) @@ -99,8 +99,8 @@ function createRequest() { manifest: { homepage: 'https://github.com/bitflags/bitflags', documentation: 'https://docs.rs/bitflags', - repository: 'https://github.com/bitflags/bitflags', - }, + repository: 'https://github.com/bitflags/bitflags' + } } request.processMode = 'process' return request diff --git a/test/unit/providers/process/debExtractTests.js b/test/unit/providers/process/debExtractTests.js index 99c9de3a..6798a5a5 100644 --- a/test/unit/providers/process/debExtractTests.js +++ b/test/unit/providers/process/debExtractTests.js @@ -14,10 +14,10 @@ describe('Debian processing', () => { expect(request.document.sourceInfo.type).to.equal('debsrc') expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members([ + expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ 'licensee', 'scancode', - 'reuse' /*, 'fossology'*/, + 'reuse' /*, 'fossology'*/ ]) expect(processor.linkAndQueue.callCount).to.be.equal(1) expect(processor.linkAndQueue.args[0][1]).to.equal('source') @@ -43,9 +43,9 @@ function createRequest() { provider: 'debian', namespace: '-', name: '0ad', - revision: '0.0.17-1', + revision: '0.0.17-1' }, - registryData: [{ Architecture: 'armhf', Source: '0ad' }], + registryData: [{ Architecture: 'armhf', Source: '0ad' }] } request.processMode = 'process' return request diff --git a/test/unit/providers/process/fsfeReuseTests.js b/test/unit/providers/process/fsfeReuseTests.js index 300b31b9..629c715c 100644 --- a/test/unit/providers/process/fsfeReuseTests.js +++ b/test/unit/providers/process/fsfeReuseTests.js @@ -23,7 +23,7 @@ describe('FSFE REUSE software process', () => { expect(document.attachments.length).to.equal(2) expect(document.reuse.licenses).to.eql([ { filePath: 'LICENSES/Apache-2.0.txt', spdxId: 'Apache-2.0' }, - { filePath: 'LICENSES/CC-BY-3.0.txt', spdxId: 'CC-BY-3.0' }, + { filePath: 'LICENSES/CC-BY-3.0.txt', spdxId: 'CC-BY-3.0' } ]) let readmeFound = false let securityFound = false @@ -35,7 +35,7 @@ describe('FSFE REUSE software process', () => { expect(document.reuse.files[i].LicenseConcluded).to.equal('NOASSERTION') expect(document.reuse.files[i].LicenseInfoInFile).to.equal('Apache-2.0') expect(document.reuse.files[i].FileCopyrightText).to.equal( - '1982-2021 SAP SE or an SAP affiliate company and ospo-reuse contributors', + '1982-2021 SAP SE or an SAP affiliate company and ospo-reuse contributors' ) } if (document.reuse.files[i].FileName === 'SECURITY.md') { @@ -43,7 +43,7 @@ describe('FSFE REUSE software process', () => { expect(document.reuse.files[i].LicenseConcluded).to.equal('NOASSERTION') expect(document.reuse.files[i].LicenseInfoInFile).to.equal('Beerware') expect(document.reuse.files[i].FileCopyrightText).to.equal( - '2013-2017 SAP SE or an SAP affiliate company and ospo-reuse contributors', + '2013-2017 SAP SE or an SAP affiliate company and ospo-reuse contributors' ) } if (document.reuse.files[i].FileName === 'ospo-reuse/src/main/java/com/sap/ospo-reuse/HelloWorld.java') { @@ -51,7 +51,7 @@ describe('FSFE REUSE software process', () => { expect(document.reuse.files[i].LicenseConcluded).to.equal('NOASSERTION') expect(document.reuse.files[i].LicenseInfoInFile).to.equal('GPL-3.0-or-later') expect(document.reuse.files[i].FileCopyrightText).to.equal( - '2019-2021 SAP SE or an SAP affiliate company and ospo-reuse contributors', + '2019-2021 SAP SE or an SAP affiliate company and ospo-reuse contributors' ) } if (document.reuse.files[i].FileName === 'ospo-reuse/src/test/java/com/sap/ospo-reuse/TestsHelloWorld.java') { @@ -99,7 +99,7 @@ describe('FSFE REUSE software process', () => { return callbackOrOptions(resultBox.versionError, { stdout: resultBox.versionResult }) } callback(resultBox.error, {}) - }, + } } const fsStub = { readdirSync: () => resultBox.licensesDirectory } Handler = proxyquire('../../../../providers/process/fsfeReuse', { child_process: processStub, fs: fsStub }) diff --git a/test/unit/providers/process/gemExtractTests.js b/test/unit/providers/process/gemExtractTests.js index 37ec1b43..33e6e20e 100644 --- a/test/unit/providers/process/gemExtractTests.js +++ b/test/unit/providers/process/gemExtractTests.js @@ -47,7 +47,7 @@ const githubResults = { 'http://gem': createSourceSpec('gem'), 'http://home': createSourceSpec('home'), 'http://mail': createSourceSpec('mail'), - 'http://source': createSourceSpec('source'), + 'http://source': createSourceSpec('source') } function createManifest( @@ -57,7 +57,7 @@ function createManifest( gem_uri, homepage_uri, mailing_list_uri, - source_code_uri, + source_code_uri ) { return { bug_tracker_uri, changelog_uri, documentation_uri, gem_uri, homepage_uri, mailing_list_uri, source_code_uri } } diff --git a/test/unit/providers/process/goExtractTests.js b/test/unit/providers/process/goExtractTests.js index 17b306d0..cd29ba3b 100644 --- a/test/unit/providers/process/goExtractTests.js +++ b/test/unit/providers/process/goExtractTests.js @@ -28,7 +28,7 @@ describe('Go processing', () => { await processor.handle(request) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members(['licensee', 'scancode', 'reuse']) + expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members(['licensee', 'scancode', 'reuse']) expect(request.document.registryData.licenses).to.be.deep.equal(licenses) }) }) @@ -51,11 +51,11 @@ function createRequest() { provider: 'golang', namespace: 'rsc.io', name: 'quote', - revision: '1.5.2', + revision: '1.5.2' }, registryData: { - licenses, - }, + licenses + } } request.processMode = 'process' return request @@ -70,8 +70,8 @@ function createInvalidRequest() { provider: 'golang', namespace: 'rsc.io', name: 'quote', - revision: '1.5.2', - }, + revision: '1.5.2' + } } request.processMode = 'process' return request diff --git a/test/unit/providers/process/licenseeTests.js b/test/unit/providers/process/licenseeTests.js index 62b20d6a..572e4608 100644 --- a/test/unit/providers/process/licenseeTests.js +++ b/test/unit/providers/process/licenseeTests.js @@ -22,7 +22,7 @@ describe('Licensee process', () => { 'LICENSE', 'package.json', 'subfolder/LICENSE.foo', - 'subfolder/LICENSE.bar', + 'subfolder/LICENSE.bar' ]) expect(processor.attachFiles.args[0][2]).to.equal(path.resolve('test/fixtures/licensee/9.10.1/folder1')) }) @@ -57,7 +57,7 @@ describe('Licensee process', () => { execFile: (command, parameters, callbackOrOptions) => { if (parameters.includes('version')) return callbackOrOptions(resultBox.versionError, { stdout: resultBox.versionResult }) - }, + } } Handler = proxyquire('../../../../providers/process/licensee', { child_process: processStub }) Handler._resultBox = resultBox diff --git a/test/unit/providers/process/mavenExtractTests.js b/test/unit/providers/process/mavenExtractTests.js index 932aef3d..b7d20e81 100644 --- a/test/unit/providers/process/mavenExtractTests.js +++ b/test/unit/providers/process/mavenExtractTests.js @@ -66,7 +66,7 @@ function sourceDiscovery() { } const githubResults = { - 'http://url': createSourceSpec('url'), + 'http://url': createSourceSpec('url') } function createManifest(url) { diff --git a/test/unit/providers/process/npmExtractTests.js b/test/unit/providers/process/npmExtractTests.js index 165ebe75..91c721d9 100644 --- a/test/unit/providers/process/npmExtractTests.js +++ b/test/unit/providers/process/npmExtractTests.js @@ -13,21 +13,21 @@ const hashes = { 'redie-0.3.0': { 'package/LICENSE': { sha1: '6401e7f1f46654117270c4860a263d3c4d6df1eb', - sha256: '42c7def049b7ef692085ca9bdf5984d439d3291922e02cb112d5cd1287b3cc56', + sha256: '42c7def049b7ef692085ca9bdf5984d439d3291922e02cb112d5cd1287b3cc56' }, 'package/README.md': { sha1: 'f137a2544ac6b3589796fbd7dee87a35858f8d75', - sha256: 'df3005370ff27872f241341dd11089951e099786a2b7e949262ab2ed5b3e4237', + sha256: 'df3005370ff27872f241341dd11089951e099786a2b7e949262ab2ed5b3e4237' }, 'package/index.js': { sha1: '7561b32ffa21eeb8ca1c12a5e76ec28d718c3dfd', - sha256: 'b83c7eeef19b2f4be9a8947db0bedc4ef43a15746e9c9b6f14e491f68bd2db60', + sha256: 'b83c7eeef19b2f4be9a8947db0bedc4ef43a15746e9c9b6f14e491f68bd2db60' }, 'package/package.json': { sha1: '74c5c9c1de88406c3d08272bfb6fe57055625fc9', - sha256: '7bf06a09d2b1c79b2cad7820a97e3887749418e6c53da1f7fb7f1b7c430e386d', - }, - }, + sha256: '7bf06a09d2b1c79b2cad7820a97e3887749418e6c53da1f7fb7f1b7c430e386d' + } + } } describe('NPM processing', () => { @@ -40,15 +40,15 @@ describe('NPM processing', () => { await processor.handle(request) const files = request.document.files expect(request.document).to.be.not.null - files.forEach((file) => { + files.forEach(file => { expect(file.hashes.sha1).to.be.equal(hashes['redie-0.3.0'][file.path].sha1) expect(file.hashes.sha256).to.be.equal(hashes['redie-0.3.0'][file.path].sha256) }) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members([ + expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ 'licensee', 'scancode', - 'reuse' /*, 'fossology'*/, + 'reuse' /*, 'fossology'*/ ]) expect(request.document.attachments.length).to.eq(2) expect(request.document._attachments.length).to.eq(2) @@ -155,7 +155,7 @@ function sourceDiscovery() { const githubResults = { 'http://repo': createSourceSpec('repo'), 'http://url': createSourceSpec('url'), - 'http://bugs': createSourceSpec('bugs'), + 'http://bugs': createSourceSpec('bugs') } function createManifest(repo, url, homepage, bugs) { diff --git a/test/unit/providers/process/nugetExtractTests.js b/test/unit/providers/process/nugetExtractTests.js index dcdcee5d..e02c243c 100644 --- a/test/unit/providers/process/nugetExtractTests.js +++ b/test/unit/providers/process/nugetExtractTests.js @@ -13,41 +13,41 @@ const hashes = { 'xunit.core.2.4.1': { '.signature.p7s': { sha1: 'cfdbf40dc9729d51621609c440b0aab6e82ca62c', - sha256: '83a8224a271c8340855d80baa7169604a0d60c914e3a852b6423b3c54124e2e7', + sha256: '83a8224a271c8340855d80baa7169604a0d60c914e3a852b6423b3c54124e2e7' }, '[Content_Types].xml': { sha1: '5e7b5e8e973dfb200d56e6894978cf4652c431dc', - sha256: 'b5a90ff27fec02ae69707b8a1bbe2bd069b47519daeface707303722fbf6e01e', + sha256: 'b5a90ff27fec02ae69707b8a1bbe2bd069b47519daeface707303722fbf6e01e' }, 'xunit.core.nuspec': { sha1: 'c05dad55561e3c2df400b8b13c944590b15ee98c', - sha256: '2c411d7ef591767dfc42910d6cad592d77a3ce4c4d4333b8477c1465e936af10', + sha256: '2c411d7ef591767dfc42910d6cad592d77a3ce4c4d4333b8477c1465e936af10' }, '_rels/.rels': { sha1: 'b5515c2da3422faba0848fe256a5b6ec4afca732', - sha256: '0c3ee1caf5de49929c8be1050b5d13e7e97130f008749a0a4c38da292cfe049e', + sha256: '0c3ee1caf5de49929c8be1050b5d13e7e97130f008749a0a4c38da292cfe049e' }, 'build/xunit.core.props': { sha1: '9cce282dd8f38294df68a8945988572b07f7298b', - sha256: '91d72e308289a3b92f4ea16357f3d893c6552e5af256838cb5372b45f2ad2856', + sha256: '91d72e308289a3b92f4ea16357f3d893c6552e5af256838cb5372b45f2ad2856' }, 'build/xunit.core.targets': { sha1: '04727e3c2a540f437c37d20e4e6cb872618c7e81', - sha256: '5ee8e74529a707ebf9c86904a38d4d0aaadea70e991b0c61697246fa7adbb71d', + sha256: '5ee8e74529a707ebf9c86904a38d4d0aaadea70e991b0c61697246fa7adbb71d' }, 'buildMultiTargeting/xunit.core.props': { sha1: '9cce282dd8f38294df68a8945988572b07f7298b', - sha256: '91d72e308289a3b92f4ea16357f3d893c6552e5af256838cb5372b45f2ad2856', + sha256: '91d72e308289a3b92f4ea16357f3d893c6552e5af256838cb5372b45f2ad2856' }, 'buildMultiTargeting/xunit.core.targets': { sha1: '04727e3c2a540f437c37d20e4e6cb872618c7e81', - sha256: '5ee8e74529a707ebf9c86904a38d4d0aaadea70e991b0c61697246fa7adbb71d', + sha256: '5ee8e74529a707ebf9c86904a38d4d0aaadea70e991b0c61697246fa7adbb71d' }, 'package/services/metadata/core-properties/929de7b81e6f4062812c1a95465898c7.psmdcp': { sha1: '2cc94ae30faf15ea01ddd2aa49fbf581a7005b2a', - sha256: 'd4a95f4d4c7f23c17942fecac5cac2bb9dd8a41dfc9fcb57adbf20ab1b64841f', - }, - }, + sha256: 'd4a95f4d4c7f23c17942fecac5cac2bb9dd8a41dfc9fcb57adbf20ab1b64841f' + } + } } describe('NuGet processing', () => { @@ -60,12 +60,12 @@ describe('NuGet processing', () => { await processor.handle(request) const files = request.document.files expect(request.document).to.be.not.null - files.forEach((file) => { + files.forEach(file => { expect(file.hashes.sha1).to.be.equal(hashes['xunit.core.2.4.1'][file.path].sha1) expect(file.hashes.sha256).to.be.equal(hashes['xunit.core.2.4.1'][file.path].sha256) }) expect(processor.linkAndQueueTool.callCount).to.be.equal(3) - expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members(['licensee', 'scancode', 'reuse']) + expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members(['licensee', 'scancode', 'reuse']) expect(request.document.summaryInfo.count).to.be.equal(9) expect(processor.linkAndQueue.callCount).to.be.equal(1) expect(processor.linkAndQueue.args[0][1]).to.equal('source') @@ -80,7 +80,7 @@ async function setup() { const dir = processor.createTempDir(request) request.document.metadataLocation = { manifest: 'test/fixtures/nuget/xunit.core.2.4.1.catalog.json', - nuspec: 'test/fixtures/nuget/xunit.core.2.4.1.nuspec', + nuspec: 'test/fixtures/nuget/xunit.core.2.4.1.nuspec' } request.document.location = `${dir.name}/nupkg` await new AbstractFetch({}).decompress('test/fixtures/nuget/xunit.core.2.4.1.nupkg', `${dir.name}/nupkg`) @@ -140,7 +140,7 @@ function sourceDiscovery() { const githubResults = { 'http://repo': createSourceSpec('repo'), 'http://project': createSourceSpec('project'), - 'http://license': createSourceSpec('license'), + 'http://license': createSourceSpec('license') } function createManifest(repo, projectUrl, licenseUrl) { diff --git a/test/unit/providers/process/pypiExtractTests.js b/test/unit/providers/process/pypiExtractTests.js index 0d4c37b9..8a03dbc0 100644 --- a/test/unit/providers/process/pypiExtractTests.js +++ b/test/unit/providers/process/pypiExtractTests.js @@ -47,7 +47,7 @@ const githubResults = { 'http://home': createSourceSpec('home'), 'http://package': createSourceSpec('package'), 'http://project': createSourceSpec('project'), - 'http://release': createSourceSpec('release'), + 'http://release': createSourceSpec('release') } function createManifest(bugtrack_url, docs_url, download_url, home_page, package_url, project_url, release_url) { diff --git a/test/unit/providers/process/scancodeTests.js b/test/unit/providers/process/scancodeTests.js index 5b90f4f3..2eedf34c 100644 --- a/test/unit/providers/process/scancodeTests.js +++ b/test/unit/providers/process/scancodeTests.js @@ -14,19 +14,19 @@ let Handler describe('ScanCode misc', () => { it('differentiates real errors', () => { Handler._resultBox.result = { - files: [{ scan_errors: ['ValueError: this is a test'] }, { scan_errors: ['bogus package.json'] }], + files: [{ scan_errors: ['ValueError: this is a test'] }, { scan_errors: ['bogus package.json'] }] } expect(Handler._hasRealErrors()).to.be.false Handler._resultBox.result = { - files: [{ scan_errors: ['Yikes. Tragedy has struck'] }, { scan_errors: ['Panic'] }], + files: [{ scan_errors: ['Yikes. Tragedy has struck'] }, { scan_errors: ['Panic'] }] } expect(Handler._hasRealErrors()).to.be.true Handler._resultBox.result = { - files: [], + files: [] } expect(Handler._hasRealErrors()).to.be.false Handler._resultBox.result = { - files: [{}], + files: [{}] } expect(Handler._hasRealErrors()).to.be.false }) @@ -34,10 +34,10 @@ describe('ScanCode misc', () => { beforeEach(() => { const resultBox = {} const fsStub = { - readFileSync: () => JSON.stringify(resultBox.result), + readFileSync: () => JSON.stringify(resultBox.result) } const handlerFactory = proxyquire('../../../../providers/process/scancode', { - fs: fsStub, + fs: fsStub }) Handler = handlerFactory({ logger: { log: () => {} } }) Handler._resultBox = resultBox @@ -53,19 +53,19 @@ describe('ScanCode process', () => { const { request, processor } = setup('2.9.8/gem.json') await processor.handle(request) expect(request.document._metadata.toolVersion).to.equal('1.2.0') - expect(flatten(processor.attachFiles.args.map((x) => x[1]))).to.have.members([]) + expect(flatten(processor.attachFiles.args.map(x => x[1]))).to.have.members([]) }) it('should handle simple npms', async () => { const { request, processor } = setup('2.9.8/npm-basic.json') await processor.handle(request) - expect(flatten(processor.attachFiles.args.map((x) => x[1]))).to.have.members(['package/package.json']) + expect(flatten(processor.attachFiles.args.map(x => x[1]))).to.have.members(['package/package.json']) }) it('should handle large npms', async () => { const { request, processor } = setup('2.9.8/npm-large.json') await processor.handle(request) - expect(flatten(processor.attachFiles.args.map((x) => x[1]))).to.have.members(['package/package.json']) + expect(flatten(processor.attachFiles.args.map(x => x[1]))).to.have.members(['package/package.json']) }) it('should skip if ScanCode not found', async () => { @@ -91,7 +91,7 @@ describe('ScanCode process', () => { if (parameters.includes('--version')) return callbackOrOptions(resultBox.versionError, { stdout: resultBox.versionResult }) callback(resultBox.error) - }, + } } Handler = proxyquire('../../../../providers/process/scancode', { child_process: processStub }) Handler._resultBox = resultBox @@ -108,7 +108,7 @@ function setup(fixture, error, versionError) { timeout: 200, processes: 2, format: 'json', - logger: { log: sinon.stub(), info: sinon.stub() }, + logger: { log: sinon.stub(), info: sinon.stub() } } const testRequest = new request('npm', 'cd:/npm/npmjs/-/test/1.1') testRequest.document = { _metadata: { links: {} }, location: '/test' } diff --git a/test/unit/providers/process/sourceTests.js b/test/unit/providers/process/sourceTests.js index 47102fe3..b229764b 100644 --- a/test/unit/providers/process/sourceTests.js +++ b/test/unit/providers/process/sourceTests.js @@ -17,11 +17,11 @@ describe('Source processing', () => { processor.handle(request) expect(processor.linkAndQueueTool.callCount).to.be.equal(4) - expect(processor.linkAndQueueTool.args.map((call) => call[1])).to.have.members([ + expect(processor.linkAndQueueTool.args.map(call => call[1])).to.have.members([ 'clearlydefined', 'licensee', 'scancode', - 'reuse', + 'reuse' ]) }) }) @@ -29,7 +29,7 @@ describe('Source processing', () => { function mockRequest(url) { const request = new Request('source', url) request.document = { - _metadata: { links: {} }, + _metadata: { links: {} } } return request } diff --git a/test/unit/providers/queuing/scopedQueueSetsTests.js b/test/unit/providers/queuing/scopedQueueSetsTests.js index 783957c3..f7c80d1d 100644 --- a/test/unit/providers/queuing/scopedQueueSetsTests.js +++ b/test/unit/providers/queuing/scopedQueueSetsTests.js @@ -19,7 +19,7 @@ describe('scopedQueueSets', () => { function createQueues() { return { subscribe: sinon.stub(), - unsubscribe: sinon.stub(), + unsubscribe: sinon.stub() } } @@ -130,7 +130,7 @@ describe('scopedQueueSets', () => { function mockPopReturn(fromQueue) { const queue = { - getName: sinon.stub().returns(fromQueue), + getName: sinon.stub().returns(fromQueue) } return poppedRequest(queue) } @@ -171,11 +171,11 @@ describe('scopedQueueSets', () => { globalQueue = mockQueue('normal') globalQueues = { getQueue: sinon.stub().returns(globalQueue), - pop: sinon.stub().resolves(poppedRequest(globalQueue)), + pop: sinon.stub().resolves(poppedRequest(globalQueue)) } localQueue = mockQueue('normal') localQueues = { - pop: sinon.stub(), + pop: sinon.stub() } scopedQueues = new ScopedQueueSets(globalQueues, localQueues) }) @@ -211,12 +211,12 @@ describe('scopedQueueSets', () => { beforeEach(() => { globalQueue = mockQueue('normal') globalQueues = { - getQueue: () => globalQueue, + getQueue: () => globalQueue } localQueue = mockQueue('normal') localQueue.pop.resolves(poppedRequest(localQueue)) localQueues = { - queues: [localQueue], + queues: [localQueue] } scopedQueues = new ScopedQueueSets(globalQueues, localQueues) @@ -285,8 +285,8 @@ describe('integration test with AttenuatedQueue and InMemoryCrawlQueue', () => { options = { _config: new EventEmitter(), logger: { - verbose: sinon.stub(), - }, + verbose: sinon.stub() + } } queueSets = createScopedQueueSets(queueName, options) scopedQueues = new ScopedQueueSets(queueSets.global, queueSets.local) @@ -459,7 +459,7 @@ function createScopedQueueSets(queueName, options) { return { global: new QueueSet([global], options), - local: new QueueSet([local], options), + local: new QueueSet([local], options) } } @@ -491,6 +491,6 @@ function mockQueue(fromQueue) { getName: sinon.stub().returns(fromQueue), push: sinon.stub().resolves(), done: sinon.stub().resolves(), - pop: sinon.stub(), + pop: sinon.stub() } } diff --git a/test/unit/providers/queuing/storageBackedQueueTest.js b/test/unit/providers/queuing/storageBackedQueueTest.js index 52986637..c4bdc706 100644 --- a/test/unit/providers/queuing/storageBackedQueueTest.js +++ b/test/unit/providers/queuing/storageBackedQueueTest.js @@ -16,8 +16,8 @@ describe('storageBackedQueue', () => { const createTestQueue = (memoryQueue, storageQueue) => { const options = { logger: { - verbose: sinon.stub(), - }, + verbose: sinon.stub() + } } return new StorageBackedQueue(memoryQueue, storageQueue, options) } @@ -246,5 +246,5 @@ const createQueueStub = () => ({ push: sinon.stub(), pop: sinon.stub(), done: sinon.stub(), - getInfo: sinon.stub(), + getInfo: sinon.stub() }) diff --git a/test/unit/providers/store/attachmentStoreTests.js b/test/unit/providers/store/attachmentStoreTests.js index ee8e825b..c1f7e900 100644 --- a/test/unit/providers/store/attachmentStoreTests.js +++ b/test/unit/providers/store/attachmentStoreTests.js @@ -13,8 +13,8 @@ describe('AttachmentStore', () => { _metadata: { type: 'test', fetchedAt: 'now', processedAt: 'then', extra: 'value' }, _attachments: [ { token: '42', attachment: '42 attachment' }, - { token: '13', attachment: '13 attachment' }, - ], + { token: '13', attachment: '13 attachment' } + ] } await store.upsert(document) const baseStore = store.baseStore @@ -51,7 +51,7 @@ describe('AttachmentStore', () => { it('works with no attachments', async () => { const { store } = setup() const document = { - _metadata: { type: 'test', fetchedAt: 'now', processedAt: 'then', extra: 'value' }, + _metadata: { type: 'test', fetchedAt: 'now', processedAt: 'then', extra: 'value' } } await store.upsert(document) const baseStore = store.baseStore From b97c0854c3f49ebc12fdbb3467c90ab1d4c532e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Spie=C3=9F?= Date: Wed, 14 Feb 2024 13:12:09 +0100 Subject: [PATCH 11/11] Remove formatting rules from ESLint --- eslint.config.js | 2 -- 1 file changed, 2 deletions(-) diff --git a/eslint.config.js b/eslint.config.js index ff3291eb..c22296eb 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -15,8 +15,6 @@ module.exports = [ } }, rules: { - quotes: ['error', 'single'], - semi: ['error', 'never'], 'no-console': 'off' } },