diff --git a/lib/utils/get-file-hash.js b/lib/utils/get-file-hash.js new file mode 100644 index 0000000..03f7742 --- /dev/null +++ b/lib/utils/get-file-hash.js @@ -0,0 +1,12 @@ +const fs = require('fs') +const crypto = require('crypto') + +module.exports = async (pathToFile, algorithm = 'sha256') => { + return await new Promise((resolve, reject) => { + const hash = crypto.createHash(algorithm) + const rs = fs.createReadStream(pathToFile) + rs.on('error', reject) + rs.on('data', chunk => hash.update(chunk)) + rs.on('end', () => resolve(hash.digest('hex'))) + }) +} \ No newline at end of file diff --git a/package.json b/package.json index 7a7e430..e7c7c94 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "ardent-collector", - "version": "1.54.0", + "version": "1.54.1", "description": "Ardent Collector saves data submitted to EDDN", "main": "index.js", "scripts": { diff --git a/scripts/compress-backups.js b/scripts/compress-backups.js index bb4a12d..02d5b58 100644 --- a/scripts/compress-backups.js +++ b/scripts/compress-backups.js @@ -1,37 +1,24 @@ const path = require('path') const fs = require('fs') const { execSync } = require('child_process') -const crypto = require('crypto') +const getFileHash = require('../lib/utils/get-file-hash') const byteSize = require('byte-size') const { ARDENT_BACKUP_DIR } = require('../lib/consts') -const pathToLocationsDbBackup = path.join(ARDENT_BACKUP_DIR, '/locations.db') -const pathToTradeDbBackup = path.join(ARDENT_BACKUP_DIR, '/trade.db') -const pathToStationsDbBackup = path.join(ARDENT_BACKUP_DIR, '/stations.db') -const pathToSystemsDbBackup = path.join(ARDENT_BACKUP_DIR, '/systems.db') +const pathToBackupDownloadManifest = path.join(ARDENT_BACKUP_DIR, 'backup-downloads.json') -async function getFileHash (pathToFile) { - return await new Promise((resolve, reject) => { - const hash = crypto.createHash('sha256') - const rs = fs.createReadStream(pathToFile) - rs.on('error', reject) - rs.on('data', chunk => hash.update(chunk)) - rs.on('end', () => resolve(hash.digest('hex'))) - }) - } +const databasesToBackup = [ + path.join(ARDENT_BACKUP_DIR, '/locations.db'), + path.join(ARDENT_BACKUP_DIR, '/trade.db'), + path.join(ARDENT_BACKUP_DIR, '/stations.db'), + path.join(ARDENT_BACKUP_DIR, '/systems.db') +] - ;(async () => { +;(async () => { console.log('Compressing backups …') console.time('Compressed backups') - const compressedBackups = {} - - const databasesToBackup = [ - pathToLocationsDbBackup, - pathToTradeDbBackup, - pathToStationsDbBackup, - pathToSystemsDbBackup - ] + const backupDownloadManifest = {} for (const pathToDatabase of databasesToBackup) { console.log(`Compressing ${path.basename(pathToDatabase)} …`) @@ -47,7 +34,7 @@ async function getFileHash (pathToFile) { console.log(`Created ${path.basename(pathToOutput)} (${byteSize(newSize)}), saved ${byteSize(oldSize - newSize)}`) console.timeEnd(`Compressed ${path.basename(pathToDatabase)}`) try { - compressedBackups[path.basename(pathToDatabase)] = { + backupDownloadManifest[path.basename(pathToDatabase)] = { name: path.basename(pathToDatabase), url: `https://downloads.ardent-industry.com/${path.basename(pathToOutput)}`, size: newSize, @@ -60,7 +47,8 @@ async function getFileHash (pathToFile) { } // Update list of compressed backups avalible for download - fs.writeFileSync(path.join(ARDENT_BACKUP_DIR, 'backup-downloads.json'), JSON.stringify(compressedBackups, null, 2)) + fs.writeFileSync(pathToBackupDownloadManifest, JSON.stringify(backupDownloadManifest, null, 2)) + console.log(`Saved backup download manifest to ${pathToBackupDownloadManifest}`) console.timeEnd('Compressed backups') diff --git a/scripts/download.js b/scripts/download.js index 7608452..513585c 100644 --- a/scripts/download.js +++ b/scripts/download.js @@ -1,28 +1,24 @@ const fs = require('fs') -const { mkdir, rm, rmdir } = require('fs/promises') +const { mkdir, rm } = require('fs/promises') const { Readable } = require('stream') const { finished } = require('stream/promises') +const { execSync } = require('child_process') const path = require('path') const byteSize = require('byte-size') +const getFileHash = require('../lib/utils/get-file-hash') const { ARDENT_DATA_DIR } = require('../lib/consts') const TMP_DOWNLOAD_DIR = path.join(ARDENT_DATA_DIR, 'tmp') -const BASE_URL = 'https://downloads.ardent-industry.com/' -const FILES = [ - 'systems.db', - 'trade.db', - 'stations.db', - 'locations.db' -] - -async function download (url, destination) { - if (fs.existsSync(destination)) await rm(destination) +const BACKUP_DOWNLOAD_MANIFEST = 'https://downloads.ardent-industry.com/backup-downloads.json' + +async function download (url, pathToDestination) { + if (fs.existsSync(pathToDestination)) await rm(pathToDestination) const res = await fetch(url) - const fileStream = fs.createWriteStream(destination, { flags: 'wx' }) + const fileStream = fs.createWriteStream(pathToDestination, { flags: 'wx' }) await finished(Readable.fromWeb(res.body).pipe(fileStream)) } -function saveDownload (copyFrom, copyTo) { +function syncToDataDir (copyFrom, copyTo) { fs.rmSync(copyTo, { force: true }) fs.rmSync(`${copyTo}-journal}`, { force: true }) fs.rmSync(`${copyTo}-shm`, { force: true }) @@ -33,18 +29,38 @@ function saveDownload (copyFrom, copyTo) { (async () => { if (!fs.existsSync(ARDENT_DATA_DIR)) await mkdir(ARDENT_DATA_DIR) if (!fs.existsSync(TMP_DOWNLOAD_DIR)) await mkdir(TMP_DOWNLOAD_DIR) + const res = await fetch(BACKUP_DOWNLOAD_MANIFEST) + const files = await res.json() - for (const fileName of FILES) { - const downloadDestination = path.resolve(TMP_DOWNLOAD_DIR, fileName) - const saveDestination = path.resolve(ARDENT_DATA_DIR, fileName) - const url = `${BASE_URL}${fileName}` - console.log(`Downloading ${url} …`) - await download(url, downloadDestination) - const stats = fs.statSync(downloadDestination) - console.log(` * Download of ${fileName} complete (${(byteSize(stats.size))})`) - saveDownload(downloadDestination, saveDestination) - console.log(` * Saved to ${saveDestination}`) - } + for (const f in files) { + const file = files[f] + + const pathToDownload = path.resolve(TMP_DOWNLOAD_DIR, path.basename(file.url)) + const pathToUncompressedFile = path.resolve(TMP_DOWNLOAD_DIR, file.name) + + console.log(`Downloading ${file.url} (${(byteSize(file.size))}) …`) + console.time(`Downloaded ${path.basename(file.url)}`) + await download(file.url, pathToDownload) + console.timeEnd(`Downloaded ${path.basename(file.url)}`) - rmdir(TMP_DOWNLOAD_DIR) + const checksum = await getFileHash(pathToDownload) + if (checksum === file.sha256) { + console.log(`Checksum verified: ${file.sha256}`) + } else { + throw new Error(`Checksum did not match expected value\nExpected: ${file.sha256}\nActual: ${checksum}`) + } + + console.log(`Uncompressing ${path.basename(file.url)} …`) + console.time(`Uncompressed ${path.basename(file.url)}`) + execSync(`gzip -df ${pathToDownload}`, (error, stdout, stderr) => { + if (error) console.error(error) + }) + const { size } = fs.statSync(pathToUncompressedFile) + console.log(`Uncompressed file size is ${(byteSize(size))}`) + console.timeEnd(`Uncompressed ${path.basename(file.url)}`) + + console.time(`Saved ${file.name} to ${ARDENT_DATA_DIR}`) + syncToDataDir(pathToUncompressedFile, path.resolve(ARDENT_DATA_DIR, file.name)) + console.timeEnd(`Saved ${file.name} to ${ARDENT_DATA_DIR}`) + } })()