Skip to content

Commit

Permalink
Refactor download script to use new backup download manifest
Browse files Browse the repository at this point in the history
New script pulls the manifest and downloads the compressed versions, then uncompresses them and saves them to the data dir.

This makes it much quicker and easier to sync the latest database to development systems.
  • Loading branch information
iaincollins committed Sep 24, 2024
1 parent ea8f84e commit c3ad814
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 51 deletions.
12 changes: 12 additions & 0 deletions lib/utils/get-file-hash.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
const fs = require('fs')
const crypto = require('crypto')

module.exports = async (pathToFile, algorithm = 'sha256') => {
return await new Promise((resolve, reject) => {
const hash = crypto.createHash(algorithm)
const rs = fs.createReadStream(pathToFile)
rs.on('error', reject)
rs.on('data', chunk => hash.update(chunk))
rs.on('end', () => resolve(hash.digest('hex')))
})
}
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "ardent-collector",
"version": "1.54.0",
"version": "1.54.1",
"description": "Ardent Collector saves data submitted to EDDN",
"main": "index.js",
"scripts": {
Expand Down
38 changes: 13 additions & 25 deletions scripts/compress-backups.js
Original file line number Diff line number Diff line change
@@ -1,37 +1,24 @@
const path = require('path')
const fs = require('fs')
const { execSync } = require('child_process')
const crypto = require('crypto')
const getFileHash = require('../lib/utils/get-file-hash')
const byteSize = require('byte-size')

const { ARDENT_BACKUP_DIR } = require('../lib/consts')

const pathToLocationsDbBackup = path.join(ARDENT_BACKUP_DIR, '/locations.db')
const pathToTradeDbBackup = path.join(ARDENT_BACKUP_DIR, '/trade.db')
const pathToStationsDbBackup = path.join(ARDENT_BACKUP_DIR, '/stations.db')
const pathToSystemsDbBackup = path.join(ARDENT_BACKUP_DIR, '/systems.db')
const pathToBackupDownloadManifest = path.join(ARDENT_BACKUP_DIR, 'backup-downloads.json')

async function getFileHash (pathToFile) {
return await new Promise((resolve, reject) => {
const hash = crypto.createHash('sha256')
const rs = fs.createReadStream(pathToFile)
rs.on('error', reject)
rs.on('data', chunk => hash.update(chunk))
rs.on('end', () => resolve(hash.digest('hex')))
})
}
const databasesToBackup = [
path.join(ARDENT_BACKUP_DIR, '/locations.db'),
path.join(ARDENT_BACKUP_DIR, '/trade.db'),
path.join(ARDENT_BACKUP_DIR, '/stations.db'),
path.join(ARDENT_BACKUP_DIR, '/systems.db')
]

;(async () => {
;(async () => {
console.log('Compressing backups …')
console.time('Compressed backups')
const compressedBackups = {}

const databasesToBackup = [
pathToLocationsDbBackup,
pathToTradeDbBackup,
pathToStationsDbBackup,
pathToSystemsDbBackup
]
const backupDownloadManifest = {}

for (const pathToDatabase of databasesToBackup) {
console.log(`Compressing ${path.basename(pathToDatabase)} …`)
Expand All @@ -47,7 +34,7 @@ async function getFileHash (pathToFile) {
console.log(`Created ${path.basename(pathToOutput)} (${byteSize(newSize)}), saved ${byteSize(oldSize - newSize)}`)
console.timeEnd(`Compressed ${path.basename(pathToDatabase)}`)
try {
compressedBackups[path.basename(pathToDatabase)] = {
backupDownloadManifest[path.basename(pathToDatabase)] = {
name: path.basename(pathToDatabase),
url: `https://downloads.ardent-industry.com/${path.basename(pathToOutput)}`,
size: newSize,
Expand All @@ -60,7 +47,8 @@ async function getFileHash (pathToFile) {
}

// Update list of compressed backups avalible for download
fs.writeFileSync(path.join(ARDENT_BACKUP_DIR, 'backup-downloads.json'), JSON.stringify(compressedBackups, null, 2))
fs.writeFileSync(pathToBackupDownloadManifest, JSON.stringify(backupDownloadManifest, null, 2))
console.log(`Saved backup download manifest to ${pathToBackupDownloadManifest}`)

console.timeEnd('Compressed backups')

Expand Down
66 changes: 41 additions & 25 deletions scripts/download.js
Original file line number Diff line number Diff line change
@@ -1,28 +1,24 @@
const fs = require('fs')
const { mkdir, rm, rmdir } = require('fs/promises')
const { mkdir, rm } = require('fs/promises')
const { Readable } = require('stream')
const { finished } = require('stream/promises')
const { execSync } = require('child_process')
const path = require('path')
const byteSize = require('byte-size')
const getFileHash = require('../lib/utils/get-file-hash')

const { ARDENT_DATA_DIR } = require('../lib/consts')
const TMP_DOWNLOAD_DIR = path.join(ARDENT_DATA_DIR, 'tmp')
const BASE_URL = 'https://downloads.ardent-industry.com/'
const FILES = [
'systems.db',
'trade.db',
'stations.db',
'locations.db'
]

async function download (url, destination) {
if (fs.existsSync(destination)) await rm(destination)
const BACKUP_DOWNLOAD_MANIFEST = 'https://downloads.ardent-industry.com/backup-downloads.json'

async function download (url, pathToDestination) {
if (fs.existsSync(pathToDestination)) await rm(pathToDestination)
const res = await fetch(url)
const fileStream = fs.createWriteStream(destination, { flags: 'wx' })
const fileStream = fs.createWriteStream(pathToDestination, { flags: 'wx' })
await finished(Readable.fromWeb(res.body).pipe(fileStream))
}

function saveDownload (copyFrom, copyTo) {
function syncToDataDir (copyFrom, copyTo) {
fs.rmSync(copyTo, { force: true })
fs.rmSync(`${copyTo}-journal}`, { force: true })
fs.rmSync(`${copyTo}-shm`, { force: true })
Expand All @@ -33,18 +29,38 @@ function saveDownload (copyFrom, copyTo) {
(async () => {
if (!fs.existsSync(ARDENT_DATA_DIR)) await mkdir(ARDENT_DATA_DIR)
if (!fs.existsSync(TMP_DOWNLOAD_DIR)) await mkdir(TMP_DOWNLOAD_DIR)
const res = await fetch(BACKUP_DOWNLOAD_MANIFEST)
const files = await res.json()

for (const fileName of FILES) {
const downloadDestination = path.resolve(TMP_DOWNLOAD_DIR, fileName)
const saveDestination = path.resolve(ARDENT_DATA_DIR, fileName)
const url = `${BASE_URL}${fileName}`
console.log(`Downloading ${url} …`)
await download(url, downloadDestination)
const stats = fs.statSync(downloadDestination)
console.log(` * Download of ${fileName} complete (${(byteSize(stats.size))})`)
saveDownload(downloadDestination, saveDestination)
console.log(` * Saved to ${saveDestination}`)
}
for (const f in files) {
const file = files[f]

const pathToDownload = path.resolve(TMP_DOWNLOAD_DIR, path.basename(file.url))
const pathToUncompressedFile = path.resolve(TMP_DOWNLOAD_DIR, file.name)

console.log(`Downloading ${file.url} (${(byteSize(file.size))}) …`)
console.time(`Downloaded ${path.basename(file.url)}`)
await download(file.url, pathToDownload)
console.timeEnd(`Downloaded ${path.basename(file.url)}`)

rmdir(TMP_DOWNLOAD_DIR)
const checksum = await getFileHash(pathToDownload)
if (checksum === file.sha256) {
console.log(`Checksum verified: ${file.sha256}`)
} else {
throw new Error(`Checksum did not match expected value\nExpected: ${file.sha256}\nActual: ${checksum}`)
}

console.log(`Uncompressing ${path.basename(file.url)} …`)
console.time(`Uncompressed ${path.basename(file.url)}`)
execSync(`gzip -df ${pathToDownload}`, (error, stdout, stderr) => {
if (error) console.error(error)
})
const { size } = fs.statSync(pathToUncompressedFile)
console.log(`Uncompressed file size is ${(byteSize(size))}`)
console.timeEnd(`Uncompressed ${path.basename(file.url)}`)

console.time(`Saved ${file.name} to ${ARDENT_DATA_DIR}`)
syncToDataDir(pathToUncompressedFile, path.resolve(ARDENT_DATA_DIR, file.name))
console.timeEnd(`Saved ${file.name} to ${ARDENT_DATA_DIR}`)
}
})()

0 comments on commit c3ad814

Please sign in to comment.