Skip to content

Commit

Permalink
Add compressed backups
Browse files Browse the repository at this point in the history
Storage has recently been expanded so there is now enough space to also hold compressed backups, which has made this possible.

Previously there wasn't quite enough space to safely store live data, a live backup, an old compressed version and a new compressed version.

This should make it much quicker/faster to download backups. Once this is deployed the downloads page on the stie will be updated to link to the compressed versions; for now uncompressed versions will still be kept available.
  • Loading branch information
iaincollins committed Sep 24, 2024
1 parent a49cc0f commit 5c3c72d
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 1 deletion.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "ardent-collector",
"version": "1.52.1",
"version": "1.53.0",
"description": "Ardent Collector saves data submitted to EDDN",
"main": "index.js",
"scripts": {
Expand Down
27 changes: 27 additions & 0 deletions scripts/backup.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ const checkDiskSpace = require('check-disk-space').default
const fastFolderSizeSync = require('fast-folder-size/sync')
const byteSize = require('byte-size')
const SqlLiteDatabase = require('better-sqlite3')
const { execSync } = require('child_process')

const {
ARDENT_DATA_DIR,
Expand Down Expand Up @@ -63,6 +64,12 @@ const MIN_ROWS_FOR_BACKUP_VALIDATION = 100
backupDatabase(systemsDb, pathToSystemsDbBackup)
verifyResults.push(verifyBackup(pathToSystemsDbBackup, ['systems'], TEN_MB_IN_BYTES))

const compressedBackups = []
compressedBackups.push(compressDatabase(pathToLocationsDbBackup))
compressedBackups.push(compressDatabase(pathToTradeDbBackup))
compressedBackups.push(compressDatabase(pathToStationsDbBackup))
compressedBackups.push(compressDatabase(pathToSystemsDbBackup))

console.timeEnd('Backup complete')
writeBackupLog(`Completed backup at ${new Date().toISOString()}`)

Expand All @@ -79,6 +86,7 @@ const MIN_ROWS_FOR_BACKUP_VALIDATION = 100
dataDirSizeInBytes,
freeDiskSpaceInBytes,
databases: verifyResults,
compressedBackups,
timestamp: new Date().toISOString()
}
fs.writeFileSync(path.join(ARDENT_DATA_DIR, 'backup.json'), JSON.stringify(backupReport, null, 2))
Expand Down Expand Up @@ -135,3 +143,22 @@ function verifyBackup (pathToBackupTargetLocation, tables, minDbSizeInBytes) {

return result
}

function compressDatabase(pathToDatabase) {
console.log(`Compressing ${path.basename(pathToDatabase)} …`)
console.time(`Compressed ${path.basename(pathToDatabase)}`)
const pathToOutput = `${pathToDatabase}.gz`
const pathToTmpOutput = `${pathToDatabase}.tmp.gz`
execSync(`gzip -cf ${pathToDatabase} > ${pathToTmpOutput}`, (error, stdout, stderr) => {
if (error) console.error(error)
})
fs.renameSync(pathToTmpOutput,pathToOutput)
const { size: oldSize } = fs.statSync(pathToDatabase)
const { size: newSize } = fs.statSync(pathToOutput)
console.log(`Saved compressed backup to ${path.basename(pathToOutput)} (${byteSize(newSize)}), saved ${byteSize(oldSize - newSize)}`)
console.timeEnd(`Compressed ${path.basename(pathToDatabase)}`)
return {
name: path.basename(pathToOutput),
size: newSize
}
}

0 comments on commit 5c3c72d

Please sign in to comment.