From ca21a7cf9737a0926431d8d4fb9d0bf743371b91 Mon Sep 17 00:00:00 2001 From: Nazar Duchak Date: Mon, 24 Aug 2020 16:24:24 +0300 Subject: [PATCH] feat: finish backup tests --- .tasegir.js | 2 +- config/default.json5 | 2 +- config/test.json5 | 4 +- src/index.ts | 13 +--- src/sequelize.ts | 2 +- src/utils.ts | 22 +++--- test/db-back-up-job.spec.ts | 144 ++++++++++++++++++++++++++++++++++-- 7 files changed, 153 insertions(+), 36 deletions(-) diff --git a/.tasegir.js b/.tasegir.js index d04bdb31..e22d76de 100644 --- a/.tasegir.js +++ b/.tasegir.js @@ -4,7 +4,7 @@ module.exports = { }, depCheck: { ignore: [ - 'tasegir', 'reflect-metadata', '@types/*', 'sqlite3', '@oclif/*', + 'cross-env', 'tasegir', 'reflect-metadata', '@types/*', 'sqlite3', '@oclif/*', ] } } diff --git a/config/default.json5 b/config/default.json5 index 1367b641..675a1249 100644 --- a/config/default.json5 +++ b/config/default.json5 @@ -1,6 +1,6 @@ // For full syntax see /src/types.ts::Config interface { - db: 'sqlite:db.sqlite', + db: 'db.sqlite', // CORS setting, please consult https://expressjs.com/en/resources/middleware/cors.html for more details cors: { diff --git a/config/test.json5 b/config/test.json5 index fe62e19f..cb3606ce 100644 --- a/config/test.json5 +++ b/config/test.json5 @@ -1,6 +1,6 @@ { - db: 'sqlite://db_test.sqlite', - dbBackUp: { block: 10, path: 'db-backup' }, + db: 'db_test.sqlite', + dbBackUp: { blocks: 10, path: 'db-backup' }, blockchain: { provider: 'ws://localhost:8545', waitBlockCountBeforeConfirmationRemoved: 10 diff --git a/src/index.ts b/src/index.ts index 90becdcf..bd5a07e5 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,18 +1,7 @@ import { loggingFactory } from './logger' import { appFactory } from './app' -import config from 'config' (async function (): Promise { const logger = loggingFactory() - const app = await appFactory() - const port = config.get('port') - const server = app.listen(port) - - process.on('unhandledRejection', err => - logger.error(`Unhandled Rejection at: ${err}`) - ) - - server.on('listening', () => - logger.info('Feathers application started on port %d', port) - ) + const app = await appFactory({ appResetCallBack: () => { throw new Error('Reset callback not implemented') } }) })() diff --git a/src/sequelize.ts b/src/sequelize.ts index 48e6d0bb..f63656bf 100644 --- a/src/sequelize.ts +++ b/src/sequelize.ts @@ -31,7 +31,7 @@ export function sequelizeFactory (): Sequelize { transactionType: 'IMMEDIATE' } - return new Sequelize(config.get('db'), dbSettings) + return new Sequelize(`sqlite:${config.get('db')}`, dbSettings) } export function BigNumberStringType (propName: string): Partial { diff --git a/src/utils.ts b/src/utils.ts index ae155bff..79b8c1c3 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -213,11 +213,11 @@ export abstract class BaseCLICommand extends Command { type BackUpEntry = { name: string, block: { hash: string, number: BigNumber } } function parseBackUps (backUpName: string): BackUpEntry { - const [block, name] = backUpName.split('.')[0].split('-')[0] + const [block] = backUpName.split('.')[0].split('-') const [hash, blockNumber] = block.split(':') return { - name: name, + name: backUpName, block: { number: new BigNumber(blockNumber), hash } } } @@ -232,7 +232,7 @@ function getBackUps (): BackUpEntry[] { .map(parseBackUps) .sort( (a: Record, b: Record) => - a.block.number.gt(b.block.number) ? 1 : -1 + a.block.number.gt(b.block.number) ? -1 : 1 ) } @@ -274,14 +274,16 @@ export async function restoreDb (): Promise { } export class DbBackUpJob { - private newBlockEmitter: AutoStartStopEventEmitter - private backUpConfig: DbBackUpConfig + private readonly db: string + private readonly newBlockEmitter: AutoStartStopEventEmitter + private readonly backUpConfig: DbBackUpConfig constructor (newBlockEmitter: AutoStartStopEventEmitter) { if (!config.has('dbBackUp')) { throw new Error('DB Backup config not exist') } - this.backUpConfig = config.get<{ blocks: number, path: string }>('dbBackUp') + this.backUpConfig = config.get('dbBackUp') + this.db = config.get('db') const eventEmittersConfirmations = this.getEventEmittersConfigs() const invalidConfirmation = eventEmittersConfirmations.find(c => c.config.confirmations && c.config.confirmations > this.backUpConfig.blocks) @@ -298,13 +300,11 @@ export class DbBackUpJob { } private async backupHandler (block: BlockHeader): Promise { - const db = config.get('db') const [lastBackUp, previousBackUp] = getBackUps() - if (new BigNumber(block.number).minus(this.backUpConfig.blocks).gte(lastBackUp.block.number)) { + if (!lastBackUp || new BigNumber(block.number).minus(this.backUpConfig.blocks).gte(lastBackUp.block.number)) { // copy and rename current db - await fs.promises.copyFile(db, this.backUpConfig.path) - await fs.promises.rename(path.resolve(this.backUpConfig.path, db), path.resolve(this.backUpConfig.path, `${block.hash}:${block.number}-${db}`)) + await fs.promises.copyFile(this.db, path.resolve(this.backUpConfig.path, `${block.hash}:${block.number}-${this.db}`)) // remove the oldest version if (previousBackUp) { @@ -342,7 +342,7 @@ export class DbBackUpJob { } public run (): void { - this.newBlockEmitter.on(NEW_BLOCK_EVENT_NAME, this.backupHandler) + this.newBlockEmitter.on(NEW_BLOCK_EVENT_NAME, this.backupHandler.bind(this)) } public stop (): void { diff --git a/test/db-back-up-job.spec.ts b/test/db-back-up-job.spec.ts index b3175a5e..45d4a44e 100644 --- a/test/db-back-up-job.spec.ts +++ b/test/db-back-up-job.spec.ts @@ -1,23 +1,44 @@ +import path from 'path' import fs from 'fs' import config from 'config' import chai from 'chai' import sinonChai from 'sinon-chai' import { Eth } from 'web3-eth' -import { Substitute } from '@fluffy-spoon/substitute' +import { Substitute, Arg } from '@fluffy-spoon/substitute' import { DbBackUpJob } from '../src/utils' import { getNewBlockEmitter } from '../src/blockchain/utils' +import { blockMock, sleep } from './utils' +import { NEW_BLOCK_EVENT_NAME } from '../src/blockchain/new-block-emitters' +import { DbBackUpConfig } from '../src/definitions' chai.use(sinonChai) const expect = chai.expect -describe('DB back up job', () => { +function rmDir (folder: string) { + if (fs.existsSync(folder)) { + for (const file of fs.readdirSync(folder)) { + fs.unlinkSync(path.join(folder, file)) + } + + fs.rmdirSync(config.get('dbBackUp').path, { recursive: true }) + } +} + +describe('DB backUp/restore', () => { const configBackUp = { db: config.get('db'), dbBackUp: config.get('dbBackUp') } - const eth = Substitute.for() - const newBlockEmitter = getNewBlockEmitter(eth) - describe('DbBackUpJob constructor', () => { - afterEach(() => config.util.extendDeep(config, configBackUp)) + afterEach(() => { + config.util.extendDeep(config, configBackUp) + + rmDir(config.get('dbBackUp').path) + }) + + describe('Back Up', () => { + const eth = Substitute.for() + const newBlockEmitter = getNewBlockEmitter(eth) + + beforeEach(() => rmDir(config.get('dbBackUp').path)) it('should throw error if "dbBackUp" not in config', () => { // @ts-ignore @@ -29,10 +50,15 @@ describe('DB back up job', () => { Error, 'DB Backup config not exist' ) + + // @ts-ignore + config.util.extendDeep(config, configBackUp) }) it('should throw error if confirmation range greater the backup range', () => { // @ts-ignore - config.util.extendDeep(config, { dbBackUp: { blocks: 2 }, rns: { enabled: true, owner: { eventsEmitter: { confirmations: 5 } } } }) + config.util.extendDeep(config, { + rns: { enabled: true, owner: { eventsEmitter: { confirmations: 11 } } } + }) const invalidConfirmation = { name: 'rns.owner' } @@ -40,16 +66,118 @@ describe('DB back up job', () => { Error, `Invalid db backup configuration. Number of backup blocks should be greater then confirmation blocks for ${invalidConfirmation.name} service` ) + + // @ts-ignore + config.util.extendDeep(config, { rns: { enabled: false } }) }) it('should create back-up folder if not exist', () => { const dbPath = config.get('dbBackUp.path') - if (fs.existsSync(dbPath)) fs.rmdirSync(dbPath, { recursive: true }) expect(fs.existsSync(dbPath)).to.be.false() const job = new DbBackUpJob(newBlockEmitter) expect(fs.existsSync(dbPath)).to.be.true() }) + it('should make backup if not exist', async () => { + eth.getBlock(Arg.all()).returns(Promise.resolve(blockMock(10))) + const emitter = getNewBlockEmitter(eth) + const backUpPath = config.get('dbBackUp').path + + const job = new DbBackUpJob(emitter) + + job.run() + await sleep(300) + + // should have one db back up already + const files = fs.readdirSync(backUpPath) + expect(files.length).to.be.eql(1) + expect(files).to.be.eql([`0x123:${10}-${config.get('db')}`]) + }) + it('should not make backup if blocks condition not met', async () => { + const eth = Substitute.for() + eth.getBlock(Arg.all()).returns(Promise.resolve(blockMock(10))) + const emitter = getNewBlockEmitter(eth) + const backUpPath = config.get('dbBackUp').path + + const job = new DbBackUpJob(emitter) + + job.run() + await sleep(300) + + // should have one db back up already + const files = fs.readdirSync(backUpPath) + expect(files.length).to.be.eql(1) + expect(files).to.be.eql([`0x123:${10}-${config.get('db')}`]) + + // should skip this block as it's not met condition + emitter.emit(NEW_BLOCK_EVENT_NAME, blockMock(13)) + await sleep(300) + + const files2 = fs.readdirSync(backUpPath) + expect(files2.length).to.be.eql(1) + expect(files2).to.be.eql([`0x123:${10}-${config.get('db')}`]) + }) + it('should add seconf backup', async () => { + const eth = Substitute.for() + eth.getBlock(Arg.all()).returns(Promise.resolve(blockMock(10))) + const emitter = getNewBlockEmitter(eth) + const backups = [] + const backUpPath = config.get('dbBackUp').path + const job = new DbBackUpJob(emitter) + + job.run() + await sleep(300) + + // should have one db back up already + const files = fs.readdirSync(backUpPath) + backups.push(`0x123:${10}-${config.get('db')}`) + expect(files.length).to.be.eql(1) + expect(files).to.be.eql(backups) + + // should add another backe up + emitter.emit(NEW_BLOCK_EVENT_NAME, blockMock(30)) + await sleep(300) + + const files2 = fs.readdirSync(backUpPath) + backups.push(`0x123:${30}-${config.get('db')}`) + expect(files2.length).to.be.eql(2) + expect(files2).to.be.eql(backups) + }) + it('should replace oldest back-up with fresh one', async () => { + const eth = Substitute.for() + eth.getBlock(Arg.all()).returns(Promise.resolve(blockMock(10))) + const emitter = getNewBlockEmitter(eth) + const backups = [] + const backUpPath = config.get('dbBackUp').path + const job = new DbBackUpJob(emitter) + + job.run() + await sleep(300) + + // should have one db back up already + const files = fs.readdirSync(backUpPath) + backups.push(`0x123:${10}-${config.get('db')}`) + expect(files.length).to.be.eql(1) + expect(files).to.be.eql(backups) + + // should add another backe up + emitter.emit(NEW_BLOCK_EVENT_NAME, blockMock(30)) + await sleep(300) + + const files2 = fs.readdirSync(backUpPath) + backups.push(`0x123:${30}-${config.get('db')}`) + expect(files2.length).to.be.eql(2) + expect(files2).to.be.eql(backups) + + // should replace the oldest backup with fresh one + emitter.emit(NEW_BLOCK_EVENT_NAME, blockMock(45)) + await sleep(300) + + const files3 = fs.readdirSync(backUpPath) + backups.push(`0x123:${45}-${config.get('db')}`) + expect(files3.length).to.be.eql(2) + expect(files3).to.be.eql(backups.slice(1)) + }) }) })