diff --git a/framework/src/engine/endpoint/chain.ts b/framework/src/engine/endpoint/chain.ts index 69674084a6..ae7b36c747 100644 --- a/framework/src/engine/endpoint/chain.ts +++ b/framework/src/engine/endpoint/chain.ts @@ -148,7 +148,7 @@ export class ChainEndpoint { throw new Error('Invalid parameters. ids must be a non empty array.'); } if (!ids.every(id => isHexString(id))) { - throw new Error('Invalid parameters. id must a valid hex string.'); + throw new Error('Invalid parameters. id must be a valid hex string.'); } const transactions = []; try { diff --git a/framework/test/unit/engine/endpoint/chain.spec.ts b/framework/test/unit/engine/endpoint/chain.spec.ts index d00cf9d4f5..5625c2dfac 100644 --- a/framework/test/unit/engine/endpoint/chain.spec.ts +++ b/framework/test/unit/engine/endpoint/chain.spec.ts @@ -12,9 +12,16 @@ * Removal or modification of this copyright notice is prohibited. */ -import { Event, StateStore } from '@liskhq/lisk-chain'; +import { + Block, + BlockAssets, + BlockHeader, + Event, + StateStore, + Transaction, +} from '@liskhq/lisk-chain'; import { utils } from '@liskhq/lisk-cryptography'; -import { Batch, Database, InMemoryDatabase } from '@liskhq/lisk-db'; +import { Batch, Database, InMemoryDatabase, NotFoundError } from '@liskhq/lisk-db'; import { EMPTY_KEY, MODULE_STORE_PREFIX_BFT, @@ -24,12 +31,51 @@ import { import { bftParametersSchema, bftVotesSchema } from '../../../../src/engine/bft/schemas'; import { ChainEndpoint } from '../../../../src/engine/endpoint/chain'; import { createRequestContext } from '../../../utils/mocks/endpoint'; +import * as bftUtils from '../../../../src/engine/bft/utils'; describe('Chain endpoint', () => { const DEFAULT_INTERVAL = 10; let stateStore: StateStore; let endpoint: ChainEndpoint; let db: InMemoryDatabase; + const transaction = new Transaction({ + module: 'token', + command: 'transfer', + fee: BigInt(613000), + params: utils.getRandomBytes(100), + nonce: BigInt(2), + senderPublicKey: utils.getRandomBytes(32), + signatures: [utils.getRandomBytes(64)], + }); + const blockAsset = new BlockAssets(); + const getBlockAttrs = (attrs?: Record) => ({ + version: 1, + timestamp: 1009988, + height: 1009988, + previousBlockID: Buffer.from('4a462ea57a8c9f72d866c09770e5ec70cef18727', 'hex'), + stateRoot: Buffer.from('7f9d96a09a3fd17f3478eb7bef3a8bda00e1238b', 'hex'), + transactionRoot: Buffer.from('b27ca21f40d44113c2090ca8f05fb706c54e87dd', 'hex'), + assetRoot: Buffer.from('b27ca21f40d44113c2090ca8f05fb706c54e87dd', 'hex'), + eventRoot: Buffer.from( + '30dda4fbc395828e5a9f2f8824771e434fce4945a1e7820012440d09dd1e2b6d', + 'hex', + ), + generatorAddress: Buffer.from('be63fb1c0426573352556f18b21efd5b6183c39c', 'hex'), + maxHeightPrevoted: 1000988, + maxHeightGenerated: 1000988, + impliesMaxPrevotes: true, + validatorsHash: utils.hash(Buffer.alloc(0)), + aggregateCommit: { + height: 0, + aggregationBits: Buffer.alloc(0), + certificateSignature: Buffer.alloc(0), + }, + signature: Buffer.from('6da88e2fd4435e26e02682435f108002ccc3ddd5', 'hex'), + ...attrs, + }); + const blockHeader = new BlockHeader(getBlockAttrs()); + const block = new Block(blockHeader, [transaction], blockAsset); + const validBlockID = '215b667a32a5cd51a94c9c2046c11fffb08c65748febec099451e3b164452b'; beforeEach(() => { stateStore = new StateStore(new InMemoryDatabase()); @@ -37,7 +83,12 @@ describe('Chain endpoint', () => { chain: { dataAccess: { getEvents: jest.fn(), + getBlockByID: jest.fn(), + getBlockByHeight: jest.fn(), + getBlocksByHeightBetween: jest.fn(), + getTransactionByID: jest.fn(), }, + lastBlock: block, } as any, bftMethod: { getSlotNumber: jest.fn().mockReturnValue(0), @@ -229,4 +280,254 @@ describe('Chain endpoint', () => { expect(list[1].nextAllocatedTime - list[0].nextAllocatedTime).toBe(DEFAULT_INTERVAL); }); }); + + describe('getBlockByID', () => { + it('should throw if provided block id is not valid', async () => { + await expect( + endpoint.getBlockByID(createRequestContext({ id: 'invalid id' })), + ).rejects.toThrow('Invalid parameters. id must be a valid hex string.'); + }); + + it('should return the block if provided id is valid', async () => { + jest.spyOn(endpoint['_chain'].dataAccess, 'getBlockByID').mockResolvedValue(block); + await expect( + endpoint.getBlockByID( + createRequestContext({ + id: validBlockID, + }), + ), + ).resolves.toEqual(block.toJSON()); + }); + }); + + describe('getBlocksByIDs', () => { + it('should throw if the provided block ids is an empty array or not a valid array', async () => { + await expect(endpoint.getBlocksByIDs(createRequestContext({ ids: [] }))).rejects.toThrow( + 'Invalid parameters. ids must be a non empty array.', + ); + + await expect( + endpoint.getBlocksByIDs(createRequestContext({ ids: 'not an array' })), + ).rejects.toThrow('Invalid parameters. ids must be a non empty array.'); + }); + + it('should throw if any of the provided block ids is not valid', async () => { + await expect( + endpoint.getBlocksByIDs(createRequestContext({ ids: [validBlockID, 'invalid id'] })), + ).rejects.toThrow('Invalid parameters. id must a valid hex string.'); + }); + + it('should return empty result if the provided block ids are not found', async () => { + jest.spyOn(endpoint['_chain'].dataAccess, 'getBlockByID').mockImplementation(() => { + throw new NotFoundError(); + }); + + await expect( + endpoint.getBlocksByIDs(createRequestContext({ ids: [validBlockID] })), + ).resolves.toEqual([]); + }); + + it('should throw if dataAccess throws an error other than NotFoundError', async () => { + jest.spyOn(endpoint['_chain'].dataAccess, 'getBlockByID').mockImplementation(() => { + throw new Error(); + }); + + await expect( + endpoint.getBlocksByIDs(createRequestContext({ ids: [validBlockID] })), + ).rejects.toThrow(); + }); + + it('should return a collection of blocks', async () => { + jest.spyOn(endpoint['_chain'].dataAccess, 'getBlockByID').mockResolvedValue(block); + + await expect( + endpoint.getBlocksByIDs(createRequestContext({ ids: [validBlockID] })), + ).resolves.toEqual([block.toJSON()]); + }); + }); + + describe('getBlockByHeight', () => { + it('should throw if provided height is invalid', async () => { + await expect( + endpoint.getBlockByHeight(createRequestContext({ height: 'incorrect height' })), + ).rejects.toThrow('Invalid parameters. height must be a number.'); + }); + + it('should rerturn a block if the provided height is valid', async () => { + jest.spyOn(endpoint['_chain'].dataAccess, 'getBlockByHeight').mockResolvedValue(block); + + await expect(endpoint.getBlockByHeight(createRequestContext({ height: 1 }))).resolves.toEqual( + block.toJSON(), + ); + }); + }); + + describe('getBlocksByHeightBetween', () => { + it('should throw if provided heights are invalid', async () => { + await expect( + endpoint.getBlocksByHeightBetween( + createRequestContext({ from: 'incorrect height', to: 10 }), + ), + ).rejects.toThrow('Invalid parameters. from and to must be a number.'); + + await expect( + endpoint.getBlocksByHeightBetween( + createRequestContext({ from: 1, to: 'incorrect height' }), + ), + ).rejects.toThrow('Invalid parameters. from and to must be a number.'); + }); + + it('should return a collection of blocks', async () => { + jest + .spyOn(endpoint['_chain'].dataAccess, 'getBlocksByHeightBetween') + .mockResolvedValue([block]); + + await expect( + endpoint.getBlocksByHeightBetween(createRequestContext({ from: 1, to: 10 })), + ).resolves.toEqual([block.toJSON()]); + }); + }); + + describe('getLastBlock', () => { + it('should return the last block', () => { + expect(endpoint.getLastBlock()).toEqual(block.toJSON()); + }); + }); + + describe('getTransactionByID', () => { + it('should throw if provided id is not valid', async () => { + await expect( + endpoint.getTransactionByID(createRequestContext({ id: 'invalid id' })), + ).rejects.toThrow('Invalid parameters. id must be a valid hex string.'); + }); + + it('should return a transaction if provided id is valid', async () => { + jest + .spyOn(endpoint['_chain'].dataAccess, 'getTransactionByID') + .mockResolvedValue(transaction); + + await expect( + endpoint.getTransactionByID(createRequestContext({ id: transaction.id.toString('hex') })), + ).resolves.toEqual(transaction.toJSON()); + }); + }); + + describe('getTransactionsByIDs', () => { + it('should throw if provided ids is empty or not an array', async () => { + await expect( + endpoint.getTransactionsByIDs(createRequestContext({ ids: [] })), + ).rejects.toThrow('Invalid parameters. ids must be a non empty array'); + + await expect( + endpoint.getTransactionsByIDs(createRequestContext({ ids: 'invalid id' })), + ).rejects.toThrow('Invalid parameters. ids must be a non empty array'); + }); + + it('should throw if any of the provided ids are not valid', async () => { + await expect( + endpoint.getTransactionsByIDs(createRequestContext({ ids: [validBlockID, 'invalid ID'] })), + ).rejects.toThrow('Invalid parameters. id must be a valid hex string.'); + }); + + it('should return a collection of transactions', async () => { + jest + .spyOn(endpoint['_chain'].dataAccess, 'getTransactionByID') + .mockResolvedValue(transaction); + + await expect( + endpoint.getTransactionsByIDs( + createRequestContext({ ids: [transaction.id.toString('hex')] }), + ), + ).resolves.toEqual([transaction.toJSON()]); + }); + }); + + describe('getTransactionsByHeight', () => { + it('should throw if provided height is invalid', async () => { + await expect( + endpoint.getTransactionsByHeight(createRequestContext({ height: 'invalid height' })), + ).rejects.toThrow('Invalid parameters. height must be zero or a positive number.'); + + await expect( + endpoint.getTransactionsByHeight(createRequestContext({ height: -1 })), + ).rejects.toThrow('Invalid parameters. height must be zero or a positive number.'); + }); + + it('should return a collection of transactions in the block at the provided height', async () => { + jest.spyOn(endpoint['_chain'].dataAccess, 'getBlockByHeight').mockResolvedValue(block); + + await expect( + endpoint.getTransactionsByHeight(createRequestContext({ height: 1 })), + ).resolves.toEqual(block.transactions.map(t => t.toJSON())); + }); + }); + + describe('getAssetsByHeight', () => { + it('should throw if provided height is invalid', async () => { + await expect( + endpoint.getAssetsByHeight(createRequestContext({ height: 'invalid height' })), + ).rejects.toThrow('Invalid parameters. height must be zero or a positive number.'); + + await expect( + endpoint.getAssetsByHeight(createRequestContext({ height: -1 })), + ).rejects.toThrow('Invalid parameters. height must be zero or a positive number.'); + }); + + it('should return block assests at the provided height', async () => { + jest.spyOn(endpoint['_chain'].dataAccess, 'getBlockByHeight').mockResolvedValue(block); + + await expect( + endpoint.getAssetsByHeight(createRequestContext({ height: 1 })), + ).resolves.toEqual(block.assets.toJSON()); + }); + }); + + describe('areHeadersContradicting', () => { + it('should throw if provided parameters are not valid', async () => { + await expect( + endpoint.areHeadersContradicting( + createRequestContext({ + header1: 'header1', + header2: blockHeader.getBytes().toString('hex'), + }), + ), + ).rejects.toThrow(`'.header1' must match format "hex"`); + + await expect( + endpoint.areHeadersContradicting( + createRequestContext({ + header1: block.getBytes().toString('hex'), + header2: blockHeader.getBytes().toString(), + }), + ), + ).rejects.toThrow(`'.header2' must match format "hex"`); + }); + + it('should invalidate if both headers have same id', async () => { + await expect( + endpoint.areHeadersContradicting( + createRequestContext({ + header1: blockHeader.getBytes().toString('hex'), + header2: blockHeader.getBytes().toString('hex'), + }), + ), + ).resolves.toEqual({ valid: false }); + }); + + it('should invoke areDistinctHeadersContradicting for the provided headers', async () => { + const contradictingBlockHeader = new BlockHeader(getBlockAttrs({ version: 2 })); + + jest.spyOn(bftUtils, 'areDistinctHeadersContradicting').mockReturnValue(false); + await expect( + endpoint.areHeadersContradicting( + createRequestContext({ + header1: blockHeader.getBytes().toString('hex'), + header2: contradictingBlockHeader.getBytes().toString('hex'), + }), + ), + ).resolves.toEqual({ valid: false }); + + expect(bftUtils.areDistinctHeadersContradicting).toHaveBeenCalledTimes(1); + }); + }); }); diff --git a/framework/test/unit/modules/interoperability/internal_method.spec.ts b/framework/test/unit/modules/interoperability/internal_method.spec.ts index 739c6b3060..7719564558 100644 --- a/framework/test/unit/modules/interoperability/internal_method.spec.ts +++ b/framework/test/unit/modules/interoperability/internal_method.spec.ts @@ -12,8 +12,8 @@ * Removal or modification of this copyright notice is prohibited. */ -import { utils as cryptoUtils } from '@liskhq/lisk-cryptography'; import * as cryptography from '@liskhq/lisk-cryptography'; +import { bls, utils as cryptoUtils } from '@liskhq/lisk-cryptography'; import { regularMerkleTree } from '@liskhq/lisk-tree'; import { codec } from '@liskhq/lisk-codec'; import { SparseMerkleTree } from '@liskhq/lisk-db'; @@ -21,15 +21,18 @@ import { validator } from '@liskhq/lisk-validator'; import { BLS_PUBLIC_KEY_LENGTH, BLS_SIGNATURE_LENGTH, + CROSS_CHAIN_COMMAND_CHANNEL_TERMINATED, CCMStatusCode, CROSS_CHAIN_COMMAND_REGISTRATION, EMPTY_BYTES, EMPTY_HASH, HASH_LENGTH, + MAX_NUM_VALIDATORS, MAX_UINT64, MESSAGE_TAG_CERTIFICATE, MIN_RETURN_FEE_PER_BYTE_BEDDOWS, MODULE_NAME_INTEROPERABILITY, + EMPTY_FEE_ADDRESS, } from '../../../../src/modules/interoperability/constants'; import { MainchainInteroperabilityInternalMethod } from '../../../../src/modules/interoperability/mainchain/internal_method'; import * as utils from '../../../../src/modules/interoperability/utils'; @@ -40,6 +43,7 @@ import { testing, CCMsg, OwnChainAccount, + ChainStatus, } from '../../../../src'; import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; import { InMemoryPrefixedStateDB } from '../../../../src/testing/in_memory_prefixed_state'; @@ -76,6 +80,7 @@ import { crossChainUpdateTransactionParams, } from '../../../../src/modules/interoperability/schemas'; import { InvalidSMTVerificationEvent } from '../../../../src/modules/interoperability/events/invalid_smt_verification'; +import { CCM_STATUS_OK } from '../../../../src/modules/token/constants'; describe('Base interoperability internal method', () => { const interopMod = new MainchainInteroperabilityModule(); @@ -192,10 +197,12 @@ describe('Base interoperability internal method', () => { jest.spyOn(channelDataSubstore, 'set'); outboxRootSubstore = interopMod.stores.get(OutboxRootStore); jest.spyOn(outboxRootSubstore, 'set'); + jest.spyOn(outboxRootSubstore, 'del'); terminatedOutboxSubstore = interopMod.stores.get(TerminatedOutboxStore); chainValidatorsSubstore = interopMod.stores.get(ChainValidatorsStore); // jest.spyOn(terminatedOutboxSubstore, 'set'); chainDataSubstore = interopMod.stores.get(ChainAccountStore); + jest.spyOn(chainDataSubstore, 'set'); terminatedStateSubstore = interopMod.stores.get(TerminatedStateStore); mainchainInteroperabilityInternalMethod = new MainchainInteroperabilityInternalMethod( @@ -221,6 +228,8 @@ describe('Base interoperability internal method', () => { describe('appendToInboxTree', () => { it('should update the channel store with the new inbox tree info', async () => { + const { inbox: originalInbox } = await channelDataSubstore.get(methodContext, chainID); + // Act await mainchainInteroperabilityInternalMethod.appendToInboxTree( methodContext, @@ -233,11 +242,22 @@ describe('Base interoperability internal method', () => { ...channelData, inbox: updatedInboxTree, }); + + const { inbox } = await channelDataSubstore.get(methodContext, chainID); + expect(inbox.size).toBe(originalInbox.size + 1); + + expect(regularMerkleTree.calculateMerkleRoot).toHaveBeenCalledWith({ + value: cryptoUtils.hash(appendData), + appendPath: originalInbox.appendPath, + size: originalInbox.size, + }); }); }); describe('appendToOutboxTree', () => { it('should update the channel store with the new outbox tree info', async () => { + const { outbox: originalOutbox } = await channelDataSubstore.get(methodContext, chainID); + // Act await mainchainInteroperabilityInternalMethod.appendToOutboxTree( methodContext, @@ -250,6 +270,15 @@ describe('Base interoperability internal method', () => { ...channelData, outbox: updatedOutboxTree, }); + + const { outbox } = await channelDataSubstore.get(methodContext, chainID); + expect(outbox.size).toBe(originalOutbox.size + 1); + + expect(regularMerkleTree.calculateMerkleRoot).toHaveBeenCalledWith({ + value: cryptoUtils.hash(appendData), + appendPath: originalOutbox.appendPath, + size: originalOutbox.size, + }); }); }); @@ -262,6 +291,11 @@ describe('Base interoperability internal method', () => { expect(outboxRootSubstore.set).toHaveBeenCalledWith(expect.anything(), chainID, { root: updatedOutboxTree.root, }); + + const { outbox } = await channelDataSubstore.get(methodContext, chainID); + expect(outbox.size).toBe(outboxTree.size + 1); + expect(outbox.root).toStrictEqual(updatedOutboxTree.root); + expect(outbox.appendPath).toStrictEqual(updatedOutboxTree.appendPath); }); }); @@ -357,6 +391,12 @@ describe('Base interoperability internal method', () => { initialized: true, }, ); + + expect(chainDataSubstore.set).toHaveBeenCalledWith(crossChainMessageContext, chainId, { + ...chainAccount, + status: ChainStatus.TERMINATED, + }); + expect(outboxRootSubstore.del).toHaveBeenCalledWith(crossChainMessageContext, chainId); }); it('should set appropriate terminated state for chain id in the terminatedState sub store if chain account exists for the id but state root is not provided', async () => { @@ -377,6 +417,15 @@ describe('Base interoperability internal method', () => { mainchainStateRoot: EMPTY_HASH, initialized: true, }); + + expect(chainDataSubstore.set).toHaveBeenCalledWith(crossChainMessageContext, chainId, { + ...chainAccount, + status: ChainStatus.TERMINATED, + }); + + expect(outboxRootSubstore.del).toHaveBeenCalledWith(crossChainMessageContext, chainId); + expect(chainAccountUpdatedEvent.log).toHaveBeenCalled(); + expect(terminatedStateCreatedEvent.log).toHaveBeenCalled(); }); it('should throw error if chain account does not exist for the id and ownchain account id is mainchain id', async () => { @@ -392,6 +441,10 @@ describe('Base interoperability internal method', () => { chainIdNew, ), ).rejects.toThrow('Chain to be terminated is not valid'); + + await expect(terminatedStateSubstore.has(crossChainMessageContext, chainIdNew)).resolves.toBe( + false, + ); }); it('should set appropriate terminated state for chain id if chain account does not exist for the id and stateRoot is EMPTY_HASH', async () => { @@ -507,10 +560,19 @@ describe('Base interoperability internal method', () => { ), ).toBeUndefined(); - expect(mainchainInteroperabilityInternalMethod.sendInternal).toHaveBeenCalled(); + expect(mainchainInteroperabilityInternalMethod.sendInternal).toHaveBeenCalledWith( + crossChainMessageContext, + EMPTY_FEE_ADDRESS, + MODULE_NAME_INTEROPERABILITY, + CROSS_CHAIN_COMMAND_CHANNEL_TERMINATED, + SIDECHAIN_ID, + BigInt(0), + CCM_STATUS_OK, + EMPTY_BYTES, + ); expect( mainchainInteroperabilityInternalMethod.createTerminatedStateAccount, - ).toHaveBeenCalled(); + ).toHaveBeenCalledWith(crossChainMessageContext, SIDECHAIN_ID); }); }); @@ -684,36 +746,32 @@ describe('Base interoperability internal method', () => { certificate: codec.encode(certificateSchema, defaultCertificate), }; - await interopMod.stores.get(ChainAccountStore).set(storeContext, ccuParams.sendingChainID, { - lastCertificate: { - height: 20, - stateRoot: cryptoUtils.getRandomBytes(HASH_LENGTH), - timestamp: 99, - validatorsHash: cryptoUtils.getRandomBytes(HASH_LENGTH), - }, - name: 'chain1', - status: 1, - }); - - await mainchainInteroperabilityInternalMethod.updateCertificate(methodContext, ccu); - - const updatedChainAccount = { + const certificate = { lastCertificate: { height: defaultCertificate.height, stateRoot: defaultCertificate.stateRoot, timestamp: defaultCertificate.timestamp, validatorsHash: defaultCertificate.validatorsHash, }, + name: 'chain1', + status: 1, }; + + await interopMod.stores + .get(ChainAccountStore) + .set(storeContext, ccuParams.sendingChainID, certificate); + + await mainchainInteroperabilityInternalMethod.updateCertificate(methodContext, ccu); + expect(interopMod.stores.get(ChainAccountStore).set).toHaveBeenCalledWith( expect.anything(), ccu.sendingChainID, - expect.objectContaining(updatedChainAccount), + certificate, ); expect(interopMod.events.get(ChainAccountUpdatedEvent).log).toHaveBeenCalledWith( expect.anything(), ccu.sendingChainID, - expect.objectContaining(updatedChainAccount), + certificate, ); expect(validator.validate).toHaveBeenCalledWith( certificateSchema, @@ -791,6 +849,99 @@ describe('Base interoperability internal method', () => { }); describe('verifyValidatorsUpdate', () => { + it('should reject if length of bftWeightsUpdateBitmap is too large', async () => { + const ccu = { + ...ccuParams, + certificate: codec.encode(certificateSchema, defaultCertificate), + activeValidatorsUpdate: { + blsKeysUpdate: [ + Buffer.from([0, 0, 0, 0]), + Buffer.from([0, 0, 0, 1]), + Buffer.from([0, 0, 3, 0]), + ], + bftWeightsUpdate: [BigInt(1), BigInt(3), BigInt(4), BigInt(3)], + bftWeightsUpdateBitmap: Buffer.from([0, 7]), + }, + }; + + const activeValidators = [{ blsKey: Buffer.from([0, 0, 2, 0]), bftWeight: BigInt(2) }]; + await chainValidatorsSubstore.set(methodContext, ccu.sendingChainID, { + activeValidators, + certificateThreshold: BigInt(1), + }); + + // the bitmap for 1 active validator and 3 new validators fits into one byte + const expectedBitmapLength = 1; + + await expect( + mainchainInteroperabilityInternalMethod.verifyValidatorsUpdate(methodContext, ccu), + ).rejects.toThrow(`Invalid bftWeightsUpdateBitmap. Expected length ${expectedBitmapLength}.`); + }); + + it('should reject if the validator list returned by calculateNewActiveValidators is empty', async () => { + const ccu = { + ...ccuParams, + certificate: codec.encode(certificateSchema, defaultCertificate), + activeValidatorsUpdate: { + blsKeysUpdate: [ + Buffer.from([0, 0, 0, 0]), + Buffer.from([0, 0, 0, 1]), + Buffer.from([0, 0, 3, 0]), + ], + bftWeightsUpdate: [BigInt(1), BigInt(3), BigInt(4)], + // 7 corresponds to 0111 + bftWeightsUpdateBitmap: Buffer.from([7]), + }, + }; + + const existingKey = Buffer.from([0, 2, 3, 0]); + await chainValidatorsSubstore.set(methodContext, ccu.sendingChainID, { + activeValidators: [{ blsKey: existingKey, bftWeight: BigInt(2) }], + certificateThreshold: BigInt(1), + }); + jest.spyOn(utils, 'calculateNewActiveValidators').mockReturnValue([]); + + await expect( + mainchainInteroperabilityInternalMethod.verifyValidatorsUpdate(methodContext, ccu), + ).rejects.toThrow( + `Invalid validators array. It must have at least 1 element and at most ${MAX_NUM_VALIDATORS} elements.`, + ); + }); + + it('should reject if the validator list returned by calculateNewActiveValidators has more than MAX_NUM_VALIDATORS entries', async () => { + const ccu = { + ...ccuParams, + certificate: codec.encode(certificateSchema, defaultCertificate), + activeValidatorsUpdate: { + blsKeysUpdate: [ + Buffer.from([0, 0, 0, 0]), + Buffer.from([0, 0, 0, 1]), + Buffer.from([0, 0, 3, 0]), + ], + bftWeightsUpdate: [BigInt(1), BigInt(3), BigInt(4)], + // 7 corresponds to 0111 + bftWeightsUpdateBitmap: Buffer.from([7]), + }, + }; + + const existingKey = Buffer.from([0, 2, 3, 0]); + await chainValidatorsSubstore.set(methodContext, ccu.sendingChainID, { + activeValidators: [{ blsKey: existingKey, bftWeight: BigInt(2) }], + certificateThreshold: BigInt(1), + }); + const newValidators = Array.from(new Array(MAX_NUM_VALIDATORS + 1)).map((_, index) => ({ + blsKey: Buffer.from([0, 0, 0, index]), + bftWeight: BigInt(1), + })); + jest.spyOn(utils, 'calculateNewActiveValidators').mockReturnValue(newValidators); + + await expect( + mainchainInteroperabilityInternalMethod.verifyValidatorsUpdate(methodContext, ccu), + ).rejects.toThrow( + `Invalid validators array. It must have at least 1 element and at most ${MAX_NUM_VALIDATORS} elements.`, + ); + }); + it('should reject if the certificate is empty', async () => { const ccu = { ...ccuParams, @@ -1136,10 +1287,11 @@ describe('Base interoperability internal method', () => { }; const existingKey = Buffer.from([0, 2, 3, 0]); - await chainValidatorsSubstore.set(methodContext, ccu.sendingChainID, { + const chainValidator = { activeValidators: [{ blsKey: existingKey, bftWeight: BigInt(2) }], certificateThreshold: BigInt(1), - }); + }; + await chainValidatorsSubstore.set(methodContext, ccu.sendingChainID, chainValidator); const newValidators = [ { blsKey: Buffer.from([0, 0, 0, 0]), bftWeight: BigInt(1) }, { blsKey: Buffer.from([0, 0, 0, 1]), bftWeight: BigInt(3) }, @@ -1152,6 +1304,12 @@ describe('Base interoperability internal method', () => { await expect( mainchainInteroperabilityInternalMethod.verifyValidatorsUpdate(methodContext, ccu), ).resolves.toBeUndefined(); + expect(utils.calculateNewActiveValidators).toHaveBeenCalledWith( + chainValidator.activeValidators, + ccu.activeValidatorsUpdate.blsKeysUpdate, + ccu.activeValidatorsUpdate.bftWeightsUpdate, + ccu.activeValidatorsUpdate.bftWeightsUpdateBitmap, + ); expect(validator.validate).toHaveBeenCalledWith( certificateSchema, expect.toBeObject() as Certificate, @@ -1193,7 +1351,7 @@ describe('Base interoperability internal method', () => { await interopMod.stores .get(ChainValidatorsStore) .set(methodContext, txParams.sendingChainID, { - certificateThreshold: BigInt(99), + certificateThreshold: BigInt(txParams.certificateThreshold), activeValidators: [], }); }); @@ -1267,11 +1425,25 @@ describe('Base interoperability internal method', () => { ); }); - it('should resolve when certificate is valid', async () => { + // (1): validatorsHash in certificate and state store are equal + // (2): there is a proper validators update in the CCU + it('should resolve when validatorsHash in certificate and state store are equal', async () => { const certificate: Certificate = { ...defaultCertificate, timestamp: 1000, }; + + await interopMod.stores.get(ChainAccountStore).set(methodContext, txParams.sendingChainID, { + lastCertificate: { + height: 100, + timestamp: 10, + stateRoot: cryptoUtils.getRandomBytes(HASH_LENGTH), + validatorsHash: defaultCertificate.validatorsHash, + }, + name: 'rand', + status: 0, + }); + const encodedCertificate = codec.encode(certificateSchema, certificate); await expect( mainchainInteroperabilityInternalMethod.verifyCertificate( @@ -1288,6 +1460,46 @@ describe('Base interoperability internal method', () => { expect.toBeObject() as Certificate, ); }); + + it('should resolve when validatorsHash is NOT equal, but validators are updated', async () => { + const certificate: Certificate = { + ...defaultCertificate, + timestamp: 1000, + }; + + await interopMod.stores.get(ChainAccountStore).set(methodContext, txParams.sendingChainID, { + lastCertificate: { + height: 100, + timestamp: 10, + stateRoot: cryptoUtils.getRandomBytes(HASH_LENGTH), + validatorsHash: cryptoUtils.getRandomBytes(HASH_LENGTH), + }, + name: 'rand', + status: 0, + }); + + const encodedCertificate = codec.encode(certificateSchema, certificate); + await expect( + mainchainInteroperabilityInternalMethod.verifyCertificate( + methodContext, + { + ...{ + ...txParams, + activeValidatorsUpdate: { + ...txParams.activeValidatorsUpdate, + bftWeightsUpdateBitmap: Buffer.alloc(1), + }, + }, + certificate: encodedCertificate, + }, + 1001, + ), + ).resolves.toBeUndefined(); + expect(validator.validate).toHaveBeenCalledWith( + certificateSchema, + expect.toBeObject() as Certificate, + ); + }); }); describe('verifyCertificateSignature', () => { @@ -1373,6 +1585,38 @@ describe('Base interoperability internal method', () => { expect.toBeObject() as Certificate, ); }); + + it('should resolve correctly when validators store is NOT sorted', async () => { + jest.spyOn(cryptography.bls, 'verifyWeightedAggSig').mockReturnValue(true); + + await interopMod.stores + .get(ChainValidatorsStore) + .set(methodContext, txParams.sendingChainID, { + ...chainValidators, + activeValidators: [ + activeValidators[activeValidators.length - 1], + ...activeValidators.slice(1, activeValidators.length - 1), + activeValidators[0], + ], + }); + + await expect( + mainchainInteroperabilityInternalMethod.verifyCertificateSignature(methodContext, txParams), + ).resolves.toBeUndefined(); + + expect(cryptography.bls.verifyWeightedAggSig).toHaveBeenCalledTimes(1); + + expect(bls.verifyWeightedAggSig).toHaveBeenCalledWith( + activeValidators.map(activeValidator => activeValidator.blsKey), + expect.anything(), + expect.anything(), + expect.anything(), + expect.anything(), + expect.anything(), + activeValidators.map(activeValidator => activeValidator.bftWeight), + expect.anything(), + ); + }); }); describe('verifyOutboxRootWitness', () => { @@ -1622,23 +1866,30 @@ describe('Base interoperability internal method', () => { .spyOn(regularMerkleTree, 'calculateRootFromRightWitness') .mockReturnValue(channelData.partnerChainOutboxRoot); + const params = { + ...crossChainUpdateParams, + inboxUpdate: { + crossChainMessages: [], + messageWitnessHashes: [cryptoUtils.getRandomBytes(32)], + outboxRootWitness: { + bitmap: Buffer.alloc(0), + siblingHashes: [], + }, + }, + certificate: Buffer.alloc(0), + }; await expect( mainchainInteroperabilityInternalMethod.verifyPartnerChainOutboxRoot( commandExecuteContext as any, - { - ...crossChainUpdateParams, - inboxUpdate: { - crossChainMessages: [], - messageWitnessHashes: [], - outboxRootWitness: { - bitmap: Buffer.alloc(0), - siblingHashes: [], - }, - }, - certificate: Buffer.alloc(0), - }, + params, ), ).resolves.toBeUndefined(); + + expect(regularMerkleTree.calculateRootFromRightWitness).toHaveBeenCalledWith( + channelData.inbox.size, + channelData.inbox.appendPath, + params.inboxUpdate.messageWitnessHashes, + ); }); it('should resolve when certificate provides valid inclusion proof', async () => { @@ -1655,8 +1906,11 @@ describe('Base interoperability internal method', () => { ), ).resolves.toBeUndefined(); + // outboxKey = STORE_PREFIX_INTEROPERABILITY + SUBSTORE_PREFIX_OUTBOX_ROOT + sha256(OWN_CHAIN_ID) + // https://github.com/LiskHQ/lips/blob/main/proposals/lip-0053.md#verifypartnerchainoutboxroot const outboxKey = Buffer.concat([ - interopMod.stores.get(OutboxRootStore).key, + Buffer.from('83ed0d25', 'hex'), + Buffer.from('0000', 'hex'), cryptoUtils.hash(ownChainAccount.chainID), ]); expect(SparseMerkleTree.prototype.verify).toHaveBeenCalledWith( @@ -1677,6 +1931,42 @@ describe('Base interoperability internal method', () => { certificateSchema, expect.toBeObject() as Certificate, ); + + expect(regularMerkleTree.calculateRootFromRightWitness).toHaveBeenCalledWith( + updatedInboxTree.size, + updatedInboxTree.appendPath, + crossChainUpdateParams.inboxUpdate.messageWitnessHashes, + ); + }); + + it('should resolve correctly when crossChainMessages is non-empty', async () => { + jest.spyOn(SparseMerkleTree.prototype, 'verify').mockResolvedValue(false); + jest + .spyOn(regularMerkleTree, 'calculateRootFromRightWitness') + .mockReturnValue(channelData.partnerChainOutboxRoot); + + const params = { + ...crossChainUpdateParams, + inboxUpdate: { + crossChainMessages: [cryptoUtils.getRandomBytes(32), cryptoUtils.getRandomBytes(32)], + messageWitnessHashes: [cryptoUtils.getRandomBytes(32)], + outboxRootWitness: { + bitmap: Buffer.alloc(0), + siblingHashes: [], + }, + }, + certificate: Buffer.alloc(0), + }; + await expect( + mainchainInteroperabilityInternalMethod.verifyPartnerChainOutboxRoot( + commandExecuteContext as any, + params, + ), + ).resolves.toBeUndefined(); + + expect(regularMerkleTree.calculateMerkleRoot).toHaveBeenCalledTimes( + params.inboxUpdate.crossChainMessages.length, + ); }); }); }); diff --git a/framework/test/unit/modules/interoperability/mainchain/internal_method.spec.ts b/framework/test/unit/modules/interoperability/mainchain/internal_method.spec.ts index 0ac861e974..e6530f7a54 100644 --- a/framework/test/unit/modules/interoperability/mainchain/internal_method.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/internal_method.spec.ts @@ -144,5 +144,20 @@ describe('Mainchain interoperability internal method', () => { expect(isLive).toBe(false); }); + + it(`should return true when chain account exists, status is ${ChainStatus.ACTIVE} and liveness requirement IS fulfilled`, async () => { + await chainDataSubstore.set(context, chainID, { + ...chainAccount, + status: ChainStatus.ACTIVE, + }); + + const isLive = await mainchainInteroperabilityInternalMethod.isLive( + context, + chainID, + timestamp, + ); + + expect(isLive).toBe(true); + }); }); }); diff --git a/framework/test/unit/modules/interoperability/sidechain/internal_method.spec.ts b/framework/test/unit/modules/interoperability/sidechain/internal_method.spec.ts index 91fc93ff2e..1b40e992e7 100644 --- a/framework/test/unit/modules/interoperability/sidechain/internal_method.spec.ts +++ b/framework/test/unit/modules/interoperability/sidechain/internal_method.spec.ts @@ -100,7 +100,19 @@ describe('Sidechain interoperability store', () => { expect(isLive).toBe(false); }); - it('should return true if chain is not terminated', async () => { + it('should return true if status is ACTIVE or REGISTERED', async () => { + for (const status of [ChainStatus.ACTIVE, ChainStatus.REGISTERED]) { + await chainDataSubstore.set(context, chainID, { + ...chainAccount, + status, + }); + const isLive = await sidechainInteroperabilityInternalMethod.isLive(context, chainID); + + expect(isLive).toBe(true); + } + }); + + it('should return true if chain account and terminated chain account do not exist', async () => { const isLive = await sidechainInteroperabilityInternalMethod.isLive(context, chainID); expect(isLive).toBe(true);