diff --git a/README.md b/README.md index 91819ef..d631862 100644 --- a/README.md +++ b/README.md @@ -183,13 +183,12 @@ to help contributors better understand how the project is organized. - ``/bin`` Contains the CLI script for the ``ethereumjs`` command - ``/docs`` Contains auto-generated API docs as well as other supporting documentation -- ``/lib/blockchain`` Contains the ``Chain``, ``BlockPool`` and ``HeaderPool`` classes. +- ``/lib/blockchain`` Contains the ``Chain`` class. - ``/lib/net`` Contains all of the network layer classes including ``Peer``, ``Protocol`` and its subclasses, ``Server`` and its subclasses, and ``PeerPool``. -- ``/lib/service`` Contains the various services. Currently, only ``EthereumService`` is implemented. -- ``/lib/handler`` Contains the various message handlers +- ``/lib/service`` Contains the main Ethereum services (``FastEthereumService`` and ``LightEthereumService``) - ``/lib/rpc`` Contains the RPC server (optionally) embedded in the client. -- ``/lib/sync`` Contains the various chain synchronizers +- ``/lib/sync`` Contains the various chain synchronizers and ``Fetcher`` helpers. - ``/tests`` Contains test cases, testing helper functions, mocks and test data **Components** @@ -198,11 +197,6 @@ to help contributors better understand how the project is organized. ``ethereumjs-blockchain``. It handles creation of the data directory, provides basic blockchain operations and maintains an updated current state of the blockchain, including current height, total difficulty, and latest block. -- ``BlockPool`` [**In Progress**] This class holds segments of the blockchain that have been downloaded -from other peers. Once valid, sequential segments are available, they are automatically added to the -blockchain - - ``HeaderPool`` [**In Progress**] This is a subclass of ``BlockPool`` that holds header segments instead of - block segments. It is useful for light syncs when downloading sequential headers in parallel. - ``Server`` This class represents a server that discovers new peers and handles incoming and dropped connections. When a new peer connects, the ``Server`` class will negotiate protocols and emit a ``connected`` event with a new ``Peer``instance. The peer will have properties corresponding to each protocol. For example, @@ -224,14 +218,15 @@ low level ethereum protocols such as ``eth/62``, ``eth/62`` and ``les/2``. Subcl and ``removed`` events when new peers are added and removed and also emit the ``message`` event whenever any of the peers in the pool emit a message. Each ``Service`` has an associated ``PeerPool`` and they are used primarily by ``Synchronizer``s to help with blockchain synchronization. - ``Synchronizer`` Subclasses of this class implements a specific blockchain synchronization strategy. They -also make use of subclasses of the ``Fetcher`` class that help fetch headers and bodies from pool peers. +also make use of subclasses of the ``Fetcher`` class that help fetch headers and bodies from pool peers. The fetchers internally make use of streams to handle things like queuing and backpressure. - ``FastSynchronizer`` [**In Progress**] Implements fast syncing of the blockchain - ``LightSynchronizer`` [**In Progress**] Implements light syncing of the blockchain - ``Handler`` Subclasses of this class implements a protocol message handler. Handlers respond to incoming requests from peers. - ``EthHandler`` [**In Progress**] Handles incoming ETH requests - ``LesHandler`` [**In Progress**] Handles incoming LES requests -- ``Service`` Subclasses of ``Service`` will implement specific functionality of a ``Node``. For example, the ``EthereumService`` will synchronize the blockchain using the fast or light sync protocols. Each service must specify which protocols it needs and define a ``start()`` and ``stop()`` function. - - ``EthereumService`` [**In Progress**] Implementation of an ethereum fast sync and light sync node. +- ``Service`` Subclasses of ``Service`` will implement specific functionality of a ``Node``. For example, the ``EthereumService`` subclasses will synchronize the blockchain using the fast or light sync protocols. Each service must specify which protocols it needs and define a ``start()`` and ``stop()`` function. + - ``FastEthereumService`` [**In Progress**] Implementation of ethereum fast sync. + - ``LightEthereumService`` [**In Progress**] Implementation of ethereum light sync. - ``WhisperService`` [**Not Started**] Implementation of an ethereum whisper node. - ``Node`` [**In Progress**] Represents the top-level ethereum node, and is responsible for managing the lifecycle of included services. - ``RPCManager`` [**In Progress**] Implements an embedded JSON-RPC server to handle incoming RPC requests. diff --git a/bin/cli.js b/bin/cli.js index 46fb206..faf6645 100755 --- a/bin/cli.js +++ b/bin/cli.js @@ -60,6 +60,16 @@ const args = require('yargs') choices: [ 'error', 'warn', 'info', 'debug' ], default: 'info' }, + 'minPeers': { + describe: 'Peers needed before syncing', + number: true, + default: 2 + }, + 'maxPeers': { + describe: 'Maximum peers to sync with', + number: true, + default: 25 + }, 'params': { describe: 'Path to chain parameters json file', coerce: path.resolve @@ -79,8 +89,8 @@ async function runNode (options) { node.on('listening', details => { logger.info(`Listener up transport=${details.transport} url=${details.url}`) }) - node.on('synchronized', (stats) => { - logger.info(`Synchronized ${stats.count} ${stats.type === 'light' ? 'headers' : 'blocks'}`) + node.on('synchronized', () => { + logger.info('Synchronized') }) logger.info(`Connecting to network: ${options.common.chainName()}`) await node.open() @@ -125,7 +135,9 @@ async function run () { lightserv: args.lightserv, db: level(dataDir), rpcport: args.rpcport, - rpcaddr: args.rpcaddr + rpcaddr: args.rpcaddr, + minPeers: args.minPeers, + maxPeers: args.maxPeers } const node = await runNode(options) const server = args.rpc ? runRpcServer(node, options) : null diff --git a/browser/index.js b/browser/index.js index f69c909..f4a2f08 100644 --- a/browser/index.js +++ b/browser/index.js @@ -5,13 +5,6 @@ const level = require('level-browserify') // Blockchain exports.Chain = require('../lib/blockchain/chain') -exports.BlockPool = require('../lib/blockchain/blockpool') -exports.HeaderPool = require('../lib/blockchain/headerpool') - -// Handler -exports.Handler = require('../lib/handler/handler') -exports.EthHandler = require('../lib/handler/ethhandler') -exports.LesHandler = require('../lib/handler/leshandler') // Peer exports.Peer = require('../lib/net/peer/peer') @@ -36,7 +29,8 @@ exports.Node = require('../lib/node') // Service exports.Service = require('../lib/service/service') -exports.EthereumService = require('../lib/service/ethereumservice') +exports.FastEthereumService = require('../lib/service/fastethereumservice') +exports.LightEthereumService = require('../lib/service/lightethereumservice') // Synchronizer exports.Synchronizer = require('../lib/sync/sync') diff --git a/docs/API.md b/docs/API.md index d21b9fc..78a9530 100644 --- a/docs/API.md +++ b/docs/API.md @@ -3,8 +3,6 @@
number
- * [.open()](#module_blockchain.BlockPool+open) ⇒ Promise
- * [.add(blocks)](#module_blockchain.BlockPool+add) ⇒ Promise
* [.Chain](#module_blockchain.Chain)
* [new Chain(options)](#new_module_blockchain.Chain_new)
* [.networkId](#module_blockchain.Chain+networkId) : number
@@ -67,58 +62,6 @@
* [.getLatestHeader()](#module_blockchain.Chain+getLatestHeader) ⇒ Promise
* [.getLatestBlock()](#module_blockchain.Chain+getLatestBlock) ⇒ Promise
* [.getTd(hash)](#module_blockchain.Chain+getTd) ⇒ Promise
- * [.HeaderPool](#module_blockchain.HeaderPool)
- * [.add(headers)](#module_blockchain.HeaderPool+add) ⇒ Promise
-
-
-
-### blockchain.BlockPool
-Pool of blockchain segments
-
-**Kind**: static class of [blockchain
](#module_blockchain)
-
-* [.BlockPool](#module_blockchain.BlockPool)
- * [new BlockPool(options)](#new_module_blockchain.BlockPool_new)
- * [.size](#module_blockchain.BlockPool+size) : number
- * [.open()](#module_blockchain.BlockPool+open) ⇒ Promise
- * [.add(blocks)](#module_blockchain.BlockPool+add) ⇒ Promise
-
-
-
-#### new BlockPool(options)
-Create new block pool
-
-
-| Param | Type | Description |
-| --- | --- | --- |
-| options | Object
| constructor parameters |
-| options.chain | Chain
| blockchain |
-| [options.logger] | Logger
| Logger instance |
-
-
-
-#### blockPool.size : number
-Size of pool
-
-**Kind**: instance property of [BlockPool
](#module_blockchain.BlockPool)
-
-
-#### blockPool.open() ⇒ Promise
-Open block pool and wait for blockchain to open
-
-**Kind**: instance method of [BlockPool
](#module_blockchain.BlockPool)
-
-
-#### blockPool.add(blocks) ⇒ Promise
-Add a blockchain segment to the pool. Returns a promise that resolves once
-the segment has been added to the pool. Segments are automatically inserted
-into the blockchain once prior gaps are filled.
-
-**Kind**: instance method of [BlockPool
](#module_blockchain.BlockPool)
-
-| Param | Type | Description |
-| --- | --- | --- |
-| blocks | Array.<Block>
| list of sequential blocks |
@@ -264,177 +207,6 @@ Gets total difficulty for a block
| --- | --- | --- |
| hash | Buffer
| block hash |
-
-
-### blockchain.HeaderPool
-Pool of headerchain segments
-
-**Kind**: static class of [blockchain
](#module_blockchain)
-
-
-#### headerPool.add(headers) ⇒ Promise
-Add a headerchain segment to the pool. Returns a promise that resolves once
-the segment has been added to the pool. Segments are automatically inserted
-into the blockchain once prior gaps are filled.
-
-**Kind**: instance method of [HeaderPool
](#module_blockchain.HeaderPool)
-
-| Param | Type | Description |
-| --- | --- | --- |
-| headers | Array.<Header>
| list of sequential headers |
-
-
-
-## handler
-
-* [handler](#module_handler)
- * [.EthHandler](#module_handler.EthHandler)
- * [.event](#module_handler.EthHandler+event) ⇒ string
- * [.handle(message, peer)](#module_handler.EthHandler+handle) ⇒ Promise
- * [.Handler](#module_handler.Handler)
- * [new Handler(options)](#new_module_handler.Handler_new)
- * [.event](#module_handler.Handler+event) ⇒ string
- * [.start()](#module_handler.Handler+start)
- * [.stop()](#module_handler.Handler+stop)
- * [.handle(message, peer)](#module_handler.Handler+handle) ⇒ Promise
- * [.LesHandler](#module_handler.LesHandler)
- * [new LesHandler(options)](#new_module_handler.LesHandler_new)
- * [.event](#module_handler.LesHandler+event) ⇒ string
- * [.handle(message, peer)](#module_handler.LesHandler+handle) ⇒ Promise
-
-
-
-### handler.EthHandler
-ETH protocol handler
-
-**Kind**: static class of [handler
](#module_handler)
-
-* [.EthHandler](#module_handler.EthHandler)
- * [.event](#module_handler.EthHandler+event) ⇒ string
- * [.handle(message, peer)](#module_handler.EthHandler+handle) ⇒ Promise
-
-
-
-#### ethHandler.event ⇒ string
-Message event to listen for
-
-**Kind**: instance property of [EthHandler
](#module_handler.EthHandler)
-**Returns**: string
- name of message event
-
-
-#### ethHandler.handle(message, peer) ⇒ Promise
-Handles incoming ETH request from connected peer
-
-**Kind**: instance method of [EthHandler
](#module_handler.EthHandler)
-
-| Param | Type | Description |
-| --- | --- | --- |
-| message | Object
| message object |
-| peer | Peer
| peer |
-
-
-
-### handler.Handler
-Base class for protocol handlers
-
-**Kind**: static class of [handler
](#module_handler)
-
-* [.Handler](#module_handler.Handler)
- * [new Handler(options)](#new_module_handler.Handler_new)
- * [.event](#module_handler.Handler+event) ⇒ string
- * [.start()](#module_handler.Handler+start)
- * [.stop()](#module_handler.Handler+stop)
- * [.handle(message, peer)](#module_handler.Handler+handle) ⇒ Promise
-
-
-
-#### new Handler(options)
-Create new handler
-
-
-| Param | Type | Description |
-| --- | --- | --- |
-| options | Object
| constructor parameters |
-| options.pool | PeerPool
| peer pool |
-| options.chain | Chain
| blockchain |
-| [options.logger] | Logger
| Logger instance |
-
-
-
-#### handler.event ⇒ string
-Message event to listen for
-
-**Kind**: instance property of [Handler
](#module_handler.Handler)
-**Returns**: string
- name of message event
-
-
-#### handler.start()
-Start handler
-
-**Kind**: instance method of [Handler
](#module_handler.Handler)
-
-
-#### handler.stop()
-Stop handler
-
-**Kind**: instance method of [Handler
](#module_handler.Handler)
-
-
-#### handler.handle(message, peer) ⇒ Promise
-Handles incoming request from connected peer
-
-**Kind**: instance method of [Handler
](#module_handler.Handler)
-
-| Param | Type | Description |
-| --- | --- | --- |
-| message | Object
| message object |
-| peer | Peer
| peer |
-
-
-
-### handler.LesHandler
-LES protocol handler
-
-**Kind**: static class of [handler
](#module_handler)
-
-* [.LesHandler](#module_handler.LesHandler)
- * [new LesHandler(options)](#new_module_handler.LesHandler_new)
- * [.event](#module_handler.LesHandler+event) ⇒ string
- * [.handle(message, peer)](#module_handler.LesHandler+handle) ⇒ Promise
-
-
-
-#### new LesHandler(options)
-Create new handler
-
-
-| Param | Type | Description |
-| --- | --- | --- |
-| options | Object
| constructor parameters |
-| options.pool | PeerPool
| peer pool |
-| options.chain | Chain
| blockchain |
-| options.flow | FlowControl
| flow control manager |
-| [options.logger] | Logger
| Logger instance |
-
-
-
-#### lesHandler.event ⇒ string
-Message event to listen for
-
-**Kind**: instance property of [LesHandler
](#module_handler.LesHandler)
-**Returns**: string
- name of message event
-
-
-#### lesHandler.handle(message, peer) ⇒ Promise
-Handles incoming LES requests from connected peer
-
-**Kind**: instance method of [LesHandler
](#module_handler.LesHandler)
-
-| Param | Type | Description |
-| --- | --- | --- |
-| message | Object
| message object |
-| peer | Peer
| peer |
-
## net/peer
@@ -668,6 +440,7 @@ Libp2p Bundle
* [.PeerPool](#module_net.PeerPool)
* [new PeerPool(options)](#new_module_net.PeerPool_new)
* [.peers](#module_net.PeerPool+peers) : Array.<Peer>
+ * [.size](#module_net.PeerPool+size) : number
* [.open()](#module_net.PeerPool+open) ⇒ Promise
* [.close()](#module_net.PeerPool+close) ⇒ Promise
* [.contains(peer)](#module_net.PeerPool+contains) ⇒ boolean
@@ -687,6 +460,7 @@ Pool of connected peers
* [.PeerPool](#module_net.PeerPool)
* [new PeerPool(options)](#new_module_net.PeerPool_new)
* [.peers](#module_net.PeerPool+peers) : Array.<Peer>
+ * [.size](#module_net.PeerPool+size) : number
* [.open()](#module_net.PeerPool+open) ⇒ Promise
* [.close()](#module_net.PeerPool+close) ⇒ Promise
* [.contains(peer)](#module_net.PeerPool+contains) ⇒ boolean
@@ -701,17 +475,24 @@ Pool of connected peers
Create new peer pool
-| Param | Type | Description |
-| --- | --- | --- |
-| options | Object
| constructor parameters |
-| options.servers | Array.<Server>
| servers to aggregate peers from |
-| [options.logger] | Logger
| logger instance |
+| Param | Type | Default | Description |
+| --- | --- | --- | --- |
+| options | Object
| | constructor parameters |
+| options.servers | Array.<Server>
| | servers to aggregate peers from |
+| [options.maxPeers] | number
| 25
| maximum peers allowed |
+| [options.logger] | Logger
| | logger instance |
#### peerPool.peers : Array.<Peer>
Connected peers
+**Kind**: instance property of [PeerPool
](#module_net.PeerPool)
+
+
+#### peerPool.size : number
+Number of peers in pool
+
**Kind**: instance property of [PeerPool
](#module_net.PeerPool)
@@ -1658,10 +1439,19 @@ middleware for parameters validation
* [.EthereumService](#module_service.EthereumService)
* [new EthereumService(options)](#new_module_service.EthereumService_new)
* [.name](#module_service.EthereumService+name) : string
- * [.protocols](#module_service.EthereumService+protocols) : Array.<Protocol>
* [.open()](#module_service.EthereumService+open) ⇒ Promise
* [.start()](#module_service.EthereumService+start) ⇒ Promise
* [.stop()](#module_service.EthereumService+stop) ⇒ Promise
+ * [.FastEthereumService](#module_service.FastEthereumService)
+ * [new FastEthereumService(options)](#new_module_service.FastEthereumService_new)
+ * [.protocols](#module_service.FastEthereumService+protocols) : Array.<Protocol>
+ * [.handle(message, protocol, peer)](#module_service.FastEthereumService+handle) ⇒ Promise
+ * [.handleEth(message, peer)](#module_service.FastEthereumService+handleEth) ⇒ Promise
+ * [.handleLes(message, peer)](#module_service.FastEthereumService+handleLes) ⇒ Promise
+ * [.LightEthereumService](#module_service.LightEthereumService)
+ * [new LightEthereumService(options)](#new_module_service.LightEthereumService_new)
+ * [.protocols](#module_service.LightEthereumService+protocols) : Array.<Protocol>
+ * [.handle(message, protocol, peer)](#module_service.LightEthereumService+handle) ⇒ Promise
* [.Service](#module_service.Service)
* [new Service(options)](#new_module_service.Service_new)
* [.name](#module_service.Service+name) : string
@@ -1670,6 +1460,7 @@ middleware for parameters validation
* [.close()](#module_service.Service+close) ⇒ Promise
* [.start()](#module_service.Service+start) ⇒ Promise
* [.stop()](#module_service.Service+stop) ⇒ Promise
+ * [.handle(message, protocol, peer)](#module_service.Service+handle) ⇒ Promise
@@ -1681,7 +1472,6 @@ Ethereum service
* [.EthereumService](#module_service.EthereumService)
* [new EthereumService(options)](#new_module_service.EthereumService_new)
* [.name](#module_service.EthereumService+name) : string
- * [.protocols](#module_service.EthereumService+protocols) : Array.<Protocol>
* [.open()](#module_service.EthereumService+open) ⇒ Promise
* [.start()](#module_service.EthereumService+start) ⇒ Promise
* [.stop()](#module_service.EthereumService+stop) ⇒ Promise
@@ -1696,11 +1486,13 @@ Create new ETH service
| --- | --- | --- | --- |
| options | Object
| | constructor parameters |
| options.servers | Array.<Server>
| | servers to run service on |
-| [options.syncmode] | string
| "light"
| synchronization mode ('fast' or 'light') |
-| [options.lightserv] | boolean
| false
| serve LES requests |
| [options.chain] | Chain
| | blockchain |
+| [options.db] | LevelDB
|
| blockchain database |
| [options.common] | Common
| | ethereum network name |
-| [options.interval] | number
| | sync interval |
+| [options.minPeers] | number
| 3
| number of peers needed before syncing |
+| [options.maxPeers] | number
| 25
| maximum peers allowed |
+| [options.timeout] | number
| | protocol timeout |
+| [options.interval] | number
| | sync retry interval |
| [options.logger] | Logger
| | logger instance |
@@ -1710,12 +1502,6 @@ Service name
**Kind**: instance property of [EthereumService
](#module_service.EthereumService)
**Access**: protected
-
-
-#### ethereumService.protocols : Array.<Protocol>
-Returns all protocols required by this service
-
-**Kind**: instance property of [EthereumService
](#module_service.EthereumService)
#### ethereumService.open() ⇒ Promise
@@ -1735,6 +1521,129 @@ that resolves once the service is started and blockchain is in sync.
Stop service. Interrupts blockchain synchronization if its in progress.
**Kind**: instance method of [EthereumService
](#module_service.EthereumService)
+
+
+### service.FastEthereumService
+Ethereum service
+
+**Kind**: static class of [service
](#module_service)
+
+* [.FastEthereumService](#module_service.FastEthereumService)
+ * [new FastEthereumService(options)](#new_module_service.FastEthereumService_new)
+ * [.protocols](#module_service.FastEthereumService+protocols) : Array.<Protocol>
+ * [.handle(message, protocol, peer)](#module_service.FastEthereumService+handle) ⇒ Promise
+ * [.handleEth(message, peer)](#module_service.FastEthereumService+handleEth) ⇒ Promise
+ * [.handleLes(message, peer)](#module_service.FastEthereumService+handleLes) ⇒ Promise
+
+
+
+#### new FastEthereumService(options)
+Create new ETH service
+
+
+| Param | Type | Default | Description |
+| --- | --- | --- | --- |
+| options | Object
| | constructor parameters |
+| options.servers | Array.<Server>
| | servers to run service on |
+| [options.lightserv] | boolean
| false
| serve LES requests |
+| [options.chain] | Chain
| | blockchain |
+| [options.common] | Common
| | ethereum network name |
+| [options.minPeers] | number
| 3
| number of peers needed before syncing |
+| [options.maxPeers] | number
| 25
| maximum peers allowed |
+| [options.interval] | number
| | sync retry interval |
+| [options.logger] | Logger
| | logger instance |
+
+
+
+#### fastEthereumService.protocols : Array.<Protocol>
+Returns all protocols required by this service
+
+**Kind**: instance property of [FastEthereumService
](#module_service.FastEthereumService)
+
+
+#### fastEthereumService.handle(message, protocol, peer) ⇒ Promise
+Handles incoming message from connected peer
+
+**Kind**: instance method of [FastEthereumService
](#module_service.FastEthereumService)
+
+| Param | Type | Description |
+| --- | --- | --- |
+| message | Object
| message object |
+| protocol | string
| protocol name |
+| peer | Peer
| peer |
+
+
+
+#### fastEthereumService.handleEth(message, peer) ⇒ Promise
+Handles incoming ETH message from connected peer
+
+**Kind**: instance method of [FastEthereumService
](#module_service.FastEthereumService)
+
+| Param | Type | Description |
+| --- | --- | --- |
+| message | Object
| message object |
+| peer | Peer
| peer |
+
+
+
+#### fastEthereumService.handleLes(message, peer) ⇒ Promise
+Handles incoming LES message from connected peer
+
+**Kind**: instance method of [FastEthereumService
](#module_service.FastEthereumService)
+
+| Param | Type | Description |
+| --- | --- | --- |
+| message | Object
| message object |
+| peer | Peer
| peer |
+
+
+
+### service.LightEthereumService
+Ethereum service
+
+**Kind**: static class of [service
](#module_service)
+
+* [.LightEthereumService](#module_service.LightEthereumService)
+ * [new LightEthereumService(options)](#new_module_service.LightEthereumService_new)
+ * [.protocols](#module_service.LightEthereumService+protocols) : Array.<Protocol>
+ * [.handle(message, protocol, peer)](#module_service.LightEthereumService+handle) ⇒ Promise
+
+
+
+#### new LightEthereumService(options)
+Create new ETH service
+
+
+| Param | Type | Default | Description |
+| --- | --- | --- | --- |
+| options | Object
| | constructor parameters |
+| options.servers | Array.<Server>
| | servers to run service on |
+| [options.chain] | Chain
| | blockchain |
+| [options.common] | Common
| | ethereum network name |
+| [options.minPeers] | number
| 3
| number of peers needed before syncing |
+| [options.maxPeers] | number
| 25
| maximum peers allowed |
+| [options.interval] | number
| | sync retry interval |
+| [options.logger] | Logger
| | logger instance |
+
+
+
+#### lightEthereumService.protocols : Array.<Protocol>
+Returns all protocols required by this service
+
+**Kind**: instance property of [LightEthereumService
](#module_service.LightEthereumService)
+
+
+#### lightEthereumService.handle(message, protocol, peer) ⇒ Promise
+Handles incoming message from connected peer
+
+**Kind**: instance method of [LightEthereumService
](#module_service.LightEthereumService)
+
+| Param | Type | Description |
+| --- | --- | --- |
+| message | Object
| message object |
+| protocol | string
| protocol name |
+| peer | Peer
| peer |
+
### service.Service
@@ -1750,6 +1659,7 @@ Base class for all services
* [.close()](#module_service.Service+close) ⇒ Promise
* [.start()](#module_service.Service+start) ⇒ Promise
* [.stop()](#module_service.Service+stop) ⇒ Promise
+ * [.handle(message, protocol, peer)](#module_service.Service+handle) ⇒ Promise
@@ -1761,6 +1671,7 @@ Create new service and associated peer pool
| --- | --- | --- | --- |
| options | Object
| | constructor parameters |
| [options.servers] | Array.<Server>
| []
| servers to run service on |
+| [options.maxPeers] | number
| 25
| maximum peers allowed |
| [options.logger] | Logger
| | logger instance |
@@ -1779,7 +1690,7 @@ Returns all protocols required by this service
#### service.open() ⇒ Promise
-Open service. Must be called before service is started
+Open service. Must be called before service is running
**Kind**: instance method of [Service
](#module_service.Service)
@@ -1800,79 +1711,69 @@ Start service
Start service
**Kind**: instance method of [Service
](#module_service.Service)
-
+
-## sync
+#### service.handle(message, protocol, peer) ⇒ Promise
+Handles incoming request from connected peer
-* [sync](#module_sync)
- * [.BlockFetcher](#module_sync.BlockFetcher)
- * [new BlockFetcher(options)](#new_module_sync.BlockFetcher_new)
- * [.open()](#module_sync.BlockFetcher+open) ⇒ Promise
- * [.before(taskOne, taskTwo)](#module_sync.BlockFetcher+before) ⇒ boolean
- * [.fetch(task, peer)](#module_sync.BlockFetcher+fetch) ⇒ Promise
- * [.process(entry, reply)](#module_sync.BlockFetcher+process)
- * [.FastSynchronizer](#module_sync.FastSynchronizer)
- * [new FastSynchronizer(options)](#new_module_sync.FastSynchronizer_new)
- * [.type](#module_sync.FastSynchronizer+type) ⇒ string
- * [.fetchable()](#module_sync.FastSynchronizer+fetchable) ⇒ boolean
- * [.height(peer)](#module_sync.FastSynchronizer+height) ⇒ Promise
- * [.origin()](#module_sync.FastSynchronizer+origin) ⇒ Promise
- * [.fetch([last])](#module_sync.FastSynchronizer+fetch) ⇒ Promise
- * [.handle(message, peer)](#module_sync.FastSynchronizer+handle) ⇒ Promise
- * [.open()](#module_sync.FastSynchronizer+open) ⇒ Promise
- * [.stop()](#module_sync.FastSynchronizer+stop) ⇒ Promise
- * [.Fetcher](#module_sync.Fetcher)
- * [new Fetcher(options)](#new_module_sync.Fetcher_new)
- * [.add(task)](#module_sync.Fetcher+add)
- * [.next()](#module_sync.Fetcher+next)
- * [.handle(reply, peer)](#module_sync.Fetcher+handle)
- * [.error(error, task, peer)](#module_sync.Fetcher+error)
- * [.expire()](#module_sync.Fetcher+expire)
- * [.open()](#module_sync.Fetcher+open) ⇒ Promise
- * [.start()](#module_sync.Fetcher+start) ⇒ Promise
- * [.stop()](#module_sync.Fetcher+stop) ⇒ Promise
- * [.before(taskOne, taskTwo)](#module_sync.Fetcher+before) ⇒ boolean
- * [.fetchable(peer)](#module_sync.Fetcher+fetchable) ⇒ boolean
- * [.fetch(task, peer)](#module_sync.Fetcher+fetch) ⇒ Promise
- * [.process(entry, reply)](#module_sync.Fetcher+process)
- * [.HeaderFetcher](#module_sync.HeaderFetcher)
- * [new HeaderFetcher(options)](#new_module_sync.HeaderFetcher_new)
- * [.open()](#module_sync.HeaderFetcher+open) ⇒ Promise
- * [.before(taskOne, taskTwo)](#module_sync.HeaderFetcher+before) ⇒ boolean
- * [.fetch(task, peer)](#module_sync.HeaderFetcher+fetch) ⇒ Promise
- * [.process(entry, reply)](#module_sync.HeaderFetcher+process)
- * [.LightSynchronizer](#module_sync.LightSynchronizer)
- * [new LightSynchronizer(options)](#new_module_sync.LightSynchronizer_new)
- * [.type](#module_sync.LightSynchronizer+type) ⇒ string
- * [.fetchable()](#module_sync.LightSynchronizer+fetchable) ⇒ boolean
- * [.origin()](#module_sync.LightSynchronizer+origin) ⇒ Promise
- * [.fetch([last])](#module_sync.LightSynchronizer+fetch) ⇒ Promise
- * [.handle(message, peer)](#module_sync.LightSynchronizer+handle) ⇒ Promise
- * [.open()](#module_sync.LightSynchronizer+open) ⇒ Promise
- * [.stop()](#module_sync.LightSynchronizer+stop) ⇒ Promise
- * [.Synchronizer](#module_sync.Synchronizer)
- * [new Synchronizer(options)](#new_module_sync.Synchronizer_new)
- * [.type](#module_sync.Synchronizer+type) ⇒ string
- * [.open()](#module_sync.Synchronizer+open) ⇒ Promise
- * [.fetchable()](#module_sync.Synchronizer+fetchable) ⇒ boolean
- * [.sync([height])](#module_sync.Synchronizer+sync) ⇒ Promise
- * [.stop()](#module_sync.Synchronizer+stop) ⇒ Promise
+**Kind**: instance method of [Service
](#module_service.Service)
-
+| Param | Type | Description |
+| --- | --- | --- |
+| message | Object
| message object |
+| protocol | string
| protocol name |
+| peer | Peer
| peer |
-### sync.BlockFetcher
+
+
+## sync/fetcher
+
+* [sync/fetcher](#module_sync/fetcher)
+ * [.BlockFetcher](#module_sync/fetcher.BlockFetcher)
+ * [new BlockFetcher(options)](#new_module_sync/fetcher.BlockFetcher_new)
+ * [.tasks()](#module_sync/fetcher.BlockFetcher+tasks) ⇒ Array.<Object>
+ * [.request(job)](#module_sync/fetcher.BlockFetcher+request) ⇒ Promise
+ * [.process(job, result)](#module_sync/fetcher.BlockFetcher+process) ⇒ \*
+ * [.store(blocks)](#module_sync/fetcher.BlockFetcher+store) ⇒ Promise
+ * [.peer(job)](#module_sync/fetcher.BlockFetcher+peer) ⇒ Peer
+ * [.Fetcher](#module_sync/fetcher.Fetcher)
+ * [new Fetcher(options)](#new_module_sync/fetcher.Fetcher_new)
+ * [.tasks()](#module_sync/fetcher.Fetcher+tasks) ⇒ Array.<Object>
+ * [.enqueue(job)](#module_sync/fetcher.Fetcher+enqueue)
+ * [.dequeue()](#module_sync/fetcher.Fetcher+dequeue)
+ * [._read()](#module_sync/fetcher.Fetcher+_read)
+ * [.next()](#module_sync/fetcher.Fetcher+next)
+ * [.error(error, task, peer)](#module_sync/fetcher.Fetcher+error)
+ * [.write()](#module_sync/fetcher.Fetcher+write)
+ * [.fetch()](#module_sync/fetcher.Fetcher+fetch) ⇒ Promise
+ * [.peer(job)](#module_sync/fetcher.Fetcher+peer) ⇒ Peer
+ * [.request(job)](#module_sync/fetcher.Fetcher+request) ⇒ Promise
+ * [.process(job, peer, reply)](#module_sync/fetcher.Fetcher+process)
+ * [.expire()](#module_sync/fetcher.Fetcher+expire)
+ * [.store(result)](#module_sync/fetcher.Fetcher+store) ⇒ Promise
+ * [.HeaderFetcher](#module_sync/fetcher.HeaderFetcher)
+ * [new HeaderFetcher(options)](#new_module_sync/fetcher.HeaderFetcher_new)
+ * [.request(job)](#module_sync/fetcher.HeaderFetcher+request) ⇒ Promise
+ * [.process(job, result)](#module_sync/fetcher.HeaderFetcher+process) ⇒ \*
+ * [.store(headers)](#module_sync/fetcher.HeaderFetcher+store) ⇒ Promise
+ * [.peer(job)](#module_sync/fetcher.HeaderFetcher+peer) ⇒ Peer
+
+
+
+### sync/fetcher.BlockFetcher
Implements an eth/62 based block fetcher
-**Kind**: static class of [sync
](#module_sync)
+**Kind**: static class of [sync/fetcher
](#module_sync/fetcher)
-* [.BlockFetcher](#module_sync.BlockFetcher)
- * [new BlockFetcher(options)](#new_module_sync.BlockFetcher_new)
- * [.open()](#module_sync.BlockFetcher+open) ⇒ Promise
- * [.before(taskOne, taskTwo)](#module_sync.BlockFetcher+before) ⇒ boolean
- * [.fetch(task, peer)](#module_sync.BlockFetcher+fetch) ⇒ Promise
- * [.process(entry, reply)](#module_sync.BlockFetcher+process)
+* [.BlockFetcher](#module_sync/fetcher.BlockFetcher)
+ * [new BlockFetcher(options)](#new_module_sync/fetcher.BlockFetcher_new)
+ * [.tasks()](#module_sync/fetcher.BlockFetcher+tasks) ⇒ Array.<Object>
+ * [.request(job)](#module_sync/fetcher.BlockFetcher+request) ⇒ Promise
+ * [.process(job, result)](#module_sync/fetcher.BlockFetcher+process) ⇒ \*
+ * [.store(blocks)](#module_sync/fetcher.BlockFetcher+store) ⇒ Promise
+ * [.peer(job)](#module_sync/fetcher.BlockFetcher+peer) ⇒ Peer
-
+
#### new BlockFetcher(options)
Create new block fetcher
@@ -1883,396 +1784,429 @@ Create new block fetcher
| options | Object
| | constructor parameters |
| options.pool | PeerPool
| | peer pool |
| options.chain | Chain
| | blockchain |
+| options.first | BN
| | block number to start fetching from |
+| options.count | BN
| | how many blocks to fetch |
+| [options.timeout] | number
| | fetch task timeout |
+| [options.banTime] | number
| | how long to ban misbehaving peers |
+| [options.interval] | number
| | retry interval |
| [options.maxPerRequest] | number
| 128
| max items per request |
| [options.logger] | Logger
| | Logger instance |
-
+
-#### blockFetcher.open() ⇒ Promise
-Open block fetcher. Must be called before fetcher is started
+#### blockFetcher.tasks() ⇒ Array.<Object>
+Generate list of tasks to fetch
-**Kind**: instance method of [BlockFetcher
](#module_sync.BlockFetcher)
-
+**Kind**: instance method of [BlockFetcher
](#module_sync/fetcher.BlockFetcher)
+**Returns**: Array.<Object>
- tasks
+
-#### blockFetcher.before(taskOne, taskTwo) ⇒ boolean
-Prioritizes tasks based on first block number
+#### blockFetcher.request(job) ⇒ Promise
+Requests blocks associated with this job
-**Kind**: instance method of [BlockFetcher
](#module_sync.BlockFetcher)
-**Returns**: boolean
- true if taskOne has a lower first number than taskTwo
+**Kind**: instance method of [BlockFetcher
](#module_sync/fetcher.BlockFetcher)
| Param | Type |
| --- | --- |
-| taskOne | Object
|
-| taskTwo | Object
|
+| job | Object
|
-
+
-#### blockFetcher.fetch(task, peer) ⇒ Promise
-Fetches blocks for the given task
+#### blockFetcher.process(job, result) ⇒ \*
+Process fetch result
-**Kind**: instance method of [BlockFetcher
](#module_sync.BlockFetcher)
-**Returns**: Promise
- method must return
+**Kind**: instance method of [BlockFetcher
](#module_sync/fetcher.BlockFetcher)
+**Returns**: \*
- results of processing job or undefined if job not finished
-| Param | Type |
-| --- | --- |
-| task | Object
|
-| peer | Peer
|
+| Param | Type | Description |
+| --- | --- | --- |
+| job | Object
| fetch job |
+| result | Object
| fetch result |
-
+
-#### blockFetcher.process(entry, reply)
-Process fetch reply
+#### blockFetcher.store(blocks) ⇒ Promise
+Store fetch result. Resolves once store operation is complete.
-**Kind**: instance method of [BlockFetcher
](#module_sync.BlockFetcher)
-**Emits**: event:headers
+**Kind**: instance method of [BlockFetcher
](#module_sync/fetcher.BlockFetcher)
| Param | Type | Description |
| --- | --- | --- |
-| entry | Object
| entry object |
-| entry.task | Object
| fetch task |
-| entry.peer | Peer
| peer that handled task |
-| entry.time | number
| time task was generated |
-| reply | Object
| reply data |
+| blocks | Array.<Block>
| fetch result |
-
+
-### sync.FastSynchronizer
-Implements an ethereum fast sync synchronizer
+#### blockFetcher.peer(job) ⇒ Peer
+Returns a peer that can process the given job
-**Kind**: static class of [sync
](#module_sync)
+**Kind**: instance method of [BlockFetcher
](#module_sync/fetcher.BlockFetcher)
-* [.FastSynchronizer](#module_sync.FastSynchronizer)
- * [new FastSynchronizer(options)](#new_module_sync.FastSynchronizer_new)
- * [.type](#module_sync.FastSynchronizer+type) ⇒ string
- * [.fetchable()](#module_sync.FastSynchronizer+fetchable) ⇒ boolean
- * [.height(peer)](#module_sync.FastSynchronizer+height) ⇒ Promise
- * [.origin()](#module_sync.FastSynchronizer+origin) ⇒ Promise
- * [.fetch([last])](#module_sync.FastSynchronizer+fetch) ⇒ Promise
- * [.handle(message, peer)](#module_sync.FastSynchronizer+handle) ⇒ Promise
- * [.open()](#module_sync.FastSynchronizer+open) ⇒ Promise
- * [.stop()](#module_sync.FastSynchronizer+stop) ⇒ Promise
-
-
+| Param | Type | Description |
+| --- | --- | --- |
+| job | Object
| job |
-#### new FastSynchronizer(options)
-Create new node
+
+### sync/fetcher.Fetcher
+Base class for fetchers that retrieve various data from peers. Subclasses must
+request() and process() methods. Tasks can be arbitrary objects whose structure
+is defined by subclasses. A priority queue is used to ensure tasks are fetched
+inorder.
+
+**Kind**: static class of [sync/fetcher
](#module_sync/fetcher)
+
+* [.Fetcher](#module_sync/fetcher.Fetcher)
+ * [new Fetcher(options)](#new_module_sync/fetcher.Fetcher_new)
+ * [.tasks()](#module_sync/fetcher.Fetcher+tasks) ⇒ Array.<Object>
+ * [.enqueue(job)](#module_sync/fetcher.Fetcher+enqueue)
+ * [.dequeue()](#module_sync/fetcher.Fetcher+dequeue)
+ * [._read()](#module_sync/fetcher.Fetcher+_read)
+ * [.next()](#module_sync/fetcher.Fetcher+next)
+ * [.error(error, task, peer)](#module_sync/fetcher.Fetcher+error)
+ * [.write()](#module_sync/fetcher.Fetcher+write)
+ * [.fetch()](#module_sync/fetcher.Fetcher+fetch) ⇒ Promise
+ * [.peer(job)](#module_sync/fetcher.Fetcher+peer) ⇒ Peer
+ * [.request(job)](#module_sync/fetcher.Fetcher+request) ⇒ Promise
+ * [.process(job, peer, reply)](#module_sync/fetcher.Fetcher+process)
+ * [.expire()](#module_sync/fetcher.Fetcher+expire)
+ * [.store(result)](#module_sync/fetcher.Fetcher+store) ⇒ Promise
+
+
-| Param | Type | Description |
-| --- | --- | --- |
-| options | Object
| constructor parameters |
-| options.pool | PeerPool
| peer pool |
-| options.chain | Chain
| blockchain |
-| [options.interval] | number
| refresh interval |
-| [options.logger] | Logger
| Logger instance |
+#### new Fetcher(options)
+Create new fetcher
-
-#### fastSynchronizer.type ⇒ string
-Returns synchronizer type
+| Param | Type | Default | Description |
+| --- | --- | --- | --- |
+| options | Object
| | constructor parameters |
+| options.pool | PeerPool
| | peer pool |
+| [options.timeout] | number
| | fetch task timeout |
+| [options.banTime] | number
| | how long to ban misbehaving peers |
+| [options.maxQueue] | number
| | max write queue size |
+| [options.maxPerRequest] | number
| 128
| max items per request |
+| [options.interval] | number
| | retry interval |
+| [options.logger] | Logger
| | Logger instance |
-**Kind**: instance property of [FastSynchronizer
](#module_sync.FastSynchronizer)
-**Returns**: string
- type
-
+
-#### fastSynchronizer.fetchable() ⇒ boolean
-Returns true if peer can be used to fetch blocks
+#### fetcher.tasks() ⇒ Array.<Object>
+Generate list of tasks to fetch
-**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
-
+**Kind**: instance method of [Fetcher
](#module_sync/fetcher.Fetcher)
+**Returns**: Array.<Object>
- tasks
+
-#### fastSynchronizer.height(peer) ⇒ Promise
-Request canonical chain height from peer. Returns a promise that resolves
-to the peer's height once it responds with its latest block header.
+#### fetcher.enqueue(job)
+Enqueue job
-**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
+**Kind**: instance method of [Fetcher
](#module_sync/fetcher.Fetcher)
| Param | Type |
| --- | --- |
-| peer | Peer
|
+| job | Object
|
-
+
-#### fastSynchronizer.origin() ⇒ Promise
-Find an origin peer that contains the highest total difficulty. We will
-synchronize to this peer's blockchain. Returns a promise that resolves once
-an origin peer is found.
+#### fetcher.dequeue()
+Dequeue all done tasks that completed in order
-**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
-**Returns**: Promise
- [description]
-
+**Kind**: instance method of [Fetcher
](#module_sync/fetcher.Fetcher)
+
-#### fastSynchronizer.fetch([last]) ⇒ Promise
-Fetch all headers from current height up to specified number (last). Returns
-a promise that resolves once all headers are downloaded.
+#### fetcher._read()
+Implements Readable._read() by pushing completed tasks to the read queue
-**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
-**Returns**: Promise
- Resolves with count of number of headers fetched
+**Kind**: instance method of [Fetcher
](#module_sync/fetcher.Fetcher)
+
-| Param | Type | Description |
-| --- | --- | --- |
-| [last] | BN
| number of last block header to download. If last is not specified, the best height will be used from existing peers. |
+#### fetcher.next()
+Process next task
-
+**Kind**: instance method of [Fetcher
](#module_sync/fetcher.Fetcher)
+
-#### fastSynchronizer.handle(message, peer) ⇒ Promise
-Handler for incoming requests from connected peers
+#### fetcher.error(error, task, peer)
+Handle error
-**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
+**Kind**: instance method of [Fetcher
](#module_sync/fetcher.Fetcher)
| Param | Type | Description |
| --- | --- | --- |
-| message | Object
| message object |
+| error | Error
| error object |
+| task | Object
| task |
| peer | Peer
| peer |
-
-
-#### fastSynchronizer.open() ⇒ Promise
-Open synchronizer. Must be called before sync() is called
+
-**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
-
+#### fetcher.write()
+Setup writer pipe and start writing fetch results. A pipe is used in order
+to support backpressure from storing results.
-#### fastSynchronizer.stop() ⇒ Promise
-Stop synchronization. Returns a promise that resolves once its stopped.
+**Kind**: instance method of [Fetcher
](#module_sync/fetcher.Fetcher)
+
-**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
-
-
-### sync.Fetcher
-Base class for fetchers that retrieve various data from peers. Subclasses must
-override the before(), fetch() and process() methods. Tasks can be arbitrary
-objects whose structure is defined by subclasses. A priority queue is used to
-ensure most important tasks are processed first based on the before() function.
-
-**Kind**: static class of [sync
](#module_sync)
+#### fetcher.fetch() ⇒ Promise
+Run the fetcher. Returns a promise that resolves once all tasks are completed.
-* [.Fetcher](#module_sync.Fetcher)
- * [new Fetcher(options)](#new_module_sync.Fetcher_new)
- * [.add(task)](#module_sync.Fetcher+add)
- * [.next()](#module_sync.Fetcher+next)
- * [.handle(reply, peer)](#module_sync.Fetcher+handle)
- * [.error(error, task, peer)](#module_sync.Fetcher+error)
- * [.expire()](#module_sync.Fetcher+expire)
- * [.open()](#module_sync.Fetcher+open) ⇒ Promise
- * [.start()](#module_sync.Fetcher+start) ⇒ Promise
- * [.stop()](#module_sync.Fetcher+stop) ⇒ Promise
- * [.before(taskOne, taskTwo)](#module_sync.Fetcher+before) ⇒ boolean
- * [.fetchable(peer)](#module_sync.Fetcher+fetchable) ⇒ boolean
- * [.fetch(task, peer)](#module_sync.Fetcher+fetch) ⇒ Promise
- * [.process(entry, reply)](#module_sync.Fetcher+process)
-
-
+**Kind**: instance method of [Fetcher
](#module_sync/fetcher.Fetcher)
+
-#### new Fetcher(options)
-Create new fetcher
+#### fetcher.peer(job) ⇒ Peer
+Returns a peer that can process the given job
+**Kind**: instance method of [Fetcher
](#module_sync/fetcher.Fetcher)
| Param | Type | Description |
| --- | --- | --- |
-| options | Object
| constructor parameters |
-| options.pool | PeerPool
| peer pool |
-| [options.timeout] | number
| fetch task timeout |
-| [options.interval] | number
| retry interval |
-| [options.logger] | Logger
| Logger instance |
+| job | Object
| job |
-
+
-#### fetcher.add(task)
-Add new task to fetcher
+#### fetcher.request(job) ⇒ Promise
+Request results from peer for the given job. Resolves with the raw result.
-**Kind**: instance method of [Fetcher
](#module_sync.Fetcher)
+**Kind**: instance method of [Fetcher
](#module_sync/fetcher.Fetcher)
| Param | Type |
| --- | --- |
-| task | Object
|
+| job | Object
|
-
+
-#### fetcher.next()
-Process next task
+#### fetcher.process(job, peer, reply)
+Process the reply for the given job
-**Kind**: instance method of [Fetcher
](#module_sync.Fetcher)
-
+**Kind**: instance method of [Fetcher
](#module_sync/fetcher.Fetcher)
-#### fetcher.handle(reply, peer)
-Handler for responses from peers. Finds and processes the corresponding
-task using the process() method, and resets peer to an idle state.
+| Param | Type | Description |
+| --- | --- | --- |
+| job | Object
| fetch job |
+| peer | Peer
| peer that handled task |
+| reply | Object
| reply data |
-**Kind**: instance method of [Fetcher
](#module_sync.Fetcher)
+
-| Param | Type |
-| --- | --- |
-| reply | Object
|
-| peer | Peer
|
+#### fetcher.expire()
+Expire job that has timed out and ban associated peer. Timed out tasks will
+be re-inserted into the queue.
-
+**Kind**: instance method of [Fetcher
](#module_sync/fetcher.Fetcher)
+
-#### fetcher.error(error, task, peer)
-Handle error
+#### fetcher.store(result) ⇒ Promise
+Store fetch result. Resolves once store operation is complete.
-**Kind**: instance method of [Fetcher
](#module_sync.Fetcher)
+**Kind**: instance method of [Fetcher
](#module_sync/fetcher.Fetcher)
| Param | Type | Description |
| --- | --- | --- |
-| error | Error
| error object |
-| task | Object
| task |
-| peer | Peer
| peer |
+| result | Object
| fetch result |
-
+
-#### fetcher.expire()
-Expires all tasks that have timed out. Peers that take too long to respond
-will be banned for 5 minutes. Timeout out tasks will be re-inserted into the
-queue.
+### sync/fetcher.HeaderFetcher
+Implements an les/1 based header fetcher
-**Kind**: instance method of [Fetcher
](#module_sync.Fetcher)
-
+**Kind**: static class of [sync/fetcher
](#module_sync/fetcher)
-#### fetcher.open() ⇒ Promise
-Open fetcher. Must be called before fetcher is started
+* [.HeaderFetcher](#module_sync/fetcher.HeaderFetcher)
+ * [new HeaderFetcher(options)](#new_module_sync/fetcher.HeaderFetcher_new)
+ * [.request(job)](#module_sync/fetcher.HeaderFetcher+request) ⇒ Promise
+ * [.process(job, result)](#module_sync/fetcher.HeaderFetcher+process) ⇒ \*
+ * [.store(headers)](#module_sync/fetcher.HeaderFetcher+store) ⇒ Promise
+ * [.peer(job)](#module_sync/fetcher.HeaderFetcher+peer) ⇒ Peer
-**Kind**: instance method of [Fetcher
](#module_sync.Fetcher)
-
+
-#### fetcher.start() ⇒ Promise
-Run the fetcher. Returns a promise that resolves once all tasks are completed.
+#### new HeaderFetcher(options)
+Create new header fetcher
-**Kind**: instance method of [Fetcher
](#module_sync.Fetcher)
-
-#### fetcher.stop() ⇒ Promise
-Stop the fetcher. Returns a promise that resolves once it is stopped.
+| Param | Type | Default | Description |
+| --- | --- | --- | --- |
+| options | Object
| | constructor parameters |
+| options.pool | PeerPool
| | peer pool |
+| options.first | BN
| | header number to start fetching from |
+| options.count | BN
| | how many headers to fetch |
+| options.flow | FlowControl
| | flow control manager |
+| [options.timeout] | number
| | fetch task timeout |
+| [options.banTime] | number
| | how long to ban misbehaving peers |
+| [options.interval] | number
| | retry interval |
+| [options.maxPerRequest] | number
| 192
| max items per request |
+| [options.logger] | Logger
| | Logger instance |
-**Kind**: instance method of [Fetcher
](#module_sync.Fetcher)
-
+
-#### fetcher.before(taskOne, taskTwo) ⇒ boolean
-True if taskOne has a higher priority than taskTwo
+#### headerFetcher.request(job) ⇒ Promise
+Requests block headers for the given task
-**Kind**: instance method of [Fetcher
](#module_sync.Fetcher)
+**Kind**: instance method of [HeaderFetcher
](#module_sync/fetcher.HeaderFetcher)
| Param | Type |
| --- | --- |
-| taskOne | Object
|
-| taskTwo | Object
|
+| job | Object
|
-
+
-#### fetcher.fetchable(peer) ⇒ boolean
-True if peer can process fetch tasks
+#### headerFetcher.process(job, result) ⇒ \*
+Process fetch result
-**Kind**: instance method of [Fetcher
](#module_sync.Fetcher)
+**Kind**: instance method of [HeaderFetcher
](#module_sync/fetcher.HeaderFetcher)
+**Returns**: \*
- results of processing job or undefined if job not finished
| Param | Type | Description |
| --- | --- | --- |
-| peer | Peer
| candidate peer |
+| job | Object
| fetch job |
+| result | Object
| fetch result |
-
+
-#### fetcher.fetch(task, peer) ⇒ Promise
-Sends a protocol command to peer for the specified task. Must return a
-promise that resolves with the decoded response to the commad.
+#### headerFetcher.store(headers) ⇒ Promise
+Store fetch result. Resolves once store operation is complete.
-**Kind**: instance method of [Fetcher
](#module_sync.Fetcher)
+**Kind**: instance method of [HeaderFetcher
](#module_sync/fetcher.HeaderFetcher)
-| Param | Type |
-| --- | --- |
-| task | Object
|
-| peer | Peer
|
+| Param | Type | Description |
+| --- | --- | --- |
+| headers | Array.<Header>
| fetch result |
-
+
-#### fetcher.process(entry, reply)
-Process the reply for the given fetch queue entry
+#### headerFetcher.peer(job) ⇒ Peer
+Returns a peer that can process the given job
-**Kind**: instance method of [Fetcher
](#module_sync.Fetcher)
+**Kind**: instance method of [HeaderFetcher
](#module_sync/fetcher.HeaderFetcher)
| Param | Type | Description |
| --- | --- | --- |
-| entry | Object
| entry object |
-| entry.task | Object
| fetch task |
-| entry.peer | Peer
| peer that handled task |
-| entry.time | number
| time task was generated |
-| reply | Object
| reply data |
+| job | Object
| job |
-
+
-### sync.HeaderFetcher
-Implements an les/1 based header fetcher
+## sync
+
+* [sync](#module_sync)
+ * [.FastSynchronizer](#module_sync.FastSynchronizer)
+ * [.type](#module_sync.FastSynchronizer+type) ⇒ string
+ * [.syncable()](#module_sync.FastSynchronizer+syncable) ⇒ boolean
+ * [.best(min)](#module_sync.FastSynchronizer+best) ⇒ Peer
+ * [.latest()](#module_sync.FastSynchronizer+latest) ⇒ Promise
+ * [.syncWithPeer(peer)](#module_sync.FastSynchronizer+syncWithPeer) ⇒ Promise
+ * [.sync()](#module_sync.FastSynchronizer+sync) ⇒ Promise
+ * [.announced(announcements, peer)](#module_sync.FastSynchronizer+announced) ⇒ Promise
+ * [.open()](#module_sync.FastSynchronizer+open) ⇒ Promise
+ * [.stop()](#module_sync.FastSynchronizer+stop) ⇒ Promise
+ * [.LightSynchronizer](#module_sync.LightSynchronizer)
+ * [.type](#module_sync.LightSynchronizer+type) ⇒ string
+ * [.syncable()](#module_sync.LightSynchronizer+syncable) ⇒ boolean
+ * [.best()](#module_sync.LightSynchronizer+best) ⇒ Peer
+ * [.syncWithPeer(peer)](#module_sync.LightSynchronizer+syncWithPeer) ⇒ Promise
+ * [.sync()](#module_sync.LightSynchronizer+sync) ⇒ Promise
+ * [.open()](#module_sync.LightSynchronizer+open) ⇒ Promise
+ * [.stop()](#module_sync.LightSynchronizer+stop) ⇒ Promise
+ * [.Synchronizer](#module_sync.Synchronizer)
+ * [new Synchronizer(options)](#new_module_sync.Synchronizer_new)
+ * [.type](#module_sync.Synchronizer+type) ⇒ string
+ * [.open()](#module_sync.Synchronizer+open) ⇒ Promise
+ * [.syncable()](#module_sync.Synchronizer+syncable) ⇒ boolean
+ * [.start()](#module_sync.Synchronizer+start) ⇒ Promise
+ * [.stop()](#module_sync.Synchronizer+stop) ⇒ Promise
+
+
+
+### sync.FastSynchronizer
+Implements an ethereum fast sync synchronizer
**Kind**: static class of [sync
](#module_sync)
-* [.HeaderFetcher](#module_sync.HeaderFetcher)
- * [new HeaderFetcher(options)](#new_module_sync.HeaderFetcher_new)
- * [.open()](#module_sync.HeaderFetcher+open) ⇒ Promise
- * [.before(taskOne, taskTwo)](#module_sync.HeaderFetcher+before) ⇒ boolean
- * [.fetch(task, peer)](#module_sync.HeaderFetcher+fetch) ⇒ Promise
- * [.process(entry, reply)](#module_sync.HeaderFetcher+process)
+* [.FastSynchronizer](#module_sync.FastSynchronizer)
+ * [.type](#module_sync.FastSynchronizer+type) ⇒ string
+ * [.syncable()](#module_sync.FastSynchronizer+syncable) ⇒ boolean
+ * [.best(min)](#module_sync.FastSynchronizer+best) ⇒ Peer
+ * [.latest()](#module_sync.FastSynchronizer+latest) ⇒ Promise
+ * [.syncWithPeer(peer)](#module_sync.FastSynchronizer+syncWithPeer) ⇒ Promise
+ * [.sync()](#module_sync.FastSynchronizer+sync) ⇒ Promise
+ * [.announced(announcements, peer)](#module_sync.FastSynchronizer+announced) ⇒ Promise
+ * [.open()](#module_sync.FastSynchronizer+open) ⇒ Promise
+ * [.stop()](#module_sync.FastSynchronizer+stop) ⇒ Promise
-
+
-#### new HeaderFetcher(options)
-Create new header fetcher
+#### fastSynchronizer.type ⇒ string
+Returns synchronizer type
+**Kind**: instance property of [FastSynchronizer
](#module_sync.FastSynchronizer)
+**Returns**: string
- type
+
-| Param | Type | Default | Description |
-| --- | --- | --- | --- |
-| options | Object
| | constructor parameters |
-| options.pool | PeerPool
| | peer pool |
-| options.flow | FlowControl
| | flow control manager |
-| options.chain | Chain
| | blockchain |
-| [options.maxPerRequest] | number
| 192
| max items per request |
-| [options.logger] | Logger
| | Logger instance |
+#### fastSynchronizer.syncable() ⇒ boolean
+Returns true if peer can be used for syncing
-
+**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
+
-#### headerFetcher.open() ⇒ Promise
-Open header fetcher. Must be called before fetcher is started
+#### fastSynchronizer.best(min) ⇒ Peer
+Finds the best peer to sync with. We will synchronize to this peer's
+blockchain. Returns null if no valid peer is found
-**Kind**: instance method of [HeaderFetcher
](#module_sync.HeaderFetcher)
-
+**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
-#### headerFetcher.before(taskOne, taskTwo) ⇒ boolean
-Prioritizes tasks based on first block number
+| Param | Type | Description |
+| --- | --- | --- |
+| min | number
| minimum numbers of peers to search |
-**Kind**: instance method of [HeaderFetcher
](#module_sync.HeaderFetcher)
-**Returns**: boolean
- true if taskOne has a lower first number than taskTwo
+
-| Param | Type |
-| --- | --- |
-| taskOne | Object
|
-| taskTwo | Object
|
+#### fastSynchronizer.latest() ⇒ Promise
+Get latest header of peer
-
+**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
+**Returns**: Promise
- Resolves with header
+
-#### headerFetcher.fetch(task, peer) ⇒ Promise
-Fetches block headers for the given task
+#### fastSynchronizer.syncWithPeer(peer) ⇒ Promise
+Sync all blocks and state from peer starting from current height.
-**Kind**: instance method of [HeaderFetcher
](#module_sync.HeaderFetcher)
-**Returns**: Promise
- method must return
+**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
+**Returns**: Promise
- Resolves when sync completed
-| Param | Type |
-| --- | --- |
-| task | Object
|
-| peer | Peer
|
+| Param | Type | Description |
+| --- | --- | --- |
+| peer | Peer
| remote peer to sync with |
-
+
-#### headerFetcher.process(entry, reply)
-Process the getBlockHeaders reply
+#### fastSynchronizer.sync() ⇒ Promise
+Fetch all blocks from current height up to highest found amongst peers and
+fetch entire recent state trie
-**Kind**: instance method of [HeaderFetcher
](#module_sync.HeaderFetcher)
-**Emits**: event:headers
+**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
+**Returns**: Promise
- Resolves with true if sync successful
+
+
+#### fastSynchronizer.announced(announcements, peer) ⇒ Promise
+Chain was updated
+
+**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
| Param | Type | Description |
| --- | --- | --- |
-| entry | Object
| entry object |
-| entry.task | Object
| fetch task |
-| entry.peer | Peer
| peer that handled task |
-| entry.time | number
| time task was generated |
-| reply | Object
| reply data |
+| announcements | Array.<Object>
| new block hash announcements |
+| peer | Peer
| peer |
+
+
+
+#### fastSynchronizer.open() ⇒ Promise
+Open synchronizer. Must be called before sync() is called
+**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
+
+
+#### fastSynchronizer.stop() ⇒ Promise
+Stop synchronization. Returns a promise that resolves once its stopped.
+
+**Kind**: instance method of [FastSynchronizer
](#module_sync.FastSynchronizer)
### sync.LightSynchronizer
@@ -2281,30 +2215,14 @@ Implements an ethereum light sync synchronizer
**Kind**: static class of [sync
](#module_sync)
* [.LightSynchronizer](#module_sync.LightSynchronizer)
- * [new LightSynchronizer(options)](#new_module_sync.LightSynchronizer_new)
* [.type](#module_sync.LightSynchronizer+type) ⇒ string
- * [.fetchable()](#module_sync.LightSynchronizer+fetchable) ⇒ boolean
- * [.origin()](#module_sync.LightSynchronizer+origin) ⇒ Promise
- * [.fetch([last])](#module_sync.LightSynchronizer+fetch) ⇒ Promise
- * [.handle(message, peer)](#module_sync.LightSynchronizer+handle) ⇒ Promise
+ * [.syncable()](#module_sync.LightSynchronizer+syncable) ⇒ boolean
+ * [.best()](#module_sync.LightSynchronizer+best) ⇒ Peer
+ * [.syncWithPeer(peer)](#module_sync.LightSynchronizer+syncWithPeer) ⇒ Promise
+ * [.sync()](#module_sync.LightSynchronizer+sync) ⇒ Promise
* [.open()](#module_sync.LightSynchronizer+open) ⇒ Promise
* [.stop()](#module_sync.LightSynchronizer+stop) ⇒ Promise
-
-
-#### new LightSynchronizer(options)
-Create new node
-
-
-| Param | Type | Description |
-| --- | --- | --- |
-| options | Object
| constructor parameters |
-| options.pool | PeerPool
| peer pool |
-| options.chain | Chain
| blockchain |
-| options.flow | FlowControl
| flow control manager |
-| [options.interval] | number
| refresh interval |
-| [options.logger] | Logger
| Logger instance |
-
#### lightSynchronizer.type ⇒ string
@@ -2312,46 +2230,38 @@ Returns synchronizer type
**Kind**: instance property of [LightSynchronizer
](#module_sync.LightSynchronizer)
**Returns**: string
- type
-
+
-#### lightSynchronizer.fetchable() ⇒ boolean
-Returns true if peer can be used to fetch headers
+#### lightSynchronizer.syncable() ⇒ boolean
+Returns true if peer can be used for syncing
**Kind**: instance method of [LightSynchronizer
](#module_sync.LightSynchronizer)
-
+
-#### lightSynchronizer.origin() ⇒ Promise
-Find an origin peer that contains the highest total difficulty. We will
-synchronize to this peer's blockchain. Returns a promise that resolves once
-an origin peer is found.
+#### lightSynchronizer.best() ⇒ Peer
+Finds the best peer to sync with. We will synchronize to this peer's
+blockchain. Returns null if no valid peer is found
**Kind**: instance method of [LightSynchronizer
](#module_sync.LightSynchronizer)
-**Returns**: Promise
- Resolves with [ origin peer, height ]
-
+
-#### lightSynchronizer.fetch([last]) ⇒ Promise
-Fetch all headers from current height up to specified number (last). Returns
-a promise that resolves once all headers are downloaded.
+#### lightSynchronizer.syncWithPeer(peer) ⇒ Promise
+Sync all headers and state from peer starting from current height.
**Kind**: instance method of [LightSynchronizer
](#module_sync.LightSynchronizer)
-**Returns**: Promise
- Resolves with count of number of headers fetched
+**Returns**: Promise
- Resolves when sync completed
| Param | Type | Description |
| --- | --- | --- |
-| [last] | BN
| number of last block header to download. If last is not specified, the best height will be used from existing peers. |
+| peer | Peer
| remote peer to sync with |
-
+
-#### lightSynchronizer.handle(message, peer) ⇒ Promise
-Handler for incoming requests from connected peers
+#### lightSynchronizer.sync() ⇒ Promise
+Fetch all headers from current height up to highest found amongst peers
**Kind**: instance method of [LightSynchronizer
](#module_sync.LightSynchronizer)
-
-| Param | Type | Description |
-| --- | --- | --- |
-| message | Object
| message object |
-| peer | Peer
| peer |
-
+**Returns**: Promise
- Resolves with true if sync successful
#### lightSynchronizer.open() ⇒ Promise
@@ -2375,8 +2285,8 @@ Base class for blockchain synchronizers
* [new Synchronizer(options)](#new_module_sync.Synchronizer_new)
* [.type](#module_sync.Synchronizer+type) ⇒ string
* [.open()](#module_sync.Synchronizer+open) ⇒ Promise
- * [.fetchable()](#module_sync.Synchronizer+fetchable) ⇒ boolean
- * [.sync([height])](#module_sync.Synchronizer+sync) ⇒ Promise
+ * [.syncable()](#module_sync.Synchronizer+syncable) ⇒ boolean
+ * [.start()](#module_sync.Synchronizer+start) ⇒ Promise
* [.stop()](#module_sync.Synchronizer+stop) ⇒ Promise
@@ -2385,13 +2295,15 @@ Base class for blockchain synchronizers
Create new node
-| Param | Type | Description |
-| --- | --- | --- |
-| options | Object
| constructor parameters |
-| options.pool | PeerPool
| peer pool |
-| options.chain | Chain
| blockchain |
-| [options.interval] | number
| refresh interval |
-| [options.logger] | Logger
| Logger instance |
+| Param | Type | Default | Description |
+| --- | --- | --- | --- |
+| options | Object
| | constructor parameters |
+| options.pool | PeerPool
| | peer pool |
+| options.chain | Chain
| | blockchain |
+| options.flow | FlowControl
| | flow control manager |
+| [options.minPeers] | number
| 3
| number of peers needed before syncing |
+| [options.interval] | number
| | refresh interval |
+| [options.logger] | Logger
| | Logger instance |
@@ -2406,24 +2318,18 @@ Returns synchronizer type
Open synchronizer. Must be called before sync() is called
**Kind**: instance method of [Synchronizer
](#module_sync.Synchronizer)
-
+
-#### synchronizer.fetchable() ⇒ boolean
-Returns true if peer can be used to fetch data
+#### synchronizer.syncable() ⇒ boolean
+Returns true if peer can be used for syncing
**Kind**: instance method of [Synchronizer
](#module_sync.Synchronizer)
-
+
-#### synchronizer.sync([height]) ⇒ Promise
-Synchronize blockchain. Returns a promise that resolves once chain is
-synchronized
+#### synchronizer.start() ⇒ Promise
+Start synchronization
**Kind**: instance method of [Synchronizer
](#module_sync.Synchronizer)
-
-| Param | Type | Description |
-| --- | --- | --- |
-| [height] | BN
| number of last block to fetch. Will be discovered from peers if not specified. |
-
#### synchronizer.stop() ⇒ Promise
diff --git a/lib/blockchain/blockpool.js b/lib/blockchain/blockpool.js
deleted file mode 100644
index 2b91f70..0000000
--- a/lib/blockchain/blockpool.js
+++ /dev/null
@@ -1,94 +0,0 @@
-'use strict'
-
-const BN = require('ethereumjs-util').BN
-const { defaultLogger } = require('../logging')
-
-const defaultOptions = {
- logger: defaultLogger
-}
-
-/**
- * Pool of blockchain segments
- * @memberof module:blockchain
- */
-class BlockPool {
- /**
- * Create new block pool
- * @param {Object} options constructor parameters
- * @param {Chain} options.chain blockchain
- * @param {Logger} [options.logger] Logger instance
- */
- constructor (options) {
- options = {...defaultOptions, ...options}
-
- this.logger = options.logger
- this.chain = options.chain
- this.pool = new Map()
- this.init()
- }
-
- init () {
- this.opened = false
- }
-
- /**
- * Size of pool
- * @type {number}
- */
- get size () {
- return this.pool.size
- }
-
- /**
- * Open block pool and wait for blockchain to open
- * @return {Promise}
- */
- async open () {
- if (this.opened) {
- return false
- }
- await this.chain.open()
- this.opened = true
- }
-
- /**
- * Add a blockchain segment to the pool. Returns a promise that resolves once
- * the segment has been added to the pool. Segments are automatically inserted
- * into the blockchain once prior gaps are filled.
- * @param {Block[]} blocks list of sequential blocks
- * @return {Promise}
- */
- async add (blocks) {
- if (!this.opened) {
- return false
- }
-
- if (!Array.isArray(blocks)) {
- blocks = [ blocks ]
- }
-
- let latest = this.chain.blocks.height
- let first = new BN(blocks[0].header.number)
-
- if (first.gt(latest.addn(1))) {
- // if block segment arrived out of order, save it to the pool
- this.pool.set(first.toString(), blocks)
- return
- }
- while (blocks) {
- // otherwise save headers and keep saving headers from header pool in order
- let last = new BN(blocks[blocks.length - 1].header.number)
- let hash = blocks[0].hash().toString('hex').slice(0, 8) + '...'
- await this.chain.putBlocks(blocks)
- this.logger.info(`Imported blocks count=${blocks.length} number=${first.toString(10)} hash=${hash}`)
- latest = last
- blocks = this.pool.get(last.addn(1).toString())
- if (blocks) {
- this.pool.delete(last.addn(1).toString())
- first = new BN(blocks[0].header.number)
- }
- }
- }
-}
-
-module.exports = BlockPool
diff --git a/lib/blockchain/headerpool.js b/lib/blockchain/headerpool.js
deleted file mode 100644
index 906bd28..0000000
--- a/lib/blockchain/headerpool.js
+++ /dev/null
@@ -1,51 +0,0 @@
-'use strict'
-
-const BlockPool = require('./blockpool')
-const BN = require('ethereumjs-util').BN
-
-/**
- * Pool of headerchain segments
- * @memberof module:blockchain
- */
-class HeaderPool extends BlockPool {
- /**
- * Add a headerchain segment to the pool. Returns a promise that resolves once
- * the segment has been added to the pool. Segments are automatically inserted
- * into the blockchain once prior gaps are filled.
- * @param {Header[]} headers list of sequential headers
- * @return {Promise}
- */
- async add (headers) {
- if (!this.opened) {
- return false
- }
-
- if (!Array.isArray(headers)) {
- headers = [ headers ]
- }
-
- let latest = this.chain.headers.height
- let first = new BN(headers[0].number)
-
- if (first.gt(latest.addn(1))) {
- // if block segment arrived out of order, save it to the pool
- this.pool.set(first.toString(), headers)
- return
- }
- while (headers) {
- // otherwise save headers and keep saving headers from header pool in order
- let last = new BN(headers[headers.length - 1].number)
- let hash = headers[0].hash().toString('hex').slice(0, 8) + '...'
- await this.chain.putHeaders(headers)
- this.logger.info(`Imported headers count=${headers.length} number=${first.toString(10)} hash=${hash}`)
- latest = last
- headers = this.pool.get(last.addn(1).toString())
- if (headers) {
- this.pool.delete(last.addn(1).toString())
- first = new BN(headers[0].number)
- }
- }
- }
-}
-
-module.exports = HeaderPool
diff --git a/lib/blockchain/index.js b/lib/blockchain/index.js
index 519fa87..5b8f7bf 100644
--- a/lib/blockchain/index.js
+++ b/lib/blockchain/index.js
@@ -5,5 +5,3 @@
*/
exports.Chain = require('./chain')
-exports.BlockPool = require('./blockpool')
-exports.HeaderPool = require('./headerpool')
diff --git a/lib/handler/ethhandler.js b/lib/handler/ethhandler.js
deleted file mode 100644
index 963390c..0000000
--- a/lib/handler/ethhandler.js
+++ /dev/null
@@ -1,46 +0,0 @@
-'use strict'
-
-const Handler = require('./handler')
-
-/**
- * ETH protocol handler
- * @memberof module:handler
- */
-class EthHandler extends Handler {
- /**
- * Message event to listen for
- * @return {string} name of message event
- */
- get event () {
- return 'message:eth'
- }
-
- /**
- * Handles incoming ETH request from connected peer
- * @param {Object} message message object
- * @param {Peer} peer peer
- * @return {Promise}
- */
- async handle (message, peer) {
- try {
- if (!this.chain.opened) {
- await this.chain.open()
- }
-
- if (message.name === 'GetBlockHeaders') {
- const { block, max, skip, reverse } = message.data
- const headers = await this.chain.getHeaders(block, max, skip, reverse)
- peer.eth.send('BlockHeaders', headers)
- } else if (message.name === 'GetBlockBodies') {
- const hashes = message.data
- const blocks = await Promise.all(hashes.map(hash => this.chain.getBlock(hash)))
- const bodies = blocks.map(block => block.raw.slice(1))
- peer.eth.send('BlockBodies', bodies)
- }
- } catch (error) {
- this.emit('error', error)
- }
- }
-}
-
-module.exports = EthHandler
diff --git a/lib/handler/handler.js b/lib/handler/handler.js
deleted file mode 100644
index 8c8a023..0000000
--- a/lib/handler/handler.js
+++ /dev/null
@@ -1,69 +0,0 @@
-'use strict'
-
-const EventEmitter = require('events')
-const { defaultLogger } = require('../logging')
-
-const defaultOptions = {
- logger: defaultLogger
-}
-
-/**
- * Base class for protocol handlers
- * @memberof module:handler
- */
-class Handler extends EventEmitter {
- /**
- * Create new handler
- * @param {Object} options constructor parameters
- * @param {PeerPool} options.pool peer pool
- * @param {Chain} options.chain blockchain
- * @param {Logger} [options.logger] Logger instance
- */
- constructor (options) {
- super(options)
- options = {...defaultOptions, ...options}
-
- this.logger = options.logger
- this.pool = options.pool
- this.chain = options.chain
- this.running = false
- this.pool.on(this.event, (message, peer) => {
- if (this.running) {
- this.handle(message, peer)
- }
- })
- }
-
- /**
- * Message event to listen for
- * @return {string} name of message event
- */
- get event () {
- throw new Error('Unimplemented')
- }
-
- /**
- * Start handler
- */
- start () {
- this.running = true
- }
-
- /**
- * Stop handler
- */
- stop () {
- this.running = false
- }
-
- /**
- * Handles incoming request from connected peer
- * @param {Object} message message object
- * @param {Peer} peer peer
- * @return {Promise}
- */
- async handle (message, peer) {
- }
-}
-
-module.exports = Handler
diff --git a/lib/handler/index.js b/lib/handler/index.js
deleted file mode 100644
index 252eca4..0000000
--- a/lib/handler/index.js
+++ /dev/null
@@ -1,9 +0,0 @@
-'use strict'
-
-/**
- * @module handler
- */
-
-exports.Handler = require('./handler')
-exports.EthHandler = require('./ethhandler')
-exports.LesHandler = require('./leshandler')
diff --git a/lib/handler/leshandler.js b/lib/handler/leshandler.js
deleted file mode 100644
index 32f7a03..0000000
--- a/lib/handler/leshandler.js
+++ /dev/null
@@ -1,60 +0,0 @@
-'use strict'
-
-const Handler = require('./handler')
-
-/**
- * LES protocol handler
- * @memberof module:handler
- */
-class LesHandler extends Handler {
- /**
- * Create new handler
- * @param {Object} options constructor parameters
- * @param {PeerPool} options.pool peer pool
- * @param {Chain} options.chain blockchain
- * @param {FlowControl} options.flow flow control manager
- * @param {Logger} [options.logger] Logger instance
- */
- constructor (options) {
- super(options)
- this.flow = options.flow
- }
-
- /**
- * Message event to listen for
- * @return {string} name of message event
- */
- get event () {
- return 'message:les'
- }
-
- /**
- * Handles incoming LES requests from connected peer
- * @param {Object} message message object
- * @param {Peer} peer peer
- * @return {Promise}
- */
- async handle (message, peer) {
- try {
- if (!this.chain.opened) {
- await this.chain.open()
- }
-
- if (message.name === 'GetBlockHeaders') {
- const { reqId, block, max, skip, reverse } = message.data
- const bv = this.flow.handleRequest(peer, message.name, max)
- if (bv < 0) {
- this.pool.ban(peer, 300000)
- this.logger.debug(`Dropping peer for violating flow control ${peer}`)
- } else {
- const headers = await this.chain.getHeaders(block, max, skip, reverse)
- peer.les.send('BlockHeaders', { reqId, bv, headers })
- }
- }
- } catch (error) {
- this.emit('error', error)
- }
- }
-}
-
-module.exports = LesHandler
diff --git a/lib/index.js b/lib/index.js
index 9db7dd8..52625d9 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -22,8 +22,6 @@ exports.define = function define (name, path) {
// Blockchain
exports.define('blockchain', './blockchain')
exports.define('Chain', './blockchain/chain')
-exports.define('BlockPool', './blockchain/blockpool')
-exports.define('HeaderPool', './blockchain/headerpool')
// Handler
exports.define('handler', './handler')
diff --git a/lib/net/peerpool.js b/lib/net/peerpool.js
index e564254..2630267 100644
--- a/lib/net/peerpool.js
+++ b/lib/net/peerpool.js
@@ -6,7 +6,8 @@ const Peer = require('./peer/peer')
const defaultOptions = {
logger: defaultLogger,
- servers: []
+ servers: [],
+ maxPeers: 25
}
/**
@@ -30,6 +31,7 @@ class PeerPool extends EventEmitter {
* Create new peer pool
* @param {Object} options constructor parameters
* @param {Server[]} options.servers servers to aggregate peers from
+ * @param {number} [options.maxPeers=25] maximum peers allowed
* @param {Logger} [options.logger] logger instance
*/
constructor (options) {
@@ -39,6 +41,7 @@ class PeerPool extends EventEmitter {
this.servers = options.servers
this.logger = options.logger
+ this.maxPeers = options.maxPeers
this.pool = new Map()
this.init()
}
@@ -79,6 +82,14 @@ class PeerPool extends EventEmitter {
return Array.from(this.pool.values())
}
+ /**
+ * Number of peers in pool
+ * @type {number}
+ */
+ get size () {
+ return this.peers.length
+ }
+
/**
* Return true if pool contains the specified peer
* @param {Peer|string} peer object or peer id
@@ -108,6 +119,7 @@ class PeerPool extends EventEmitter {
* @param {Peer} peer
*/
connected (peer) {
+ if (this.size >= this.maxPeers) return
peer.on('message', (message, protocol) => {
if (this.pool.get(peer.id)) {
this.emit('message', message, protocol, peer)
diff --git a/lib/net/server/libp2pserver.js b/lib/net/server/libp2pserver.js
index 9fd5304..66928ac 100644
--- a/lib/net/server/libp2pserver.js
+++ b/lib/net/server/libp2pserver.js
@@ -7,7 +7,7 @@ const Libp2pNode = require('../peer/libp2pnode')
const Libp2pPeer = require('../peer/libp2ppeer')
const defaultOptions = {
- multiaddrs: [ '/ip4/127.0.0.1/tcp/50505', '/ip4/127.0.0.1/tcp/50580/ws' ],
+ multiaddrs: [ '/ip4/127.0.0.1/tcp/50580/ws' ],
key: null,
bootnodes: []
}
diff --git a/lib/net/server/rlpxserver.js b/lib/net/server/rlpxserver.js
index b6af051..ff096f1 100644
--- a/lib/net/server/rlpxserver.js
+++ b/lib/net/server/rlpxserver.js
@@ -23,7 +23,8 @@ const ignoredErrors = new RegExp([
'Handshake timed out',
'Invalid address buffer',
'Invalid MAC',
- 'Invalid timestamp buffer'
+ 'Invalid timestamp buffer',
+ 'Hash verification failed'
].join('|'))
/**
diff --git a/lib/node.js b/lib/node.js
index 5c076fd..ce9dab8 100644
--- a/lib/node.js
+++ b/lib/node.js
@@ -1,10 +1,12 @@
'use strict'
const EventEmitter = require('events')
-const { EthereumService } = require('./service')
+const { FastEthereumService, LightEthereumService } = require('./service')
const { defaultLogger } = require('./logging')
const defaultOptions = {
+ minPeers: 3,
+ maxPeers: 25,
logger: defaultLogger,
servers: []
}
@@ -25,6 +27,7 @@ class Node extends EventEmitter {
* @param {boolean} [options.lightserv=false] serve LES requests
* @param {Server[]} [options.servers=[]] list of servers to use
* @param {Object[]} [options.bootnodes] list of bootnodes to use for discovery
+ * @param {number} [options.minPeers=3] number of peers needed before syncing
* @param {number} [options.maxPeers=25] maximum peers allowed
* @param {string[]} [options.clientFilter] list of supported clients
* @param {number} [options.refreshInterval] how often to discover new peers
@@ -36,15 +39,26 @@ class Node extends EventEmitter {
this.logger = options.logger
this.common = options.common
this.servers = options.servers
+ this.syncmode = options.syncmode
this.services = [
- new EthereumService({
- servers: this.servers,
- logger: this.logger,
- syncmode: options.syncmode,
- lightserv: options.lightserv,
- common: options.common,
- db: options.db
- })
+ this.syncmode === 'fast'
+ ? new FastEthereumService({
+ servers: this.servers,
+ logger: this.logger,
+ lightserv: options.lightserv,
+ common: options.common,
+ minPeers: options.minPeers,
+ maxPeers: options.maxPeers,
+ db: options.db
+ })
+ : new LightEthereumService({
+ servers: this.servers,
+ logger: this.logger,
+ common: options.common,
+ minPeers: options.minPeers,
+ maxPeers: options.maxPeers,
+ db: options.db
+ })
]
this.opened = false
this.started = false
diff --git a/lib/service/ethereumservice.js b/lib/service/ethereumservice.js
index adda308..9d5e250 100644
--- a/lib/service/ethereumservice.js
+++ b/lib/service/ethereumservice.js
@@ -1,18 +1,16 @@
'use strict'
const Service = require('./service')
-const EthProtocol = require('../net/protocol/ethprotocol')
-const LesProtocol = require('../net/protocol/lesprotocol')
const FlowControl = require('../net/protocol/flowcontrol')
const { Chain } = require('../blockchain')
-const { FastSynchronizer, LightSynchronizer } = require('../sync')
-const { EthHandler, LesHandler } = require('../handler')
const Common = require('ethereumjs-common')
const defaultOptions = {
- syncmode: 'light',
lightserv: false,
- common: new Common('mainnet')
+ common: new Common('mainnet'),
+ minPeers: 3,
+ timeout: 5000,
+ interval: 1000
}
/**
@@ -24,56 +22,25 @@ class EthereumService extends Service {
* Create new ETH service
* @param {Object} options constructor parameters
* @param {Server[]} options.servers servers to run service on
- * @param {string} [options.syncmode=light] synchronization mode ('fast' or 'light')
- * @param {boolean} [options.lightserv=false] serve LES requests
* @param {Chain} [options.chain] blockchain
+ * @param {LevelDB} [options.db=null] blockchain database
* @param {Common} [options.common] ethereum network name
- * @param {number} [options.interval] sync interval
+ * @param {number} [options.minPeers=3] number of peers needed before syncing
+ * @param {number} [options.maxPeers=25] maximum peers allowed
+ * @param {number} [options.timeout] protocol timeout
+ * @param {number} [options.interval] sync retry interval
* @param {Logger} [options.logger] logger instance
*/
constructor (options) {
+ options = { ...defaultOptions, ...options }
super(options)
- options = { ...defaultOptions, ...options, ...{ logger: this.logger } }
- this.syncmode = options.syncmode
- this.lightserv = options.lightserv
this.flow = new FlowControl(options)
this.chain = options.chain || new Chain(options)
- this.handlers = []
-
- if (this.syncmode === 'light') {
- this.logger.info('Light sync mode')
- this.synchronizer = new LightSynchronizer({
- logger: this.logger,
- pool: this.pool,
- chain: this.chain,
- flow: this.flow,
- interval: this.interval
- })
- } else if (this.syncmode === 'fast') {
- this.logger.info('Fast sync mode')
- this.synchronizer = new FastSynchronizer({
- logger: this.logger,
- pool: this.pool,
- chain: this.chain,
- interval: this.interval
- })
- this.handlers.push(new EthHandler({
- logger: this.logger,
- chain: this.chain,
- pool: this.pool
- }))
- if (this.lightserv) {
- this.handlers.push(new LesHandler({
- logger: this.logger,
- chain: this.chain,
- pool: this.pool,
- flow: this.flow
- }))
- }
- } else {
- throw new Error(`Unsupported syncmode: ${this.syncmode}`)
- }
+ this.minPeers = options.minPeers
+ this.interval = options.interval
+ this.timeout = options.timeout
+ this.synchronizer = null
}
/**
@@ -85,23 +52,6 @@ class EthereumService extends Service {
return 'eth'
}
- /**
- * Returns all protocols required by this service
- * @type {Protocol[]} required protocols
- */
- get protocols () {
- const protocols = []
- if (this.syncmode === 'light') {
- protocols.push(new LesProtocol({ chain: this.chain }))
- } else if (this.syncmode === 'fast') {
- protocols.push(new EthProtocol({ chain: this.chain }))
- if (this.lightserv) {
- protocols.push(new LesProtocol({ chain: this.chain, flow: this.flow }))
- }
- }
- return protocols
- }
-
/**
* Open eth service. Must be called before service is started
* @return {Promise}
@@ -111,7 +61,7 @@ class EthereumService extends Service {
return false
}
super.open()
- this.synchronizer.on('synchronized', (stats) => this.emit('synchronized', stats))
+ this.synchronizer.on('synchronized', () => this.emit('synchronized'))
this.synchronizer.on('error', error => this.emit('error', error))
await this.chain.open()
await this.synchronizer.open()
@@ -123,12 +73,11 @@ class EthereumService extends Service {
* @return {Promise}
*/
async start () {
- if (this.started) {
+ if (this.running) {
return false
}
- this.handlers.forEach(h => h.start())
await super.start()
- this.synchronizer.sync()
+ this.synchronizer.start()
}
/**
@@ -136,11 +85,10 @@ class EthereumService extends Service {
* @return {Promise}
*/
async stop () {
- if (!this.started) {
+ if (!this.running) {
return false
}
await this.synchronizer.stop()
- this.handlers.forEach(h => h.stop())
await super.stop()
}
}
diff --git a/lib/service/fastethereumservice.js b/lib/service/fastethereumservice.js
new file mode 100644
index 0000000..0051cab
--- /dev/null
+++ b/lib/service/fastethereumservice.js
@@ -0,0 +1,123 @@
+'use strict'
+
+const EthereumService = require('./ethereumservice')
+const FastSynchronizer = require('../sync/fastsync')
+const EthProtocol = require('../net/protocol/ethprotocol')
+const LesProtocol = require('../net/protocol/lesprotocol')
+
+const defaultOptions = {
+ lightserv: false
+}
+
+/**
+ * Ethereum service
+ * @memberof module:service
+ */
+class FastEthereumService extends EthereumService {
+ /**
+ * Create new ETH service
+ * @param {Object} options constructor parameters
+ * @param {Server[]} options.servers servers to run service on
+ * @param {boolean} [options.lightserv=false] serve LES requests
+ * @param {Chain} [options.chain] blockchain
+ * @param {Common} [options.common] ethereum network name
+ * @param {number} [options.minPeers=3] number of peers needed before syncing
+ * @param {number} [options.maxPeers=25] maximum peers allowed
+ * @param {number} [options.interval] sync retry interval
+ * @param {Logger} [options.logger] logger instance
+ */
+ constructor (options) {
+ super(options)
+ options = { ...defaultOptions, ...options }
+ this.lightserv = options.lightserv
+ this.init()
+ }
+
+ init () {
+ this.logger.info('Fast sync mode')
+ this.synchronizer = new FastSynchronizer({
+ logger: this.logger,
+ pool: this.pool,
+ chain: this.chain,
+ minPeers: this.minPeers,
+ interval: this.interval
+ })
+ }
+
+ /**
+ * Returns all protocols required by this service
+ * @type {Protocol[]} required protocols
+ */
+ get protocols () {
+ const protocols = [ new EthProtocol({
+ chain: this.chain,
+ timeout: this.timeout
+ }) ]
+ if (this.lightserv) {
+ protocols.push(new LesProtocol({
+ chain: this.chain,
+ flow: this.flow,
+ timeout: this.timeout
+ }))
+ }
+ return protocols
+ }
+
+ /**
+ * Handles incoming message from connected peer
+ * @param {Object} message message object
+ * @param {string} protocol protocol name
+ * @param {Peer} peer peer
+ * @return {Promise}
+ */
+ async handle (message, protocol, peer) {
+ if (protocol === 'eth') {
+ return this.handleEth(message, peer)
+ } else {
+ return this.handleLes(message, peer)
+ }
+ }
+
+ /**
+ * Handles incoming ETH message from connected peer
+ * @param {Object} message message object
+ * @param {Peer} peer peer
+ * @return {Promise}
+ */
+ async handleEth (message, peer) {
+ if (message.name === 'GetBlockHeaders') {
+ const { block, max, skip, reverse } = message.data
+ const headers = await this.chain.getHeaders(block, max, skip, reverse)
+ peer.eth.send('BlockHeaders', headers)
+ } else if (message.name === 'GetBlockBodies') {
+ const hashes = message.data
+ const blocks = await Promise.all(hashes.map(hash => this.chain.getBlock(hash)))
+ const bodies = blocks.map(block => block.raw.slice(1))
+ peer.eth.send('BlockBodies', bodies)
+ } else if (message.name === 'NewBlockHashes') {
+ this.synchronizer.announced(message.data, peer)
+ }
+ }
+
+ /**
+ * Handles incoming LES message from connected peer
+ * @param {Object} message message object
+ * @param {Peer} peer peer
+ * @return {Promise}
+ */
+ async handleLes (message, peer) {
+ if (message.name === 'GetBlockHeaders' && this.lightserv) {
+ const { reqId, block, max, skip, reverse } = message.data
+ const bv = this.flow.handleRequest(peer, message.name, max)
+ if (bv < 0) {
+ this.pool.ban(peer, 300000)
+ this.logger.debug(`Dropping peer for violating flow control ${peer}`)
+ } else {
+ const headers = await this.chain.getHeaders(block, max, skip, reverse)
+ peer.les.send('BlockHeaders', { reqId, bv, headers })
+ }
+ }
+ }
+}
+
+module.exports = FastEthereumService
diff --git a/lib/service/index.js b/lib/service/index.js
index 4d20869..6ecd2e4 100644
--- a/lib/service/index.js
+++ b/lib/service/index.js
@@ -6,3 +6,5 @@
exports.Service = require('./service')
exports.EthereumService = require('./ethereumservice')
+exports.FastEthereumService = require('./fastethereumservice')
+exports.LightEthereumService = require('./lightethereumservice')
diff --git a/lib/service/lightethereumservice.js b/lib/service/lightethereumservice.js
new file mode 100644
index 0000000..9c3c6c6
--- /dev/null
+++ b/lib/service/lightethereumservice.js
@@ -0,0 +1,59 @@
+'use strict'
+
+const EthereumService = require('./ethereumservice')
+const LightSynchronizer = require('../sync/lightsync')
+const LesProtocol = require('../net/protocol/lesprotocol')
+
+/**
+ * Ethereum service
+ * @memberof module:service
+ */
+class LightEthereumService extends EthereumService {
+ /**
+ * Create new ETH service
+ * @param {Object} options constructor parameters
+ * @param {Server[]} options.servers servers to run service on
+ * @param {Chain} [options.chain] blockchain
+ * @param {Common} [options.common] ethereum network name
+ * @param {number} [options.minPeers=3] number of peers needed before syncing
+ * @param {number} [options.maxPeers=25] maximum peers allowed
+ * @param {number} [options.interval] sync retry interval
+ * @param {Logger} [options.logger] logger instance
+ */
+ constructor (options) {
+ super(options)
+ this.init()
+ }
+
+ init () {
+ this.logger.info('Light sync mode')
+ this.synchronizer = new LightSynchronizer({
+ logger: this.logger,
+ pool: this.pool,
+ chain: this.chain,
+ minPeers: this.minPeers,
+ flow: this.flow,
+ interval: this.interval
+ })
+ }
+
+ /**
+ * Returns all protocols required by this service
+ * @type {Protocol[]} required protocols
+ */
+ get protocols () {
+ return [ new LesProtocol({ chain: this.chain, timeout: this.timeout }) ]
+ }
+
+ /**
+ * Handles incoming message from connected peer
+ * @param {Object} message message object
+ * @param {string} protocol protocol name
+ * @param {Peer} peer peer
+ * @return {Promise}
+ */
+ async handle (message, protocol, peer) {
+ }
+}
+
+module.exports = LightEthereumService
diff --git a/lib/service/service.js b/lib/service/service.js
index 36c40a6..032dab5 100644
--- a/lib/service/service.js
+++ b/lib/service/service.js
@@ -5,6 +5,7 @@ const PeerPool = require('../net/peerpool')
const { defaultLogger } = require('../logging')
const defaultOptions = {
+ maxPeers: 25,
logger: defaultLogger,
servers: []
}
@@ -18,6 +19,7 @@ class Service extends EventEmitter {
* Create new service and associated peer pool
* @param {Object} options constructor parameters
* @param {Server[]} [options.servers=[]] servers to run service on
+ * @param {number} [options.maxPeers=25] maximum peers allowed
* @param {Logger} [options.logger] logger instance
*/
constructor (options) {
@@ -26,11 +28,21 @@ class Service extends EventEmitter {
this.logger = options.logger
this.opened = false
- this.started = false
+ this.running = false
this.servers = options.servers
this.pool = new PeerPool({
logger: this.logger,
- servers: this.servers
+ servers: this.servers,
+ maxPeers: options.maxPeers
+ })
+ this.pool.on('message', async (message, protocol, peer) => {
+ if (this.running) {
+ try {
+ await this.handle(message, protocol, peer)
+ } catch (error) {
+ this.logger.debug(`Error handling message (${protocol}:${message.name}): ${error.message}`)
+ }
+ }
})
}
@@ -52,7 +64,7 @@ class Service extends EventEmitter {
}
/**
- * Open service. Must be called before service is started
+ * Open service. Must be called before service is running
* @return {Promise}
*/
async open () {
@@ -88,11 +100,11 @@ class Service extends EventEmitter {
* @return {Promise}
*/
async start () {
- if (this.started) {
+ if (this.running) {
return false
}
await Promise.all(this.servers.map(s => s.start()))
- this.started = true
+ this.running = true
this.logger.info(`Started ${this.name} service.`)
}
@@ -101,9 +113,19 @@ class Service extends EventEmitter {
* @return {Promise}
*/
async stop () {
- this.started = false
+ this.running = false
this.logger.info(`Stopped ${this.name} service.`)
}
+
+ /**
+ * Handles incoming request from connected peer
+ * @param {Object} message message object
+ * @param {string} protocol protocol name
+ * @param {Peer} peer peer
+ * @return {Promise}
+ */
+ async handle (message, protocol, peer) {
+ }
}
module.exports = Service
diff --git a/lib/sync/blockfetcher.js b/lib/sync/blockfetcher.js
deleted file mode 100644
index ce472b0..0000000
--- a/lib/sync/blockfetcher.js
+++ /dev/null
@@ -1,116 +0,0 @@
-'use strict'
-
-const Fetcher = require('./fetcher')
-const Block = require('ethereumjs-block')
-const { BlockPool } = require('../blockchain')
-const BN = require('ethereumjs-util').BN
-
-const defaultOptions = {
- maxPerRequest: 128
-}
-
-/**
- * Implements an eth/62 based block fetcher
- * @memberof module:sync
- */
-class BlockFetcher extends Fetcher {
- /**
- * Create new block fetcher
- * @param {Object} options constructor parameters
- * @param {PeerPool} options.pool peer pool
- * @param {Chain} options.chain blockchain
- * @param {number} [options.maxPerRequest=128] max items per request
- * @param {Logger} [options.logger] Logger instance
- */
- constructor (options) {
- super(options)
- options = {...defaultOptions, ...options}
- this.chain = options.chain
- this.blockPool = new BlockPool({
- logger: this.logger,
- chain: this.chain
- })
-
- this.maxPerRequest = options.maxPerRequest
- }
-
- /**
- * Open block fetcher. Must be called before fetcher is started
- * @return {Promise}
- */
- async open () {
- if (this.opened) {
- return false
- }
- await this.blockPool.open()
- return super.open()
- }
-
- /**
- * Prioritizes tasks based on first block number
- * @param {Object} taskOne
- * @param {Object} taskTwo
- * @return {boolean} true if taskOne has a lower first number than taskTwo
- */
- before (taskOne, taskTwo) {
- return taskOne.first.lt(taskTwo.first)
- }
-
- /**
- * Fetches blocks for the given task
- * @param {Object} task
- * @param {Peer} peer
- * @return {Promise} method must return
- */
- async fetch (task, peer) {
- let count = task.last.sub(task.first).addn(1)
- if (count.gtn(this.maxPerRequest)) {
- count = this.maxPerRequest
- } else {
- count = count.toNumber()
- }
- const headers = await peer.eth.getBlockHeaders({ block: task.first, max: count })
- const bodies = await peer.eth.getBlockBodies(headers.map(h => h.hash()))
- const blocks = bodies.map((body, i) => new Block([headers[i]].concat(body)))
- return { blocks }
- }
-
- /**
- * Process fetch reply
- * @param {Object} entry entry object
- * @param {Object} entry.task fetch task
- * @param {Peer} entry.peer peer that handled task
- * @param {number} entry.time time task was generated
- * @param {Object} reply reply data
- * @emits headers
- */
- process (entry, reply) {
- if (!this.running) {
- return
- }
-
- const { blocks } = reply
- const { task } = entry
- if (!blocks || blocks.length === 0) {
- this.add(task)
- } else {
- const last = new BN(blocks[blocks.length - 1].header.number)
- if (last.lt(task.last)) {
- this.add({ first: last.addn(1), last: task.last })
- }
- this.blockPool.add(blocks).catch(error => {
- this.logger.error(`Block fetch error, trying again: ${error.stack}`)
- this.add({
- first: new BN(blocks[0].header.number),
- last: new BN(blocks[blocks.length - 1].header.number)
- })
- })
- }
- }
-
- fetchable (peer) {
- return peer.eth && !peer.inbound
- }
-}
-
-module.exports = BlockFetcher
diff --git a/lib/sync/fastsync.js b/lib/sync/fastsync.js
index 3e927fd..4c5346e 100644
--- a/lib/sync/fastsync.js
+++ b/lib/sync/fastsync.js
@@ -1,39 +1,15 @@
'use strict'
const Synchronizer = require('./sync')
-const BlockFetcher = require('./blockfetcher')
+const { BlockFetcher } = require('./fetcher')
const BN = require('ethereumjs-util').BN
+const { short } = require('../util')
/**
* Implements an ethereum fast sync synchronizer
* @memberof module:sync
*/
class FastSynchronizer extends Synchronizer {
- /**
- * Create new node
- * @param {Object} options constructor parameters
- * @param {PeerPool} options.pool peer pool
- * @param {Chain} options.chain blockchain
- * @param {number} [options.interval] refresh interval
- * @param {Logger} [options.logger] Logger instance
- */
- constructor (options) {
- super(options)
- this.blockFetcher = new BlockFetcher({
- pool: this.pool,
- chain: this.chain,
- logger: this.logger
- })
- this.init()
- }
-
- init () {
- this.pool.on('message:eth', (message, peer) => this.handle(message, peer))
- this.blockFetcher.on('error', (error, task, peer) => {
- this.logger.debug(`Error processing task ${JSON.stringify(task)} with peer ${peer}: ${error.stack}`)
- })
- }
-
/**
* Returns synchronizer type
* @return {string} type
@@ -43,107 +19,104 @@ class FastSynchronizer extends Synchronizer {
}
/**
- * Returns true if peer can be used to fetch blocks
+ * Returns true if peer can be used for syncing
* @return {boolean}
*/
- fetchable (peer) {
- return peer.eth && !peer.inbound
+ syncable (peer) {
+ return peer.eth
}
/**
- * Request canonical chain height from peer. Returns a promise that resolves
- * to the peer's height once it responds with its latest block header.
- * @param {Peer} peer
- * @return {Promise}
+ * Finds the best peer to sync with. We will synchronize to this peer's
+ * blockchain. Returns null if no valid peer is found
+ * @param {number} min minimum numbers of peers to search
+ * @return {Peer}
*/
- async height (peer) {
- const headers = await peer.eth.getBlockHeaders({block: peer.eth.status.bestHash, max: 1})
- return new BN(headers[0].number)
+ best () {
+ let best
+ const peers = this.pool.peers.filter(this.syncable.bind(this))
+ if (peers.length < this.minPeers && !this.forceSync) return
+ for (let peer of peers) {
+ const td = peer.eth.status.td
+ if ((!best && td.gte(this.chain.blocks.td)) ||
+ (best && best.eth.status.td.lt(td))) {
+ best = peer
+ }
+ }
+ return best
}
/**
- * Find an origin peer that contains the highest total difficulty. We will
- * synchronize to this peer's blockchain. Returns a promise that resolves once
- * an origin peer is found.
- * @return {Promise} [description]
+ * Get latest header of peer
+ * @return {Promise} Resolves with header
*/
- async origin () {
- let best
- let height
- while (!height && this.syncing) {
- await this.wait()
- const peers = this.pool.peers.filter(this.fetchable.bind(this))
- if (!peers.length) {
- continue
- }
- for (let peer of peers) {
- const td = peer.eth.status.td
- if ((!best && td.gte(this.chain.blocks.td)) ||
- (best && best.eth.status.td.lt(td))) {
- best = peer
- }
- }
- try {
- if (best) {
- height = await this.height(best)
- }
- } catch (error) {
- this.pool.ban(best)
- this.logger.debug(`Error getting peer height: ${best} ${error.stack}`)
- }
- }
- return [best, height]
+ async latest (peer) {
+ const headers = await peer.eth.getBlockHeaders({
+ block: peer.eth.status.bestHash, max: 1
+ })
+ return headers[0]
}
/**
- * Fetch all headers from current height up to specified number (last). Returns
- * a promise that resolves once all headers are downloaded.
- * @param {BN} [last] number of last block header to download. If last is not
- * specified, the best height will be used from existing peers.
- * @return {Promise} Resolves with count of number of headers fetched
+ * Sync all blocks and state from peer starting from current height.
+ * @param {Peer} peer remote peer to sync with
+ * @return {Promise} Resolves when sync completed
*/
- async fetch (last) {
- if (!last) {
- const [ origin, height ] = await this.origin()
- if (!origin || !height) {
- return 0
- }
- this.logger.info(`Using origin peer: ${origin.toString(true)} height=${height.toString(10)}`)
- last = height
- }
+ async syncWithPeer (peer) {
+ if (!peer) return false
+ const latest = await this.latest(peer)
+ const height = new BN(latest.number)
const first = this.chain.blocks.height.addn(1)
+ const count = height.sub(first).addn(1)
+ if (count.lten(0)) return false
- if (first.gt(last)) {
- return 0
- }
+ this.logger.debug(`Syncing with peer: ${peer.toString(true)} height=${height.toString(10)}`)
+
+ this.blockFetcher = new BlockFetcher({
+ pool: this.pool,
+ chain: this.chain,
+ logger: this.logger,
+ interval: this.interval,
+ first,
+ count
+ })
+ this.blockFetcher
+ .on('error', (error) => {
+ this.emit('error', error)
+ })
+ .on('fetched', blocks => {
+ const first = new BN(blocks[0].header.number)
+ const hash = short(blocks[0].hash())
+ this.logger.info(`Imported blocks count=${blocks.length} number=${first.toString(10)} hash=${hash} peers=${this.pool.size}`)
+ })
+ await this.blockFetcher.fetch()
+ delete this.blockFetcher
+ return true
- await this.blockFetcher.open()
- this.blockFetcher.add({ first, last })
- await this.blockFetcher.start()
- return last.sub(first).toNumber() + 1
+ // TO DO: Fetch state trie as well
}
/**
- * Handler for incoming requests from connected peers
- * @param {Object} message message object
- * @param {Peer} peer peer
- * @return {Promise}
+ * Fetch all blocks from current height up to highest found amongst peers and
+ * fetch entire recent state trie
+ * @return {Promise} Resolves with true if sync successful
*/
- async handle (message, peer) {
- try {
- if (!this.chain.opened) {
- await this.chain.open()
- }
+ async sync () {
+ const peer = this.best()
+ return this.syncWithPeer(peer)
+ }
- if (message.name === 'NewBlockHashes') {
- const pairs = message.data
- if (pairs.length) {
- const [, height] = pairs[pairs.length - 1]
- this.sync(height)
- }
- }
- } catch (error) {
- this.emit('error', error)
+ /**
+ * Chain was updated
+ * @param {Object[]} announcements new block hash announcements
+ * @param {Peer} peer peer
+ * @return {Promise}
+ */
+ async announced (announcements, peer) {
+ if (announcements.length) {
+ const [hash, height] = announcements[announcements.length - 1]
+ this.logger.debug(`New height: number=${height.toString(10)} hash=${short(hash)}`)
+ // TO DO: download new blocks
}
}
@@ -156,8 +129,8 @@ class FastSynchronizer extends Synchronizer {
await this.pool.open()
const number = this.chain.blocks.height.toString(10)
const td = this.chain.blocks.td.toString(10)
- const hash = this.chain.blocks.latest.hash().toString('hex').slice(0, 8) + '...'
- this.logger.info(`Latest local block: number=${number} td=${td} hash=${hash}`)
+ const hash = this.chain.blocks.latest.hash()
+ this.logger.info(`Latest local block: number=${number} td=${td} hash=${short(hash)}`)
}
/**
@@ -165,11 +138,14 @@ class FastSynchronizer extends Synchronizer {
* @return {Promise}
*/
async stop () {
- if (!this.syncing) {
+ if (!this.running) {
return false
}
- super.stop()
- await this.blockFetcher.stop()
+ if (this.blockFetcher) {
+ this.blockFetcher.destroy()
+ delete this.blockFetcher
+ }
+ await super.stop()
}
}
diff --git a/lib/sync/fetcher.js b/lib/sync/fetcher.js
deleted file mode 100644
index 53aafe5..0000000
--- a/lib/sync/fetcher.js
+++ /dev/null
@@ -1,262 +0,0 @@
-'use strict'
-
-const EventEmitter = require('events')
-const Heap = require('qheap')
-const { defaultLogger } = require('../logging')
-
-const defaultOptions = {
- logger: defaultLogger,
- timeout: 5000,
- interval: 1000
-}
-
-/**
- * Base class for fetchers that retrieve various data from peers. Subclasses must
- * override the before(), fetch() and process() methods. Tasks can be arbitrary
- * objects whose structure is defined by subclasses. A priority queue is used to
- * ensure most important tasks are processed first based on the before() function.
- * @memberof module:sync
- */
-class Fetcher extends EventEmitter {
- /**
- * Create new fetcher
- * @param {Object} options constructor parameters
- * @param {PeerPool} options.pool peer pool
- * @param {number} [options.timeout] fetch task timeout
- * @param {number} [options.interval] retry interval
- * @param {Logger} [options.logger] Logger instance
- */
- constructor (options) {
- super()
- options = {...defaultOptions, ...options}
-
- this.pool = options.pool
- this.logger = options.logger
- this.timeout = options.timeout
- this.interval = options.interval
- this.active = new Map()
- this.heap = new Heap({
- comparBefore: (a, b) => this.before(a, b)
- })
- this.pool.on('removed', peer => this.failure(peer.id))
- this.opened = false
- this.running = false
- }
-
- /**
- * Add new task to fetcher
- * @param {Object} task
- */
- add (task) {
- this.heap.insert(task)
- }
-
- /**
- * handle successful task completion
- * @private
- * @param {string} peerId peer id
- */
- success (peerId) {
- const entry = this.active.get(peerId)
- if (entry) {
- const { peer } = entry
- peer.idle = true
- this.active.delete(peer.id)
- this.next()
- }
- }
-
- /**
- * handle failed task completion
- * @private
- * @param {string} peerId peer id
- * @param {Error} [error] error
- */
- failure (peerId, error) {
- const entry = this.active.get(peerId)
- if (entry) {
- const { task, peer } = entry
- peer.idle = true
- this.active.delete(peerId)
- this.add(task)
- if (error) {
- this.error(error, task, peer)
- }
- this.next()
- }
- }
-
- /**
- * Process next task
- */
- next () {
- const task = this.heap.peek()
- if (!task) {
- return false
- }
- const peer = this.pool.idle(this.fetchable.bind(this))
- if (peer) {
- peer.idle = false
- this.heap.remove()
- this.active.set(peer.id, { time: Date.now(), task: task, peer: peer })
- this.fetch(task, peer)
- .then(reply => this.handle(reply, peer))
- .catch(error => this.failure(peer.id, error))
- return task
- } else {
- this.logger.debug(`No idle peers found. Waiting...`)
- }
- }
-
- /**
- * Handler for responses from peers. Finds and processes the corresponding
- * task using the process() method, and resets peer to an idle state.
- * @param {Object} reply
- * @param {Peer} peer
- */
- handle (reply, peer) {
- const entry = this.active.get(peer.id)
- if (entry) {
- if (reply) {
- try {
- this.process(entry, reply)
- this.success(peer.id)
- } catch (error) {
- this.failure(peer.id, error)
- }
- } else {
- // if fetch returns a falsy reply, then re-add task
- this.failure(peer.id)
- }
- } else {
- peer.idle = true
- this.logger.warn(`Task missing for peer ${peer}`)
- }
- }
-
- /**
- * Handle error
- * @param {Error} error error object
- * @param {Object} task task
- * @param {Peer} peer peer
- */
- error (error, task, peer) {
- this.emit('error', error, task, peer)
- }
-
- /**
- * Expires all tasks that have timed out. Peers that take too long to respond
- * will be banned for 5 minutes. Timeout out tasks will be re-inserted into the
- * queue.
- */
- expire () {
- const now = Date.now()
- for (let [id, entry] of this.active) {
- if (now - entry.time > this.timeout) {
- if (this.pool.contains(entry.peer)) {
- this.logger.debug(`Task timed out for peer (banning) ${JSON.stringify(entry.task)} ${entry.peer}`)
- this.pool.ban(entry.peer, 300000)
- } else {
- this.logger.debug(`Peer disconnected while performing task ${JSON.stringify(entry.task)} ${entry.peer}`)
- }
- this.active.delete(id)
- this.add(entry.task)
- }
- }
- }
-
- /**
- * Open fetcher. Must be called before fetcher is started
- * @return {Promise}
- */
- async open () {
- if (this.opened) {
- return false
- }
- this.opened = true
- }
-
- /**
- * Run the fetcher. Returns a promise that resolves once all tasks are completed.
- * @return {Promise}
- */
- async start () {
- if (this.running) {
- return false
- }
- this.running = true
- while (this.running) {
- this.expire()
- if (!this.next()) {
- if (this.heap.length === 0 && this.active.size === 0) {
- this.running = false
- } else {
- await this.wait()
- }
- }
- }
- }
-
- /**
- * Stop the fetcher. Returns a promise that resolves once it is stopped.
- * @return {Promise}
- */
- async stop () {
- if (!this.running) {
- return false
- }
- while (this.heap.remove()) {}
- this.active.clear()
- while (this.running) {
- await this.wait()
- }
- }
-
- /**
- * True if taskOne has a higher priority than taskTwo
- * @param {Object} taskOne
- * @param {Object} taskTwo
- * @return {boolean}
- */
- before (taskOne, taskTwo) {
- throw new Error('Unimplemented')
- }
-
- /**
- * True if peer can process fetch tasks
- * @param {Peer} peer candidate peer
- * @return {boolean}
- */
- fetchable (peer) {
- return true
- }
-
- /**
- * Sends a protocol command to peer for the specified task. Must return a
- * promise that resolves with the decoded response to the commad.
- * @param {Object} task
- * @param {Peer} peer
- * @return {Promise}
- */
- fetch (task, peer) {
- throw new Error('Unimplemented')
- }
-
- /**
- * Process the reply for the given fetch queue entry
- * @param {Object} entry entry object
- * @param {Object} entry.task fetch task
- * @param {Peer} entry.peer peer that handled task
- * @param {number} entry.time time task was generated
- * @param {Object} reply reply data
- */
- process (entry, reply) {
- throw new Error('Unimplemented')
- }
-
- async wait (delay) {
- await new Promise(resolve => setTimeout(resolve, delay || this.interval))
- }
-}
-
-module.exports = Fetcher
diff --git a/lib/sync/fetcher/blockfetcher.js b/lib/sync/fetcher/blockfetcher.js
new file mode 100644
index 0000000..eb0a91f
--- /dev/null
+++ b/lib/sync/fetcher/blockfetcher.js
@@ -0,0 +1,102 @@
+'use strict'
+
+const Fetcher = require('./fetcher')
+const Block = require('ethereumjs-block')
+const BN = require('bn.js')
+
+const defaultOptions = {
+ maxPerRequest: 128
+}
+
+/**
+ * Implements an eth/62 based block fetcher
+ * @memberof module:sync/fetcher
+ */
+class BlockFetcher extends Fetcher {
+ /**
+ * Create new block fetcher
+ * @param {Object} options constructor parameters
+ * @param {PeerPool} options.pool peer pool
+ * @param {Chain} options.chain blockchain
+ * @param {BN} options.first block number to start fetching from
+ * @param {BN} options.count how many blocks to fetch
+ * @param {number} [options.timeout] fetch task timeout
+ * @param {number} [options.banTime] how long to ban misbehaving peers
+ * @param {number} [options.interval] retry interval
+ * @param {number} [options.maxPerRequest=128] max items per request
+ * @param {Logger} [options.logger] Logger instance
+ */
+ constructor (options) {
+ super(options)
+ options = {...defaultOptions, ...options}
+ this.maxPerRequest = options.maxPerRequest
+ this.chain = options.chain
+ this.first = options.first
+ this.count = BN.isBN(options.count) ? options.count : new BN(options.count)
+ }
+
+ /**
+ * Generate list of tasks to fetch
+ * @return {Object[]} tasks
+ */
+ tasks () {
+ let { first, count } = this
+ const max = this.maxPerRequest
+ const tasks = []
+ while (count.gten(max)) {
+ tasks.push({ first: first.clone(), count: max })
+ first.iaddn(max)
+ count.isubn(max)
+ }
+ if (count.gtn(0)) {
+ tasks.push({ first: first.clone(), count: count.toNumber() })
+ }
+ return tasks
+ }
+
+ /**
+ * Requests blocks associated with this job
+ * @param {Object} job
+ * @return {Promise}
+ */
+ async request (job) {
+ const { task, peer } = job
+ let { first, count } = task
+ const headers = await peer.eth.getBlockHeaders({ block: first, max: count })
+ const bodies = await peer.eth.getBlockBodies(headers.map(h => h.hash()))
+ const blocks = bodies.map((body, i) => new Block([headers[i]].concat(body)))
+ return { blocks }
+ }
+
+ /**
+ * Process fetch result
+ * @param {Object} job fetch job
+ * @param {Object} result fetch result
+ * @return {*} results of processing job or undefined if job not finished
+ */
+ process (job, result) {
+ if (result.blocks && result.blocks.length === job.task.count) {
+ return result.blocks
+ }
+ }
+
+ /**
+ * Store fetch result. Resolves once store operation is complete.
+ * @param {Block[]} blocks fetch result
+ * @return {Promise}
+ */
+ async store (blocks) {
+ await this.chain.putBlocks(blocks)
+ }
+
+ /**
+ * Returns a peer that can process the given job
+ * @param {Object} job job
+ * @return {Peer}
+ */
+ peer (job) {
+ return this.pool.idle(p => p.eth)
+ }
+}
+
+module.exports = BlockFetcher
diff --git a/lib/sync/fetcher/fetcher.js b/lib/sync/fetcher/fetcher.js
new file mode 100644
index 0000000..b0cf2bc
--- /dev/null
+++ b/lib/sync/fetcher/fetcher.js
@@ -0,0 +1,307 @@
+'use strict'
+
+const { Readable, Writable } = require('stream')
+const Heap = require('qheap')
+const { defaultLogger } = require('../../logging')
+
+const defaultOptions = {
+ logger: defaultLogger,
+ timeout: 5000,
+ interval: 1000,
+ banTime: 60000,
+ maxQueue: 16,
+ maxPerRequest: 128
+}
+
+/**
+ * Base class for fetchers that retrieve various data from peers. Subclasses must
+ * request() and process() methods. Tasks can be arbitrary objects whose structure
+ * is defined by subclasses. A priority queue is used to ensure tasks are fetched
+ * inorder.
+ * @memberof module:sync/fetcher
+ */
+class Fetcher extends Readable {
+ /**
+ * Create new fetcher
+ * @param {Object} options constructor parameters
+ * @param {PeerPool} options.pool peer pool
+ * @param {number} [options.timeout] fetch task timeout
+ * @param {number} [options.banTime] how long to ban misbehaving peers
+ * @param {number} [options.maxQueue] max write queue size
+ * @param {number} [options.maxPerRequest=128] max items per request
+ * @param {number} [options.interval] retry interval
+ * @param {Logger} [options.logger] Logger instance
+ */
+ constructor (options) {
+ super({...options, objectMode: true})
+ options = {...defaultOptions, ...options}
+
+ this.pool = options.pool
+ this.logger = options.logger
+ this.timeout = options.timeout
+ this.interval = options.interval
+ this.banTime = options.banTime
+ this.maxQueue = options.maxQueue
+ this.maxPerRequest = options.maxPerRequest
+ this.in = new Heap({ comparBefore: (a, b) => a.index < b.index })
+ this.out = new Heap({ comparBefore: (a, b) => a.index < b.index })
+ this.total = 0
+ this.processed = 0
+ this.running = false
+ this.reading = false
+ }
+
+ /**
+ * Generate list of tasks to fetch
+ * @return {Object[]} tasks
+ */
+ tasks () {
+ return []
+ }
+
+ /**
+ * Enqueue job
+ * @param {Object} job
+ */
+ enqueue (job) {
+ if (this.running) {
+ this.in.insert({
+ ...job,
+ time: Date.now(),
+ state: 'idle',
+ result: null
+ })
+ }
+ }
+
+ /**
+ * Dequeue all done tasks that completed in order
+ */
+ dequeue () {
+ for (let f = this.out.peek(); f && f.index === this.processed;) {
+ this.processed++
+ const { result } = this.out.remove()
+ if (!this.push(result)) {
+ return
+ }
+ f = this.out.peek()
+ }
+ }
+
+ /**
+ * Implements Readable._read() by pushing completed tasks to the read queue
+ */
+ _read () {
+ this.dequeue()
+ }
+
+ /**
+ * handle successful job completion
+ * @private
+ * @param {Object} job successful job
+ * @param {Object} result job result
+ */
+ success (job, result) {
+ if (job.state !== 'active') return
+ if (result === undefined) {
+ this.enqueue(job)
+ this.wait().then(() => {
+ job.peer.idle = true
+ })
+ } else {
+ job.peer.idle = true
+ job.result = this.process(job, result)
+ if (job.result) {
+ this.out.insert(job)
+ this.dequeue()
+ } else {
+ this.enqueue(job)
+ }
+ }
+ this.next()
+ }
+
+ /**
+ * handle failed job completion
+ * @private
+ * @param {Object} job failed job
+ * @param {Error} [error] error
+ */
+ failure (job, error) {
+ if (job.state !== 'active') return
+ job.peer.idle = true
+ this.pool.ban(job.peer, this.banTime)
+ this.enqueue(job)
+ if (error) {
+ this.error(error, job)
+ }
+ this.next()
+ }
+
+ /**
+ * Process next task
+ */
+ next () {
+ const job = this.in.peek()
+ if (
+ !job ||
+ this._readableState.length > this.maxQueue ||
+ job.index > this.processed + this.maxQueue ||
+ this.processed === this.total
+ ) {
+ return false
+ }
+ const peer = this.peer()
+ if (peer) {
+ peer.idle = false
+ this.in.remove()
+ job.peer = peer
+ job.state = 'active'
+ const timeout = setTimeout(() => {
+ this.expire(job)
+ }, this.timeout)
+ this.request(job, peer)
+ .then(result => this.success(job, result))
+ .catch(error => this.failure(job, error))
+ .finally(() => clearTimeout(timeout))
+ return job
+ }
+ }
+
+ /**
+ * Handle error
+ * @param {Error} error error object
+ * @param {Object} task task
+ * @param {Peer} peer peer
+ */
+ error (error, job) {
+ if (this.running) {
+ this.emit('error', error, job && job.task, job && job.peer)
+ }
+ }
+
+ /**
+ * Setup writer pipe and start writing fetch results. A pipe is used in order
+ * to support backpressure from storing results.
+ */
+ write () {
+ const _write = async (result, encoding, cb) => {
+ try {
+ await this.store(result)
+ this.emit('fetched', result)
+ cb()
+ } catch (error) {
+ cb(error)
+ }
+ }
+ const writer = new Writable({
+ objectMode: true,
+ write: _write,
+ writev: (many, cb) => _write([].concat(...many.map(x => x.chunk)), null, cb)
+ })
+ this
+ .on('close', () => {
+ this.running = false
+ writer.destroy()
+ })
+ .pipe(writer)
+ .on('finish', () => {
+ this.running = false
+ })
+ .on('error', (error) => {
+ this.error(error)
+ this.running = false
+ writer.destroy()
+ })
+ }
+
+ /**
+ * Run the fetcher. Returns a promise that resolves once all tasks are completed.
+ * @return {Promise}
+ */
+ async fetch () {
+ if (this.running) {
+ return false
+ }
+ this.write()
+ this.tasks().forEach(task => {
+ const job = {
+ task,
+ time: Date.now(),
+ index: this.total++,
+ result: null,
+ state: 'idle',
+ peer: null
+ }
+ this.in.insert(job)
+ })
+ this.running = true
+ while (this.running) {
+ if (!this.next()) {
+ if (this.processed === this.total) {
+ this.push(null)
+ }
+ await this.wait()
+ }
+ }
+ this.destroy()
+ }
+
+ /**
+ * Returns a peer that can process the given job
+ * @param {Object} job job
+ * @return {Peer}
+ */
+ peer (job) {
+ return this.pool.idle()
+ }
+
+ /**
+ * Request results from peer for the given job. Resolves with the raw result.
+ * @param {Object} job
+ * @return {Promise}
+ */
+ request (job) {
+ throw new Error('Unimplemented')
+ }
+
+ /**
+ * Process the reply for the given job
+ * @param {Object} job fetch job
+ * @param {Peer} peer peer that handled task
+ * @param {Object} reply reply data
+ */
+ process (job, result) {
+ throw new Error('Unimplemented')
+ }
+
+ /**
+ * Expire job that has timed out and ban associated peer. Timed out tasks will
+ * be re-inserted into the queue.
+ */
+ expire (job) {
+ job.state = 'expired'
+ if (this.pool.contains(job.peer)) {
+ this.logger.debug(`Task timed out for peer (banning) ${JSON.stringify(job.task)} ${job.peer}`)
+ this.pool.ban(job.peer, 300000)
+ } else {
+ this.logger.debug(`Peer disconnected while performing task ${JSON.stringify(job.task)} ${job.peer}`)
+ }
+ this.enqueue(job)
+ }
+
+ /**
+ * Store fetch result. Resolves once store operation is complete.
+ * @param {Object} result fetch result
+ * @return {Promise}
+ */
+ async store (result) {
+ throw new Error('Unimplemented')
+ }
+
+ async wait (delay) {
+ await new Promise(resolve => setTimeout(resolve, delay || this.interval))
+ }
+}
+
+module.exports = Fetcher
diff --git a/lib/sync/fetcher/headerfetcher.js b/lib/sync/fetcher/headerfetcher.js
new file mode 100644
index 0000000..ca50719
--- /dev/null
+++ b/lib/sync/fetcher/headerfetcher.js
@@ -0,0 +1,79 @@
+'use strict'
+
+const BlockFetcher = require('./blockfetcher')
+
+const defaultOptions = {
+ maxPerRequest: 192
+}
+
+/**
+ * Implements an les/1 based header fetcher
+ * @memberof module:sync/fetcher
+ */
+class HeaderFetcher extends BlockFetcher {
+ /**
+ * Create new header fetcher
+ * @param {Object} options constructor parameters
+ * @param {PeerPool} options.pool peer pool
+ * @param {BN} options.first header number to start fetching from
+ * @param {BN} options.count how many headers to fetch
+ * @param {FlowControl} options.flow flow control manager
+ * @param {number} [options.timeout] fetch task timeout
+ * @param {number} [options.banTime] how long to ban misbehaving peers
+ * @param {number} [options.interval] retry interval
+ * @param {number} [options.maxPerRequest=192] max items per request
+ * @param {Logger} [options.logger] Logger instance
+ */
+ constructor (options) {
+ super(options)
+ options = {...defaultOptions, ...options}
+ this.flow = options.flow
+ }
+
+ /**
+ * Requests block headers for the given task
+ * @param {Object} job
+ * @return {Promise}
+ */
+ async request (job) {
+ const { task, peer } = job
+ if (this.flow.maxRequestCount(peer, 'GetBlockHeaders') < this.maxPerRequest) {
+ // we reached our request limit. try with a different peer.
+ return false
+ }
+ return peer.les.getBlockHeaders({ block: task.first, max: task.count })
+ }
+
+ /**
+ * Process fetch result
+ * @param {Object} job fetch job
+ * @param {Object} result fetch result
+ * @return {*} results of processing job or undefined if job not finished
+ */
+ process (job, result) {
+ this.flow.handleReply(job.peer, result.bv)
+ if (result.headers && result.headers.length === job.task.count) {
+ return result.headers
+ }
+ }
+
+ /**
+ * Store fetch result. Resolves once store operation is complete.
+ * @param {Header[]} headers fetch result
+ * @return {Promise}
+ */
+ async store (headers) {
+ await this.chain.putHeaders(headers)
+ }
+
+ /**
+ * Returns a peer that can process the given job
+ * @param {Object} job job
+ * @return {Peer}
+ */
+ peer (job) {
+ return this.pool.idle(p => p.les && p.les.status.serveHeaders)
+ }
+}
+
+module.exports = HeaderFetcher
diff --git a/lib/sync/fetcher/index.js b/lib/sync/fetcher/index.js
new file mode 100644
index 0000000..a68c37b
--- /dev/null
+++ b/lib/sync/fetcher/index.js
@@ -0,0 +1,9 @@
+'use strict'
+
+/**
+ * @module sync/fetcher
+ */
+
+exports.Fetcher = require('./fetcher')
+exports.BlockFetcher = require('./blockfetcher')
+exports.HeaderFetcher = require('./headerfetcher')
diff --git a/lib/sync/headerfetcher.js b/lib/sync/headerfetcher.js
deleted file mode 100644
index 578bd75..0000000
--- a/lib/sync/headerfetcher.js
+++ /dev/null
@@ -1,122 +0,0 @@
-'use strict'
-
-const Fetcher = require('./fetcher')
-const { HeaderPool } = require('../blockchain')
-const BN = require('ethereumjs-util').BN
-
-const defaultOptions = {
- maxPerRequest: 192
-}
-
-/**
- * Implements an les/1 based header fetcher
- * @memberof module:sync
- */
-class HeaderFetcher extends Fetcher {
- /**
- * Create new header fetcher
- * @param {Object} options constructor parameters
- * @param {PeerPool} options.pool peer pool
- * @param {FlowControl} options.flow flow control manager
- * @param {Chain} options.chain blockchain
- * @param {number} [options.maxPerRequest=192] max items per request
- * @param {Logger} [options.logger] Logger instance
- */
- constructor (options) {
- super(options)
- options = {...defaultOptions, ...options}
- this.flow = options.flow
- this.chain = options.chain
- this.headerPool = new HeaderPool({
- logger: this.logger,
- chain: this.chain
- })
- this.maxPerRequest = options.maxPerRequest
- }
-
- /**
- * Open header fetcher. Must be called before fetcher is started
- * @return {Promise}
- */
- async open () {
- if (this.opened) {
- return false
- }
- await this.headerPool.open()
- return super.open()
- }
-
- /**
- * Prioritizes tasks based on first block number
- * @param {Object} taskOne
- * @param {Object} taskTwo
- * @return {boolean} true if taskOne has a lower first number than taskTwo
- */
- before (taskOne, taskTwo) {
- return taskOne.first.lt(taskTwo.first)
- }
-
- /**
- * Fetches block headers for the given task
- * @param {Object} task
- * @param {Peer} peer
- * @return {Promise} method must return
- */
- async fetch (task, peer) {
- const maxCount = Math.min(
- this.flow.maxRequestCount(peer, 'GetBlockHeaders'), this.maxPerRequest
- )
- if (maxCount === 0) {
- // we reached our request limit
- await this.wait()
- return false
- }
- let count = task.last.sub(task.first).addn(1)
- if (count.gtn(maxCount)) {
- count = maxCount
- } else {
- count = count.toNumber()
- }
- return peer.les.getBlockHeaders({ block: task.first, max: count })
- }
-
- /**
- * Process the getBlockHeaders reply
- * @param {Object} entry entry object
- * @param {Object} entry.task fetch task
- * @param {Peer} entry.peer peer that handled task
- * @param {number} entry.time time task was generated
- * @param {Object} reply reply data
- * @emits headers
- */
- process (entry, reply) {
- if (!this.running) {
- return
- }
-
- const { bv, headers } = reply
- const { task, peer } = entry
- if (!headers || headers.length === 0) {
- this.add(task)
- } else {
- const last = new BN(headers[headers.length - 1].number)
- if (last.lt(task.last)) {
- this.add({ first: last.addn(1), last: task.last })
- }
- this.flow.handleReply(peer, bv)
- this.headerPool.add(headers).catch(error => {
- this.logger.error(`Header fetch error, trying again: ${error.stack}`)
- this.add({
- first: new BN(headers[0].number),
- last: new BN(headers[headers.length - 1].number)
- })
- })
- }
- }
-
- fetchable (peer) {
- return peer.les && peer.les.status.serveHeaders && !peer.inbound
- }
-}
-
-module.exports = HeaderFetcher
diff --git a/lib/sync/lightsync.js b/lib/sync/lightsync.js
index 3c1cc3c..b53d86c 100644
--- a/lib/sync/lightsync.js
+++ b/lib/sync/lightsync.js
@@ -1,7 +1,9 @@
'use strict'
const Synchronizer = require('./sync')
-const HeaderFetcher = require('./headerfetcher')
+const { HeaderFetcher } = require('./fetcher')
+const BN = require('ethereumjs-util').BN
+const { short } = require('../util')
/**
* Implements an ethereum light sync synchronizer
@@ -9,130 +11,84 @@ const HeaderFetcher = require('./headerfetcher')
*/
class LightSynchronizer extends Synchronizer {
/**
- * Create new node
- * @param {Object} options constructor parameters
- * @param {PeerPool} options.pool peer pool
- * @param {Chain} options.chain blockchain
- * @param {FlowControl} options.flow flow control manager
- * @param {number} [options.interval] refresh interval
- * @param {Logger} [options.logger] Logger instance
+ * Returns synchronizer type
+ * @return {string} type
*/
- constructor (options) {
- super(options)
- this.flow = options.flow
- this.headerFetcher = new HeaderFetcher({
- pool: this.pool,
- flow: this.flow,
- chain: this.chain,
- logger: this.logger
- })
- this.init()
- }
-
- init () {
- this.pool.on('message:les', (message, peer) => this.handle(message, peer))
- this.headerFetcher.on('error', (error, task, peer) => {
- this.logger.debug(`Error processing task ${JSON.stringify(task)} with peer ${peer}: ${error.stack}`)
- })
+ get type () {
+ return 'light'
}
/**
- * Returns true if peer can be used to fetch headers
+ * Returns true if peer can be used for syncing
* @return {boolean}
*/
- fetchable (peer) {
- return peer.les && peer.les.status.serveHeaders && !peer.inbound
+ syncable (peer) {
+ return peer.les && peer.les.status.serveHeaders
}
/**
- * Returns synchronizer type
- * @return {string} type
+ * Finds the best peer to sync with. We will synchronize to this peer's
+ * blockchain. Returns null if no valid peer is found
+ * @return {Peer}
*/
- get type () {
- return 'light'
- }
-
- /**
- * Find an origin peer that contains the highest total difficulty. We will
- * synchronize to this peer's blockchain. Returns a promise that resolves once
- * an origin peer is found.
- * @return {Promise} Resolves with [ origin peer, height ]
- */
- async origin () {
+ best () {
let best
- let height
- while (!height && this.syncing) {
- await this.wait()
- const peers = this.pool.peers.filter(this.fetchable.bind(this))
- if (!peers.length) {
- continue
- }
- for (let peer of this.pool.peers) {
- const td = peer.les.status.headTd
- if ((!best && td.gte(this.chain.headers.td)) ||
- (best && best.les.status.headTd.lt(td))) {
- best = peer
- }
- }
- if (best) {
- height = best.les.status.headNum
+ const peers = this.pool.peers.filter(this.syncable.bind(this))
+ if (peers.length < this.minPeers && !this.forceSync) return
+ for (let peer of peers) {
+ const td = peer.les.status.headTd
+ if ((!best && td.gte(this.chain.headers.td)) ||
+ (best && best.les.status.headTd.lt(td))) {
+ best = peer
}
}
- return [best, height]
+ return best
}
/**
- * Fetch all headers from current height up to specified number (last). Returns
- * a promise that resolves once all headers are downloaded.
- * @param {BN} [last] number of last block header to download. If last is not
- * specified, the best height will be used from existing peers.
- * @return {Promise} Resolves with count of number of headers fetched
+ * Sync all headers and state from peer starting from current height.
+ * @param {Peer} peer remote peer to sync with
+ * @return {Promise} Resolves when sync completed
*/
- async fetch (last) {
- if (!last) {
- const [ origin, height ] = await this.origin()
- if (!origin || !height) {
- return 0
- }
- this.logger.info(`Using origin peer: ${origin.toString(true)} height=${height.toString(10)}`)
- last = height
- }
-
+ async syncWithPeer (peer) {
+ if (!peer) return false
+ const height = new BN(peer.les.status.headNum)
const first = this.chain.headers.height.addn(1)
+ const count = height.sub(first).addn(1)
+ if (count.lten(0)) return false
- if (first.gt(last)) {
- return 0
- }
+ this.logger.debug(`Syncing with peer: ${peer.toString(true)} height=${height.toString(10)}`)
- await this.headerFetcher.open()
- this.headerFetcher.add({ first, last })
- await this.headerFetcher.start()
- return last.sub(first).toNumber() + 1
+ this.headerFetcher = new HeaderFetcher({
+ pool: this.pool,
+ chain: this.chain,
+ flow: this.flow,
+ logger: this.logger,
+ interval: this.interval,
+ first,
+ count
+ })
+ this.headerFetcher
+ .on('error', (error) => {
+ this.emit('error', error)
+ })
+ .on('fetched', headers => {
+ const first = new BN(headers[0].number)
+ const hash = short(headers[0].hash())
+ this.logger.info(`Imported headers count=${headers.length} number=${first.toString(10)} hash=${hash} peers=${this.pool.size}`)
+ })
+ await this.headerFetcher.fetch()
+ delete this.headerFetcher
+ return true
}
/**
- * Handler for incoming requests from connected peers
- * @param {Object} message message object
- * @param {Peer} peer peer
- * @return {Promise}
+ * Fetch all headers from current height up to highest found amongst peers
+ * @return {Promise} Resolves with true if sync successful
*/
- async handle (message, peer) {
- try {
- if (!this.chain.opened) {
- await this.chain.open()
- }
-
- if (message.name === 'Announce') {
- const { headNumber, reorgDepth } = message.data
- // TO DO: handle re-orgs
- if (reorgDepth) {
- return
- }
- this.sync(headNumber)
- }
- } catch (error) {
- this.emit('error', error)
- }
+ async sync () {
+ const peer = this.best()
+ return this.syncWithPeer(peer)
}
/**
@@ -144,8 +100,8 @@ class LightSynchronizer extends Synchronizer {
await this.pool.open()
const number = this.chain.headers.height.toString(10)
const td = this.chain.headers.td.toString(10)
- const hash = this.chain.headers.latest.hash().toString('hex').slice(0, 8) + '...'
- this.logger.info(`Latest local header: number=${number} td=${td} hash=${hash}`)
+ const hash = this.chain.blocks.latest.hash()
+ this.logger.info(`Latest local header: number=${number} td=${td} hash=${short(hash)}`)
}
/**
@@ -153,11 +109,14 @@ class LightSynchronizer extends Synchronizer {
* @return {Promise}
*/
async stop () {
- if (!this.syncing) {
+ if (!this.running) {
return false
}
- await this.headerFetcher.stop()
- return super.stop()
+ if (this.headerFetcher) {
+ this.headerFetcher.destroy()
+ delete this.headerFetcher
+ }
+ await super.stop()
}
}
diff --git a/lib/sync/sync.js b/lib/sync/sync.js
index 0a4a4eb..82c5a83 100644
--- a/lib/sync/sync.js
+++ b/lib/sync/sync.js
@@ -1,12 +1,12 @@
'use strict'
const EventEmitter = require('events')
-const AwaitLock = require('await-lock')
const { defaultLogger } = require('../logging')
const defaultOptions = {
logger: defaultLogger,
- interval: 1000
+ interval: 1000,
+ minPeers: 3
}
/**
@@ -16,11 +16,13 @@ const defaultOptions = {
class Synchronizer extends EventEmitter {
/**
* Create new node
- * @param {Object} options constructor parameters
- * @param {PeerPool} options.pool peer pool
- * @param {Chain} options.chain blockchain
- * @param {number} [options.interval] refresh interval
- * @param {Logger} [options.logger] Logger instance
+ * @param {Object} options constructor parameters
+ * @param {PeerPool} options.pool peer pool
+ * @param {Chain} options.chain blockchain
+ * @param {FlowControl} options.flow flow control manager
+ * @param {number} [options.minPeers=3] number of peers needed before syncing
+ * @param {number} [options.interval] refresh interval
+ * @param {Logger} [options.logger] Logger instance
*/
constructor (options) {
super()
@@ -29,13 +31,14 @@ class Synchronizer extends EventEmitter {
this.logger = options.logger
this.pool = options.pool
this.chain = options.chain
+ this.flow = options.flow
+ this.minPeers = options.minPeers
this.interval = options.interval
- this.lock = new AwaitLock()
- this.syncing = false
-
+ this.running = false
+ this.forceSync = false
this.pool.on('added', peer => {
- if (this.fetchable(peer)) {
- this.logger.info(`Found ${this.type} peer: ${peer}`)
+ if (this.syncable(peer)) {
+ this.logger.debug(`Found ${this.type} peer: ${peer}`)
}
})
}
@@ -55,37 +58,33 @@ class Synchronizer extends EventEmitter {
}
/**
- * Returns true if peer can be used to fetch data
+ * Returns true if peer can be used for syncing
* @return {boolean}
*/
- fetchable (peer) {
+ syncable (peer) {
return true
}
/**
- * Synchronize blockchain. Returns a promise that resolves once chain is
- * synchronized
- * @param {BN} [height] number of last block to fetch. Will be discovered from
- * peers if not specified.
+ * Start synchronization
* @return {Promise}
*/
- async sync (height) {
- if (this.syncing) {
+ async start () {
+ if (this.running) {
return false
}
- await this.lock.acquireAsync()
- this.syncing = true
- try {
- this.emit('synchronized', {
- count: await this.fetch(height),
- type: this.type
- })
- } catch (err) {
- this.emit('error', err)
- } finally {
- this.syncing = false
- this.lock.release()
+ this.running = true
+ const timeout = setTimeout(() => { this.forceSync = true }, this.interval * 30)
+ while (this.running) {
+ try {
+ if (await this.sync()) this.emit('synchronized')
+ } catch (error) {
+ if (this.running) this.emit('error', error)
+ }
+ await new Promise(resolve => setTimeout(resolve, this.interval))
}
+ this.running = false
+ clearTimeout(timeout)
}
/**
@@ -93,14 +92,11 @@ class Synchronizer extends EventEmitter {
* @return {Promise}
*/
async stop () {
- if (!this.syncing) {
+ if (!this.running) {
return false
}
- this.syncing = false
- }
-
- async wait (delay) {
- await new Promise(resolve => setTimeout(resolve, delay || this.interval))
+ await new Promise(resolve => setTimeout(resolve, this.interval))
+ this.running = false
}
}
diff --git a/lib/util/index.js b/lib/util/index.js
index 538f5c8..bb26c28 100644
--- a/lib/util/index.js
+++ b/lib/util/index.js
@@ -5,3 +5,4 @@
*/
exports.parse = require('./parse')
+exports.short = (buffer) => buffer.toString('hex').slice(0, 8) + '...'
diff --git a/package.json b/package.json
index 49ab48d..cc36121 100644
--- a/package.json
+++ b/package.json
@@ -47,11 +47,10 @@
},
"homepage": "https://github.com/ethereumjs/ethereumjs-client#readme",
"dependencies": {
- "await-lock": "^1.1.3",
"chalk": "^2.4.1",
"ethereumjs-account": "^2.0.5",
"ethereumjs-block": "^2.1.0",
- "ethereumjs-blockchain": "^3.3.1",
+ "ethereumjs-blockchain": "^3.3.2",
"ethereumjs-common": "^0.6.1",
"ethereumjs-devp2p": "^2.5.1",
"ethereumjs-util": "^6.0.0",
diff --git a/test/blockchain/blockpool.js b/test/blockchain/blockpool.js
deleted file mode 100644
index 8b6a356..0000000
--- a/test/blockchain/blockpool.js
+++ /dev/null
@@ -1,64 +0,0 @@
-const tape = require('tape')
-const Block = require('ethereumjs-block')
-const util = require('ethereumjs-util')
-const { Chain, BlockPool } = require('../../lib/blockchain')
-const { defaultLogger } = require('../../lib/logging')
-defaultLogger.silent = true
-
-tape('[BlockPool]', t => {
- t.test('should add block segment to chain', async (t) => {
- const chain = new Chain() // eslint-disable-line no-new
- const pool = new BlockPool({ chain })
- await pool.open()
-
- const block1 = new Block()
- block1.header.number = util.toBuffer(1)
- block1.header.difficulty = '0x11111111'
- block1.header.parentHash = chain.genesis.hash
-
- const block2 = new Block()
- block2.header.number = util.toBuffer(2)
- block2.header.difficulty = '0x22222222'
- block2.header.parentHash = block1.hash()
-
- // add blocks out of order to make sure they are inserted in order
- await pool.add(block2)
- await pool.add(block1)
- t.equal(chain.blocks.td.toString(16), '433333333', 'get chain.blocks.td')
- t.equal(chain.blocks.height.toString(10), '2', 'get chain.blocks.height')
- chain.close()
- t.end()
- })
-
- t.test('should get pool size', async (t) => {
- const chain = new Chain() // eslint-disable-line no-new
- const pool = new BlockPool({ chain })
- await pool.open()
-
- const block1 = new Block()
- block1.header.number = util.toBuffer(1)
- block1.header.difficulty = '0x11111111'
- block1.header.parentHash = chain.genesis.hash
-
- const block2 = new Block()
- block2.header.number = util.toBuffer(2)
- block2.header.difficulty = '0x22222222'
- block2.header.parentHash = block1.hash()
-
- await pool.add(block2)
- t.equal(pool.size, 1, 'pool contains out of order block')
- await pool.add(block1)
- t.equal(pool.size, 0, 'pool should be empty')
- chain.close()
- t.end()
- })
-
- t.test('should check opened state', async (t) => {
- const chain = new Chain() // eslint-disable-line no-new
- const pool = new BlockPool({ chain })
- t.equal(await pool.add([]), false, 'not opened')
- await pool.open()
- t.equal(await pool.open(), false, 'already opened')
- t.end()
- })
-})
diff --git a/test/blockchain/headerpool.js b/test/blockchain/headerpool.js
deleted file mode 100644
index cc10069..0000000
--- a/test/blockchain/headerpool.js
+++ /dev/null
@@ -1,48 +0,0 @@
-const tape = require('tape')
-const tmp = require('tmp')
-const Block = require('ethereumjs-block')
-const util = require('ethereumjs-util')
-const { Chain, HeaderPool } = require('../../lib/blockchain')
-const { defaultLogger } = require('../../lib/logging')
-defaultLogger.silent = true
-
-tape('[HeaderPool]', t => {
- const config = {}
-
- t.test('should add header segment to chain', async (t) => {
- const tmpdir = tmp.dirSync()
- config.dataDir = `${tmpdir.name}/chaindb`
-
- const chain = new Chain(config) // eslint-disable-line no-new
- const pool = new HeaderPool({ chain })
- await pool.open()
-
- const header1 = new Block.Header()
- header1.number = util.toBuffer(1)
- header1.difficulty = '0x11111111'
- header1.parentHash = chain.genesis.hash
-
- const header2 = new Block.Header()
- header2.number = util.toBuffer(2)
- header2.difficulty = '0x22222222'
- header2.parentHash = header1.hash()
-
- // add headers out of order to make sure they are inserted in order
- await pool.add(header2)
- await pool.add(header1)
- t.equal(chain.headers.td.toString(16), '433333333', 'get chain.headers.td')
- t.equal(chain.headers.height.toString(10), '2', 'get chain.headers.height')
- chain.close()
- t.end()
- })
-
- t.test('should check opened state', async (t) => {
- const tmpdir = tmp.dirSync()
- config.dataDir = `${tmpdir.name}/chaindb`
-
- const chain = new Chain(config) // eslint-disable-line no-new
- const pool = new HeaderPool({ chain })
- t.equal(await pool.add([]), false, 'not opened')
- t.end()
- })
-})
diff --git a/test/handler/ethhandler.js b/test/handler/ethhandler.js
deleted file mode 100644
index 9f304c6..0000000
--- a/test/handler/ethhandler.js
+++ /dev/null
@@ -1,50 +0,0 @@
-const tape = require('tape-catch')
-const td = require('testdouble')
-const EventEmitter = require('events')
-const Chain = td.constructor(require('../../lib/blockchain/chain'))
-const { EthHandler } = require('../../lib/handler')
-const { defaultLogger } = require('../../lib/logging')
-defaultLogger.silent = true
-
-tape('[EthHandler]', t => {
- const pool = new EventEmitter()
- const chain = new Chain()
- const handler = new EthHandler({ pool, chain })
- const peer = { eth: { send: td.func() } }
- handler.start()
-
- t.test('should handle GetBlockHeaders', t => {
- const message = {
- name: 'GetBlockHeaders',
- data: {
- block: 5,
- max: 100,
- skip: 10,
- reverse: 1
- }
- }
- const headers = ['header0', 'header1']
- td.when(chain.getHeaders(5, 100, 10, 1)).thenResolve(headers)
- pool.emit('message:eth', message, peer)
- setTimeout(() => {
- td.verify(peer.eth.send('BlockHeaders', headers))
- td.reset()
- t.pass('sent BlockHeaders')
- t.end()
- }, 100)
- })
-
- t.test('should handle errors', t => {
- handler.on('error', err => {
- t.ok(err, 'caught error')
- t.end()
- })
- pool.emit('message:eth', null, peer)
- })
-
- t.test('should stop handler', t => {
- handler.stop()
- t.notOk(handler.running, 'stopped handler')
- t.end()
- })
-})
diff --git a/test/handler/leshandler.js b/test/handler/leshandler.js
deleted file mode 100644
index 439a134..0000000
--- a/test/handler/leshandler.js
+++ /dev/null
@@ -1,67 +0,0 @@
-const tape = require('tape-catch')
-const td = require('testdouble')
-const EventEmitter = require('events')
-const Chain = td.constructor(require('../../lib/blockchain/chain'))
-const Flow = td.constructor(require('../../lib/net/protocol/flowcontrol'))
-const { LesHandler } = require('../../lib/handler')
-const { defaultLogger } = require('../../lib/logging')
-defaultLogger.silent = true
-
-tape('[LesHandler]', t => {
- const pool = new EventEmitter()
- const chain = new Chain()
- const flow = new Flow()
- const handler = new LesHandler({ pool, chain, flow })
- const peer = { les: { send: td.func() } }
- const message = {
- name: 'GetBlockHeaders',
- data: {
- reqId: 1,
- block: 5,
- max: 100,
- skip: 10,
- reverse: 1
- }
- }
- const headers = [ 'header0', 'header1' ]
- handler.start()
-
- t.test('should handle GetBlockHeaders', t => {
- td.when(flow.handleRequest(peer, message.name, 100)).thenReturn(11)
- td.when(chain.getHeaders(5, 100, 10, 1)).thenResolve(headers)
- pool.emit('message:les', message, peer)
- setTimeout(() => {
- td.verify(peer.les.send('BlockHeaders', { reqId: 1, bv: 11, headers }))
- td.reset()
- t.pass('sent BlockHeaders')
- t.end()
- }, 100)
- })
-
- t.test('should perform flow control', t => {
- td.when(flow.handleRequest(peer, message.name, 100)).thenReturn(-1)
- pool.ban = td.func()
- pool.emit('message:les', message, peer)
- setTimeout(() => {
- td.verify(pool.ban(peer, 300000))
- td.reset()
- t.pass('flow control')
- t.end()
- }, 100)
- pool.emit('message:les', message, peer)
- })
-
- t.test('should handle errors', t => {
- handler.on('error', err => {
- t.ok(err, 'caught error')
- t.end()
- })
- pool.emit('message:les', null, peer)
- })
-
- t.test('should stop handler', t => {
- handler.stop()
- t.notOk(handler.running, 'stopped handler')
- t.end()
- })
-})
diff --git a/test/integration/ethereumservice.js b/test/integration/fastethereumservice.js
similarity index 68%
rename from test/integration/ethereumservice.js
rename to test/integration/fastethereumservice.js
index 437b212..d8590c4 100644
--- a/test/integration/ethereumservice.js
+++ b/test/integration/fastethereumservice.js
@@ -1,19 +1,19 @@
'use strict'
const tape = require('tape')
-const { EthereumService } = require('../../lib/service')
+const { FastEthereumService } = require('../../lib/service')
const MockServer = require('./mocks/mockserver.js')
const MockChain = require('./mocks/mockchain.js')
+const BN = require('bn.js')
const { defaultLogger } = require('../../lib/logging')
defaultLogger.silent = true
-tape('[Integration:EthereumService]', async (t) => {
+tape('[Integration:FastEthereumService]', async (t) => {
async function setup () {
const server = new MockServer()
const chain = new MockChain()
- const service = new EthereumService({
+ const service = new FastEthereumService({
servers: [ server ],
- syncmode: 'fast',
lightserv: true,
chain
})
@@ -32,11 +32,12 @@ tape('[Integration:EthereumService]', async (t) => {
const [server, service] = await setup()
const peer = await server.accept('peer0')
const headers = await peer.eth.getBlockHeaders({block: 1, max: 2})
- t.equals(
- headers[1].hash().toString('hex'),
- 'a321d27cd2743617c1c1b0d7ecb607dd14febcdfca8f01b79c3f0249505ea069',
- 'handled GetBlockHeaders'
- )
+ const hash = Buffer.from('a321d27cd2743617c1c1b0d7ecb607dd14febcdfca8f01b79c3f0249505ea069', 'hex')
+ t.equals(headers[1].hash().toString('hex'), hash.toString('hex'), 'handled GetBlockHeaders')
+ const bodies = await peer.eth.getBlockBodies([hash])
+ t.deepEquals(bodies, [[[], []]], 'handled GetBlockBodies')
+ await peer.eth.send('NewBlockHashes', [[hash, new BN(2)]])
+ t.pass('handled NewBlockHashes')
await destroy(server, service)
t.end()
})
diff --git a/test/integration/fastsync.js b/test/integration/fastsync.js
index eed0e69..1d87982 100644
--- a/test/integration/fastsync.js
+++ b/test/integration/fastsync.js
@@ -1,20 +1,25 @@
'use strict'
const tape = require('tape')
-const { EthereumService } = require('../../lib/service')
+const { FastEthereumService } = require('../../lib/service')
const MockServer = require('./mocks/mockserver.js')
const MockChain = require('./mocks/mockchain.js')
const { defaultLogger } = require('../../lib/logging')
defaultLogger.silent = true
+async function wait (delay) {
+ await new Promise(resolve => setTimeout(resolve, delay))
+}
+
tape('[Integration:FastSync]', async (t) => {
async function setup (options = {}) {
const server = new MockServer({location: options.location})
const chain = new MockChain({height: options.height})
- const service = new EthereumService({
+ const service = new FastEthereumService({
servers: [ server ],
- syncmode: 'fast',
+ minPeers: 1,
interval: options.interval || 10,
+ timeout: 500,
chain
})
await service.open()
@@ -30,10 +35,10 @@ tape('[Integration:FastSync]', async (t) => {
}
t.test('should sync blocks', async (t) => {
- const [remoteServer, remoteService] = await setup({location: '127.0.0.2', height: 10})
+ const [remoteServer, remoteService] = await setup({location: '127.0.0.2', height: 200})
const [localServer, localService] = await setup({location: '127.0.0.1', height: 0})
- localService.on('synchronized', async (stats) => {
- t.equal(stats.count, 10, 'synced')
+ localService.on('synchronized', async () => {
+ t.equals(localService.chain.blocks.height.toNumber(), 200, 'synced')
await destroy(localServer, localService)
await destroy(remoteServer, remoteService)
t.end()
@@ -44,31 +49,31 @@ tape('[Integration:FastSync]', async (t) => {
t.test('should not sync with stale peers', async (t) => {
const [remoteServer, remoteService] = await setup({location: '127.0.0.2', height: 9})
const [localServer, localService] = await setup({location: '127.0.0.1', height: 10})
- localService.on('synchronized', async (stats) => {
- t.equal(stats.count, 0, 'nothing synced')
- await destroy(remoteServer, remoteService)
- t.end()
+ localService.on('synchronized', async () => {
+ t.fail('synced with a stale peer')
})
localServer.discover('remotePeer', '127.0.0.2')
- setTimeout(async () => {
- await destroy(localServer, localService)
- }, 100)
+ await wait(100)
+ await destroy(localServer, localService)
+ await destroy(remoteServer, remoteService)
+ t.pass('did not sync')
+ t.end()
})
- t.test('should find best origin peer', async (t) => {
+ t.test('should sync with best peer', async (t) => {
const [remoteServer1, remoteService1] = await setup({location: '127.0.0.2', height: 9})
const [remoteServer2, remoteService2] = await setup({location: '127.0.0.3', height: 10})
const [localServer, localService] = await setup({location: '127.0.0.1', height: 0})
await localService.synchronizer.stop()
await localServer.discover('remotePeer1', '127.0.0.2')
await localServer.discover('remotePeer2', '127.0.0.3')
- localService.on('synchronized', async (stats) => {
- t.equal(stats.count, 10, 'synced with best peer')
+ localService.on('synchronized', async () => {
+ t.equals(localService.chain.blocks.height.toNumber(), 10, 'synced with best peer')
await destroy(localServer, localService)
await destroy(remoteServer1, remoteService1)
await destroy(remoteServer2, remoteService2)
t.end()
})
- localService.synchronizer.sync()
+ localService.synchronizer.start()
})
})
diff --git a/test/integration/lightethereumservice.js b/test/integration/lightethereumservice.js
new file mode 100644
index 0000000..0a4ea41
--- /dev/null
+++ b/test/integration/lightethereumservice.js
@@ -0,0 +1,35 @@
+'use strict'
+
+const tape = require('tape')
+const { LightEthereumService } = require('../../lib/service')
+const MockServer = require('./mocks/mockserver.js')
+const MockChain = require('./mocks/mockchain.js')
+const { defaultLogger } = require('../../lib/logging')
+defaultLogger.silent = true
+
+tape('[Integration:LightEthereumService]', async (t) => {
+ async function setup () {
+ const server = new MockServer()
+ const chain = new MockChain()
+ const service = new LightEthereumService({
+ servers: [ server ],
+ chain
+ })
+ await service.open()
+ await server.start()
+ await service.start()
+ return [server, service]
+ }
+
+ async function destroy (server, service) {
+ await service.stop()
+ await server.stop()
+ }
+
+ t.test('should handle LES requests', async (t) => {
+ const [server, service] = await setup()
+ // TO DO: test handlers once they are implemented
+ await destroy(server, service)
+ t.end()
+ })
+})
diff --git a/test/integration/lightsync.js b/test/integration/lightsync.js
index bbbea0c..2a07317 100644
--- a/test/integration/lightsync.js
+++ b/test/integration/lightsync.js
@@ -1,23 +1,34 @@
'use strict'
const tape = require('tape')
-const { EthereumService } = require('../../lib/service')
+const { FastEthereumService, LightEthereumService } = require('../../lib/service')
const MockServer = require('./mocks/mockserver.js')
const MockChain = require('./mocks/mockchain.js')
const { defaultLogger } = require('../../lib/logging')
defaultLogger.silent = true
+async function wait (delay) {
+ await new Promise(resolve => setTimeout(resolve, delay))
+}
+
tape('[Integration:LightSync]', async (t) => {
async function setup (options = {}) {
const server = new MockServer({location: options.location})
const chain = new MockChain({height: options.height})
- const service = new EthereumService({
- servers: [ server ],
- syncmode: options.syncmode,
- lightserv: true,
- interval: options.interval || 10,
- chain
- })
+ const service = options.syncmode === 'fast'
+ ? new FastEthereumService({
+ servers: [ server ],
+ lightserv: true,
+ minPeers: 1,
+ interval: options.interval || 10,
+ chain
+ })
+ : new LightEthereumService({
+ servers: [ server ],
+ minPeers: 1,
+ interval: options.interval || 10,
+ chain
+ })
await service.open()
await server.start()
await service.start()
@@ -31,10 +42,10 @@ tape('[Integration:LightSync]', async (t) => {
}
t.test('should sync headers', async (t) => {
- const [remoteServer, remoteService] = await setup({location: '127.0.0.2', height: 10, syncmode: 'fast'})
+ const [remoteServer, remoteService] = await setup({location: '127.0.0.2', height: 200, syncmode: 'fast'})
const [localServer, localService] = await setup({location: '127.0.0.1', height: 0, syncmode: 'light'})
- localService.on('synchronized', async (stats) => {
- t.equal(stats.count, 10, 'synced')
+ localService.on('synchronized', async () => {
+ t.equals(localService.chain.headers.height.toNumber(), 200, 'synced')
await destroy(localServer, localService)
await destroy(remoteServer, remoteService)
t.end()
@@ -45,18 +56,18 @@ tape('[Integration:LightSync]', async (t) => {
t.test('should not sync with stale peers', async (t) => {
const [remoteServer, remoteService] = await setup({location: '127.0.0.2', height: 9, syncmode: 'fast'})
const [localServer, localService] = await setup({location: '127.0.0.1', height: 10, syncmode: 'light'})
- localService.on('synchronized', async (stats) => {
- t.equal(stats.count, 0, 'nothing synced')
- await destroy(remoteServer, remoteService)
- t.end()
+ localService.on('synchronized', async () => {
+ t.fail('synced with a stale peer')
})
localServer.discover('remotePeer', '127.0.0.2')
- setTimeout(async () => {
- await destroy(localServer, localService)
- }, 100)
+ await wait(100)
+ await destroy(localServer, localService)
+ await destroy(remoteServer, remoteService)
+ t.pass('did not sync')
+ t.end()
})
- t.test('should find best origin peer', async (t) => {
+ t.test('should sync with best peer', async (t) => {
const [remoteServer1, remoteService1] = await setup({location: '127.0.0.2', height: 9, syncmode: 'fast'})
const [remoteServer2, remoteService2] = await setup({location: '127.0.0.3', height: 10, syncmode: 'fast'})
const [localServer, localService] = await setup({location: '127.0.0.1', height: 0, syncmode: 'light'})
@@ -64,12 +75,12 @@ tape('[Integration:LightSync]', async (t) => {
await localServer.discover('remotePeer1', '127.0.0.2')
await localServer.discover('remotePeer2', '127.0.0.3')
localService.on('synchronized', async (stats) => {
- t.equal(stats.count, 10, 'synced with best peer')
+ t.equals(localService.chain.headers.height.toNumber(), 10, 'synced with best peer')
await destroy(localServer, localService)
await destroy(remoteServer1, remoteService1)
await destroy(remoteServer2, remoteService2)
t.end()
})
- localService.synchronizer.sync()
+ localService.synchronizer.start()
})
})
diff --git a/test/integration/node.js b/test/integration/node.js
index d7bb1f4..2358370 100644
--- a/test/integration/node.js
+++ b/test/integration/node.js
@@ -17,13 +17,11 @@ tape('[Integration:Node]', t => {
node.on('listening', details => {
t.deepEqual(details, { transport: 'mock', url: 'mock://127.0.0.1' }, 'server listening')
})
- node.on('synchronized', stats => {
- t.deepEqual(stats, { count: 0, type: 'fast' }, 'synchronized')
- })
await node.open()
node.service('eth').synchronizer.interval = 100
node.service('eth').emit('error', 'err0')
await node.start()
+ t.ok(node.service('eth').synchronizer.running, 'sync running')
await node.stop()
t.pass('node stopped')
})
diff --git a/test/service/ethereumservice.js b/test/service/fastethereumservice.js
similarity index 60%
rename from test/service/ethereumservice.js
rename to test/service/fastethereumservice.js
index a2cbffd..b603bc2 100644
--- a/test/service/ethereumservice.js
+++ b/test/service/fastethereumservice.js
@@ -4,7 +4,7 @@ const EventEmitter = require('events')
const { defaultLogger } = require('../../lib/logging')
defaultLogger.silent = true
-tape('[EthereumService]', t => {
+tape('[FastEthereumService]', t => {
class PeerPool extends EventEmitter { }
PeerPool.prototype.open = td.func()
td.replace('../../lib/net/peerpool', PeerPool)
@@ -12,38 +12,29 @@ tape('[EthereumService]', t => {
const Chain = td.constructor()
Chain.prototype.open = td.func()
td.replace('../../lib/blockchain', { Chain })
- const EthHandler = td.constructor()
- const LesHandler = td.constructor()
- td.replace('../../lib/handler', { EthHandler, LesHandler })
const EthProtocol = td.constructor()
const LesProtocol = td.constructor()
td.replace('../../lib/net/protocol/ethprotocol', EthProtocol)
td.replace('../../lib/net/protocol/lesprotocol', LesProtocol)
class FastSynchronizer extends EventEmitter { }
- class LightSynchronizer extends EventEmitter { }
- LightSynchronizer.prototype.sync = td.func()
- LightSynchronizer.prototype.stop = td.func()
- LightSynchronizer.prototype.open = td.func()
+ FastSynchronizer.prototype.start = td.func()
+ FastSynchronizer.prototype.stop = td.func()
+ FastSynchronizer.prototype.open = td.func()
td.replace('../../lib/sync/fastsync', FastSynchronizer)
- td.replace('../../lib/sync/lightsync', LightSynchronizer)
- const EthereumService = require('../../lib/service/ethereumservice')
+ const FastEthereumService = require('../../lib/service/fastethereumservice')
t.test('should initialize correctly', async (t) => {
- let service = new EthereumService({ syncmode: 'light' })
- t.ok(service.synchronizer instanceof LightSynchronizer, 'light mode')
- service = new EthereumService({ syncmode: 'fast', lightserv: true })
+ let service = new FastEthereumService()
t.ok(service.synchronizer instanceof FastSynchronizer, 'fast mode')
- t.ok(service.handlers[0] instanceof EthHandler, 'eth handler')
- t.ok(service.handlers[1] instanceof LesHandler, 'les handler')
- t.throws(() => new EthereumService({ syncmode: 'unknown' }), /Unsupported/, 'bad syncmode')
t.equals(service.name, 'eth', 'got name')
t.end()
})
t.test('should get protocols', async (t) => {
- let service = new EthereumService({ syncmode: 'light' })
- t.ok(service.protocols[0] instanceof LesProtocol, 'light protocols')
- service = new EthereumService({ syncmode: 'fast', lightserv: true })
+ let service = new FastEthereumService()
+ t.ok(service.protocols[0] instanceof EthProtocol, 'fast protocols')
+ t.notOk(service.protocols[1], 'no light protocol')
+ service = new FastEthereumService({ lightserv: true })
t.ok(service.protocols[0] instanceof EthProtocol, 'fast protocols')
t.ok(service.protocols[1] instanceof LesProtocol, 'lightserv protocols')
t.end()
@@ -52,16 +43,14 @@ tape('[EthereumService]', t => {
t.test('should open', async (t) => {
t.plan(3)
const server = td.object()
- let service = new EthereumService({
- servers: [server]
- })
+ let service = new FastEthereumService({ servers: [server] })
await service.open()
td.verify(service.chain.open())
td.verify(service.synchronizer.open())
td.verify(server.addProtocols(td.matchers.anything()))
- service.on('synchronized', stats => t.equals(stats, 'stats0', 'got stats'))
+ service.on('synchronized', () => t.pass('synchronized'))
service.once('error', err => t.equals(err, 'error0', 'got error 1'))
- service.synchronizer.emit('synchronized', 'stats0')
+ service.synchronizer.emit('synchronized')
service.synchronizer.emit('error', 'error0')
service.once('error', err => t.equals(err, 'error1', 'got error 2'))
service.pool.emit('banned', 'peer0')
@@ -72,9 +61,9 @@ tape('[EthereumService]', t => {
t.test('should start/stop', async (t) => {
const server = td.object()
- let service = new EthereumService({ servers: [server] })
+ let service = new FastEthereumService({ servers: [server] })
await service.start()
- td.verify(service.synchronizer.sync())
+ td.verify(service.synchronizer.start())
t.notOk(await service.start(), 'already started')
await service.stop()
td.verify(service.synchronizer.stop())
diff --git a/test/service/lightethereumservice.js b/test/service/lightethereumservice.js
new file mode 100644
index 0000000..95be682
--- /dev/null
+++ b/test/service/lightethereumservice.js
@@ -0,0 +1,75 @@
+const tape = require('tape-catch')
+const td = require('testdouble')
+const EventEmitter = require('events')
+const { defaultLogger } = require('../../lib/logging')
+defaultLogger.silent = true
+
+tape('[LightEthereumService]', t => {
+ class PeerPool extends EventEmitter { }
+ PeerPool.prototype.open = td.func()
+ td.replace('../../lib/net/peerpool', PeerPool)
+ td.replace('../../lib/net/protocol/flowcontrol')
+ const Chain = td.constructor()
+ Chain.prototype.open = td.func()
+ td.replace('../../lib/blockchain', { Chain })
+
+ const LesProtocol = td.constructor()
+ td.replace('../../lib/net/protocol/lesprotocol', LesProtocol)
+
+ class LightSynchronizer extends EventEmitter { }
+ LightSynchronizer.prototype.start = td.func()
+ LightSynchronizer.prototype.stop = td.func()
+ LightSynchronizer.prototype.open = td.func()
+ td.replace('../../lib/sync/lightsync', LightSynchronizer)
+ const LightEthereumService = require('../../lib/service/lightethereumservice')
+
+ t.test('should initialize correctly', async (t) => {
+ let service = new LightEthereumService()
+ t.ok(service.synchronizer instanceof LightSynchronizer, 'light sync')
+ t.equals(service.name, 'eth', 'got name')
+ t.end()
+ })
+
+ t.test('should get protocols', async (t) => {
+ let service = new LightEthereumService()
+ t.ok(service.protocols[0] instanceof LesProtocol, 'light protocols')
+ t.end()
+ })
+
+ t.test('should open', async (t) => {
+ t.plan(3)
+ const server = td.object()
+ let service = new LightEthereumService({ servers: [server] })
+ await service.open()
+ td.verify(service.chain.open())
+ td.verify(service.synchronizer.open())
+ td.verify(server.addProtocols(td.matchers.anything()))
+ service.on('synchronized', () => t.pass('synchronized'))
+ service.once('error', err => t.equals(err, 'error0', 'got error 1'))
+ service.synchronizer.emit('synchronized')
+ service.synchronizer.emit('error', 'error0')
+ service.once('error', err => t.equals(err, 'error1', 'got error 2'))
+ service.pool.emit('banned', 'peer0')
+ service.pool.emit('added', 'peer0')
+ service.pool.emit('removed', 'peer0')
+ service.pool.emit('error', 'error1')
+ })
+
+ t.test('should start/stop', async (t) => {
+ const server = td.object()
+ let service = new LightEthereumService({ servers: [server] })
+ await service.start()
+ td.verify(service.synchronizer.start())
+ t.notOk(await service.start(), 'already started')
+ await service.stop()
+ td.verify(service.synchronizer.stop())
+ td.verify(server.start())
+ t.notOk(await service.stop(), 'already stopped')
+ t.end()
+ })
+
+ t.test('should reset td', t => {
+ td.reset()
+ t.end()
+ })
+})
diff --git a/test/sync/blockfetcher.js b/test/sync/blockfetcher.js
deleted file mode 100644
index 904e369..0000000
--- a/test/sync/blockfetcher.js
+++ /dev/null
@@ -1,73 +0,0 @@
-const tape = require('tape-catch')
-const td = require('testdouble')
-const BN = require('bn.js')
-const EventEmitter = require('events')
-const { defaultLogger } = require('../../lib/logging')
-defaultLogger.silent = true
-
-tape('[BlockFetcher]', t => {
- class PeerPool extends EventEmitter {}
- td.replace('../../lib/net/peerpool', PeerPool)
- const BlockPool = td.constructor()
- BlockPool.prototype.open = td.func()
- BlockPool.prototype.add = td.func()
- td.when(BlockPool.prototype.open()).thenResolve()
- const blocks = [{header: {number: 1}}, {header: {number: 2}}]
- td.when(BlockPool.prototype.add(blocks)).thenReject(new Error('err0'))
- td.replace('../../lib/blockchain', { BlockPool })
- const BlockFetcher = require('../../lib/sync/blockfetcher')
- const ONE = new BN(1)
- const TWO = new BN(2)
-
- t.test('should order correctly', t => {
- const fetcher = new BlockFetcher({pool: new PeerPool()})
- t.ok(fetcher.before({first: ONE}, {first: TWO}), 'ordered')
- t.notOk(fetcher.before({first: TWO}, {first: ONE}), 'not ordered')
- t.end()
- })
-
- t.test('should open', async (t) => {
- const fetcher = new BlockFetcher({pool: new PeerPool()})
- t.notOk(fetcher.opened, 'not open')
- await fetcher.open()
- t.ok(fetcher.opened, 'open')
- t.end()
- })
-
- t.test('should fetch', async (t) => {
- const fetcher = new BlockFetcher({pool: new PeerPool()})
- const peer = {eth: td.object()}
- td.when(peer.eth.getBlockHeaders({block: ONE, max: 2})).thenResolve([])
- td.when(peer.eth.getBlockHeaders({block: ONE, max: 128})).thenResolve([])
- td.when(peer.eth.getBlockBodies(td.matchers.anything())).thenResolve([])
- const blocks = await fetcher.fetch({first: ONE, last: TWO}, peer)
- t.deepEquals(blocks, {blocks: []}, 'got blocks')
- await fetcher.fetch({first: ONE, last: new BN(1000)}, peer)
- t.end()
- })
-
- t.test('should process', t => {
- const fetcher = new BlockFetcher({pool: new PeerPool()})
- fetcher.running = true
- fetcher.add = td.func()
- fetcher.process({task: 'task'}, {blocks: []})
- td.verify(fetcher.add('task'))
- fetcher.process({task: {last: TWO}}, {blocks})
- setTimeout(() => {
- td.verify(fetcher.add({first: ONE, last: TWO}))
- t.pass('processed tasks')
- t.end()
- }, 10)
- })
-
- t.test('should check if peer fetchable', async (t) => {
- const fetcher = new BlockFetcher({pool: new PeerPool()})
- t.ok(fetcher.fetchable({eth: {}}), 'fetchable')
- t.end()
- })
-
- t.test('should reset td', t => {
- td.reset()
- t.end()
- })
-})
diff --git a/test/sync/fastsync.js b/test/sync/fastsync.js
index 2f30302..fd28925 100644
--- a/test/sync/fastsync.js
+++ b/test/sync/fastsync.js
@@ -8,36 +8,15 @@ defaultLogger.silent = true
tape('[FastSynchronizer]', t => {
class PeerPool extends EventEmitter {}
td.replace('../../lib/net/peerpool', PeerPool)
- const BlockPool = td.constructor()
- BlockPool.prototype.open = td.func()
- BlockPool.prototype.add = td.func()
- td.when(BlockPool.prototype.open()).thenResolve()
- td.when(BlockPool.prototype.add('blocks')).thenResolve()
- const blocks = [{header: {number: 1}}, {header: {number: 2}}]
- td.when(BlockPool.prototype.add(blocks)).thenReject(new Error('err0'))
- td.replace('../../lib/blockchain', { BlockPool })
class BlockFetcher extends EventEmitter {}
- BlockFetcher.prototype.add = td.func()
- BlockFetcher.prototype.open = td.func()
- td.when(BlockFetcher.prototype.open()).thenResolve()
- BlockFetcher.prototype.start = function () {
- this.running = true
- this.emit('blocks', 'blocks')
- this.emit('blocks', blocks)
- this.emit('error', new Error('err1'), 'task', 'peer')
- setTimeout(() => this.emit('blocks', blocks), 20)
- }
- BlockFetcher.prototype.stop = async function () { this.running = false }
- td.replace('../../lib/sync/blockfetcher', BlockFetcher)
+ BlockFetcher.prototype.fetch = td.func()
+ td.replace('../../lib/sync/fetcher', { BlockFetcher })
const FastSynchronizer = require('../../lib/sync/fastsync')
t.test('should initialize correctly', async (t) => {
const pool = new PeerPool()
const sync = new FastSynchronizer({pool})
- sync.handle = td.func()
- pool.emit('added', { eth: true, inbound: false })
- pool.emit('message:eth', 'msg0', 'peer0')
- td.verify(sync.handle('msg0', 'peer0'))
+ pool.emit('added', { eth: true })
t.equals(sync.type, 'fast', 'fast type')
t.end()
})
@@ -66,70 +45,46 @@ tape('[FastSynchronizer]', t => {
const peer = {eth: {getBlockHeaders: td.func(), status: {bestHash: 'hash'}}}
const headers = [{number: 5}]
td.when(peer.eth.getBlockHeaders({block: 'hash', max: 1})).thenResolve(headers)
- t.equals((await sync.height(peer)).toNumber(), 5, 'got height')
+ const latest = await sync.latest(peer)
+ t.equals(new BN(latest.number).toNumber(), 5, 'got height')
t.end()
})
- t.test('should find origin', async (t) => {
- t.plan(3)
+ t.test('should find best', async (t) => {
const sync = new FastSynchronizer({interval: 1, pool: new PeerPool()})
- sync.syncing = true
+ sync.running = true
sync.height = td.func()
sync.chain = {blocks: {td: new BN(1)}}
- sync.pool = {peers: []}
const peers = [
{eth: {status: {td: new BN(1)}}, inbound: false},
{eth: {status: {td: new BN(2)}}, inbound: false}
]
+ sync.pool = {peers}
+ sync.forceSync = true
td.when(sync.height(peers[0])).thenDo(peer => Promise.resolve(peer.eth.status.td))
td.when(sync.height(peers[1])).thenDo(peer => Promise.resolve(peer.eth.status.td))
- sync.origin().then(([best, height]) => {
- t.equals(best, peers[1], 'found best')
- t.equals(height.toNumber(), 2, 'correct height')
- })
- setTimeout(() => { sync.pool.peers = peers }, 2)
- setTimeout(async () => {
- peers.push({eth: {status: {td: new BN(3)}}, inbound: false})
- sync.pool.ban = td.func()
- td.when(sync.pool.ban(peers[2])).thenThrow('err0')
- td.when(sync.height(peers[2])).thenReject(new Error('err0'))
- try {
- await sync.origin()
- } catch (err) {
- t.equals(err, 'err0', 'threw error')
- }
- }, 100)
+ t.equals(sync.best(), peers[1], 'found best')
+ t.end()
})
t.test('should sync', async (t) => {
t.plan(3)
const sync = new FastSynchronizer({interval: 1, pool: new PeerPool()})
- sync.origin = td.func()
- td.when(sync.origin()).thenResolve(['origin', new BN(2)])
+ sync.best = td.func()
+ sync.latest = td.func()
+ td.when(sync.best()).thenReturn('peer')
+ td.when(sync.latest('peer')).thenResolve({number: 2})
+ td.when(BlockFetcher.prototype.fetch(), {delay: 20}).thenResolve()
sync.chain = {blocks: {height: new BN(3)}}
- sync.once('synchronized', info => t.equals(info.count, 0, 'first > last'))
- await sync.sync()
+ t.notOk(await sync.sync(), 'local height > remote height')
sync.chain = {blocks: {height: new BN(0)}}
- sync.once('synchronized', info => t.equals(info.count, 2, 'synched 2 blocks'))
- setTimeout(() => sync.stop(), 10)
- sync.sync()
- process.nextTick(async () => {
- t.notOk(await sync.sync(), 'already syncing')
- })
- })
-
- t.test('should handle messages', async (t) => {
- const sync = new FastSynchronizer({interval: 1, pool: new PeerPool()})
- sync.chain = {open: td.func()}
- td.when(sync.chain.open()).thenResolve()
- sync.sync = td.func()
- await sync.handle({name: 'NewBlockHashes', data: [[0, 2]]})
- td.verify(sync.sync(2))
- await sync.handle({name: 'Unknown'})
- sync.on('error', err => t.equals(err, 'err0', 'got error'))
- td.when(sync.chain.open()).thenReject('err0')
- await sync.handle()
- t.end()
+ t.ok(await sync.sync(), 'local height < remote height')
+ td.when(BlockFetcher.prototype.fetch()).thenReject('err0')
+ try {
+ await sync.sync()
+ } catch (err) {
+ t.equals(err, 'err0', 'got error')
+ }
})
t.test('should reset td', t => {
diff --git a/test/sync/fetcher.js b/test/sync/fetcher.js
deleted file mode 100644
index 094dd3e..0000000
--- a/test/sync/fetcher.js
+++ /dev/null
@@ -1,169 +0,0 @@
-const tape = require('tape-catch')
-const td = require('testdouble')
-const EventEmitter = require('events')
-const timers = require('testdouble-timers').default
-const { defaultLogger } = require('../../lib/logging')
-defaultLogger.silent = true
-
-timers.use(td)
-
-tape('[Fetcher]', t => {
- class PeerPool extends EventEmitter {}
- td.replace('../../lib/net/peerpool', PeerPool)
- const Fetcher = require('../../lib/sync/fetcher')
-
- t.test('should initialize correctly', t => {
- const fetcher = new Fetcher({pool: new PeerPool()})
- fetcher.failure = td.func()
- fetcher.pool.emit('removed', {id: 'peer0'})
- process.nextTick(() => {
- td.verify(fetcher.failure('peer0'))
- t.end()
- })
- t.notOk(fetcher.running, 'not running')
- })
-
- t.test('should add task', t => {
- const fetcher = new Fetcher({pool: new PeerPool()})
- fetcher.add('task0')
- t.equals(fetcher.heap.peek(), 'task0', 'added')
- t.end()
- })
-
- t.test('should handle success', t => {
- const fetcher = new Fetcher({pool: new PeerPool()})
- const peer = {id: 'id0'}
- fetcher.next = td.func()
- fetcher.active.set('id0', {peer})
- fetcher.success('id0')
- t.ok(peer.idle, 'idle set')
- t.notOk(fetcher.active.get('id0'), 'set to inactive')
- td.verify(fetcher.next())
- t.end()
- })
-
- t.test('should handle failure', t => {
- t.plan(3)
- const fetcher = new Fetcher({pool: new PeerPool()})
- const peer = {id: 'id0'}
- fetcher.next = td.func()
- fetcher.add = td.func()
- fetcher.active.set('id0', {task: 'task0', peer})
- fetcher.on('error', (err, task, p) => {
- t.ok(err === 'err0' && task === 'task0' && p === peer, 'got error')
- })
- fetcher.failure('id0', 'err0')
- t.ok(peer.idle, 'idle set')
- t.notOk(fetcher.active.get('id0'), 'set to inactive')
- td.verify(fetcher.add('task0'))
- td.verify(fetcher.next())
- })
-
- t.test('should do next', t => {
- const fetcher = new Fetcher({pool: new PeerPool()})
- const peer = {id: 'id0', idle: true}
- t.equals(fetcher.next(), false, 'no remaining tasks')
- fetcher.pool.idle = td.func()
- fetcher.fetch = td.func()
- fetcher.handle = td.func()
- fetcher.failure = td.func()
- td.when(fetcher.pool.idle(td.matchers.anything())).thenReturn(peer)
- td.when(fetcher.fetch('task0', peer)).thenResolve('reply0')
- td.when(fetcher.fetch('task1', peer)).thenResolve('reply1')
- td.when(fetcher.handle('reply1', peer)).thenThrow('err0')
- fetcher.add('task0')
- t.equals(fetcher.next(), 'task0', 'next task')
- t.notOk(peer.idle, 'peer not idle')
- t.notOk(fetcher.heap.peek(), 'no tasks')
- const active = fetcher.active.get('id0')
- t.ok(active.task === 'task0' && active.peer === peer, 'active set')
- fetcher.add('task1')
- fetcher.next()
- setTimeout(() => {
- td.verify(fetcher.handle('reply0', peer))
- td.verify(fetcher.failure('id0', 'err0'))
- t.end()
- }, 10)
- })
-
- t.test('should handle reply', t => {
- const fetcher = new Fetcher({pool: new PeerPool()})
- const peer = {id: 'id0'}
- fetcher.process = td.func()
- fetcher.success = td.func()
- fetcher.failure = td.func()
- fetcher.handle(null, peer)
- t.ok(peer.idle, 'peer is idle')
- fetcher.active.set('id0', 'entry0')
- fetcher.handle(null, peer)
- td.verify(fetcher.failure('id0'))
- fetcher.handle('reply', peer)
- td.verify(fetcher.process('entry0', 'reply'))
- td.verify(fetcher.success('id0'))
- td.when(fetcher.process('entry0', 'reply')).thenThrow('err0')
- fetcher.handle('reply', peer)
- td.verify(fetcher.failure('id0', 'err0'))
- t.end()
- })
-
- t.test('should expire', t => {
- const clock = td.timers()
- const fetcher = new Fetcher({pool: new PeerPool(), timeout: 1})
- fetcher.pool = td.object()
- fetcher.add = td.func()
- fetcher.active.set(1, {time: 1, peer: 'peer1', task: 'task1'})
- fetcher.active.set(2, {time: 2, peer: 'peer2', task: 'task2'})
- fetcher.active.set(3, {time: 3, peer: 'peer3', task: 'task3'})
- td.when(fetcher.pool.contains('peer1')).thenReturn(true)
- clock.tick(4)
- fetcher.expire()
- td.verify(fetcher.pool.ban('peer1', td.matchers.isA(Number)))
- td.verify(fetcher.add('task1'))
- td.verify(fetcher.add('task2'))
- t.deepEquals(Array.from(fetcher.active), [[3, {time: 3, peer: 'peer3', task: 'task3'}]], 'one left')
- clock.restore()
- t.end()
- })
-
- t.test('should start', async (t) => {
- t.plan(3)
- const clock = td.timers()
- const fetcher = new Fetcher({pool: new PeerPool(), interval: 10})
- fetcher.expire = td.func()
- fetcher.next = td.func()
- td.when(fetcher.next()).thenReturn(true, true, false)
- fetcher.active.set(1)
- fetcher.start().then(() => {
- t.notOk(fetcher.running, 'stopped')
- })
- setTimeout(() => {
- t.ok(fetcher.running, 'started')
- }, 1)
- t.notOk(await fetcher.start(), 'already started')
- clock.tick(1)
- fetcher.active.delete(1)
- clock.tick(20)
- clock.restore()
- })
-
- t.test('should stop', async (t) => {
- const fetcher = new Fetcher({pool: new PeerPool(), interval: 1})
- fetcher.heap.remove = td.func()
- td.when(fetcher.heap.remove()).thenReturn(false)
- fetcher.active.set(0)
- fetcher.running = true
- fetcher.stop().then(() => {
- t.notOk(fetcher.active.get(0), 'empty active')
- t.end()
- })
- setTimeout(async () => {
- fetcher.running = false
- t.equals(await fetcher.stop(), false, 'already stopped')
- }, 100)
- })
-
- t.test('should reset td', t => {
- td.reset()
- t.end()
- })
-})
diff --git a/test/sync/fetcher/blockfetcher.js b/test/sync/fetcher/blockfetcher.js
new file mode 100644
index 0000000..989e8ec
--- /dev/null
+++ b/test/sync/fetcher/blockfetcher.js
@@ -0,0 +1,59 @@
+const tape = require('tape-catch')
+const td = require('testdouble')
+const BN = require('bn.js')
+const EventEmitter = require('events')
+const { defaultLogger } = require('../../../lib/logging')
+defaultLogger.silent = true
+
+async function wait (delay) {
+ await new Promise(resolve => setTimeout(resolve, delay || 10))
+}
+
+tape('[BlockFetcher]', t => {
+ class PeerPool extends EventEmitter {}
+ PeerPool.prototype.idle = td.func()
+ PeerPool.prototype.ban = td.func()
+ td.replace('../../../lib/net/peerpool', PeerPool)
+ const BlockFetcher = require('../../../lib/sync/fetcher/blockfetcher')
+
+ t.test('should start/stop', async (t) => {
+ const fetcher = new BlockFetcher({
+ pool: new PeerPool(),
+ first: new BN(1),
+ count: 10,
+ maxPerRequest: 5,
+ timeout: 5
+ })
+ fetcher.next = () => false
+ t.notOk(fetcher.running, 'not started')
+ fetcher.fetch()
+ t.equals(fetcher.in.size(), 2, 'added 2 tasks')
+ await wait()
+ t.ok(fetcher.running, 'started')
+ fetcher.destroy()
+ await wait()
+ t.notOk(fetcher.running, 'stopped')
+ t.end()
+ })
+
+ t.test('should process', t => {
+ const fetcher = new BlockFetcher({pool: new PeerPool()})
+ const blocks = [{header: {number: 1}}, {header: {number: 2}}]
+ t.deepEquals(fetcher.process({task: {count: 2}}, {blocks}), blocks, 'got results')
+ t.notOk(fetcher.process({task: {count: 2}}, {blocks: []}), 'bad results')
+ t.end()
+ })
+
+ t.test('should find a fetchable peer', async (t) => {
+ const pool = new PeerPool()
+ const fetcher = new BlockFetcher({pool})
+ td.when(fetcher.pool.idle(td.matchers.anything())).thenReturn('peer0')
+ t.equals(fetcher.peer(), 'peer0', 'found peer')
+ t.end()
+ })
+
+ t.test('should reset td', t => {
+ td.reset()
+ t.end()
+ })
+})
diff --git a/test/sync/fetcher/fetcher.js b/test/sync/fetcher/fetcher.js
new file mode 100644
index 0000000..ccbfb03
--- /dev/null
+++ b/test/sync/fetcher/fetcher.js
@@ -0,0 +1,58 @@
+const tape = require('tape-catch')
+const td = require('testdouble')
+const { defaultLogger } = require('../../../lib/logging')
+defaultLogger.silent = true
+
+tape('[Fetcher]', t => {
+ const Fetcher = require('../../../lib/sync/fetcher/fetcher')
+
+ t.test('should handle bad result', t => {
+ t.plan(2)
+ const fetcher = new Fetcher({pool: td.object()})
+ const job = {peer: {}, state: 'active'}
+ fetcher.running = true
+ fetcher.next = td.func()
+ fetcher.wait = td.func()
+ td.when(fetcher.wait()).thenResolve()
+ fetcher.success(job, undefined)
+ t.equals(fetcher.in.size(), 1, 'enqueued job')
+ setTimeout(() => t.ok(job.peer.idle, 'peer idled'), 10)
+ })
+
+ t.test('should handle failure', t => {
+ t.plan(2)
+ const fetcher = new Fetcher({pool: td.object()})
+ const job = {peer: {}, state: 'active'}
+ fetcher.running = true
+ fetcher.next = td.func()
+ fetcher.on('error', (err) => t.equals(err, 'err0', 'got error'))
+ fetcher.failure(job, 'err0')
+ t.equals(fetcher.in.size(), 1, 'enqueued job')
+ })
+
+ t.test('should handle expiration', t => {
+ t.plan(2)
+ const fetcher = new Fetcher({pool: td.object(), timeout: 5})
+ const job = {index: 0}
+ const peer = {idle: true}
+ fetcher.peer = td.func()
+ fetcher.request = td.func()
+ td.when(fetcher.peer()).thenReturn(peer)
+ td.when(fetcher.request(td.matchers.anything(), {idle: false}), {delay: 10}).thenReject('err0')
+ td.when(fetcher.pool.contains({idle: false})).thenReturn(true)
+ fetcher.in.insert(job)
+ fetcher._readableState = []
+ fetcher.running = true
+ fetcher.total = 10
+ fetcher.next()
+ setTimeout(() => {
+ t.deepEquals(job, {index: 0, peer: {idle: false}, state: 'expired'}, 'expired job')
+ t.equals(fetcher.in.size(), 1, 'enqueued job')
+ }, 20)
+ })
+
+ t.test('should reset td', t => {
+ td.reset()
+ t.end()
+ })
+})
diff --git a/test/sync/fetcher/headerfetcher.js b/test/sync/fetcher/headerfetcher.js
new file mode 100644
index 0000000..e410963
--- /dev/null
+++ b/test/sync/fetcher/headerfetcher.js
@@ -0,0 +1,36 @@
+const tape = require('tape-catch')
+const td = require('testdouble')
+// const BN = require('bn.js')
+const EventEmitter = require('events')
+const { defaultLogger } = require('../../../lib/logging')
+defaultLogger.silent = true
+
+tape('[HeaderFetcher]', t => {
+ class PeerPool extends EventEmitter {}
+ PeerPool.prototype.idle = td.func()
+ PeerPool.prototype.ban = td.func()
+ td.replace('../../../lib/net/peerpool', PeerPool)
+ const HeaderFetcher = require('../../../lib/sync/fetcher/headerfetcher')
+
+ t.test('should process', t => {
+ const fetcher = new HeaderFetcher({pool: new PeerPool(), flow: td.object()})
+ const headers = [{number: 1}, {number: 2}]
+ t.deepEquals(fetcher.process({task: {count: 2}, peer: 'peer0'}, {headers, bv: 1}), headers, 'got results')
+ t.notOk(fetcher.process({task: {count: 2}}, {headers: []}), 'bad results')
+ td.verify(fetcher.flow.handleReply('peer0', 1))
+ t.end()
+ })
+
+ t.test('should find a fetchable peer', async (t) => {
+ const pool = new PeerPool()
+ const fetcher = new HeaderFetcher({pool})
+ td.when(fetcher.pool.idle(td.matchers.anything())).thenReturn('peer0')
+ t.equals(fetcher.peer(), 'peer0', 'found peer')
+ t.end()
+ })
+
+ t.test('should reset td', t => {
+ td.reset()
+ t.end()
+ })
+})
diff --git a/test/sync/headerfetcher.js b/test/sync/headerfetcher.js
deleted file mode 100644
index 3cdb02c..0000000
--- a/test/sync/headerfetcher.js
+++ /dev/null
@@ -1,79 +0,0 @@
-const tape = require('tape-catch')
-const td = require('testdouble')
-const BN = require('bn.js')
-const EventEmitter = require('events')
-const { defaultLogger } = require('../../lib/logging')
-defaultLogger.silent = true
-
-tape('[HeaderFetcher]', t => {
- class PeerPool extends EventEmitter {}
- td.replace('../../lib/net/peerpool', PeerPool)
- const HeaderPool = td.constructor()
- HeaderPool.prototype.open = td.func()
- HeaderPool.prototype.add = td.func()
- td.when(HeaderPool.prototype.open()).thenResolve()
- const headers = [{number: 1}, {number: 2}]
- td.when(HeaderPool.prototype.add(headers)).thenReject(new Error('err0'))
- td.replace('../../lib/blockchain', { HeaderPool })
- const HeaderFetcher = require('../../lib/sync/headerfetcher')
- const ONE = new BN(1)
- const TWO = new BN(2)
-
- t.test('should order correctly', t => {
- const fetcher = new HeaderFetcher({pool: new PeerPool()})
- t.ok(fetcher.before({first: ONE}, {first: TWO}), 'ordered')
- t.notOk(fetcher.before({first: TWO}, {first: ONE}), 'not ordered')
- t.end()
- })
-
- t.test('should open', async (t) => {
- const fetcher = new HeaderFetcher({pool: new PeerPool()})
- t.notOk(fetcher.opened, 'not open')
- await fetcher.open()
- t.ok(fetcher.opened, 'open')
- t.end()
- })
-
- t.test('should fetch', async (t) => {
- const fetcher = new HeaderFetcher({
- interval: 1,
- pool: new PeerPool(),
- flow: td.object(),
- maxPerRequest: 128
- })
- const peer = {les: td.object()}
- td.when(fetcher.flow.maxRequestCount(peer, 'GetBlockHeaders')).thenReturn(0)
- t.equals(await fetcher.fetch({}, peer), false, 'reached request limit')
- td.when(fetcher.flow.maxRequestCount(peer, 'GetBlockHeaders')).thenReturn(1000)
- td.when(peer.les.getBlockHeaders({block: ONE, max: 2})).thenResolve([])
- td.when(peer.les.getBlockHeaders({block: ONE, max: 128})).thenResolve([])
- t.deepEquals(await fetcher.fetch({first: ONE, last: TWO}, peer), [], 'got headers')
- t.deepEquals(await fetcher.fetch({first: ONE, last: new BN(1000)}, peer), [], 'got max headers')
- t.end()
- })
-
- t.test('should process', async (t) => {
- const fetcher = new HeaderFetcher({pool: new PeerPool(), flow: td.object()})
- fetcher.running = true
- fetcher.add = td.func()
- fetcher.process({task: 'task'}, {headers: []})
- td.verify(fetcher.add('task'))
- fetcher.process({task: {last: TWO}}, {headers})
- setTimeout(() => {
- td.verify(fetcher.add({first: ONE, last: TWO}))
- t.pass('processed tasks')
- t.end()
- }, 10)
- })
-
- t.test('should check if peer fetchable', async (t) => {
- const fetcher = new HeaderFetcher({pool: new PeerPool(), sync: td.object()})
- t.ok(fetcher.fetchable({les: {status: {serveHeaders: 1}}}), 'fetchable')
- t.end()
- })
-
- t.test('should reset td', t => {
- td.reset()
- t.end()
- })
-})
diff --git a/test/sync/lightsync.js b/test/sync/lightsync.js
index 96eda72..1888595 100644
--- a/test/sync/lightsync.js
+++ b/test/sync/lightsync.js
@@ -8,107 +8,50 @@ defaultLogger.silent = true
tape('[LightSynchronizer]', t => {
class PeerPool extends EventEmitter {}
td.replace('../../lib/net/peerpool', PeerPool)
- const HeaderPool = td.constructor()
- HeaderPool.prototype.open = td.func()
- HeaderPool.prototype.add = td.func()
- td.when(HeaderPool.prototype.open()).thenResolve()
- td.when(HeaderPool.prototype.add('headers')).thenResolve()
- const headers = [{number: 1}, {number: 2}]
- td.when(HeaderPool.prototype.add(headers)).thenReject(new Error('err0'))
- td.replace('../../lib/blockchain', { HeaderPool })
class HeaderFetcher extends EventEmitter {}
- HeaderFetcher.prototype.add = td.func()
- HeaderFetcher.prototype.open = td.func()
- td.when(HeaderFetcher.prototype.open()).thenResolve()
- HeaderFetcher.prototype.start = function () {
- this.running = true
- this.emit('headers', 'headers')
- this.emit('headers', headers)
- this.emit('error', new Error('err1'), 'task', 'peer')
- setTimeout(() => this.emit('headers', headers), 20)
- }
- HeaderFetcher.prototype.stop = async function () { this.running = false }
- td.replace('../../lib/sync/headerfetcher', HeaderFetcher)
+ HeaderFetcher.prototype.fetch = td.func()
+ td.replace('../../lib/sync/fetcher', { HeaderFetcher })
const LightSynchronizer = require('../../lib/sync/lightsync')
t.test('should initialize correctly', async (t) => {
const pool = new PeerPool()
const sync = new LightSynchronizer({pool})
- sync.handle = td.func()
- pool.emit('added', { eth: true, inbound: false })
- pool.emit('message:les', 'msg0', 'peer0')
- td.verify(sync.handle('msg0', 'peer0'))
+ pool.emit('added', {les: {status: {serveHeaders: true}}})
t.equals(sync.type, 'light', 'light type')
t.end()
})
- t.test('should open', async (t) => {
- const sync = new LightSynchronizer({pool: new PeerPool()})
- sync.chain = {
- open: td.func(),
- headers: {
- height: '1',
- td: '10',
- latest: {hash: () => '1234567890'}
- }
- }
- sync.pool.open = td.func()
- td.when(sync.chain.open()).thenResolve()
- td.when(sync.pool.open()).thenResolve()
- await sync.open()
- t.pass('opened')
- t.end()
- })
-
- t.test('should find origin', async (t) => {
- t.plan(2)
+ t.test('should find best', async (t) => {
const sync = new LightSynchronizer({interval: 1, pool: new PeerPool()})
- sync.syncing = true
+ sync.running = true
sync.chain = {headers: {td: new BN(1)}}
- sync.pool = {peers: []}
const peers = [
{les: {status: {headTd: new BN(1), headNum: new BN(1), serveHeaders: 1}}, inbound: false},
{les: {status: {headTd: new BN(2), headNum: new BN(2), serveHeaders: 1}}, inbound: false}
]
- sync.origin().then(([best, height]) => {
- t.equals(best, peers[1], 'found best')
- t.equals(height.toNumber(), 2, 'correct height')
- })
- setTimeout(() => { sync.pool.peers = peers }, 2)
- setTimeout(async () => {
- peers.push({les: {status: {headTd: new BN(3), headNum: new BN(3), serveHeaders: 1}}, inbound: false})
- }, 100)
+ sync.pool = {peers}
+ sync.forceSync = true
+ t.equals(sync.best(), peers[1], 'found best')
+ t.end()
})
t.test('should sync', async (t) => {
t.plan(3)
const sync = new LightSynchronizer({interval: 1, pool: new PeerPool()})
- sync.origin = td.func()
- td.when(sync.origin()).thenResolve(['origin', new BN(2)])
+ sync.best = td.func()
+ sync.latest = td.func()
+ td.when(sync.best()).thenReturn({les: {status: {headNum: new BN(2)}}})
+ td.when(HeaderFetcher.prototype.fetch(), {delay: 20}).thenResolve()
sync.chain = {headers: {height: new BN(3)}}
- sync.once('synchronized', info => t.equals(info.count, 0, 'first > last'))
- await sync.sync()
+ t.notOk(await sync.sync(), 'local height > remote height')
sync.chain = {headers: {height: new BN(0)}}
- sync.once('synchronized', info => t.equals(info.count, 2, 'synched 2 headers'))
- setTimeout(() => sync.stop(), 10)
- sync.sync()
- process.nextTick(async () => {
- t.notOk(await sync.sync(), 'already syncing')
- })
- })
-
- t.test('should handle messages', async (t) => {
- const sync = new LightSynchronizer({interval: 1, pool: new PeerPool()})
- sync.chain = {open: td.func()}
- td.when(sync.chain.open()).thenResolve()
- sync.sync = td.func()
- await sync.handle({name: 'Announce', data: {headNumber: 2}})
- td.verify(sync.sync(2))
- await sync.handle({name: 'Unknown'})
- sync.on('error', err => t.equals(err, 'err0', 'got error'))
- td.when(sync.chain.open()).thenReject('err0')
- await sync.handle()
- t.end()
+ t.ok(await sync.sync(), 'local height < remote height')
+ td.when(HeaderFetcher.prototype.fetch()).thenReject('err0')
+ try {
+ await sync.sync()
+ } catch (err) {
+ t.equals(err, 'err0', 'got error')
+ }
})
t.test('should reset td', t => {
diff --git a/test/sync/sync.js b/test/sync/sync.js
index 793c361..dbb414f 100644
--- a/test/sync/sync.js
+++ b/test/sync/sync.js
@@ -7,37 +7,37 @@ defaultLogger.silent = true
tape('[Synchronizer]', t => {
class PeerPool extends EventEmitter {}
td.replace('../../lib/net/peerpool', PeerPool)
- const Synchronizer = require('../../lib/sync/sync')
-
- t.test('should sync', async (t) => {
- const pool = new PeerPool()
- const sync = new Synchronizer({pool})
- sync.fetch = td.func()
- td.when(sync.fetch(2)).thenResolve(2)
- sync.on('synchronized', info => {
- t.equals(info.count, 2, 'synchronized')
- t.end()
- })
- sync.sync(2)
- })
-
- t.test('should stop', async (t) => {
- const pool = new PeerPool()
- const sync = new Synchronizer({pool})
- sync.fetch = () => {
- return new Promise(resolve => {
- setTimeout(() => {
- resolve(sync.syncing ? 2 : 1)
- }, 100)
- })
- }
- sync.on('synchronized', info => {
- t.equals(info.count, 1, 'synchronized')
- t.end()
- })
- sync.sync(2)
- setTimeout(() => sync.stop(), 50)
- })
+ // const Synchronizer = require('../../lib/sync/sync')
+ //
+ // t.test('should sync', async (t) => {
+ // const pool = new PeerPool()
+ // const sync = new Synchronizer({pool})
+ // sync.fetch = td.func()
+ // td.when(sync.fetch(2)).thenResolve(2)
+ // sync.on('synchronized', info => {
+ // t.equals(info.count, 2, 'synchronized')
+ // t.end()
+ // })
+ // sync.sync(2)
+ // })
+ //
+ // t.test('should stop', async (t) => {
+ // const pool = new PeerPool()
+ // const sync = new Synchronizer({pool})
+ // sync.fetch = () => {
+ // return new Promise(resolve => {
+ // setTimeout(() => {
+ // resolve(sync.syncing ? 2 : 1)
+ // }, 100)
+ // })
+ // }
+ // sync.on('synchronized', info => {
+ // t.equals(info.count, 1, 'synchronized')
+ // t.end()
+ // })
+ // sync.sync(2)
+ // setTimeout(() => sync.stop(), 50)
+ // })
t.test('should reset td', t => {
td.reset()