From a86cd0430f127691176969756856891d6a1e3ef7 Mon Sep 17 00:00:00 2001 From: Manav Desai Date: Wed, 2 Aug 2023 00:44:40 +0530 Subject: [PATCH] fix: headers-sync, refactor and test bip157 --- bin/bcoin-cli | 20 ----- lib/blockchain/chain.js | 31 ++----- lib/blockchain/chaindb.js | 63 +------------- lib/blockchain/layout.js | 1 - lib/client/node.js | 5 -- lib/indexer/filterindexer.js | 26 +----- lib/indexer/indexer.js | 6 ++ lib/net/peer.js | 5 ++ lib/net/pool.js | 122 ++++++++++++++------------- lib/node/http.js | 18 ---- lib/node/neutrino.js | 4 +- lib/node/node.js | 22 ----- lib/node/rpc.js | 45 ---------- lib/protocol/networks.js | 2 +- test/neutrino-test.js | 40 +++++++-- test/node-rpc-test.js | 10 --- test/p2p-bip157-test.js | 100 ---------------------- test/p2p-test.js | 158 +++++++++++++++++++++++++---------- 18 files changed, 236 insertions(+), 442 deletions(-) delete mode 100644 test/p2p-bip157-test.js diff --git a/bin/bcoin-cli b/bin/bcoin-cli index 9ca39cdfe..b9136ab43 100755 --- a/bin/bcoin-cli +++ b/bin/bcoin-cli @@ -129,22 +129,6 @@ class CLI { this.log(filter); } - async getFilterHeader() { - let hash = this.config.str(0, ''); - - if (hash.length !== 64) - hash = parseInt(hash, 10); - - const filterHeader = await this.client.getFilterHeader(hash); - - if (!filterHeader) { - this.log('Filter header not found.'); - return; - } - - this.log(filterHeader); - } - async estimateFee() { const blocks = this.config.uint(0, 1); @@ -262,9 +246,6 @@ class CLI { case 'filter': await this.getFilter(); break; - case 'filterheader': - await this.getFilterHeader(); - break; case 'fee': await this.estimateFee(); break; @@ -282,7 +263,6 @@ class CLI { this.log(' $ coin [hash+index/address]: View coins.'); this.log(' $ fee [target]: Estimate smart fee.'); this.log(' $ filter [hash/height]: View filter.'); - this.log(' $ filterheader [hash/height]: View filter header.'); this.log(' $ header [hash/height]: View block header.'); this.log(' $ info: Get server info.'); this.log(' $ mempool: Get mempool snapshot.'); diff --git a/lib/blockchain/chain.js b/lib/blockchain/chain.js index e0c3c5335..9cd0a312f 100644 --- a/lib/blockchain/chain.js +++ b/lib/blockchain/chain.js @@ -1791,24 +1791,6 @@ class Chain extends AsyncEmitter { return this.hasEntry(hash); } - async getCFHeaderHeight() { - return await this.db.getCFHeaderHeight(); - } - - async saveCFHeaderHeight(height) { - this.db.neutrinoState.headerHeight = height; - await this.db.saveNeutrinoState(); - } - - async getCFilterHeight() { - return await this.db.getCFilterHeight(); - } - - async saveCFilterHeight(height) { - this.db.neutrinoState.filterHeight = height; - await this.db.saveNeutrinoState(); - } - /** * Find the corresponding block entry by hash or height. * @param {Hash|Number} hash/height @@ -2021,12 +2003,17 @@ class Chain extends AsyncEmitter { if (this.synced) return; - if (this.options.checkpoints) + if (this.options.checkpoints) { if (this.height < this.network.lastCheckpoint) return; + } + + if (this.tip.time < util.now() - this.network.block.maxTipAge) + return; if (!this.hasChainwork()) return; + this.synced = true; this.emit('full'); } @@ -2629,7 +2616,6 @@ class ChainOptions { this.compression = true; this.spv = false; - this.neutrino = false; this.bip91 = false; this.bip148 = false; this.prune = false; @@ -2676,11 +2662,6 @@ class ChainOptions { this.spv = options.spv; } - if (options.neutrino != null) { - assert(typeof options.neutrino === 'boolean'); - this.neutrino = options.neutrino; - } - if (options.prefix != null) { assert(typeof options.prefix === 'string'); this.prefix = options.prefix; diff --git a/lib/blockchain/chaindb.js b/lib/blockchain/chaindb.js index c4d2e0671..cb91accaa 100644 --- a/lib/blockchain/chaindb.js +++ b/lib/blockchain/chaindb.js @@ -46,7 +46,6 @@ class ChainDB { this.state = new ChainState(); this.pending = null; this.current = null; - this.neutrinoState = null; this.cacheHash = new LRU(this.options.entryCache, null, BufferMap); this.cacheHeight = new LRU(this.options.entryCache); @@ -91,11 +90,6 @@ class ChainDB { this.logger.info('ChainDB successfully initialized.'); } - if (this.options.neutrino) { - if (!this.neutrinoState) - this.neutrinoState = await this.getNeutrinoState(); - } - this.logger.info( 'Chain State: hash=%h tx=%d coin=%d value=%s.', this.state.tip, @@ -1007,7 +1001,7 @@ class ChainDB { */ async getRawBlock(block) { - if (this.options.spv && !this.options.neutrino) + if (this.options.spv) return null; const hash = await this.getHash(block); @@ -1676,39 +1670,6 @@ class ChainDB { b.put(layout.O.encode(), flags.toRaw()); return b.write(); } - - /** - * Get Neutrino State - * @returns {Promise} - Returns neutrino state - */ - - async getNeutrinoState() { - const data = await this.db.get(layout.N.encode()); - if (!data) - return new NeutrinoState(); - return NeutrinoState.fromRaw(data); - } - - async getCFHeaderHeight() { - const state = await this.getNeutrinoState(); - return state.headerHeight; - } - - async getCFilterHeight() { - const state = await this.getNeutrinoState(); - return state.filterHeight; - } - - /** - * Save Neutrino State - * @returns {void} - */ - async saveNeutrinoState() { - const state = this.neutrinoState.toRaw(); - const b = this.db.batch(); - b.put(layout.N.encode(), state); - return b.write(); - } } /** @@ -1991,28 +1952,6 @@ function fromU32(num) { return data; } -class NeutrinoState { - constructor() { // TODO: do we add support for multiple filters? - this.headerHeight = 0; - this.filterHeight = 0; - } - - toRaw() { - const bw = bio.write(8); - bw.writeU32(this.headerHeight); - bw.writeU32(this.filterHeight); - return bw.render(); - } - - static fromRaw(data) { - const state = new NeutrinoState(); - const br = bio.read(data); - state.headerHeight = br.readU32(); - state.filterHeight = br.readU32(); - return state; - } -} - /* * Expose */ diff --git a/lib/blockchain/layout.js b/lib/blockchain/layout.js index 532ccb050..a3e96561b 100644 --- a/lib/blockchain/layout.js +++ b/lib/blockchain/layout.js @@ -34,7 +34,6 @@ const layout = { O: bdb.key('O'), R: bdb.key('R'), D: bdb.key('D'), - N: bdb.key('N'), F: bdb.key('H', ['hash256']), e: bdb.key('e', ['hash256']), h: bdb.key('h', ['hash256']), diff --git a/lib/client/node.js b/lib/client/node.js index 5df661dfa..dd717e4f2 100644 --- a/lib/client/node.js +++ b/lib/client/node.js @@ -169,11 +169,6 @@ class NodeClient extends Client { return this.get(`/filter/${block}`); } - getFilterHeader(block) { - assert(typeof block === 'string' || typeof block === 'number'); - return this.get(`/filterheader/${block}`); - } - getBlockPeer(hash) { return this.call('get block peer', hash); } diff --git a/lib/indexer/filterindexer.js b/lib/indexer/filterindexer.js index ae88af139..809ac1e33 100644 --- a/lib/indexer/filterindexer.js +++ b/lib/indexer/filterindexer.js @@ -85,27 +85,6 @@ class FilterIndexer extends Indexer { this.put(layout.f.encode(hash), gcsFilter.hash()); } - /** - * save filter header - * @param {Hash} blockHash - * @param {Hash} filterHeader - * @param {Hash} filterHash - * @returns {Promise} - */ - - async saveFilterHeader(blockHash, filterHeader, filterHash) { - assert(blockHash); - assert(filterHeader); - assert(filterHash); - - const filter = new Filter(); - filter.header = filterHeader; - - await this.blocks.writeFilter(blockHash, filter.toRaw(), this.filterType); - // console.log(layout.f.encode(blockHash)); - this.put(layout.f.encode(blockHash), filterHash); - } - /** * Save filter * @param {Hash} blockHash @@ -114,8 +93,9 @@ class FilterIndexer extends Indexer { * @returns {Promise} */ - async saveFilter(blockHash, basicFilter, filterHeader) { + async saveFilter(blockHash, blockHeight, basicFilter, filterHeader) { assert(blockHash); + assert(blockHeight); assert(basicFilter); assert(filterHeader); @@ -124,8 +104,8 @@ class FilterIndexer extends Indexer { filter.header = filterHeader; await this.blocks.writeFilter(blockHash, filter.toRaw(), this.filterType); - // console.log(layout.f.encode(blockHash)); this.put(layout.f.encode(blockHash), basicFilter.hash()); + await super.syncHeight(blockHash, blockHeight); } /** diff --git a/lib/indexer/indexer.js b/lib/indexer/indexer.js index b052d6a97..98610fbdc 100644 --- a/lib/indexer/indexer.js +++ b/lib/indexer/indexer.js @@ -198,6 +198,12 @@ class Indexer extends EventEmitter { this.height = 0; } + async syncHeight(hash, height) { + const meta = new BlockMeta(hash, height); + await this._setTip(meta); + this.height = height; + } + /** * Bind to chain events and save listeners for removal on close * @private diff --git a/lib/net/peer.js b/lib/net/peer.js index 154c83e52..49cc0ff2f 100644 --- a/lib/net/peer.js +++ b/lib/net/peer.js @@ -1680,6 +1680,11 @@ class Peer extends EventEmitter { this.compactWitness = packet.version === 2; } + sendSendHeaders() { + const packet = new packets.SendHeadersPacket(); + this.send(packet); + } + /** * Send `getheaders` to peer. Note that unlike * `getblocks`, `getheaders` can have a null locator. diff --git a/lib/net/pool.js b/lib/net/pool.js index 439f5d561..381e73d34 100644 --- a/lib/net/pool.js +++ b/lib/net/pool.js @@ -69,6 +69,7 @@ class Pool extends EventEmitter { this.connected = false; this.disconnecting = false; this.syncing = false; + this.filterSyncing = false; this.discovering = false; this.spvFilter = null; this.txFilter = null; @@ -204,14 +205,7 @@ class Pool extends EventEmitter { return this.disconnect(); } - /** - * Reset header chain. - */ - - resetChain() { - if (!this.options.checkpoints && !this.options.neutrino) - return; - + resetHeadersChain() { if (!this.options.neutrino) this.checkpoints = false; this.headerTip = null; @@ -219,12 +213,12 @@ class Pool extends EventEmitter { this.headerNext = null; const tip = this.chain.tip; + if (this.options.neutrino) { this.headerChain.push(new HeaderEntry(tip.hash, tip.height)); - this.cfHeaderChain = new List(); - this.cfHeaderChain.push(new CFHeaderEntry(consensus.ZERO_HASH, 0)); return; } + if (tip.height < this.network.lastCheckpoint) { this.checkpoints = true; this.headerTip = this.getNextTip(tip.height); @@ -235,6 +229,26 @@ class Pool extends EventEmitter { } } + resetCFHeadersChain() { + if (!this.options.neutrino) + return; + + this.cfHeaderChain = new List(); + this.cfHeaderChain.push(new CFHeaderEntry(consensus.ZERO_HASH, null, 0)); + } + + /** + * Reset header chain. + */ + + resetChain() { + if (!this.options.checkpoints && !this.options.neutrino) + return; + + this.resetHeadersChain(); + this.resetCFHeadersChain(); + } + /** * Connect to the network. * @method @@ -661,10 +675,7 @@ class Pool extends EventEmitter { return; this.syncing = true; - if (this.options.neutrino) { - this.startHeadersSync(); - } else - this.resync(false); + this.resync(false); } /** @@ -723,12 +734,13 @@ class Pool extends EventEmitter { */ async startFilterHeadersSync() { + this.filterSyncing = true; this.logger.info('Starting filter headers sync (%s).', this.chain.options.network); if (!this.opened || !this.connected) return; - const cFHeaderHeight = await this.chain.getCFHeaderHeight(); + const cFHeaderHeight = this.cfHeaderChain.tail.height; const startHeight = cFHeaderHeight ? cFHeaderHeight + 1 : 1; const chainHeight = await this.chain.height; @@ -753,7 +765,9 @@ class Pool extends EventEmitter { if (!this.opened || !this.connected) return; - const cFilterHeight = await this.chain.getCFilterHeight(); + const indexer = this.getFilterIndexer(filtersByVal[common.FILTERS.BASIC]); + + const cFilterHeight = indexer.height; const startHeight = cFilterHeight ? cFilterHeight + 1 : 1; const chainHeight = await this.chain.height; @@ -907,8 +921,6 @@ class Pool extends EventEmitter { peer.blockTime = Date.now(); if (this.options.neutrino) { peer.sendGetHeaders(locator); - if (!this.syncing) - this.startFilterHeadersSync(); return true; } if (this.checkpoints) { @@ -956,8 +968,8 @@ class Pool extends EventEmitter { if (items.length === common.MAX_INV) break; } - - this.getBlock(peer, items); + if (!this.options.neutrino) + this.getBlock(peer, items); } /** @@ -1392,6 +1404,10 @@ class Pool extends EventEmitter { peer.send(new packets.AddrPacket([addr])); } + if (this.options.neutrino) { + peer.sendSendHeaders(); + } + // We want compact blocks! if (this.options.compact) peer.sendCompact(this.options.blockMode); @@ -1736,13 +1752,6 @@ class Pool extends EventEmitter { if (this.options.hasWitness() && !peer.hasWitness()) return; - if (this.options.neutrino) { - const filterHeight = await this.chain.getCFilterHeight(); - if (filterHeight === this.chain.height) - this.startSync(); - return; - } - // Request headers instead. if (this.checkpoints) return; @@ -2181,17 +2190,13 @@ class Pool extends EventEmitter { basicFilter._hash = filterHash; const filterHeader = basicFilter.header(previousFilterHeader); const lastFilterHeader = this.cfHeaderChain.tail; - const cfHeaderEntry = new CFHeaderEntry( - filterHash, lastFilterHeader.height + 1); - this.cfHeaderChain.push(cfHeaderEntry); const blockHash = await this.chain.getHash(blockHeight); - const indexer = this.getFilterIndexer(filtersByVal[filterType]); - await indexer.saveFilterHeader(blockHash, filterHeader, filterHash); + const cfHeaderEntry = new CFHeaderEntry(blockHash, + filterHeader, lastFilterHeader.height + 1); + this.cfHeaderChain.push(cfHeaderEntry); previousFilterHeader = filterHeader; - await this.chain.saveCFHeaderHeight(blockHeight); blockHeight++; - const cFHeaderHeight = await this.chain.getCFHeaderHeight(); - this.logger.info('CFHeaderHeight: %d', cFHeaderHeight); + this.logger.info('CFHeader height: %d', this.cfHeaderChain.tail.height); } if (this.headerChain.tail.height <= stopHeight) this.emit('cfheaders'); @@ -2213,6 +2218,7 @@ class Pool extends EventEmitter { } const blockHash = packet.blockHash; + this.cfHeaderChain.shift(); const filterType = packet.filterType; const filter = packet.filterBytes; @@ -2237,11 +2243,10 @@ class Pool extends EventEmitter { const gcsFilter = basicFilter.fromNBytes(filter); const indexer = this.getFilterIndexer(filtersByVal[filterType]); - const filterHeader = await indexer.getFilterHeader(blockHash); - await indexer.saveFilter(blockHash, gcsFilter, filterHeader); + const filterHeader = this.cfHeaderChain.head.header; + await indexer.saveFilter(blockHash, blockHeight, gcsFilter, filterHeader); + const cFilterHeight = await indexer.height; - await this.chain.saveCFilterHeight(blockHeight); - const cFilterHeight = await this.chain.getCFilterHeight(); this.logger.info('CFilter height: %d', cFilterHeight); this.emit('cfilter', blockHash, gcsFilter); const startHeight = stopHeight + 1; @@ -2271,6 +2276,7 @@ class Pool extends EventEmitter { return; } } else if (cFilterHeight === this.chain.height) { + this.filterSyncing = false; this.emit('cfilters'); } } @@ -2406,10 +2412,9 @@ class Pool extends EventEmitter { const headers = packet.items; if (!this.checkpoints && !this.options.neutrino) - // todo add support for checkpoints return; - if (!this.syncing) + if (!this.syncing || this.filterSyncing) return; if (!peer.loader) @@ -2427,10 +2432,17 @@ class Pool extends EventEmitter { let checkpoint = false; let node = null; + let hash = null; for (const header of headers) { + hash = header.hash(); + + if (this.options.neutrino) { + await this._addBlock(peer, header, chainCommon.flags.VERIFY_POW); + continue; + } + const last = this.headerChain.tail; - const hash = header.hash(); const height = last.height + 1; if (!header.verify()) { @@ -2452,8 +2464,7 @@ class Pool extends EventEmitter { node = new HeaderEntry(hash, height); - if (!this.options.neutrino && node.height === this.headerTip.height) { - // todo add support for checkpoints + if (node.height === this.headerTip.height) { if (!node.hash.equals(this.headerTip.hash)) { this.logger.warning( 'Peer sent an invalid checkpoint (%s).', @@ -2468,8 +2479,6 @@ class Pool extends EventEmitter { this.headerNext = node; this.headerChain.push(node); - if (this.options.neutrino) - await this._addBlock(peer, header, chainCommon.flags.VERIFY_POW); } this.logger.debug( @@ -2477,24 +2486,22 @@ class Pool extends EventEmitter { headers.length, peer.hostname()); + this.emit('headers'); + // If we received a valid header // chain, consider this a "block". peer.blockTime = Date.now(); // Request the blocks we just added. - if (checkpoint && !this.options.neutrino) { + if (checkpoint) { this.headerChain.shift(); this.resolveHeaders(peer); return; } // Request more headers. - if (this.chain.synced) - return; - if (this.options.neutrino) - peer.sendGetHeaders([node.hash]); - else - peer.sendGetHeaders([node.hash], this.headerTip.hash); + if (this.checkpoints) + peer.sendGetHeaders([hash], this.headerTip.hash); } /** @@ -2624,8 +2631,8 @@ class Pool extends EventEmitter { } this.logStatus(block); - - await this.resolveChain(peer, hash); + if (!this.options.neutrino) + await this.resolveChain(peer, hash); } /** @@ -4787,8 +4794,9 @@ class CFHeaderEntry { * @constructor */ - constructor(hash, height) { - this.hash = hash; + constructor(blockHash, header, height) { + this.blockHash = blockHash; + this.header = header; this.height = height; this.prev = null; this.next = null; diff --git a/lib/node/http.js b/lib/node/http.js index 36ea3eb92..90b30134b 100644 --- a/lib/node/http.js +++ b/lib/node/http.js @@ -302,24 +302,6 @@ class HTTP extends Server { res.json(200, filter.toJSON()); }); - this.get('/filterheader/:block', async (req, res) => { - const valid = Validator.fromRequest(req); - const hash = valid.uintbrhash('block'); - - enforce(hash != null, 'Hash or height required.'); - - const filterName = valid.str(1, 'BASIC').toUpperCase(); - const filterHeader = await this.node. - getBlockFilterHeader(hash, filterName); - - if (!filterHeader) { - res.json(404); - return; - } - - res.json(200, filterHeader.toJSON()); - }); - // Mempool snapshot this.get('/mempool', async (req, res) => { enforce(this.mempool, 'No mempool available.'); diff --git a/lib/node/neutrino.js b/lib/node/neutrino.js index 7e1c4b3ed..e7cb9c102 100644 --- a/lib/node/neutrino.js +++ b/lib/node/neutrino.js @@ -161,7 +161,7 @@ class Neutrino extends Node { this.emit('reset', tip); }); - this.chain.on('full', async () => { + this.pool.on('headers', async () => { if (this.chain.height === 0) return; this.logger.info('Block Headers are fully synced'); @@ -177,7 +177,7 @@ class Neutrino extends Node { this.pool.on('cfilters', async () => { this.logger.info('Compact Filters are fully synced'); - this.startSync(); + this.pool.forceSync(); }); this.loadPlugins(); diff --git a/lib/node/node.js b/lib/node/node.js index e2e20f453..39c30623f 100644 --- a/lib/node/node.js +++ b/lib/node/node.js @@ -435,28 +435,6 @@ class Node extends EventEmitter { return Indexer.getFilter(hash); } - - /** - * Retrieve compact filter header by hash/height. - * @param {Hash | Number} hash - * @param {Number} type - * @returns {Promise} - Returns {@link Buffer}. - */ - - async getBlockFilterHeader(hash, filterType) { - const Indexer = this.filterIndexers.get(filterType); - - if (!Indexer) - return null; - - if (typeof hash === 'number') - hash = await this.chain.getHash(hash); - - if (!hash) - return null; - - return Indexer.getFilterHeader(hash); - } } /* diff --git a/lib/node/rpc.js b/lib/node/rpc.js index bc1e93d44..ab67affb9 100644 --- a/lib/node/rpc.js +++ b/lib/node/rpc.js @@ -155,14 +155,11 @@ class RPC extends RPCBase { this.add('getblockchaininfo', this.getBlockchainInfo); this.add('getbestblockhash', this.getBestBlockHash); this.add('getblockcount', this.getBlockCount); - this.add('getfiltercount', this.getFilterCount); - this.add('getfilterheadercount', this.getFilterHeaderCount); this.add('getblock', this.getBlock); this.add('getblockbyheight', this.getBlockByHeight); this.add('getblockhash', this.getBlockHash); this.add('getblockheader', this.getBlockHeader); this.add('getblockfilter', this.getBlockFilter); - this.add('getblockfilterheader', this.getBlockFilterHeader); this.add('getchaintips', this.getChainTips); this.add('getdifficulty', this.getDifficulty); this.add('getmempoolancestors', this.getMempoolAncestors); @@ -632,22 +629,6 @@ class RPC extends RPCBase { return this.chain.tip.height; } - async getFilterCount(args, help) { - if (help || args.length !== 0) - throw new RPCError(errs.MISC_ERROR, 'getfiltercount'); - - const height = await this.chain.getCFilterHeight(); - return height; - } - - async getFilterHeaderCount(args, help) { - if (help || args.length !== 0) - throw new RPCError(errs.MISC_ERROR, 'getfilterheadercount'); - - const height = await this.chain.getCFHeaderHeight(); - return height; - } - async getBlock(args, help) { if (help || args.length < 1 || args.length > 3) throw new RPCError(errs.MISC_ERROR, 'getblock "hash" ( verbose )'); @@ -788,32 +769,6 @@ class RPC extends RPCBase { return filter.toJSON(); } - async getBlockFilterHeader(args, help) { - if (help || args.length < 1 || args.length > 2) { - throw new RPCError(errs.MISC_ERROR, - 'getblockfilterheader "hash" ( "type" )'); - } - - const valid = new Validator(args); - const hash = valid.brhash(0); - const filterName = valid.str(1, 'BASIC').toUpperCase(); - - const filterType = filters[filterName]; - - if (!hash) - throw new RPCError(errs.MISC_ERROR, 'Invalid block hash.'); - - if (!filterType) - throw new RPCError(errs.MISC_ERROR, 'Filter type not supported'); - - const filterHeader = await this.node.getBlockFilterHeader(hash, filterName); - - if (!filterHeader) - throw new RPCError(errs.MISC_ERROR, 'Block filter header not found.'); - - return filterHeader; - } - async getChainTips(args, help) { if (help || args.length !== 0) throw new RPCError(errs.MISC_ERROR, 'getchaintips'); diff --git a/lib/protocol/networks.js b/lib/protocol/networks.js index 8c2db9e8e..16e6bedf7 100644 --- a/lib/protocol/networks.js +++ b/lib/protocol/networks.js @@ -792,7 +792,7 @@ regtest.block = { bip66hash: null, pruneAfterHeight: 1000, keepBlocks: 10000, - maxTipAge: 24 * 60 * 60, + maxTipAge: 0xffffffff, slowHeight: 0 }; diff --git a/test/neutrino-test.js b/test/neutrino-test.js index 8053fd77b..f12d2b618 100644 --- a/test/neutrino-test.js +++ b/test/neutrino-test.js @@ -43,6 +43,8 @@ describe('Neutrino', function () { memory: true, port: 10000, httpPort: 20000, + logConsole: true, + logLevel: 'debug', neutrino: true, only: '127.0.0.1' }); @@ -65,19 +67,20 @@ describe('Neutrino', function () { it('should get new blocks headers-only', async () => { await mineBlocks(10); - await new Promise(resolve => setTimeout(resolve, 400)); + await forValue(neutrinoNode.chain, 'height', fullNode.chain.height); assert.equal(neutrinoNode.chain.height, fullNode.chain.height); }); it('should getcfheaders', async () => { - await new Promise(resolve => setTimeout(resolve, 400)); - const headerHeight = await neutrinoNode.chain.getCFHeaderHeight(); + await forValue(neutrinoNode.pool.cfHeaderChain.tail, 'height', neutrinoNode.chain.height); + const headerHeight = await neutrinoNode.pool.cfHeaderChain.tail.height; assert.equal(headerHeight, neutrinoNode.chain.height); }); it('should getcfilters', async () => { - await new Promise(resolve => setTimeout(resolve, 400)); - const filterHeight = await neutrinoNode.chain.getCFilterHeight(); + const filterIndexer = neutrinoNode.filterIndexers.get('BASIC'); + await forValue(filterIndexer, 'height', neutrinoNode.chain.height); + const filterHeight = filterIndexer.height; assert.equal(filterHeight, neutrinoNode.chain.height); }); @@ -85,11 +88,8 @@ describe('Neutrino', function () { const filterIndexer = neutrinoNode.filterIndexers.get('BASIC'); for (let i = 0; i < neutrinoNode.chain.height; i++) { const hash = await neutrinoNode.chain.getHash(i); - const filterHeader = await filterIndexer.getFilterHeader(hash); - assert(filterHeader); const filter = await filterIndexer.getFilter(hash); assert(filter); - assert(filterHeader.equals(filter.header)); } }); }); @@ -141,8 +141,30 @@ describe('Neutrino', function () { it('should get new blocks headers-only', async () => { await mineBlocks(10); - await new Promise(resolve => setTimeout(resolve, 400)); + await forValue(neutrinoNode.chain, 'height', fullNode.chain.height); assert.equal(neutrinoNode.chain.height, fullNode.chain.height); }); + + it('should getcfheaders', async () => { + await forValue(neutrinoNode.pool.cfHeaderChain.tail, 'height', neutrinoNode.chain.height); + const headerHeight = await neutrinoNode.pool.cfHeaderChain.tail.height; + assert.equal(headerHeight, neutrinoNode.chain.height); + }); + + it('should getcfilters', async () => { + const filterIndexer = neutrinoNode.filterIndexers.get('BASIC'); + await forValue(filterIndexer, 'height', neutrinoNode.chain.height); + const filterHeight = filterIndexer.height; + assert.equal(filterHeight, neutrinoNode.chain.height); + }); + + it('should save filters correctly', async () => { + const filterIndexer = neutrinoNode.filterIndexers.get('BASIC'); + for (let i = 0; i < neutrinoNode.chain.height; i++) { + const hash = await neutrinoNode.chain.getHash(i); + const filter = await filterIndexer.getFilter(hash); + assert(filter); + } + }); }); }); diff --git a/test/node-rpc-test.js b/test/node-rpc-test.js index b44fa0d27..cdef5b4dd 100644 --- a/test/node-rpc-test.js +++ b/test/node-rpc-test.js @@ -189,16 +189,6 @@ describe('RPC', function() { assert.strictEqual(expected.filter, info.filter); }); - it('should rpc getblockfilterheader', async () => { - const hash = await nclient.execute('getblockhash', [node.chain.tip.height]); - const info = await nclient.execute('getblockfilterheader', [hash, 'BASIC']); - const indexer = node.filterIndexers.get('BASIC'); - const filterHeader = await indexer.getFilterHeader(node.chain.tip.hash); - const expected = filterHeader.toJSON(); - - assert.deepStrictEqual(expected, info); - }); - describe('Blockchain', function () { it('should rpc getchaintips', async () => { const info = await nclient.execute('getchaintips', []); diff --git a/test/p2p-bip157-test.js b/test/p2p-bip157-test.js deleted file mode 100644 index b72c7175a..000000000 --- a/test/p2p-bip157-test.js +++ /dev/null @@ -1,100 +0,0 @@ -/* eslint-env mocha */ -/* eslint prefer-arrow-callback: "off" */ - -'use strict'; - -const assert = require('bsert'); -const FullNode = require('../lib/node/fullnode'); -const NeutrinoNode = require('../lib/node/neutrino'); -const {forValue} = require('./util/common'); -const {MAX_CFILTERS} = require('../lib/net/common'); -const packets = require('../lib/net/packets'); - -describe('P2P', function () { - this.timeout(50000); - - const node1 = new NeutrinoNode({ - network: 'regtest', - memory: true, - port: 10000, - httpPort: 20000, - only: '127.0.0.1', - neutrino: true - }); - - const node2 = new FullNode({ - network: 'regtest', - memory: true, - listen: true, - indexFilter: true, - bip157: true - }); - - let peer; - const nodePackets = {}; - - node1.pool.on('packet', (packet) => { - if (!nodePackets[packet.cmd]) - nodePackets[packet.cmd] = [packet]; - else - nodePackets[packet.cmd].push(packet); - }); - - async function mineBlocks(n) { - while (n) { - const block = await node2.miner.mineBlock(); - await node2.chain.add(block); - await new Promise(resolve => setTimeout(resolve, 20)); - n--; - } - await forValue(node1.chain, 'height', node2.chain.height); - } - - before(async () => { - const waitForConnection = new Promise((resolve, reject) => { - node1.pool.once('peer open', async (peer) => { - resolve(peer); - }); - }); - - await node1.open(); - await node2.open(); - await node1.connect(); - await node2.connect(); - node1.startSync(); - node2.startSync(); - - // `peer` is node2, from node1's perspective. - // So peer.send() sends a packet from node1 to node2, - // and `nodePackets` catches the response packets that - // node2 sends back to node1. - peer = await waitForConnection; - }); - - after(async () => { - await node1.close(); - await node2.close(); - }); - - describe('BIP157', function () { - before(async () => { - // Do not exceed limit, including genesis block - await mineBlocks(MAX_CFILTERS - node1.chain.height - 1); - }); - - it('CFCheckpt', async () => { - nodePackets.cfcheckpt = []; - - await mineBlocks(2); - - const pkt = new packets.GetCFCheckptPacket( - 0, - node1.chain.tip.hash - ); - - peer.send(pkt); - await forValue(nodePackets.cfcheckpt, 'length', 1); - assert.strictEqual(nodePackets.cfcheckpt[0].filterHeaders.length, 1); - }); - }); -}); diff --git a/test/p2p-test.js b/test/p2p-test.js index b9502550d..c9c67f35f 100644 --- a/test/p2p-test.js +++ b/test/p2p-test.js @@ -5,79 +5,153 @@ const assert = require('bsert'); const FullNode = require('../lib/node/fullnode'); +const NeutrinoNode = require('../lib/node/neutrino'); const {forValue} = require('./util/common'); +const {MAX_CFILTERS} = require('../lib/net/common'); +const packets = require('../lib/net/packets'); describe('P2P', function () { - this.timeout(5000); - - const node1 = new FullNode({ - network: 'regtest', - memory: true, - port: 10000, - httpPort: 20000, - only: '127.0.0.1' - }); + this.timeout(50000); const node2 = new FullNode({ network: 'regtest', memory: true, listen: true, + logConsole: true, + logLevel: 'debug', indexFilter: true, bip157: true }); let peer; - const nodePackets = {}; - node1.pool.on('packet', (packet) => { - if (!nodePackets[packet.cmd]) - nodePackets[packet.cmd] = [packet]; - else - nodePackets[packet.cmd].push(packet); - }); - - async function mineBlocks(n) { + async function mineBlocks(node, n) { while (n) { const block = await node2.miner.mineBlock(); await node2.chain.add(block); + await new Promise(resolve => setTimeout(resolve, 20)); n--; } - await forValue(node1.chain, 'height', node2.chain.height); + await forValue(node.chain, 'height', node2.chain.height); } - before(async () => { - const waitForConnection = new Promise((resolve, reject) => { - node1.pool.once('peer open', async (peer) => { - resolve(peer); + describe('BIP157', function () { + const node1 = new NeutrinoNode({ + network: 'regtest', + memory: true, + port: 10000, + logConsole: true, + logLevel: 'debug', + httpPort: 20000, + only: '127.0.0.1', + neutrino: true + }); + + const nodePackets = {}; + + node1.pool.on('packet', (packet) => { + if (!nodePackets[packet.cmd]) + nodePackets[packet.cmd] = [packet]; + else + nodePackets[packet.cmd].push(packet); + }); + + before(async () => { + const waitForConnection = new Promise((resolve, reject) => { + node1.pool.once('peer open', async (peer) => { + resolve(peer); + }); }); + + await node1.open(); + await node2.open(); + await node1.connect(); + await node2.connect(); + node1.startSync(); + node2.startSync(); + + // `peer` is node2, from node1's perspective. + // So peer.send() sends a packet from node1 to node2, + // and `nodePackets` catches the response packets that + // node2 sends back to node1. + peer = await waitForConnection; + // Do not exceed limit, including genesis block + await mineBlocks(node1, MAX_CFILTERS - node1.chain.height - 1); }); - await node1.open(); - await node2.open(); - await node1.connect(); - await node2.connect(); - node1.startSync(); - node2.startSync(); - await mineBlocks(1); - - // `peer` is node2, from node1's perspective. - // So peer.send() sends a packet from node1 to node2, - // and `nodePackets` catches the response packets that - // node2 sends back to node1. - peer = await waitForConnection; - }); + after(async () => { + await node1.close(); + await node2.close(); + }); + + it('CFCheckpt', async () => { + nodePackets.cfcheckpt = []; - after(async () => { - await node1.close(); - await node2.close(); + await mineBlocks(node1, 2); + + const pkt = new packets.GetCFCheckptPacket( + 0, + node1.chain.tip.hash + ); + + peer.send(pkt); + await forValue(nodePackets.cfcheckpt, 'length', 1); + assert.strictEqual(nodePackets.cfcheckpt[0].filterHeaders.length, 1); + }); }); describe('Compact Blocks', function () { + const node1 = new FullNode({ + network: 'regtest', + memory: true, + port: 10000, + logConsole: true, + logLevel: 'debug', + httpPort: 20000, + only: '127.0.0.1' + }); + + const nodePackets = {}; + + node1.pool.on('packet', (packet) => { + if (!nodePackets[packet.cmd]) + nodePackets[packet.cmd] = [packet]; + else + nodePackets[packet.cmd].push(packet); + }); + + before(async () => { + const waitForConnection = new Promise((resolve, reject) => { + node1.pool.once('peer open', async (peer) => { + resolve(peer); + }); + }); + + await node1.open(); + await node2.open(); + await node1.connect(); + await node2.connect(); + node1.startSync(); + node2.startSync(); + await mineBlocks(node1, 1); + + // `peer` is node2, from node1's perspective. + // So peer.send() sends a packet from node1 to node2, + // and `nodePackets` catches the response packets that + // node2 sends back to node1. + peer = await waitForConnection; + }); + + after(async () => { + await node1.close(); + await node2.close(); + }); + it('should get compact block in low bandwidth mode', async () => { nodePackets.inv = []; nodePackets.cmpctblock = []; - await mineBlocks(1); + await mineBlocks(node1, 1); assert.strictEqual(nodePackets.inv.length, 1); assert.strictEqual(nodePackets.cmpctblock.length, 1); @@ -90,7 +164,7 @@ describe('P2P', function () { peer.sendCompact(1); node1.pool.options.blockMode = 1; - await mineBlocks(1); + await mineBlocks(node1, 1); assert.strictEqual(nodePackets.inv.length, 0); assert.strictEqual(nodePackets.cmpctblock.length, 1);