diff --git a/bin/bcoin b/bin/bcoin index 0df74c54f..56842efa5 100755 --- a/bin/bcoin +++ b/bin/bcoin @@ -43,8 +43,11 @@ for arg in "$@"; do --daemon) daemon=1 ;; - --spv) - cmd='spvnode' + --neutrino) + cmd='neutrino' + ;; + --spv) + cmd='spvnode' ;; esac done diff --git a/bin/neutrino b/bin/neutrino new file mode 100755 index 000000000..d42ec71c0 --- /dev/null +++ b/bin/neutrino @@ -0,0 +1,48 @@ +#!/usr/bin/env node + +'use strict'; + +console.log('Starting bcoin'); +process.title = 'bcoin'; +const Neutrino = require('../lib/node/neutrino'); + +// Doubt in db +const node = new Neutrino({ + file: true, + argv: true, + env: true, + logFile: true, + logConsole: true, + logLevel: 'debug', + db: 'leveldb', + memory: false, + workers: true, + loader: require +}); + +if (!node.config.bool('no-wallet') && !node.has('walletdb')) { + const plugin = require('../lib/wallet/plugin'); + node.use(plugin); +} + +(async () => { + await node.ensure(); + await node.open(); + await node.connect(); + node.startSync(); + + node.on("full", () => { + console.log("Full node"); + }); +})().catch((err) => { + console.error(err.stack); + process.exit(1); +}); + +process.on('unhandledRejection', (err, promise) => { + throw err; +}); + +process.on('SIGINT', async () => { + await node.close(); +}); diff --git a/lib/bcoin-browser.js b/lib/bcoin-browser.js index 1f7254be8..8b2d46cb5 100644 --- a/lib/bcoin-browser.js +++ b/lib/bcoin-browser.js @@ -89,6 +89,7 @@ bcoin.node = require('./node'); bcoin.Node = require('./node/node'); bcoin.FullNode = require('./node/fullnode'); bcoin.SPVNode = require('./node/spvnode'); +bcoin.Neutrino = require('./node/neutrino'); // Primitives bcoin.primitives = require('./primitives'); diff --git a/lib/bcoin.js b/lib/bcoin.js index 3e795f6f6..3e4adb7b5 100644 --- a/lib/bcoin.js +++ b/lib/bcoin.js @@ -108,6 +108,7 @@ bcoin.define('node', './node'); bcoin.define('Node', './node/node'); bcoin.define('FullNode', './node/fullnode'); bcoin.define('SPVNode', './node/spvnode'); +bcoin.define('Neutrino', './node/neutrino'); // Primitives bcoin.define('primitives', './primitives'); diff --git a/lib/blockchain/chain.js b/lib/blockchain/chain.js index 9cd0a312f..562ab375f 100644 --- a/lib/blockchain/chain.js +++ b/lib/blockchain/chain.js @@ -64,6 +64,8 @@ class Chain extends AsyncEmitter { this.orphanMap = new BufferMap(); this.orphanPrev = new BufferMap(); + + this.getPrunedMap = new BufferMap(); } /** @@ -583,7 +585,7 @@ class Chain extends AsyncEmitter { // UASF is now enforced (bip148) (mainnet-only). if (this.options.bip148 && this.network === Network.main) { if (witness !== thresholdStates.LOCKED_IN - && witness !== thresholdStates.ACTIVE) { + && witness !== thresholdStates.ACTIVE) { // The BIP148 MTP check is nonsensical in // that it includes the _current_ entry's // timestamp. This requires some hackery, @@ -1368,7 +1370,17 @@ class Chain extends AsyncEmitter { } // Do we already have this block? - if (await this.hasEntry(hash)) { + const existingEntry = await this.getEntry(hash); + + if (existingEntry && this.getPrunedMap.has(hash)) { + block = block.toBlock(); + await this.db.updateNeutrinoSave(); + await this.db.save(existingEntry, block, new CoinView()); + await this.db.updateNeutrinoSave(); + return existingEntry; + } + + if (existingEntry) { this.logger.debug('Already have block: %h.', block.hash()); throw new VerifyError(block, 'duplicate', 'duplicate', 0); } @@ -1791,6 +1803,24 @@ class Chain extends AsyncEmitter { return this.hasEntry(hash); } + async getCFHeaderHeight() { + return await this.db.getCFHeaderHeight(); + } + + async saveCFHeaderHeight(height) { + this.db.neutrinoState.headerHeight = height; + await this.db.saveNeutrinoState(); + } + + async getCFilterHeight() { + return await this.db.getCFilterHeight(); + } + + async saveCFilterHeight(height) { + this.db.neutrinoState.filterHeight = height; + await this.db.saveNeutrinoState(); + } + /** * Find the corresponding block entry by hash or height. * @param {Hash|Number} hash/height @@ -1925,6 +1955,33 @@ class Chain extends AsyncEmitter { return this.db.getBlock(hash); } + async getBlockPeer(hash, filter) { + let block = await this.db.getBlock(hash); + if (block) { + const entry = await this.getEntry(hash); + assert(entry.hash.equals(hash)); + return block; + } else { + this.logger.warning('Block not found, attempting to download'); + + // Ensure hash not height + hash = await this.db.getHash(hash); + + const wait = new Promise((resolve, reject) => { + this.getPrunedMap.set(hash, resolve); + }); + + await this.emitAsync('getprunedblock', hash); + await wait; + block = await this.db.getBlock(hash); + const entry = await this.getEntry(hash); + assert(entry.hash.equals(hash)); + + this.emit('getblockpeer', entry, block); + return block; + } + } + /** * Retrieve a block from the database (not filled with coins). * @param {Hash} block @@ -2007,15 +2064,20 @@ class Chain extends AsyncEmitter { if (this.height < this.network.lastCheckpoint) return; } - - if (this.tip.time < util.now() - this.network.block.maxTipAge) + if (this.options.neutrino && this.tip.time < 1686851917) + // TODO change this later + return; + else if (!this.options.neutrino && + this.tip.time < util.now() - this.network.block.maxTipAge) return; if (!this.hasChainwork()) return; - this.synced = true; - this.emit('full'); + if (this.options.neutrino) + this.emit('headersFull'); + else + this.emit('full'); } /** @@ -2144,7 +2206,7 @@ class Chain extends AsyncEmitter { assert(hash); - for (;;) { + for (; ;) { const orphan = this.orphanMap.get(hash); if (!orphan) @@ -2221,8 +2283,8 @@ class Chain extends AsyncEmitter { return pow.bits; while (prev.height !== 0 - && prev.height % pow.retargetInterval !== 0 - && prev.bits === pow.bits) { + && prev.height % pow.retargetInterval !== 0 + && prev.bits === pow.bits) { const cache = this.getPrevCache(prev); if (cache) @@ -2455,7 +2517,7 @@ class Chain extends AsyncEmitter { const state = await this.getState(prev, deployment); if (state === thresholdStates.LOCKED_IN - || state === thresholdStates.STARTED) { + || state === thresholdStates.STARTED) { version |= 1 << deployment.bit; } } @@ -2616,6 +2678,7 @@ class ChainOptions { this.compression = true; this.spv = false; + this.neutrino = false; this.bip91 = false; this.bip148 = false; this.prune = false; @@ -2639,7 +2702,7 @@ class ChainOptions { fromOptions(options) { if (!options.spv) { assert(options.blocks && typeof options.blocks === 'object', - 'Chain requires a blockstore.'); + 'Chain requires a blockstore.'); } this.blocks = options.blocks; @@ -2662,6 +2725,11 @@ class ChainOptions { this.spv = options.spv; } + if (options.neutrino != null) { + assert(typeof options.neutrino === 'boolean'); + this.neutrino = options.neutrino; + } + if (options.prefix != null) { assert(typeof options.prefix === 'string'); this.prefix = options.prefix; diff --git a/lib/blockchain/chaindb.js b/lib/blockchain/chaindb.js index cb91accaa..d85d4e3b3 100644 --- a/lib/blockchain/chaindb.js +++ b/lib/blockchain/chaindb.js @@ -46,6 +46,8 @@ class ChainDB { this.state = new ChainState(); this.pending = null; this.current = null; + this.neutrinoState = null; + this.neutrinoSave = false; this.cacheHash = new LRU(this.options.entryCache, null, BufferMap); this.cacheHeight = new LRU(this.options.entryCache); @@ -90,6 +92,11 @@ class ChainDB { this.logger.info('ChainDB successfully initialized.'); } + if (this.options.neutrino) { + if (!this.neutrinoState) + this.neutrinoState = await this.getNeutrinoState(); + } + this.logger.info( 'Chain State: hash=%h tx=%d coin=%d value=%s.', this.state.tip, @@ -663,10 +670,10 @@ class ChainDB { const deployment = this.network.byBit(bit); if (deployment - && start === deployment.startTime - && timeout === deployment.timeout - && threshold === deployment.threshold - && window === deployment.window) { + && start === deployment.startTime + && timeout === deployment.timeout + && threshold === deployment.threshold + && window === deployment.window) { continue; } @@ -1001,7 +1008,7 @@ class ChainDB { */ async getRawBlock(block) { - if (this.options.spv) + if (this.options.spv && !this.options.neutrino) return null; const hash = await this.getHash(block); @@ -1150,6 +1157,14 @@ class ChainDB { * @returns {Promise} */ + async updateNeutrinoSave () { + if (this.neutrinoSave) { + this.neutrinoSave = false; + } else { + this.neutrinoSave = true; + } + } + async save(entry, block, view) { this.start(); try { @@ -1361,7 +1376,7 @@ class ChainDB { this.logger.debug('Resetting main chain to: %h', entry.hash); - for (;;) { + for (; ;) { this.start(); // Stop once we hit our target tip. @@ -1446,7 +1461,7 @@ class ChainDB { this.logger.debug('Removing alternate chain: %h.', tip.hash); - for (;;) { + for (; ;) { if (await this.isMainChain(tip)) break; @@ -1478,7 +1493,7 @@ class ChainDB { async saveBlock(entry, block, view) { const hash = block.hash(); - if (this.options.spv) + if (this.options.spv && !this.neutrinoSave) return; // Write actual block data. @@ -1670,6 +1685,39 @@ class ChainDB { b.put(layout.O.encode(), flags.toRaw()); return b.write(); } + + /** + * Get Neutrino State + * @returns {Promise} - Returns neutrino state + */ + + async getNeutrinoState() { + const data = await this.db.get(layout.N.encode()); + if (!data) + return new NeutrinoState(); + return NeutrinoState.fromRaw(data); + } + + async getCFHeaderHeight() { + const state = await this.getNeutrinoState(); + return state.headerHeight; + } + + async getCFilterHeight() { + const state = await this.getNeutrinoState(); + return state.filterHeight; + } + + /** + * Save Neutrino State + * @returns {void} + */ + async saveNeutrinoState() { + const state = this.neutrinoState.toRaw(); + const b = this.db.batch(); + b.put(layout.N.encode(), state); + return b.write(); + } } /** @@ -1952,6 +2000,28 @@ function fromU32(num) { return data; } +class NeutrinoState { + constructor() { // TODO: do we add support for multiple filters? + this.headerHeight = 0; + this.filterHeight = 0; + } + + toRaw() { + const bw = bio.write(8); + bw.writeU32(this.headerHeight); + bw.writeU32(this.filterHeight); + return bw.render(); + } + + static fromRaw(data) { + const state = new NeutrinoState(); + const br = bio.read(data); + state.headerHeight = br.readU32(); + state.filterHeight = br.readU32(); + return state; + } +} + /* * Expose */ diff --git a/lib/blockchain/layout.js b/lib/blockchain/layout.js index 337f95900..532ccb050 100644 --- a/lib/blockchain/layout.js +++ b/lib/blockchain/layout.js @@ -14,6 +14,7 @@ const bdb = require('bdb'); * O -> chain options * R -> tip hash * D -> versionbits deployments + * N -> neutrino state * e[hash] -> entry * h[hash] -> height * H[height] -> hash @@ -33,6 +34,8 @@ const layout = { O: bdb.key('O'), R: bdb.key('R'), D: bdb.key('D'), + N: bdb.key('N'), + F: bdb.key('H', ['hash256']), e: bdb.key('e', ['hash256']), h: bdb.key('h', ['hash256']), H: bdb.key('H', ['uint32']), diff --git a/lib/client/node.js b/lib/client/node.js index 50800cac1..96b5ac81f 100644 --- a/lib/client/node.js +++ b/lib/client/node.js @@ -169,6 +169,10 @@ class NodeClient extends Client { return this.get(`/filter/${filter}`); } + getBlockPeer(hash, filter) { + return this.call('get block peer', hash, filter); + } + /** * Add a transaction to the mempool and broadcast it. * @param {TX} tx diff --git a/lib/indexer/filterindexer.js b/lib/indexer/filterindexer.js index 97265253b..ae88af139 100644 --- a/lib/indexer/filterindexer.js +++ b/lib/indexer/filterindexer.js @@ -85,6 +85,49 @@ class FilterIndexer extends Indexer { this.put(layout.f.encode(hash), gcsFilter.hash()); } + /** + * save filter header + * @param {Hash} blockHash + * @param {Hash} filterHeader + * @param {Hash} filterHash + * @returns {Promise} + */ + + async saveFilterHeader(blockHash, filterHeader, filterHash) { + assert(blockHash); + assert(filterHeader); + assert(filterHash); + + const filter = new Filter(); + filter.header = filterHeader; + + await this.blocks.writeFilter(blockHash, filter.toRaw(), this.filterType); + // console.log(layout.f.encode(blockHash)); + this.put(layout.f.encode(blockHash), filterHash); + } + + /** + * Save filter + * @param {Hash} blockHash + * @param {BasicFilter} basicFilter + * @param {Hash} filterHeader + * @returns {Promise} + */ + + async saveFilter(blockHash, basicFilter, filterHeader) { + assert(blockHash); + assert(basicFilter); + assert(filterHeader); + + const filter = new Filter(); + filter.filter = basicFilter.toRaw(); + filter.header = filterHeader; + + await this.blocks.writeFilter(blockHash, filter.toRaw(), this.filterType); + // console.log(layout.f.encode(blockHash)); + this.put(layout.f.encode(blockHash), basicFilter.hash()); + } + /** * Prune compact filters. * @private diff --git a/lib/indexer/indexer.js b/lib/indexer/indexer.js index 97d85f76b..b052d6a97 100644 --- a/lib/indexer/indexer.js +++ b/lib/indexer/indexer.js @@ -50,6 +50,8 @@ class Indexer extends EventEmitter { this.blocks = this.options.blocks; this.chain = this.options.chain; + this.neutrino = this.options.neutrino; + this.closing = false; this.db = null; this.batch = null; @@ -292,6 +294,11 @@ class Indexer extends EventEmitter { */ async _syncBlock(meta, block, view) { + if (this.neutrino) { + if (!this.batch) + this.start(); + return true; + } // In the case that the next block is being // connected or the current block disconnected // use the block and view being passed directly, @@ -636,6 +643,8 @@ class IndexOptions { this.cacheSize = 16 << 20; this.compression = true; + this.neutrino = false; + if (options) this.fromOptions(options); } @@ -697,6 +706,11 @@ class IndexOptions { this.compression = options.compression; } + if (options.neutrino != null) { + assert(typeof options.neutrino === 'boolean'); + this.neutrino = options.neutrino; + } + return this; } diff --git a/lib/net/peer.js b/lib/net/peer.js index 2271e7896..b9f1e4090 100644 --- a/lib/net/peer.js +++ b/lib/net/peer.js @@ -1009,6 +1009,12 @@ class Peer extends EventEmitter { case packetTypes.GETHEADERS: this.request(packetTypes.HEADERS, timeout * 2); break; + case packetTypes.GETCFHEADERS: + this.request(packetTypes.CFHEADERS, timeout); + break; + case packetTypes.GETCFILTERS: + this.request(packetTypes.CFILTER, timeout); + break; case packetTypes.GETDATA: this.request(packetTypes.DATA, timeout * 2); break; @@ -1449,12 +1455,18 @@ class Peer extends EventEmitter { if (!(this.services & services.NETWORK)) throw new Error('Peer does not support network services.'); + if (this.options.neutrino) { + if (!(this.services & services.NODE_COMPACT_FILTERS)) { + throw new Error('Peer does not support Compact Filters.'); + } + } + if (this.options.headers) { if (this.version < common.HEADERS_VERSION) throw new Error('Peer does not support getheaders.'); } - if (this.options.spv) { + if (this.options.spv && !this.options.neutrino) { if (!(this.services & services.BLOOM)) throw new Error('Peer does not support BIP37.'); @@ -1745,6 +1757,26 @@ class Peer extends EventEmitter { this.send(packet); } + /** + * @param {Number} filterType - `0` = basic + * @param {Number} startHeight - Height to start at. + * @param {Hash} stopHash - Hash to stop at. + * @returns {void} + * @description Send `getcfilters` to peer. + */ + sendGetCFilters(filterType, startHeight, stopHash) { + const packet = new packets.GetCFiltersPacket( + filterType, + startHeight, + stopHash); + + this.logger.debug( + 'Sending getcfilters (type=%d, startHeight=%d, stopHash=%h).', + filterType, startHeight, stopHash); + + this.send(packet); + } + /** * Send `cfheaders` to peer. * @param {Number} filterType @@ -1767,6 +1799,27 @@ class Peer extends EventEmitter { this.send(packet); } + /** + * @param {Number} filterType + * @param {Number} startHeight + * @param {Hash} stopHash + * @returns {void} + * @description Send `getcfheaders` to peer. + */ + + sendGetCFHeaders(filterType, startHeight, stopHash) { + const packet = new packets.GetCFHeadersPacket( + filterType, + startHeight, + stopHash); + + this.logger.debug( + 'Sending getcfheaders (type=%d, start=%h, stop=%h).', + filterType, startHeight, stopHash); + + this.send(packet); + } + /** * send `cfcheckpt` to peer. * @param {Number} filterType @@ -1787,6 +1840,25 @@ class Peer extends EventEmitter { this.send(packet); } + /** + * Send `getcfcheckpt` to peer. + * @param {Number} filterType + * @param {Hash} stopHash + * @returns {void} + */ + + sendGetCFCheckpt(filterType, stopHash) { + const packet = new packets.GetCFCheckptPacket( + filterType, + stopHash); + + this.logger.debug( + 'Sending getcfcheckpt (type=%d, stop=%h).', + filterType, stopHash); + + this.send(packet); + } + /** * Send `mempool` to peer. */ @@ -2080,6 +2152,7 @@ class PeerOptions { this.agent = common.USER_AGENT; this.noRelay = false; this.spv = false; + this.neutrino = false; this.compact = false; this.headers = false; this.banScore = common.BAN_SCORE; @@ -2143,6 +2216,11 @@ class PeerOptions { this.spv = options.spv; } + if (options.neutrino != null) { + assert(typeof options.neutrino === 'boolean'); + this.neutrino = options.neutrino; + } + if (options.compact != null) { assert(typeof options.compact === 'boolean'); this.compact = options.compact; diff --git a/lib/net/pool.js b/lib/net/pool.js index 234b23bc2..a400c8933 100644 --- a/lib/net/pool.js +++ b/lib/net/pool.js @@ -35,6 +35,7 @@ const packetTypes = packets.types; const scores = HostList.scores; const {inspectSymbol} = require('../utils'); const {consensus} = require('../protocol'); +const BasicFilter = require('../golomb/basicFilter'); /** * Pool @@ -68,6 +69,7 @@ class Pool extends EventEmitter { this.connected = false; this.disconnecting = false; this.syncing = false; + this.filterSyncing = false; this.discovering = false; this.spvFilter = null; this.txFilter = null; @@ -79,6 +81,7 @@ class Pool extends EventEmitter { this.pendingRefill = null; this.checkpoints = false; + this.neutrino = this.options.neutrino; this.headerChain = new List(); this.headerNext = null; this.headerTip = null; @@ -87,6 +90,12 @@ class Pool extends EventEmitter { this.hosts = new HostList(this.options); this.id = 0; + this.getcfheadersFilterType = null; + this.getcfheadersStopHash = null; + this.getcfiltersFilterType = null; + this.getcfiltersStartHeight = null; + this.getcfiltersStopHash = null; + if (this.options.spv) { this.spvFilter = BloomFilter.fromRate( 20000, 0.001, BloomFilter.flags.ALL); @@ -141,6 +150,16 @@ class Pool extends EventEmitter { this.handleBadOrphan('block', err, id); }); + this.chain.on('getprunedblock', async (hash) => { + // Find the first peer with a completed handshake + for (let peer = this.peers.head(); peer; peer = peer.next) { + if (!peer.handshake) + continue; + + await this.getBlock(peer, [hash]); + } + }); + if (this.mempool) { this.mempool.on('tx', (tx) => { this.emit('tx', tx); @@ -204,16 +223,22 @@ class Pool extends EventEmitter { */ resetChain() { - if (!this.options.checkpoints) + if (!this.options.checkpoints && !this.options.neutrino) return; - this.checkpoints = false; + if (!this.options.neutrino) + this.checkpoints = false; this.headerTip = null; this.headerChain.reset(); this.headerNext = null; const tip = this.chain.tip; - + if (this.options.neutrino) { + this.headerChain.push(new HeaderEntry(tip.hash, tip.height)); + this.cfHeaderChain = new List(); + this.cfHeaderChain.push(new CFHeaderEntry(consensus.ZERO_HASH, 0)); + return; + } if (tip.height < this.network.lastCheckpoint) { this.checkpoints = true; this.headerTip = this.getNextTip(tip.height); @@ -635,7 +660,6 @@ class Pool extends EventEmitter { peer.loader = true; this.peers.load = peer; - this.sendSync(peer); this.emit('loader', peer); @@ -650,7 +674,10 @@ class Pool extends EventEmitter { return; this.syncing = true; - this.resync(false); + if (this.options.neutrino) { + this.startHeadersSync(); + } else + this.resync(false); } /** @@ -704,6 +731,85 @@ class Pool extends EventEmitter { this.compactBlocks.clear(); } + /** + * Start the filters headers sync. + */ + + async startFilterHeadersSync() { + this.logger.info('Starting filter headers sync (%s).', + this.chain.options.network); + if (!this.opened || !this.connected) + return; + + this.filterSyncing = true; + const cFHeaderHeight = await this.chain.getCFHeaderHeight(); + const startHeight = cFHeaderHeight + ? cFHeaderHeight + 1 : 1; + const chainHeight = await this.chain.height; + const stopHeight = chainHeight - startHeight + 1 > 2000 + ? 2000 : chainHeight; + const stopHash = await this.chain.getHash(stopHeight); + this.getcfheadersFilterType = common.FILTERS.BASIC; + this.getcfheadersStopHash = stopHash; + await this.peers.load.sendGetCFHeaders( + common.FILTERS.BASIC, + startHeight, + stopHash); + } + + /** + * Start the filters sync. + */ + + async startFilterSync() { + this.logger.info('Starting filter sync (%s).', + this.chain.options.network); + if (!this.opened || !this.connected) + return; + + this.filterSyncing = true; + const cFilterHeight = await this.chain.getCFilterHeight(); + const startHeight = cFilterHeight + ? cFilterHeight + 1 : 1; + const chainHeight = await this.chain.height; + const stopHeight = chainHeight - startHeight + 1 > 1000 + ? 1000 : chainHeight; + const stopHash = await this.chain.getHash(stopHeight); + this.getcfiltersFilterType = common.FILTERS.BASIC; + this.getcfiltersStartHeight = startHeight; + this.getcfiltersStopHash = stopHash; + await this.peers.load.sendGetCFilters( + common.FILTERS.BASIC, + startHeight, + stopHash); + } + + /** + * Start the headers sync using getHeaders messages. + * @private + * @return {Promise} + */ + + async startHeadersSync() { + if (!this.syncing) + return; + let locator; + try { + locator = await this.chain.getLocator(); + } catch (e) { + this.emit('error', e); + return; + } + + const peer = this.peers.load; + if (!peer) { + this.logger.info('No loader peer.'); + return; + } + this.chain.synced = false; + peer.sendGetHeaders(locator); + } + /** * Send a sync to each peer. * @private @@ -794,6 +900,17 @@ class Pool extends EventEmitter { return this.sendLocator(locator, peer); } + // /** + // * Sync the filter headers from peer. + // * @method + // * @param {Peer} peer + // * @returns {void} + // */ + // + // syncCompactFiltersCheckPt(peer) { + // peer.sendGetCFCheckpt(common.FILTERS.BASIC, this.chain.tip.hash); + // } + /** * Send a chain locator and start syncing from peer. * @method @@ -814,7 +931,12 @@ class Pool extends EventEmitter { peer.syncing = true; peer.blockTime = Date.now(); - + if (this.options.neutrino) { + peer.sendGetHeaders(locator); + if (!this.syncing) + this.startFilterHeadersSync(); + return true; + } if (this.checkpoints) { peer.sendGetHeaders(locator, this.headerTip.hash); return true; @@ -1194,6 +1316,12 @@ class Pool extends EventEmitter { case packetTypes.GETCFCHECKPT: await this.handleGetCFCheckpt(peer, packet); break; + case packetTypes.CFCHECKPT: + await this.handleCFCheckpt(peer, packet); + break; + case packetTypes.CFHEADERS: + await this.handleCFHeaders(peer, packet); + break; case packetTypes.GETBLOCKS: await this.handleGetBlocks(peer, packet); break; @@ -1246,8 +1374,8 @@ class Pool extends EventEmitter { await this.handleBlockTxn(peer, packet); break; case packetTypes.CFILTER: - case packetTypes.CFHEADERS: - case packetTypes.CFCHECKPT: + await this.handleCFilters(peer, packet); + break; case packetTypes.UNKNOWN: await this.handleUnknown(peer, packet); break; @@ -1291,7 +1419,7 @@ class Pool extends EventEmitter { } // We want compact blocks! - if (this.options.compact) + if (this.options.compact && !this.options.neutrino) peer.sendCompact(this.options.blockMode); // Find some more peers. @@ -1634,6 +1762,11 @@ class Pool extends EventEmitter { if (this.options.hasWitness() && !peer.hasWitness()) return; + if (this.neutrino) { + this.startSync(); + return; + } + // Request headers instead. if (this.checkpoints) return; @@ -1916,7 +2049,7 @@ class Pool extends EventEmitter { if (!stopHeight) return; - if (stopHeight - packet.startHeight >= common.MAX_CFILTERS) + if (stopHeight - packet.startHeight > common.MAX_CFILTERS) return; const indexer = this.getFilterIndexer(filtersByVal[packet.filterType]); @@ -2012,6 +2145,147 @@ class Pool extends EventEmitter { peer.sendCFCheckpt(packet.filterType, packet.stopHash, filterHeaders); } + /** + * Handle peer `CFCheckpt` packet. + * @method + * @private + * @param {Peer} peer + * @param {CFCheckptPacket} packet + */ + + async handleCFCheckpt(peer, packet) { + if (!this.options.neutrino) { + peer.ban(); + peer.destroy(); + } + } + + /** + * Handle peer `CFHeaders` packet. + * @method + * @private + * @param {Peer} peer - Sender. + * @param {CFHeadersPacket} packet - Packet to handle. + * @returns {void} + */ + async handleCFHeaders(peer, packet) { + this.logger.info('Received CFHeaders packet from %s', peer.hostname()); + if (!this.options.neutrino) { + peer.ban(); + peer.destroy(); + return; + } + + const filterType = packet.filterType; + + if (filterType !== this.getcfheadersFilterType) { + peer.ban(); + peer.destroy(); + return; + } + + const stopHash = packet.stopHash; + assert(stopHash.equals(this.getcfheadersStopHash)); + let previousFilterHeader = packet.previousFilterHeader; + const filterHashes = packet.filterHashes; + let blockHeight = await this.chain.getHeight(stopHash) + - filterHashes.length + 1; + const stopHeight = await this.chain.getHeight(stopHash); + for (const filterHash of filterHashes) { + assert(blockHeight <= stopHeight); + const basicFilter = new BasicFilter(); + basicFilter._hash = filterHash; + const filterHeader = basicFilter.header(previousFilterHeader); + const lastFilterHeader = this.cfHeaderChain.tail; + const cfHeaderEntry = new CFHeaderEntry( + filterHash, lastFilterHeader.height + 1); + this.cfHeaderChain.push(cfHeaderEntry); + const blockHash = await this.chain.getHash(blockHeight); + const indexer = this.getFilterIndexer(filtersByVal[filterType]); + await indexer.saveFilterHeader(blockHash, filterHeader, filterHash); + previousFilterHeader = filterHeader; + await this.chain.saveCFHeaderHeight(blockHeight); + blockHeight++; + const cFHeaderHeight = await this.chain.getCFHeaderHeight(); + this.logger.info('CFHeaderHeight: %d', cFHeaderHeight); + } + if (this.headerChain.tail.height <= stopHeight) + this.emit('cfheaders'); + else { + const nextStopHeight = stopHeight + 2000 < this.chain.height + ? stopHeight + 2000 : this.chain.height; + const nextStopHash = await this.chain.getHash(nextStopHeight); + this.getcfheadersStopHash = nextStopHash; + peer.sendGetCFHeaders(filterType, stopHeight + 1, nextStopHash); + } + } + + async handleCFilters(peer, packet) { + this.logger.info('Received CFilter packet from %s', peer.hostname()); + if (!this.options.neutrino) { + peer.ban(); + peer.destroy(); + return; + } + + const blockHash = packet.blockHash; + const filterType = packet.filterType; + const filter = packet.filterBytes; + + if (filterType !== this.getcfheadersFilterType) { + peer.ban(); + peer.destroy(); + return; + } + + const blockHeight = await this.chain.getHeight(blockHash); + const stopHeight = await this.chain.getHeight(this.getcfiltersStopHash); + + assert(blockHeight >= this.getcfiltersStartHeight + && blockHeight <= stopHeight); + + const basicFilter = new BasicFilter(); + const gcsFilter = basicFilter.fromNBytes(filter); + + const indexer = this.getFilterIndexer(filtersByVal[filterType]); + const filterHeader = await indexer.getFilterHeader(blockHash); + await indexer.saveFilter(blockHash, gcsFilter, filterHeader); + + await this.chain.saveCFilterHeight(blockHeight); + const cFilterHeight = await this.chain.getCFilterHeight(); + this.logger.info('CFilter height: %d', cFilterHeight); + this.emit('cfilter', blockHash, gcsFilter); + const startHeight = stopHeight + 1; + let nextStopHeight; + if (cFilterHeight === stopHeight + && stopHeight < this.chain.height) { + if (startHeight + 1000 < this.chain.height) { + nextStopHeight = stopHeight + 1000; + const stopHash = await this.chain.getHash(nextStopHeight); + this.getcfiltersStartHeight = startHeight; + this.getcfiltersStopHash = stopHash; + this.peers.load.sendGetCFilters( + common.FILTERS.BASIC, + startHeight, + stopHash + ); + } else { + nextStopHeight = this.chain.height; + const stopHash = await this.chain.getHash(nextStopHeight); + this.getcfiltersStartHeight = startHeight; + this.getcfiltersStopHash = stopHash; + this.peers.load.sendGetCFilters( + common.FILTERS.BASIC, + startHeight, + stopHash + ); + return; + } + } else if (cFilterHeight === this.chain.height) { + this.logger.info('CFilters sync complete'); + } + } + /** * Handle `getblocks` packet. * @method @@ -2027,6 +2301,9 @@ class Pool extends EventEmitter { if (this.options.selfish) return; + if (this.options.neutrino) + return; + if (this.chain.options.spv) return; @@ -2139,7 +2416,8 @@ class Pool extends EventEmitter { async _handleHeaders(peer, packet) { const headers = packet.items; - if (!this.checkpoints) + if (!this.checkpoints && !this.options.neutrino) + // todo add support for checkpoints return; if (!this.syncing) @@ -2179,13 +2457,14 @@ class Pool extends EventEmitter { this.logger.warning( 'Peer sent a bad header chain (%s).', peer.hostname()); - peer.destroy(); + peer.increaseBan(10); return; } node = new HeaderEntry(hash, height); - if (node.height === this.headerTip.height) { + if (!this.options.neutrino && node.height === this.headerTip.height) { + // todo add support for checkpoints if (!node.hash.equals(this.headerTip.hash)) { this.logger.warning( 'Peer sent an invalid checkpoint (%s).', @@ -2200,6 +2479,8 @@ class Pool extends EventEmitter { this.headerNext = node; this.headerChain.push(node); + if (this.options.neutrino) + await this._addBlock(peer, header, chainCommon.flags.VERIFY_POW); } this.logger.debug( @@ -2212,14 +2493,19 @@ class Pool extends EventEmitter { peer.blockTime = Date.now(); // Request the blocks we just added. - if (checkpoint) { + if (checkpoint && !this.options.neutrino) { this.headerChain.shift(); this.resolveHeaders(peer); return; } // Request more headers. - peer.sendGetHeaders([node.hash], this.headerTip.hash); + if (this.chain.synced) + return; + if (this.options.neutrino) + peer.sendGetHeaders([node.hash]); + else + peer.sendGetHeaders([node.hash], this.headerTip.hash); } /** @@ -2257,6 +2543,11 @@ class Pool extends EventEmitter { await this.addBlock(peer, packet.block, flags); } + async handleFilter(peer, packet) { + const flags = chainCommon.flags.DEFAULT_FLAGS; + await this.addFilter(peer, packet.filter, flags); + } + /** * Attempt to add block to chain. * @method @@ -2293,7 +2584,7 @@ class Pool extends EventEmitter { const hash = block.hash(); - if (!this.resolveBlock(peer, hash)) { + if (!this.options.neutrino && !this.resolveBlock(peer, hash)) { this.logger.warning( 'Received unrequested block: %h (%s).', block.hash(), peer.hostname()); @@ -2316,6 +2607,14 @@ class Pool extends EventEmitter { } // Block was orphaned. + + const resolve = this.chain.getPrunedMap.get(hash); + if (resolve) { + this.logger.warning('Received pruned block by special request'); + this.chain.getPrunedMap.delete(hash); + resolve(); + } + if (!entry) { if (this.checkpoints) { this.logger.warning( @@ -2353,6 +2652,20 @@ class Pool extends EventEmitter { await this.resolveChain(peer, hash); } + async addFilter(peer, filter, flags) { + const hash = filter.hash(); + const unlock = await this.locker.lock(hash); + try { + return await this._addFilter(peer, filter, flags); + } finally { + unlock(); + } + } + + async _addFilter(peer, filter, flags) { + + } + /** * Resolve header chain. * @method @@ -3690,6 +4003,7 @@ class PoolOptions { this.prefix = null; this.checkpoints = true; this.spv = false; + this.neutrino = false; this.bip37 = false; this.bip157 = false; this.listen = false; @@ -3772,12 +4086,17 @@ class PoolOptions { if (options.spv != null) { assert(typeof options.spv === 'boolean'); - assert(options.spv === this.chain.options.spv); + // assert(options.spv === this.chain.options.spv); this.spv = options.spv; } else { this.spv = this.chain.options.spv; } + if (options.neutrino != null) { + assert(typeof options.neutrino === 'boolean'); + this.neutrino = options.neutrino; + } + if (options.bip37 != null) { assert(typeof options.bip37 === 'boolean'); this.bip37 = options.bip37; @@ -3953,6 +4272,12 @@ class PoolOptions { this.listen = false; } + if (this.neutrino) { + this.requiredServices |= common.services.NODE_COMPACT_FILTERS; + this.checkpoints = true; + this.compact = false; + } + if (this.selfish) { this.services &= ~common.services.NETWORK; this.bip37 = false; @@ -4494,6 +4819,20 @@ class HeaderEntry { } } +class CFHeaderEntry { + /** + * Create cfheader entry. + * @constructor + */ + + constructor(hash, height) { + this.hash = hash; + this.height = height; + this.prev = null; + this.next = null; + } +} + /* * Expose */ diff --git a/lib/node/fullnode.js b/lib/node/fullnode.js index cd373d3b9..035d62dea 100644 --- a/lib/node/fullnode.js +++ b/lib/node/fullnode.js @@ -647,7 +647,7 @@ class FullNode extends Node { } /** - * Retrieve compact filter by hash. + * Retrieve compact filter by hash/height. * @param {Hash | Number} hash * @param {Number} type * @returns {Promise} - Returns {@link Buffer}. @@ -667,6 +667,28 @@ class FullNode extends Node { return Indexer.getFilter(hash); } + + /** + * Retrieve compact filter by hash/height. + * @param {Hash | Number} hash + * @param {Number} type + * @returns {Promise} - Returns {@link Buffer}. + */ + + async getBlockFilterHeader(hash, filterType) { + const Indexer = this.filterIndexers.get(filterType); + + if (!Indexer) + return null; + + if (typeof hash === 'number') + hash = await this.chain.getHash(hash); + + if (!hash) + return null; + + return Indexer.getFilterHeader(hash); + } } /* diff --git a/lib/node/http.js b/lib/node/http.js index 8448ec015..bed31bf20 100644 --- a/lib/node/http.js +++ b/lib/node/http.js @@ -498,6 +498,13 @@ class HTTP extends Server { return null; }); + socket.hook('get block peer', (...args) => { + const valid = new Validator(args); + const hash = valid.hash(0); + const filter = valid.buf(1); + return this.pool.getBlockPeer(hash, filter); + }); + socket.hook('estimate fee', (...args) => { const valid = new Validator(args); const blocks = valid.u32(0); diff --git a/lib/node/neutrino.js b/lib/node/neutrino.js new file mode 100644 index 000000000..b3570f69f --- /dev/null +++ b/lib/node/neutrino.js @@ -0,0 +1,324 @@ +/*! + * neutrino.js - spv node for bcoin + * Copyright (c) 2014-2015, Fedor Indutny (MIT License) + * Copyright (c) 2014-2017, Christopher Jeffrey (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +const assert = require('bsert'); +const Chain = require('../blockchain/chain'); +const Pool = require('../net/pool'); +const Node = require('./node'); +const HTTP = require('./http'); +const RPC = require('./rpc'); +const blockstore = require('../blockstore'); +const FilterIndexer = require('../indexer/filterindexer'); + +/** + * Neutrino Node + * Create a neutrino node which only maintains + * a chain, a pool, and an http server. + * @alias module:node.Neutrino + * @extends Node + */ + +class Neutrino extends Node { + /** + * Create Neutrino node. + * @constructor + * @param {Object?} options + * @param {Buffer?} options.sslKey + * @param {Buffer?} options.sslCert + * @param {Number?} options.httpPort + * @param {String?} options.httpHost + */ + + constructor(options) { + super('bcoin', 'bcoin.conf', 'debug.log', options); + + this.opened = false; + + // SPV flag. + this.spv = false; + this.neutrino = true; + + // Instantiate block storage. + this.blocks = blockstore.create({ + network: this.network, + logger: this.logger, + prefix: this.config.prefix, + cacheSize: this.config.mb('block-cache-size'), + memory: this.memory, + spv: this.spv, + neutrino: this.neutrino + }); + + this.chain = new Chain({ + blocks: this.blocks, + network: this.network, + logger: this.logger, + prefix: this.config.prefix, + memory: this.memory, + maxFiles: this.config.uint('max-files'), + cacheSize: this.config.mb('cache-size'), + entryCache: this.config.uint('entry-cache'), + forceFlags: this.config.bool('force-flags'), + checkpoints: this.config.bool('checkpoints'), + bip91: this.config.bool('bip91'), + bip148: this.config.bool('bip148'), + spv: true, + neutrino: this.neutrino + }); + + this.filterIndexers.set( + 'BASIC', + new FilterIndexer({ + network: this.network, + logger: this.logger, + blocks: this.blocks, + chain: this.chain, + memory: this.config.bool('memory'), + prefix: this.config.str('index-prefix', this.config.prefix), + filterType: 'BASIC', + neutrino: true + }) + ); + + this.pool = new Pool({ + network: this.network, + logger: this.logger, + chain: this.chain, + prefix: this.config.prefix, + checkpoints: true, + filterIndexers: this.filterIndexers, + proxy: this.config.str('proxy'), + onion: this.config.bool('onion'), + upnp: this.config.bool('upnp'), + seeds: this.config.array('seeds'), + nodes: this.config.array('nodes'), + only: this.config.array('only'), + maxOutbound: this.config.uint('max-outbound'), + createSocket: this.config.func('create-socket'), + memory: this.memory, + selfish: true, + listen: false, + neutrino: this.neutrino, + spv: this.spv + }); + + this.rpc = new RPC(this); + + this.http = new HTTP({ + network: this.network, + logger: this.logger, + node: this, + prefix: this.config.prefix, + ssl: this.config.bool('ssl'), + keyFile: this.config.path('ssl-key'), + certFile: this.config.path('ssl-cert'), + host: this.config.str('http-host'), + port: this.config.uint('http-port'), + apiKey: this.config.str('api-key'), + noAuth: this.config.bool('no-auth'), + cors: this.config.bool('cors') + }); + + this.init(); + } + + /** + * Initialize the node. + * @private + */ + + init() { + console.log('Initializing Neutrino Node.'); + // Bind to errors + this.chain.on('error', err => this.error(err)); + this.pool.on('error', err => this.error(err)); + + if (this.http) + this.http.on('error', err => this.error(err)); + + this.pool.on('tx', (tx) => { + this.emit('tx', tx); + }); + + this.chain.on('block', (block) => { + this.emit('block', block); + }); + + this.chain.on('connect', async (entry, block) => { + this.emit('connect', entry, block); + }); + + this.chain.on('disconnect', (entry, block) => { + this.emit('disconnect', entry, block); + }); + + this.chain.on('reorganize', (tip, competitor) => { + this.emit('reorganize', tip, competitor); + }); + + this.chain.on('reset', (tip) => { + this.emit('reset', tip); + }); + + this.chain.on('headersFull', async () => { + if (this.chain.height === 0) + return; + this.logger.info('Block Headers are fully synced'); + await this.pool.startFilterHeadersSync(); + }); + + this.pool.on('cfheaders', async () => { + if (this.chain.height === 0) + return; + this.logger.info('Filter Headers are fully synced'); + await this.pool.startFilterSync(); + }); + + this.loadPlugins(); + } + + /** + * Open the node and all its child objects, + * wait for the database to load. + * @returns {Promise} + */ + + async open() { + assert(!this.opened, 'Neutrino Node is already open.'); + this.opened = true; + + await this.handlePreopen(); + await this.blocks.open(); + await this.chain.open(); + await this.pool.open(); + + await this.openPlugins(); + + await this.http.open(); + await this.handleOpen(); + + for (const filterindex of this.filterIndexers.values()) { + await filterindex.open(); + } + + this.logger.info('Node is loaded.'); + } + + /** + * Close the node, wait for the database to close. + * @returns {Promise} + */ + + async close() { + assert(this.opened, 'Neutrino Node is not open.'); + this.opened = false; + + await this.handlePreclose(); + await this.http.close(); + + await this.closePlugins(); + + await this.pool.close(); + await this.chain.close(); + await this.handleClose(); + + for (const filterindex of this.filterIndexers.values()) { + await filterindex.close(); + } + } + + /** + * Scan for any missed transactions. + * Note that this will replay the blockchain sync. + * @param {Number|Hash} start - Start block. + * @returns {Promise} + */ + + async scan(start) { + throw new Error('Not implemented.'); + } + + /** + * Broadcast a transaction (note that this will _not_ be verified + * by the mempool - use with care, lest you get banned from + * bitcoind nodes). + * @param {TX|Block} item + * @returns {Promise} + */ + + async broadcast(item) { + try { + await this.pool.broadcast(item); + } catch (e) { + this.emit('error', e); + } + } + + /** + * Broadcast a transaction (note that this will _not_ be verified + * by the mempool - use with care, lest you get banned from + * bitcoind nodes). + * @param {TX} tx + * @returns {Promise} + */ + + sendTX(tx) { + return this.broadcast(tx); + } + + /** + * Broadcast a transaction. Silence errors. + * @param {TX} tx + * @returns {Promise} + */ + + relay(tx) { + return this.broadcast(tx); + } + + /** + * Connect to the network. + * @returns {Promise} + */ + + connect() { + return this.pool.connect(); + } + + /** + * Disconnect from the network. + * @returns {Promise} + */ + + disconnect() { + return this.pool.disconnect(); + } + + /** + * Start the blockchain sync. + */ + + startSync() { + return this.pool.startSync(); + } + + /** + * Stop syncing the blockchain. + */ + + stopSync() { + return this.pool.stopSync(); + } +} + +/* + * Expose + */ + +module.exports = Neutrino; diff --git a/lib/node/rpc.js b/lib/node/rpc.js index f200f216b..1929d93bf 100644 --- a/lib/node/rpc.js +++ b/lib/node/rpc.js @@ -152,11 +152,14 @@ class RPC extends RPCBase { this.add('getblockchaininfo', this.getBlockchainInfo); this.add('getbestblockhash', this.getBestBlockHash); this.add('getblockcount', this.getBlockCount); + this.add('getfiltercount', this.getFilterCount); + this.add('getfilterheadercount', this.getFilterHeaderCount); this.add('getblock', this.getBlock); this.add('getblockbyheight', this.getBlockByHeight); this.add('getblockhash', this.getBlockHash); this.add('getblockheader', this.getBlockHeader); this.add('getblockfilter', this.getBlockFilter); + this.add('getblockfilterheader', this.getBlockFilterHeader); this.add('getchaintips', this.getChainTips); this.add('getdifficulty', this.getDifficulty); this.add('getmempoolancestors', this.getMempoolAncestors); @@ -625,6 +628,22 @@ class RPC extends RPCBase { return this.chain.tip.height; } + async getFilterCount(args, help) { + if (help || args.length !== 0) + throw new RPCError(errs.MISC_ERROR, 'getfiltercount'); + + const height = await this.chain.getCFilterHeight(); + return height; + } + + async getFilterHeaderCount(args, help) { + if (help || args.length !== 0) + throw new RPCError(errs.MISC_ERROR, 'getfilterheadercount'); + + const height = await this.chain.getCFHeaderHeight(); + return height; + } + async getBlock(args, help) { if (help || args.length < 1 || args.length > 3) throw new RPCError(errs.MISC_ERROR, 'getblock "hash" ( verbose )'); @@ -765,6 +784,32 @@ class RPC extends RPCBase { return filter.toJSON(); } + async getBlockFilterHeader(args, help) { + if (help || args.length < 1 || args.length > 2) { + throw new RPCError(errs.MISC_ERROR, + 'getblockfilterheader "hash" ( "type" )'); + } + + const valid = new Validator(args); + const hash = valid.brhash(0); + const filterName = valid.str(1, 'BASIC').toUpperCase(); + + const filterType = filters[filterName]; + + if (!hash) + throw new RPCError(errs.MISC_ERROR, 'Invalid block hash.'); + + if (!filterType) + throw new RPCError(errs.MISC_ERROR, 'Filter type not supported'); + + const filterHeader = await this.node.getBlockFilterHeader(hash, filterName); + + if (!filterHeader) + throw new RPCError(errs.MISC_ERROR, 'Block filter header not found.'); + + return filterHeader; + } + async getChainTips(args, help) { if (help || args.length !== 0) throw new RPCError(errs.MISC_ERROR, 'getchaintips'); diff --git a/lib/protocol/networks.js b/lib/protocol/networks.js index 16e6bedf7..8c2db9e8e 100644 --- a/lib/protocol/networks.js +++ b/lib/protocol/networks.js @@ -792,7 +792,7 @@ regtest.block = { bip66hash: null, pruneAfterHeight: 1000, keepBlocks: 10000, - maxTipAge: 0xffffffff, + maxTipAge: 24 * 60 * 60, slowHeight: 0 }; diff --git a/lib/wallet/client.js b/lib/wallet/client.js index 768f38e50..6a9a12a77 100644 --- a/lib/wallet/client.js +++ b/lib/wallet/client.js @@ -71,6 +71,17 @@ class WalletClient extends NodeClient { return super.setFilter(filter.toRaw()); } + /** + * Check filter against wallet key ring + * @param {WalletKey} ring + * @param {Filter} filter + * @returns {Promise} + */ + + async getBlockFromNode(hash, filter) { + return super.getBlockPeer(hash, filter); + } + async rescan(start) { if (Buffer.isBuffer(start)) start = util.revHex(start); diff --git a/lib/wallet/nodeclient.js b/lib/wallet/nodeclient.js index 9f6c43600..48b8c6e33 100644 --- a/lib/wallet/nodeclient.js +++ b/lib/wallet/nodeclient.js @@ -37,6 +37,13 @@ class NodeClient extends AsyncEmitter { init() { this.node.chain.on('connect', async (entry, block) => { + if (!this.opened || this.node.neutrino) + return; + + await this.emitAsync('block connect', entry, block.txs); + }); + + this.node.chain.on('getblockpeer', async (entry, block) => { if (!this.opened) return; @@ -50,6 +57,13 @@ class NodeClient extends AsyncEmitter { await this.emitAsync('block disconnect', entry); }); + this.node.pool.on('cfilter', async (blockHeight, filter) => { + if (!this.opened) + return; + + await this.emitAsync('cfilter', blockHeight, filter); + }); + this.node.on('tx', (tx) => { if (!this.opened) return; @@ -134,6 +148,10 @@ class NodeClient extends AsyncEmitter { return entry; } + async getBlockFromNode(hash, filter) { + await this.node.chain.getBlockPeer(hash, filter); + } + /** * Send a transaction. Do not wait for promise. * @param {TX} tx @@ -174,6 +192,13 @@ class NodeClient extends AsyncEmitter { this.node.pool.queueFilterLoad(); } + /** + * Check filter against wallet key ring + * @param {WalletKey} ring + * @param {Filter} filter + * @returns {Promise} + */ + /** * Estimate smart fee. * @param {Number?} blocks diff --git a/lib/wallet/nullclient.js b/lib/wallet/nullclient.js index 744629d4b..a6387fc30 100644 --- a/lib/wallet/nullclient.js +++ b/lib/wallet/nullclient.js @@ -130,6 +130,17 @@ class NullClient extends EventEmitter { this.wdb.emit('reset filter'); } + /** + * Check filter against wallet key ring + * @param {WalletKey} ring + * @param {Filter} filter + * @returns {Promise} + */ + + async getBlockFromNode(hash, filter) { + ; + } + /** * Esimate smart fee. * @param {Number?} blocks diff --git a/lib/wallet/wallet.js b/lib/wallet/wallet.js index 3703deaf9..32bdb4b38 100644 --- a/lib/wallet/wallet.js +++ b/lib/wallet/wallet.js @@ -59,6 +59,8 @@ class Wallet extends EventEmitter { this.writeLock = new Lock(); this.fundLock = new Lock(); + this.neutrino = false; + this.wid = 0; this.id = null; this.watchOnly = false; diff --git a/lib/wallet/walletdb.js b/lib/wallet/walletdb.js index b1f57ebf0..b1b2c7c84 100644 --- a/lib/wallet/walletdb.js +++ b/lib/wallet/walletdb.js @@ -26,6 +26,8 @@ const Outpoint = require('../primitives/outpoint'); const layouts = require('./layout'); const records = require('./records'); const NullClient = require('./nullclient'); +const Script = require('../script/script'); +const Address = require('../primitives/address'); const layout = layouts.wdb; const tlayout = layouts.txdb; @@ -65,10 +67,12 @@ class WalletDB extends EventEmitter { this.state = new ChainState(); this.confirming = false; this.height = 0; + this.filterHeight = 0; this.wallets = new Map(); this.depth = 0; this.rescanning = false; this.filterSent = false; + this.isWitness = false; // Wallet read lock. this.readLock = new MapLock(); @@ -169,6 +173,14 @@ class WalletDB extends EventEmitter { this.emit('error', e); } }); + + this.client.bind('cfilter', async (blockHeight, filter) => { + try { + await this.checkFilter(blockHeight, filter); + } catch (e) { + this.emit('error', e); + } + }); } /** @@ -201,6 +213,9 @@ class WalletDB extends EventEmitter { id: 'primary' }); + const account = await wallet.getAccount(wallet.wid); + this.isWitness = account.witness; + const addr = await wallet.receiveAddress(); this.logger.info( @@ -568,6 +583,35 @@ class WalletDB extends EventEmitter { return this.client.resetFilter(); } + async checkFilter (blockHash, filter) { + this.filterHeight = this.filterHeight + 1; + const gcsKey = blockHash.slice(0, 16); + + const piter = this.db.iterator({ + gte: layout.p.min(), + lte: layout.p.max() + }); + + await piter.each(async (key) => { + const [data] = layout.p.decode(key); + let address = null; + if (data.length === 20) { + if (this.isWitness) + address = Address.fromWitnessPubkeyhash(data); + else + address = Address.fromPubkeyhash(data); + } else if (data.length === 32) { + address = Address.fromWitnessScripthash(data); + } + const script = Script.fromAddress(address); + const match = filter.match(gcsKey, script.toRaw()); + if (match) { + await this.client.getBlockFromNode(blockHash, filter); + return; + } + }); + } + /** * Backup the wallet db. * @param {String} path diff --git a/test/neutrino-test.js b/test/neutrino-test.js new file mode 100644 index 000000000..842fe2782 --- /dev/null +++ b/test/neutrino-test.js @@ -0,0 +1,95 @@ +'use strict'; + +const FullNode = require('../lib/node/fullnode'); +const NeutrinoNode = require('../lib/node/neutrino'); +const {forValue} = require('./util/common'); +const assert = require('bsert'); +describe('neutrino', function () { + this.timeout(100000); + + const node1 = new NeutrinoNode({ + network: 'regtest', + memory: true, + port: 10000, + httpPort: 20000, + neutrino: true, + only: '127.0.0.1' + }); + + const node2 = new FullNode({ + network: 'regtest', + memory: true, + listen: true, + indexFilter: true, + bip157: true + }); + + async function mineBlocks(n) { + while (n) { + const block = await node2.miner.mineBlock(); + await node2.chain.add(block); + await new Promise(resolve => setTimeout(resolve, 20)); + n--; + } + await forValue(node1.chain, 'height', node2.chain.height); + } + + before(async function () { + const waitForConnection = new Promise((resolve, reject) => { + node1.pool.once('peer open', async (peer) => { + resolve(peer); + }); + }); + + await node1.open(); + await node2.open(); + await node1.connect(); + await node2.connect(); + node1.startSync(); + node2.startSync(); + await waitForConnection; + await mineBlocks(1000); + }); + + after(async () => { + await node1.close(); + await node2.close(); + }); + + describe('getheaders', () => { + it('should getheaders', async () => { + await mineBlocks(10); + assert.equal(node1.chain.height, node2.chain.height); + }); + }); + + describe('getcfheaders', () => { + it('should getcfheaders', async () => { + await new Promise(resolve => setTimeout(resolve, 400)); + const headerHeight = await node1.chain.getCFHeaderHeight(); + assert.equal(headerHeight, node1.chain.height); + }); + }); + + describe('getcfilters', () => { + it('should getcfilters', async () => { + await new Promise(resolve => setTimeout(resolve, 400)); + const filterHeight = await node1.chain.getCFilterHeight(); + assert.equal(filterHeight, node1.chain.height); + }); + }); + + describe('save filters', () => { + it('should save filters correctly', async () => { + const filterIndexer = node1.filterIndexers.get('BASIC'); + for (let i = 0; i < node1.chain.height; i++) { + const hash = await node1.chain.getHash(i); + const filterHeader = await filterIndexer.getFilterHeader(hash); + assert(filterHeader); + const filter = await filterIndexer.getFilter(hash); + assert(filter); + assert(filterHeader.equals(filter.header)); + } + }); + }); +}); diff --git a/test/p2p-bip157-test.js b/test/p2p-bip157-test.js new file mode 100644 index 000000000..b72c7175a --- /dev/null +++ b/test/p2p-bip157-test.js @@ -0,0 +1,100 @@ +/* eslint-env mocha */ +/* eslint prefer-arrow-callback: "off" */ + +'use strict'; + +const assert = require('bsert'); +const FullNode = require('../lib/node/fullnode'); +const NeutrinoNode = require('../lib/node/neutrino'); +const {forValue} = require('./util/common'); +const {MAX_CFILTERS} = require('../lib/net/common'); +const packets = require('../lib/net/packets'); + +describe('P2P', function () { + this.timeout(50000); + + const node1 = new NeutrinoNode({ + network: 'regtest', + memory: true, + port: 10000, + httpPort: 20000, + only: '127.0.0.1', + neutrino: true + }); + + const node2 = new FullNode({ + network: 'regtest', + memory: true, + listen: true, + indexFilter: true, + bip157: true + }); + + let peer; + const nodePackets = {}; + + node1.pool.on('packet', (packet) => { + if (!nodePackets[packet.cmd]) + nodePackets[packet.cmd] = [packet]; + else + nodePackets[packet.cmd].push(packet); + }); + + async function mineBlocks(n) { + while (n) { + const block = await node2.miner.mineBlock(); + await node2.chain.add(block); + await new Promise(resolve => setTimeout(resolve, 20)); + n--; + } + await forValue(node1.chain, 'height', node2.chain.height); + } + + before(async () => { + const waitForConnection = new Promise((resolve, reject) => { + node1.pool.once('peer open', async (peer) => { + resolve(peer); + }); + }); + + await node1.open(); + await node2.open(); + await node1.connect(); + await node2.connect(); + node1.startSync(); + node2.startSync(); + + // `peer` is node2, from node1's perspective. + // So peer.send() sends a packet from node1 to node2, + // and `nodePackets` catches the response packets that + // node2 sends back to node1. + peer = await waitForConnection; + }); + + after(async () => { + await node1.close(); + await node2.close(); + }); + + describe('BIP157', function () { + before(async () => { + // Do not exceed limit, including genesis block + await mineBlocks(MAX_CFILTERS - node1.chain.height - 1); + }); + + it('CFCheckpt', async () => { + nodePackets.cfcheckpt = []; + + await mineBlocks(2); + + const pkt = new packets.GetCFCheckptPacket( + 0, + node1.chain.tip.hash + ); + + peer.send(pkt); + await forValue(nodePackets.cfcheckpt, 'length', 1); + assert.strictEqual(nodePackets.cfcheckpt[0].filterHeaders.length, 1); + }); + }); +}); diff --git a/test/p2p-test.js b/test/p2p-test.js index 85eae0849..b9502550d 100644 --- a/test/p2p-test.js +++ b/test/p2p-test.js @@ -6,8 +6,6 @@ const assert = require('bsert'); const FullNode = require('../lib/node/fullnode'); const {forValue} = require('./util/common'); -const {MAX_CFILTERS} = require('../lib/net/common'); -const packets = require('../lib/net/packets'); describe('P2P', function () { this.timeout(5000); @@ -60,6 +58,7 @@ describe('P2P', function () { await node2.connect(); node1.startSync(); node2.startSync(); + await mineBlocks(1); // `peer` is node2, from node1's perspective. // So peer.send() sends a packet from node1 to node2, @@ -73,58 +72,6 @@ describe('P2P', function () { await node2.close(); }); - describe('BIP157', function () { - before(async () => { - // Do not exceed limit, including genesis block - await mineBlocks(MAX_CFILTERS - node1.chain.height - 1); - }); - - it('CFilters', async () => { - nodePackets.cfilter = []; - - const pkt = new packets.GetCFiltersPacket( - 0, - 0, - node1.chain.tip.hash - ); - - peer.send(pkt); - await forValue(nodePackets.cfilter, 'length', MAX_CFILTERS); - }); - - it('CFHeaders', async () => { - nodePackets.cfheaders = []; - - const pkt = new packets.GetCFHeadersPacket( - 0, - 0, - node1.chain.tip.hash - ); - - peer.send(pkt); - await forValue(nodePackets.cfheaders, 'length', 1); - assert.strictEqual( - nodePackets.cfheaders[0].filterHashes.length, - node1.chain.height + 1 - ); - }); - - it('CFCheckpt', async () => { - nodePackets.cfcheckpt = []; - - await mineBlocks(2); - - const pkt = new packets.GetCFCheckptPacket( - 0, - node1.chain.tip.hash - ); - - peer.send(pkt); - await forValue(nodePackets.cfcheckpt, 'length', 1); - assert.strictEqual(nodePackets.cfcheckpt[0].filterHeaders.length, 1); - }); - }); - describe('Compact Blocks', function () { it('should get compact block in low bandwidth mode', async () => { nodePackets.inv = []; diff --git a/test/wallet-neutrino-test.js b/test/wallet-neutrino-test.js new file mode 100644 index 000000000..2ab783c62 --- /dev/null +++ b/test/wallet-neutrino-test.js @@ -0,0 +1,173 @@ +'use strict'; + +const FullNode = require('../lib/node/fullnode'); +const Neutrino = require('../lib/node/neutrino'); +const assert = require('bsert'); +const { forValue } = require('./util/common'); +const BasicFilter = require('../lib/golomb/basicFilter'); +const Script = require('../lib/script/script'); +const Address = require('../lib/primitives/address'); + +const node1 = new FullNode({ + network: 'regtest', + memory: true, + listen: true, + indexFilter: true, + plugins: [require('../lib/wallet/plugin')], + bip157: true +}); + +const node2 = new Neutrino({ + network: 'regtest', + memory: true, + port: 10000, + httpPort: 20000, + neutrino: true, + only: '127.0.0.1', + plugins: [require('../lib/wallet/plugin')], + env: { + 'BCOIN_WALLET_HTTP_PORT': '12221' + } +}); + +const chain = node1.chain; +const miner = node1.miner; +const wdb1 = node1.require('walletdb').wdb; +const wdb2 = node2.require('walletdb').wdb; + +let wallet1 = null; +let wallet2 = null; +const fwAddresses = []; +const nwAddresses = []; + +async function mineBlocks(n, address) { + for (let i = 0; i < n; i++) { + const block = await miner.mineBlock(null, address); + const entry = await chain.add(block); + assert(entry); + } +} + +function parseAddress(raw, network) { + return Address.fromString(raw, network); +} + +describe('wallet-neutrino', function() { + it('should open chain and miner', async () => { + miner.mempool = null; + await node1.open(); + await node2.open(); + }); + + it('should open walletdb', async () => { + wallet1 = await wdb1.create(); + wallet2 = await wdb2.create(); + }); + + it('should create accounts', async () => { + await wallet1.createAccount('fw'); + await wallet2.createAccount('nw'); + }); + + it('should generate addresses', async () => { + miner.addresses.length = 0; + for (let i = 0; i < 10; i++) { + const key = await wallet1.createReceive(0); + const address = key.getAddress().toString(node1.network.type); + fwAddresses.push(address); + } + miner.addAddress(fwAddresses[0]); + for (let i = 0; i < 10; i++) { + const key = await wallet2.createReceive(0); + const address = key.getAddress().toString(node2.network.type); + nwAddresses.push(address); + } + }); + + it('should mine 40 blocks', async () => { + for (const address of fwAddresses) { + const add = parseAddress(address, node1.network); + await mineBlocks(2, add); + } + for (const address of nwAddresses) { + const add = parseAddress(address, node2.network); + await mineBlocks(2, add); + } + }); + + it('should connect nodes', async () => { + await node1.connect(); + await node2.connect(); + }); + + it('should start sync chain', async () => { + node1.startSync(); + node2.startSync(); + await forValue(node2.chain, 'height', node1.chain.height); + }); + + it('should getheaders', async () => { + assert.equal(node1.chain.height, node2.chain.height); + }); + + it('should getcfheaders', async () => { + await new Promise(resolve => setTimeout(resolve, 400)); + const headerHeight = await node2.chain.getCFHeaderHeight(); + assert.equal(headerHeight, node2.chain.height); + }); + + it('should getcfilters', async () => { + await new Promise(resolve => setTimeout(resolve, 400)); + const filterHeight = await node2.chain.getCFilterHeight(); + assert.equal(filterHeight, node2.chain.height); + }); + + it('should send filters to wallet', async () => { + assert.equal(wdb2.filterHeight, node2.chain.height); + }); + + it('should match the filters', async () => { + let j = 0; + for (let i = 1;i <= 20; i++) { + const filterIndexer = node2.filterIndexers.get('BASIC'); + const hash = await node2.chain.getHash(i); + const filter = await filterIndexer.getFilter(hash); + const basicFilter = new BasicFilter(); + const gcs = basicFilter.fromNBytes(filter.filter); + const key = hash.slice(0, 16); + const address = Address.fromString(fwAddresses[j], node1.network.type); + if (i % 2 === 0) + j++; + const script = Script.fromAddress(address); + assert(gcs.match(key, script.raw)); + } + + j = 0; + for (let i = 21;i <= node2.chain.height; i++) { + const filterIndexer = node2.filterIndexers.get('BASIC'); + const hash = await node2.chain.getHash(i); + const filter = await filterIndexer.getFilter(hash); + const basicFilter = new BasicFilter(); + const gcs = basicFilter.fromNBytes(filter.filter); + const key = hash.slice(0, 16); + const address = Address.fromString(nwAddresses[j], node2.network.type); + if (i % 2 === 0) + j++; + const script = Script.fromAddress(address); + assert(gcs.match(key, script.raw)); + } + }); + + it('should getblockfrompeer', async () => { + for (let i = 21; i <= node2.chain.height; i++) { + const hash = await node2.chain.getHash(i); + const block = await node2.chain.getBlock(hash); + assert(block); + } + }); + + it('should cleanup', async () => { + await node1.close(); + await node2.close(); + }); +});