Merge pull request #758 from braydonf/indexer

Indexer fixes and improvements
This commit is contained in:
Braydon Fuller 2019-05-16 11:20:05 -07:00
commit 7e49be2a07
No known key found for this signature in database
GPG Key ID: F24F232D108B3AD4
30 changed files with 3989 additions and 893 deletions

View File

@ -5,13 +5,28 @@
### How to upgrade
The way that block data is stored has changed for greater performance,
efficiency, reliability and portability. To upgrade to the new disk layout
it's necessary to move block data from LevelDB (e.g. `~/.bcoin/chain`) to
a new file based block storage (e.g. `~./.bcoin/blocks`).
efficiency, reliability and portability.
- Block and undo block data has been moved from LevelDB into flat files.
- The transaction and address indexes have been moved into separate
LevelDB databases.
- The transaction has been de-duplicated, and will reduce disk usage by
half for those running with `txindex` enabled.
- The `txindex` and `addrindex` can now be enabled after the initial
block download.
- The `addrindex` has been sorted to support querying for large sets
of results, and will no longer cause CPU and memory exhaustion issues.
- The `addrindex` will correctly distinguish between `p2pkh` and
`p2wpkh` addresses.
To upgrade to the new disk layout it's necessary to move block data
from LevelDB (e.g. `~/.bcoin/chain`) to a new file based block
storage (e.g. `~./.bcoin/blocks`), and remove `txindex` and `addrindex`
data from the chain database, for those that have that feature enabled.
To do this you can run:
```
node ./migrate/chaindb4to5.js /path/to/bcoin/chain
node ./migrate/chaindb4to6.js /path/to/bcoin/chain
```
The migration will take 1-3 hours, depending on hardware. The block data
@ -22,6 +37,12 @@ Alternatively, you can also sync the chain again, however the above
migration will be faster as additional network bandwidth won't be used
for downloading the blocks again.
For those with `txindex` and `addrindex` enabled, the indexes will be
regenerated by rescanning the chain on next startup, this process can
take multiple hours (e.g. 8 hours) depending on hardware and the
index. Please take the potential downtime in re-indexing into account
before upgrading.
### Wallet API changes
#### HTTP
@ -50,6 +71,31 @@ for downloading the blocks again.
`iswitness`, `witness_version` and `witness_program`.
(a28ffa272a3c4d90d0273d9aa223a23becc08e0e)
### Node API changes
#### HTTP
Several CPU and memory exhaustion issues have been resolved with some
additional arguments for querying multiple sets of results for addresses
that have many transactions.
- `GET /tx/address/:address` has several new arguments: `after`, `reverse`
and `limit`. The `after` argument is a txid, for querying additional results
after a previous result. The `reverse` argument will change the order that
results are returned, the default order is oldest to latest. The `limit`
argument can be used to give results back in smaller sets if necessary.
- `POST /tx/address` This has been deprecated, instead query for each address
individually with `GET /tx/address/:address` with the expectation that
there could be _many_ results that would additionally need to be queried
in a subsequent query using the `after` argument to request the next set.
- `POST /coin/address` and `GET /coin/address/:address` are deprecated as
coins can be generated using results from `/tx/address/:address` and
querying by only a range of the latest transactions to stay synchronized.
Coins could otherwise be removed from results at any point, and thus the
entire set of results would need to be queried every time to discover
which coins have been spent and are currently available.
- `GET /` has new fields `.indexes.{addr,tx}` for the status of indexers.
### Network changes
- Regtest params have been updated to correspond with other bitcoin
@ -76,6 +122,9 @@ for downloading the blocks again.
- The option for `coin-cache` has been removed, this setting was causing
issues during the sync with out-of-memory errors and was making performance
worse instead of better.
- The database location for indexes can be configured via the
`--index-prefix` option. Default locations are `prefix` + `/index`
(e.g. `~/.bcoin/testnet/index/tx` and `~/.bcoin/testnet/index/addr`).
### Script changes
@ -97,6 +146,32 @@ for downloading the blocks again.
- Config file `wallet.conf` won't be read during test runs that was
causing issues with some testing environments.
### Chain changes
- The transaction index methods are now implemented at `node.txindex`:
- `getMeta(hash)`
- `getTX(hash)`
- `hasTX(hash)`
- `getSpentView(tx)`
- The address index method `getHashesByAddress` is now implemented
at `node.addrindex`:
- `getHashesByAddress(addr)` It now accepts `Address` instances
rather than `Address|String` and the results are now sorted in
order of appearance in the blockchain.
- `getHashesByAddress(addr, options)` A new options argument has
been added with the fields:
- `after` - A transaction hash for results to begin after.
- `limit` - The total number of results to return at maximum.
- `reverse` - Will give results in order of latest to oldest.
- The following methods require `node.addrindex.getHashesByAddress`
in conjunction with `node.txindex.getTX` and `node.txindex.getMeta`
respectively, and now includes a new options argument as described
above for `getHashesByAddress`:
- `node.getMetaByAddress(addr, options)`
- `node.getTXByAddress(addr, options)`
- The following method has been deprecated:
- `getCoinsByAddress(addr)`
### Other changes
- A new module for storing block data in files.

View File

@ -55,6 +55,12 @@ bcoin.HDPrivateKey = require('./hd/private');
bcoin.HDPublicKey = require('./hd/public');
bcoin.Mnemonic = require('./hd/mnemonic');
// Index
bcoin.indexer = require('./indexer');
bcoin.Indexer = require('./indexer/indexer');
bcoin.TXIndexer = require('./indexer/txindexer');
bcoin.AddrIndexer = require('./indexer/addrindexer');
// Mempool
bcoin.mempool = require('./mempool');
bcoin.Fees = require('./mempool/fees');

View File

@ -76,6 +76,12 @@ bcoin.define('HDPrivateKey', './hd/private');
bcoin.define('HDPublicKey', './hd/public');
bcoin.define('Mnemonic', './hd/mnemonic');
// Index
bcoin.define('indexer', './indexer');
bcoin.define('Indexer', './indexer/indexer');
bcoin.define('TXIndexer', './indexer/txindexer');
bcoin.define('AddrIndexer', './indexer/addrindexer');
// Mempool
bcoin.define('mempool', './mempool');
bcoin.define('Fees', './mempool/fees');

View File

@ -1948,75 +1948,6 @@ class Chain extends AsyncEmitter {
return this.db.getBlockView(block);
}
/**
* Get a transaction with metadata.
* @param {Hash} hash
* @returns {Promise} - Returns {@link TXMeta}.
*/
getMeta(hash) {
return this.db.getMeta(hash);
}
/**
* Retrieve a transaction.
* @param {Hash} hash
* @returns {Promise} - Returns {@link TX}.
*/
getTX(hash) {
return this.db.getTX(hash);
}
/**
* @param {Hash} hash
* @returns {Promise} - Returns Boolean.
*/
hasTX(hash) {
return this.db.hasTX(hash);
}
/**
* Get all coins pertinent to an address.
* @param {Address[]} addrs
* @returns {Promise} - Returns {@link Coin}[].
*/
getCoinsByAddress(addrs) {
return this.db.getCoinsByAddress(addrs);
}
/**
* Get all transaction hashes to an address.
* @param {Address[]} addrs
* @returns {Promise} - Returns {@link Hash}[].
*/
getHashesByAddress(addrs) {
return this.db.getHashesByAddress(addrs);
}
/**
* Get all transactions pertinent to an address.
* @param {Address[]} addrs
* @returns {Promise} - Returns {@link TX}[].
*/
getTXByAddress(addrs) {
return this.db.getTXByAddress(addrs);
}
/**
* Get all transactions pertinent to an address.
* @param {Address[]} addrs
* @returns {Promise} - Returns {@link TXMeta}[].
*/
getMetaByAddress(addrs) {
return this.db.getMetaByAddress(addrs);
}
/**
* Get an orphan block.
* @param {Hash} hash
@ -2057,21 +1988,6 @@ class Chain extends AsyncEmitter {
return this.db.getCoinView(tx);
}
/**
* Get coin viewpoint (spent).
* @param {TX} tx
* @returns {Promise} - Returns {@link CoinView}.
*/
async getSpentView(tx) {
const unlock = await this.locker.lock();
try {
return await this.db.getSpentView(tx);
} finally {
unlock();
}
}
/**
* Test the chain to see if it is synced.
* @returns {Boolean}
@ -2687,8 +2603,6 @@ class ChainOptions {
this.bip91 = false;
this.bip148 = false;
this.prune = false;
this.indexTX = false;
this.indexAddress = false;
this.forceFlags = false;
this.entryCache = 5000;
@ -2771,16 +2685,6 @@ class ChainOptions {
this.prune = options.prune;
}
if (options.indexTX != null) {
assert(typeof options.indexTX === 'boolean');
this.indexTX = options.indexTX;
}
if (options.indexAddress != null) {
assert(typeof options.indexAddress === 'boolean');
this.indexAddress = options.indexAddress;
}
if (options.forceFlags != null) {
assert(typeof options.forceFlags === 'boolean');
this.forceFlags = options.forceFlags;

View File

@ -11,7 +11,7 @@ const assert = require('bsert');
const bdb = require('bdb');
const bio = require('bufio');
const LRU = require('blru');
const {BufferMap, BufferSet} = require('buffer-map');
const {BufferMap} = require('buffer-map');
const Amount = require('../btc/amount');
const Network = require('../protocol/network');
const CoinView = require('../coins/coinview');
@ -20,9 +20,7 @@ const layout = require('./layout');
const consensus = require('../protocol/consensus');
const Block = require('../primitives/block');
const Outpoint = require('../primitives/outpoint');
const Address = require('../primitives/address');
const ChainEntry = require('./chainentry');
const TXMeta = require('../primitives/txmeta');
const CoinEntry = require('../coins/coinentry');
/**
@ -62,7 +60,7 @@ class ChainDB {
this.logger.info('Opening ChainDB...');
await this.db.open();
await this.db.verify(layout.V.encode(), 'chain', 5);
await this.db.verify(layout.V.encode(), 'chain', 6);
const state = await this.getState();
@ -573,18 +571,6 @@ class ChainDB {
if (!options.prune && flags.prune)
throw new Error('Cannot retroactively unprune.');
if (options.indexTX && !flags.indexTX)
throw new Error('Cannot retroactively enable TX indexing.');
if (!options.indexTX && flags.indexTX)
throw new Error('Cannot retroactively disable TX indexing.');
if (options.indexAddress && !flags.indexAddress)
throw new Error('Cannot retroactively enable address indexing.');
if (!options.indexAddress && flags.indexAddress)
throw new Error('Cannot retroactively disable address indexing.');
if (needsSave) {
await this.logger.info('Rewriting chain flags.');
await this.saveFlags();
@ -976,34 +962,6 @@ class ChainDB {
return view;
}
/**
* Get coin viewpoint (historical).
* @param {TX} tx
* @returns {Promise} - Returns {@link CoinView}.
*/
async getSpentView(tx) {
const view = await this.getCoinView(tx);
for (const {prevout} of tx.inputs) {
if (view.hasEntry(prevout))
continue;
const {hash, index} = prevout;
const meta = await this.getMeta(hash);
if (!meta)
continue;
const {tx, height} = meta;
if (index < tx.outputs.length)
view.addIndex(tx, index, height);
}
return view;
}
/**
* Get coins necessary to be resurrected during a reorg.
* @param {Hash} hash
@ -1080,157 +1038,6 @@ class ChainDB {
return view;
}
/**
* Get a transaction with metadata.
* @param {Hash} hash
* @returns {Promise} - Returns {@link TXMeta}.
*/
async getMeta(hash) {
if (!this.options.indexTX)
return null;
const data = await this.db.get(layout.t.encode(hash));
if (!data)
return null;
return TXMeta.fromRaw(data);
}
/**
* Retrieve a transaction.
* @param {Hash} hash
* @returns {Promise} - Returns {@link TX}.
*/
async getTX(hash) {
const meta = await this.getMeta(hash);
if (!meta)
return null;
return meta.tx;
}
/**
* @param {Hash} hash
* @returns {Promise} - Returns Boolean.
*/
async hasTX(hash) {
if (!this.options.indexTX)
return false;
return this.db.has(layout.t.encode(hash));
}
/**
* Get all coins pertinent to an address.
* @param {Address[]} addrs
* @returns {Promise} - Returns {@link Coin}[].
*/
async getCoinsByAddress(addrs) {
if (!this.options.indexAddress)
return [];
if (!Array.isArray(addrs))
addrs = [addrs];
const coins = [];
for (const addr of addrs) {
const hash = Address.getHash(addr);
const keys = await this.db.keys({
gte: layout.C.min(hash),
lte: layout.C.max(hash),
parse: (key) => {
const [, txid, index] = layout.C.decode(key);
return [txid, index];
}
});
for (const [hash, index] of keys) {
const coin = await this.getCoin(hash, index);
assert(coin);
coins.push(coin);
}
}
return coins;
}
/**
* Get all transaction hashes to an address.
* @param {Address[]} addrs
* @returns {Promise} - Returns {@link Hash}[].
*/
async getHashesByAddress(addrs) {
if (!this.options.indexTX || !this.options.indexAddress)
return [];
const set = new BufferSet();
for (const addr of addrs) {
const hash = Address.getHash(addr);
await this.db.keys({
gte: layout.T.min(hash),
lte: layout.T.max(hash),
parse: (key) => {
const [, txid] = layout.T.decode(key);
set.add(txid);
}
});
}
return set.toArray();
}
/**
* Get all transactions pertinent to an address.
* @param {Address[]} addrs
* @returns {Promise} - Returns {@link TX}[].
*/
async getTXByAddress(addrs) {
const mtxs = await this.getMetaByAddress(addrs);
const out = [];
for (const mtx of mtxs)
out.push(mtx.tx);
return out;
}
/**
* Get all transactions pertinent to an address.
* @param {Address[]} addrs
* @returns {Promise} - Returns {@link TXMeta}[].
*/
async getMetaByAddress(addrs) {
if (!this.options.indexTX || !this.options.indexAddress)
return [];
if (!Array.isArray(addrs))
addrs = [addrs];
const hashes = await this.getHashesByAddress(addrs);
const mtxs = [];
for (const hash of hashes) {
const mtx = await this.getMeta(hash);
assert(mtx);
mtxs.push(mtx);
}
return mtxs;
}
/**
* Scan the blockchain for transactions containing specified address hashes.
* @param {Hash} start - Block hash to start at.
@ -1461,10 +1268,6 @@ class ChainDB {
await this.commit();
// Remove undo data _after_ successful commit.
if (this.blocks)
await this.blocks.pruneUndo(entry.hash);
return view;
}
@ -1581,12 +1384,6 @@ class ChainDB {
await this.commit();
// Remove block and undo data _after_ successful commit.
if (this.blocks) {
await this.blocks.pruneUndo(tip.hash);
await this.blocks.prune(tip.hash);
}
// Update caches _after_ successful commit.
this.cacheHeight.remove(tip.height);
this.cacheHash.remove(tip.hash);
@ -1610,23 +1407,15 @@ class ChainDB {
// one giant atomic write!
this.start();
let hashes = [];
try {
for (const tip of tips)
hashes = hashes.concat(await this._removeChain(tip));
await this._removeChain(tip);
} catch (e) {
this.drop();
throw e;
}
await this.commit();
// SPV doesn't store blocks.
if (this.blocks) {
for (const hash of hashes)
await this.blocks.prune(hash);
}
}
/**
@ -1644,8 +1433,6 @@ class ChainDB {
this.logger.debug('Removing alternate chain: %h.', tip.hash);
const hashes = [];
for (;;) {
if (await this.isMainChain(tip))
break;
@ -1657,10 +1444,6 @@ class ChainDB {
this.del(layout.h.encode(tip.hash));
this.del(layout.e.encode(tip.hash));
// Queue block to be pruned on
// successful write.
hashes.push(tip.hash);
// Queue up hash to be removed
// on successful write.
this.cacheHash.unpush(tip.hash);
@ -1668,8 +1451,6 @@ class ChainDB {
tip = await this.getPrevious(tip);
assert(tip);
}
return hashes;
}
/**
@ -1771,9 +1552,6 @@ class ChainDB {
this.pending.add(output);
}
// Index the transaction if enabled.
this.indexTX(tx, view, entry, i);
}
// Commit new coin state.
@ -1828,9 +1606,6 @@ class ChainDB {
this.pending.spend(output);
}
// Remove from transaction index.
this.unindexTX(tx, view);
}
// Undo coins should be empty.
@ -1882,105 +1657,6 @@ class ChainDB {
b.put(layout.O.encode(), flags.toRaw());
return b.write();
}
/**
* Index a transaction by txid and address.
* @private
* @param {TX} tx
* @param {CoinView} view
* @param {ChainEntry} entry
* @param {Number} index
*/
indexTX(tx, view, entry, index) {
const hash = tx.hash();
if (this.options.indexTX) {
const meta = TXMeta.fromTX(tx, entry, index);
this.put(layout.t.encode(hash), meta.toRaw());
if (this.options.indexAddress) {
for (const addr of tx.getHashes(view))
this.put(layout.T.encode(addr, hash), null);
}
}
if (!this.options.indexAddress)
return;
if (!tx.isCoinbase()) {
for (const {prevout} of tx.inputs) {
const {hash, index} = prevout;
const coin = view.getOutput(prevout);
assert(coin);
const addr = coin.getHash();
if (!addr)
continue;
this.del(layout.C.encode(addr, hash, index));
}
}
for (let i = 0; i < tx.outputs.length; i++) {
const output = tx.outputs[i];
const addr = output.getHash();
if (!addr)
continue;
this.put(layout.C.encode(addr, hash, i), null);
}
}
/**
* Remove transaction from index.
* @private
* @param {TX} tx
* @param {CoinView} view
*/
unindexTX(tx, view) {
const hash = tx.hash();
if (this.options.indexTX) {
this.del(layout.t.encode(hash));
if (this.options.indexAddress) {
for (const addr of tx.getHashes(view))
this.del(layout.T.encode(addr, hash));
}
}
if (!this.options.indexAddress)
return;
if (!tx.isCoinbase()) {
for (const {prevout} of tx.inputs) {
const {hash, index} = prevout;
const coin = view.getOutput(prevout);
assert(coin);
const addr = coin.getHash();
if (!addr)
continue;
this.put(layout.C.encode(addr, hash, index), null);
}
}
for (let i = 0; i < tx.outputs.length; i++) {
const output = tx.outputs[i];
const addr = output.getHash();
if (!addr)
continue;
this.del(layout.C.encode(addr, hash, i));
}
}
}
/**
@ -2001,8 +1677,6 @@ class ChainFlags {
this.bip91 = false;
this.bip148 = false;
this.prune = false;
this.indexTX = false;
this.indexAddress = false;
if (options)
this.fromOptions(options);
@ -2031,16 +1705,6 @@ class ChainFlags {
this.prune = options.prune;
}
if (options.indexTX != null) {
assert(typeof options.indexTX === 'boolean');
this.indexTX = options.indexTX;
}
if (options.indexAddress != null) {
assert(typeof options.indexAddress === 'boolean');
this.indexAddress = options.indexAddress;
}
return this;
}
@ -2062,12 +1726,6 @@ class ChainFlags {
if (this.prune)
flags |= 1 << 2;
if (this.indexTX)
flags |= 1 << 3;
if (this.indexAddress)
flags |= 1 << 4;
if (this.bip91)
flags |= 1 << 5;
@ -2091,8 +1749,6 @@ class ChainFlags {
this.spv = (flags & 1) !== 0;
this.witness = (flags & 2) !== 0;
this.prune = (flags & 4) !== 0;
this.indexTX = (flags & 8) !== 0;
this.indexAddress = (flags & 16) !== 0;
this.bip91 = (flags & 32) !== 0;
this.bip148 = (flags & 64) !== 0;

View File

@ -20,12 +20,12 @@ const bdb = require('bdb');
* n[hash] -> next hash
* p[hash] -> tip index
* b[hash] -> block (deprecated)
* t[hash] -> extended tx
* t[hash] -> extended tx (deprecated)
* c[hash] -> coins
* u[hash] -> undo coins (deprecated)
* v[bit][hash] -> versionbits state
* T[addr-hash][hash] -> dummy (tx by address)
* C[addr-hash][hash][index] -> dummy (coin by address)
* T[addr-hash][hash] -> dummy (tx by address) (deprecated)
* C[addr-hash][hash][index] -> dummy (coin by address) (deprecated)
*/
const layout = {

279
lib/indexer/addrindexer.js Normal file
View File

@ -0,0 +1,279 @@
/*!
* addrindexer.js - address indexer for bcoin
* Copyright (c) 2018, the bcoin developers (MIT License).
* https://github.com/bcoin-org/bcoin
*/
'use strict';
const assert = require('assert');
const bdb = require('bdb');
const bio = require('bufio');
const layout = require('./layout');
const Address = require('../primitives/address');
const Indexer = require('./indexer');
/*
* AddrIndexer Database Layout:
* A[addr-prefix][addr-hash][height][index] -> dummy (tx by address)
* C[height][index] -> hash (tx hash by height and index)
* c[hash]-> height + index (tx height and index by hash)
*
* The database layout is organized so that transactions are
* sorted in the same order as the blocks using the block height
* and transaction index. This provides the ability to query for
* sets of transactions within that order. For a wallet that would
* like to synchronize or rescan, this could be a query for all of
* the latest transactions, but not for earlier transactions that
* are already known.
*
* To be able to query for all transactions in multiple sets without
* reference to height and index, there is a mapping from tx hash to
* the height and index as an entry point.
*
* A mapping of height and index is kept for each transaction
* hash so that the tx hash is not repeated for every address within
* a transaction.
*/
Object.assign(layout, {
A: bdb.key('A', ['uint8', 'hash', 'uint32', 'uint32']),
C: bdb.key('C', ['uint32', 'uint32']),
c: bdb.key('c', ['hash256'])
});
/**
* Count
*/
class Count {
/**
* Create count record.
* @constructor
* @param {Number} height
* @param {Number} index
*/
constructor(height, index) {
this.height = height || 0;
this.index = index || 0;
assert((this.height >>> 0) === this.height);
assert((this.index >>> 0) === this.index);
}
/**
* Serialize.
* @returns {Buffer}
*/
toRaw() {
const bw = bio.write(8);
bw.writeU32(this.height);
bw.writeU32(this.index);
return bw.render();
}
/**
* Deserialize.
* @private
* @param {Buffer} data
*/
fromRaw(data) {
const br = bio.read(data);
this.height = br.readU32();
this.index = br.readU32();
return this;
}
/**
* Instantiate a count from a buffer.
* @param {Buffer} data
* @returns {Count}
*/
static fromRaw(data) {
return new this().fromRaw(data);
}
}
/**
* AddrIndexer
* @alias module:indexer.AddrIndexer
* @extends Indexer
*/
class AddrIndexer extends Indexer {
/**
* Create a indexer
* @constructor
* @param {Object} options
*/
constructor(options) {
super('addr', options);
this.db = bdb.create(this.options);
this.maxTxs = options.maxTxs || 100;
}
/**
* Index transactions by address.
* @private
* @param {BlockMeta} meta
* @param {Block} block
* @param {CoinView} view
*/
async indexBlock(meta, block, view) {
const height = meta.height;
for (let i = 0; i < block.txs.length; i++) {
const tx = block.txs[i];
const hash = tx.hash();
const count = new Count(height, i);
let hasAddress = false;
for (const addr of tx.getAddresses(view)) {
const prefix = addr.getPrefix();
if (prefix < 0)
continue;
const addrHash = addr.getHash();
this.put(layout.A.encode(prefix, addrHash, height, i), null);
hasAddress = true;
}
if (hasAddress) {
this.put(layout.C.encode(height, i), hash);
this.put(layout.c.encode(hash), count.toRaw());
}
}
}
/**
* Remove addresses from index.
* @private
* @param {BlockMeta} meta
* @param {Block} block
* @param {CoinView} view
*/
async unindexBlock(meta, block, view) {
const height = meta.height;
for (let i = 0; i < block.txs.length; i++) {
const tx = block.txs[i];
const hash = tx.hash();
let hasAddress = false;
for (const addr of tx.getAddresses(view)) {
const prefix = addr.getPrefix();
if (prefix < 0)
continue;
const addrHash = addr.getHash();
this.del(layout.A.encode(prefix, addrHash, height, i));
hasAddress = true;
}
if (hasAddress) {
this.del(layout.C.encode(height, i));
this.del(layout.c.encode(hash));
}
}
}
/**
* Get transaction hashes to an address in ascending or descending
* order. If the `after` argument is supplied, results will be given
* _after_ that transaction hash. The default order is ascending from
* oldest to latest.
* @param {Address} addr
* @param {Object} options
* @param {Buffer} options.after - A transaction hash
* @param {Number} options.limit
* @param {Boolean} options.reverse
* @returns {Promise} - Returns {@link Hash}[].
*/
async getHashesByAddress(addr, options = {}) {
const txs = [];
const {after, reverse} = options;
let {limit} = options;
if (!limit)
limit = this.maxTxs;
if (limit > this.maxTxs)
throw new Error('Limit above max of ${this.maxTxs}.');
const hash = Address.getHash(addr);
const prefix = addr.getPrefix();
const opts = {
limit,
reverse,
parse: (key) => {
const [,, height, index] = layout.A.decode(key);
txs.push([height, index]);
}
};
// Determine if the hash -> height + index mapping exists.
const hasAfter = (after && await this.db.has(layout.c.encode(after)));
// Check to see if results should be skipped because
// the after hash is expected to be within a following
// mempool query.
const skip = (after && !hasAfter && !reverse);
if (skip)
return [];
if (after && hasAfter) {
// Give results starting from after
// the tx hash for the address.
const raw = await this.db.get(layout.c.encode(after));
const count = Count.fromRaw(raw);
const {height, index} = count;
if (!reverse) {
opts.gt = layout.A.min(prefix, hash, height, index);
opts.lte = layout.A.max(prefix, hash);
} else {
opts.gte = layout.A.min(prefix, hash);
opts.lt = layout.A.max(prefix, hash, height, index);
}
} else {
// Give earliest or latest results
// for the address.
opts.gte = layout.A.min(prefix, hash);
opts.lte = layout.A.max(prefix, hash);
}
await this.db.keys(opts);
const hashes = [];
for (const [height, index] of txs)
hashes.push(await this.db.get(layout.C.encode(height, index)));
return hashes;
}
}
module.exports = AddrIndexer;

15
lib/indexer/index.js Normal file
View File

@ -0,0 +1,15 @@
/*!
* index.js - indexer for bcoin
* Copyright (c) 2018, the bcoin developers (MIT License).
* https://github.com/bcoin-org/bcoin
*/
'use strict';
/**
* @module indexer
*/
exports.Indexer = require('./indexer');
exports.TXIndexer = require('./txindexer');
exports.AddrIndexer = require('./addrindexer');

690
lib/indexer/indexer.js Normal file
View File

@ -0,0 +1,690 @@
/*!
* indexer.js - abstract interface for bcoin indexers
* Copyright (c) 2018, the bcoin developers (MIT License).
* https://github.com/bcoin-org/bcoin
*/
'use strict';
const assert = require('assert');
const path = require('path');
const fs = require('bfile');
const bio = require('bufio');
const EventEmitter = require('events');
const Logger = require('blgr');
const Network = require('../protocol/network');
const util = require('../utils/util');
const layout = require('./layout');
const CoinView = require('../coins/coinview');
const Block = require('../primitives/block');
const {ZERO_HASH} = require('../protocol/consensus');
/**
* Indexer
* The class which indexers inherit from and implement the
* `indexBlock` and `unindexBlock` methods and database
* and storage initialization for indexing blocks.
* @alias module:indexer.Indexer
* @extends EventEmitter
* @abstract
*/
class Indexer extends EventEmitter {
/**
* Create an indexer.
* @constructor
* @param {String} module
* @param {Object} options
*/
constructor(module, options) {
super();
assert(typeof module === 'string');
assert(module.length > 0);
this.options = new IndexOptions(module, options);
this.network = this.options.network;
this.logger = this.options.logger.context(`${module}indexer`);
this.blocks = this.options.blocks;
this.chain = this.options.chain;
this.db = null;
this.batch = null;
this.syncing = false;
this.height = 0;
}
/**
* Start a new batch write.
* @returns {Batch}
*/
start() {
assert(this.batch === null, 'Already started.');
this.batch = this.db.batch();
return this.batch;
}
/**
* Put key and value to the current batch.
* @param {String} key
* @param {Buffer} value
*/
put(key, value) {
this.batch.put(key, value);
}
/**
* Delete key from the current batch.
* @param {String} key
*/
del(key) {
this.batch.del(key);
}
/**
* Commit the current batch.
* @returns {Promise}
*/
async commit() {
await this.batch.write();
this.batch = null;
}
/**
* Open the indexer, open the database,
* initialize height, and bind to events.
* @returns {Promise}
*/
async open() {
this.logger.info('Indexer is loading.');
await this.ensure();
await this.db.open();
await this.db.verify(layout.V.encode(), 'index', 0);
await this.verifyNetwork();
// Initialize the indexed height.
const data = await this.db.get(layout.R.encode());
if (data)
this.height = bio.readU32(data, 0);
else
await this.saveGenesis();
// Bind to chain events.
this.bind();
}
/**
* Close the indexer, wait for the database to close.
* @returns {Promise}
*/
async close() {
return this.db.close();
}
/**
* Ensure prefix directory (prefix/index).
* @returns {Promise}
*/
async ensure() {
if (fs.unsupported)
return;
if (this.options.memory)
return;
await fs.mkdirp(this.options.prefix);
}
/**
* Verify network of index.
* @returns {Promise}
*/
async verifyNetwork() {
let raw = await this.db.get(layout.O.encode());
if (!raw) {
raw = bio.write(4).writeU32(this.network.magic).render();
await this.db.put(layout.O.encode(), raw);
return;
}
const magic = bio.readU32(raw, 0);
if (magic !== this.network.magic)
throw new Error('Indexer: Network mismatch.');
}
/**
* A special case for indexing the genesis block. The genesis
* block coins are not spendable, however indexers can still index
* the block for historical and informational purposes.
* @private
* @returns {Promise}
*/
async saveGenesis() {
this.start();
const block = Block.fromRaw(Buffer.from(this.network.genesisBlock, 'hex'));
const meta = new BlockMeta(block.hash(), 0);
await this.indexBlock(meta, block, new CoinView());
await this._setTip(meta);
await this.commit();
this.height = 0;
}
/**
* Bind to chain events.
* @private
*/
bind() {
const listener = async (entry, block, view) => {
const meta = new BlockMeta(entry.hash, entry.height);
try {
await this.sync(meta, block, view);
} catch (e) {
this.emit('error', e);
}
};
this.chain.on('connect', listener);
this.chain.on('disconnect', listener);
this.chain.on('reset', listener);
}
/**
* Get a chain entry for the main chain only.
* @private
* @returns {Promise}
*/
async getEntry(hash) {
const entry = await this.chain.getEntry(hash);
if (!entry)
return null;
if (!await this.chain.isMainChain(entry))
return null;
return entry;
}
/**
* Get a index block meta.
* @param {Hash} hash
* @returns {Promise}
*/
async getBlockMeta(height) {
const data = await this.db.get(layout.h.encode(height));
if (!data)
return null;
return new BlockMeta(data, height);
}
/**
* Sync with the chain.
* @param {BlockMeta} meta
* @param {Block} block
* @param {CoinView} view
* @returns {Promise}
*/
async sync(meta, block, view) {
if (this.syncing)
return;
this.syncing = true;
const connected = await this._syncBlock(meta, block, view);
if (connected) {
this.syncing = false;
} else {
(async () => {
try {
await this._syncChain();
} catch (e) {
this.emit('error', e);
} finally {
this.syncing = false;
}
})();
}
}
/**
* Sync with the chain with a block.
* @private
* @param {BlockMeta} meta
* @param {Block} block
* @param {CoinView} view
* @returns {Promise}
*/
async _syncBlock(meta, block, view) {
// In the case that the next block is being
// connected or the current block disconnected
// use the block and view being passed directly,
// instead of reading that information again.
if (meta && block && view) {
if (meta.height === this.height + 1) {
// Make sure that the block is connected to
// the indexer chain.
const prev = await this.getBlockMeta(this.height);
if (prev.hash.compare(block.prevBlock) !== 0)
return false;
await this._addBlock(meta, block, view);
return true;
} else if (meta.height === this.height) {
// Make sure that this is the current block.
const current = await this.getBlockMeta(this.height);
if (current.hash.compare(block.hash()) !== 0)
return false;
await this._removeBlock(meta, block, view);
return true;
}
}
return false;
}
/**
* Sync with the chain.
* @private
* @returns {Promise}
*/
async _syncChain() {
let height = this.height;
// In the case that the indexer has never
// started, sync to the best height.
if (!height) {
await this._rollforward();
return;
}
// Check for a re-org that might
// leave chain in a different state.
// Scan chain backwards until we
// find a common height.
while (height > 0) {
const meta = await this.getBlockMeta(height);
assert(meta);
if (await this.getEntry(meta.hash))
break;
height -= 1;
}
if (height < this.height) {
await this._rollback(height);
await this._rollforward();
} else {
await this._rollforward();
}
}
/**
* Scan blockchain to the best chain height.
* @private
* @returns {Promise}
*/
async _rollforward() {
this.logger.info('Indexing to best height from height (%d).', this.height);
for (let height = this.height + 1; ; height++) {
const entry = await this.getEntry(height);
if (!entry)
break;
const meta = new BlockMeta(entry.hash, height);
const block = await this.chain.getBlock(entry.hash);
assert(block);
const view = await this.chain.getBlockView(block);
assert(view);
await this._addBlock(meta, block, view);
}
}
/**
* Rollback to a given chain height.
* @param {Number} height
* @returns {Promise}
*/
async _rollback(height) {
if (height > this.height) {
this.logger.warning(
'Ignoring rollback to future height (%d).',
height);
return;
}
this.logger.info('Rolling back to height %d.', height);
while (this.height > height && this.height > 1) {
const meta = await this.getBlockMeta(this.height);
assert(meta);
const block = await this.chain.getBlock(meta.hash);
assert(block);
const view = await this.chain.getBlockView(block);
assert(view);
await this._removeBlock(meta, block, view);
}
}
/**
* Add a block's transactions without a lock.
* @private
* @param {BlockMeta} meta
* @param {Block} block
* @param {CoinView} view
* @returns {Promise}
*/
async _addBlock(meta, block, view) {
assert(block.hasRaw(), 'Expected raw data for block.');
const start = util.bench();
if (meta.height !== this.height + 1)
throw new Error('Indexer: Can not add block.');
// Start the batch write.
this.start();
// Call the implemented indexer to add to
// the batch write.
await this.indexBlock(meta, block, view);
// Sync the height to the new tip.
const height = await this._setTip(meta);
// Commit the write batch to disk.
await this.commit();
// Update height _after_ successful commit.
this.height = height;
// Log the current indexer status.
this.logStatus(start, block, meta);
}
/**
* Process block indexing
* Indexers will implement this method to process the block for indexing
* @param {BlockMeta} meta
* @param {Block} block
* @param {CoinView} view
* @returns {Promise}
*/
async indexBlock(meta, block, view) {
;
}
/**
* Undo block indexing
* Indexers will implement this method to undo indexing for the block
* @param {BlockMeta} meta
* @param {Block} block
* @param {CoinView} view
* @returns {Promise}
*/
async unindexBlock(meta, block, view) {
;
}
/**
* Unconfirm a block's transactions.
* @private
* @param {BlockMeta} meta
* @param {Block} block
* @param {CoinView} view
* @returns {Promise}
*/
async _removeBlock(meta, block, view) {
const start = util.bench();
if (meta.height !== this.height)
throw new Error('Indexer: Can not remove block.');
// Start the batch write.
this.start();
// Call the implemented indexer to add to
// the batch write.
await this.unindexBlock(meta, block, view);
const prev = await this.getBlockMeta(meta.height - 1);
assert(prev);
// Sync the height to the previous tip.
const height = await this._setTip(prev);
// Commit the write batch to disk.
await this.commit();
// Update height _after_ successful commit.
this.height = height;
// Log the current indexer status.
this.logStatus(start, block, meta, true);
}
/**
* Update the current height to tip.
* @param {BlockMeta} tip
* @returns {Promise}
*/
async _setTip(meta) {
if (meta.height < this.height) {
assert(meta.height === this.height - 1);
this.del(layout.h.encode(this.height));
} else if (meta.height > this.height) {
assert(meta.height === this.height + 1);
}
// Add to batch write to save tip and height.
this.put(layout.h.encode(meta.height), meta.hash);
const raw = bio.write(4).writeU32(meta.height).render();
this.put(layout.R.encode(), raw);
return meta.height;
}
/**
* Test whether the indexer has reached its slow height.
* @private
* @returns {Boolean}
*/
isSlow() {
if (this.height === 1 || this.height % 20 === 0)
return true;
if (this.height >= this.network.block.slowHeight)
return true;
return false;
}
/**
* Log the current indexer status.
* @private
* @param {Array} start
* @param {Block} block
* @param {BlockMeta} meta
* @param {Boolean} reverse
*/
logStatus(start, block, meta, reverse) {
if (!this.isSlow())
return;
const elapsed = util.bench(start);
const msg = reverse ? 'removed from' : 'added to';
this.logger.info(
'Block (%d) %s indexer (txs=%d time=%d).',
meta.height,
msg,
block.txs.length,
elapsed);
}
}
/**
* Block Meta
*/
class BlockMeta {
constructor(hash, height) {
this.hash = hash || ZERO_HASH;
this.height = height || 0;
assert(Buffer.isBuffer(this.hash) && this.hash.length === 32);
assert(Number.isInteger(this.height));
}
}
/**
* Index Options
*/
class IndexOptions {
/**
* Create index options.
* @constructor
* @param {String} module
* @param {Object} options
*/
constructor(module, options) {
this.module = module;
this.network = Network.primary;
this.logger = Logger.global;
this.blocks = null;
this.chain = null;
this.prefix = null;
this.location = null;
this.memory = true;
this.maxFiles = 64;
this.cacheSize = 16 << 20;
this.compression = true;
if (options)
this.fromOptions(options);
}
/**
* Inject properties from object.
* @private
* @param {Object} options
* @returns {IndexOptions}
*/
fromOptions(options) {
assert(options.blocks && typeof options.blocks === 'object',
'Indexer requires a blockstore.');
assert(options.chain && typeof options.chain === 'object',
'Indexer requires chain.');
assert(!options.prune, 'Can not index while pruned.');
this.blocks = options.blocks;
this.chain = options.chain;
if (options.network != null)
this.network = Network.get(options.network);
if (options.logger != null) {
assert(typeof options.logger === 'object');
this.logger = options.logger;
}
if (options.prefix != null) {
assert(typeof options.prefix === 'string');
this.prefix = options.prefix;
this.prefix = path.join(this.prefix, 'index');
this.location = path.join(this.prefix, this.module);
}
if (options.location != null) {
assert(typeof options.location === 'string');
this.location = options.location;
}
if (options.memory != null) {
assert(typeof options.memory === 'boolean');
this.memory = options.memory;
}
if (options.maxFiles != null) {
assert((options.maxFiles >>> 0) === options.maxFiles);
this.maxFiles = options.maxFiles;
}
if (options.cacheSize != null) {
assert(Number.isSafeInteger(options.cacheSize) && options.cacheSize >= 0);
this.cacheSize = options.cacheSize;
}
if (options.compression != null) {
assert(typeof options.compression === 'boolean');
this.compression = options.compression;
}
return this;
}
/**
* Instantiate indexer options from object.
* @param {Object} options
* @returns {IndexOptions}
*/
static fromOptions(options) {
return new this().fromOptions(options);
}
}
/*
* Expose
*/
module.exports = Indexer;

32
lib/indexer/layout.js Normal file
View File

@ -0,0 +1,32 @@
/*!
* layout.js - indexer layout for bcoin
* Copyright (c) 2018, the bcoin developers (MIT License).
* https://github.com/bcoin-org/bcoin
*/
'use strict';
const bdb = require('bdb');
/*
* Index database layout:
* To be extended by indexer implementations.
*
* V -> db version
* O -> flags
* h[height] -> block hash
* R -> index sync height
*/
const layout = {
V: bdb.key('V'),
O: bdb.key('O'),
h: bdb.key('h', ['uint32']),
R: bdb.key('R')
};
/*
* Expose
*/
module.exports = layout;

318
lib/indexer/txindexer.js Normal file
View File

@ -0,0 +1,318 @@
/*!
* txindexer.js - transaction indexer for bcoin
* Copyright (c) 2018, the bcoin developers (MIT License).
* https://github.com/bcoin-org/bcoin
*/
'use strict';
const assert = require('bsert');
const bdb = require('bdb');
const bio = require('bufio');
const layout = require('./layout');
const consensus = require('../protocol/consensus');
const TX = require('../primitives/tx');
const TXMeta = require('../primitives/txmeta');
const Indexer = require('./indexer');
/*
* TXIndexer Database Layout:
* t[hash] -> tx record
* b[height] -> block record
*
* The transaction index maps a transaction to a block
* and an index, offset, and length within that block. The
* block hash is stored in a separate record by height so that
* the 32 byte hash is not repeated for every transaction
* within a block.
*/
Object.assign(layout, {
t: bdb.key('t', ['hash256']),
b: bdb.key('b', ['uint32'])
});
/**
* Block Record
*/
class BlockRecord {
/**
* Create a block record.
* @constructor
*/
constructor(options = {}) {
this.block = options.block || consensus.ZERO_HASH;
this.time = options.time || 0;
assert(this.block.length === 32);
assert((this.time >>> 0) === this.time);
}
/**
* Inject properties from serialized data.
* @private
* @param {Buffer} data
*/
fromRaw(data) {
const br = bio.read(data);
this.block = br.readHash();
this.time = br.readU32();
return this;
}
/**
* Instantiate block record from serialized data.
* @param {Hash} hash
* @param {Buffer} data
* @returns {BlockRecord}
*/
static fromRaw(data) {
return new this().fromRaw(data);
}
/**
* Serialize the block record.
* @returns {Buffer}
*/
toRaw() {
const bw = bio.write(36);
bw.writeHash(this.block);
bw.writeU32(this.time);
return bw.render();
}
}
/**
* Transaction Record
*/
class TxRecord {
/**
* Create a transaction record.
* @constructor
*/
constructor(options = {}) {
this.height = options.height || 0;
this.index = options.index || 0;
this.offset = options.offset || 0;
this.length = options.length || 0;
assert((this.height >>> 0) === this.height);
assert((this.index >>> 0) === this.index);
assert((this.offset >>> 0) === this.offset);
assert((this.length >>> 0) === this.length);
}
/**
* Inject properties from serialized data.
* @private
* @param {Buffer} data
*/
fromRaw(data) {
const br = bio.read(data);
this.height = br.readU32();
this.index = br.readU32();
this.offset = br.readU32();
this.length = br.readU32();
return this;
}
/**
* Instantiate transaction record from serialized data.
* @param {Hash} hash
* @param {Buffer} data
* @returns {BlockRecord}
*/
static fromRaw(data) {
return new this().fromRaw(data);
}
/**
* Serialize the transaction record.
* @returns {Buffer}
*/
toRaw() {
const bw = bio.write(16);
bw.writeU32(this.height);
bw.writeU32(this.index);
bw.writeU32(this.offset);
bw.writeU32(this.length);
return bw.render();
}
}
/**
* TXIndexer
* @alias module:indexer.TXIndexer
* @extends Indexer
*/
class TXIndexer extends Indexer {
/**
* Create a indexer
* @constructor
* @param {Object} options
*/
constructor(options) {
super('tx', options);
this.db = bdb.create(this.options);
}
/**
* Index transactions by txid.
* @private
* @param {BlockMeta} meta
* @param {Block} block
* @param {CoinView} view
*/
async indexBlock(meta, block, view) {
const brecord = new BlockRecord({
block: meta.hash,
time: block.time
});
this.put(layout.b.encode(meta.height), brecord.toRaw());
for (let i = 0; i < block.txs.length; i++) {
const tx = block.txs[i];
const hash = tx.hash();
const {offset, size} = tx.getPosition();
const txrecord = new TxRecord({
height: meta.height,
index: i,
offset: offset,
length: size
});
this.put(layout.t.encode(hash), txrecord.toRaw());
}
}
/**
* Remove transactions from index.
* @private
* @param {BlockMeta} meta
* @param {Block} block
* @param {CoinView} view
*/
async unindexBlock(meta, block, view) {
this.del(layout.b.encode(meta.height));
for (let i = 0; i < block.txs.length; i++) {
const tx = block.txs[i];
const hash = tx.hash();
this.del(layout.t.encode(hash));
}
}
/**
* Get a transaction with metadata.
* @param {Hash} hash
* @returns {Promise} - Returns {@link TXMeta}.
*/
async getMeta(hash) {
const raw = await this.db.get(layout.t.encode(hash));
if (!raw)
return null;
const record = TxRecord.fromRaw(raw);
const {height, index, offset, length} = record;
const braw = await this.db.get(layout.b.encode(height));
if (!braw)
return null;
const brecord = BlockRecord.fromRaw(braw);
const {block, time} = brecord;
const data = await this.blocks.read(block, offset, length);
const tx = TX.fromRaw(data);
const meta = TXMeta.fromTX(tx);
meta.height = height;
meta.block = block;
meta.time = time;
meta.index = index;
return meta;
}
/**
* Retrieve a transaction.
* @param {Hash} hash
* @returns {Promise} - Returns {@link TX}.
*/
async getTX(hash) {
const meta = await this.getMeta(hash);
if (!meta)
return null;
return meta.tx;
}
/**
* @param {Hash} hash
* @returns {Promise} - Returns Boolean.
*/
async hasTX(hash) {
return this.db.has(layout.t.encode(hash));
}
/**
* Get coin viewpoint (historical).
* @param {TX} tx
* @returns {Promise} - Returns {@link CoinView}.
*/
async getSpentView(tx) {
const view = await this.chain.getCoinView(tx);
for (const {prevout} of tx.inputs) {
if (view.hasEntry(prevout))
continue;
const {hash, index} = prevout;
const meta = await this.getMeta(hash);
if (!meta)
continue;
const {tx, height} = meta;
if (index < tx.outputs.length)
view.addIndex(tx, index, height);
}
return view;
}
}
module.exports = TXIndexer;

235
lib/mempool/addrindexer.js Normal file
View File

@ -0,0 +1,235 @@
/*!
* mempool.js - mempool for bcoin
* Copyright (c) 2018-2019, the bcoin developers (MIT License).
* Copyright (c) 2014-2017, Christopher Jeffrey (MIT License).
* https://github.com/bcoin-org/bcoin
*/
'use strict';
const assert = require('bsert');
const {BufferMap} = require('buffer-map');
const TXMeta = require('../primitives/txmeta');
/**
* Address Indexer
* @ignore
*/
class AddrIndexer {
/**
* Create TX address index.
* @constructor
*/
constructor() {
// Map of addr->entries.
this.index = new BufferMap();
// Map of txid->addrs.
this.map = new BufferMap();
}
reset() {
this.index.clear();
this.map.clear();
}
getKey(addr) {
const prefix = addr.getPrefix();
if (prefix < 0)
return null;
const raw = Buffer.allocUnsafe(1);
raw.writeUInt8(prefix);
return Buffer.concat([raw, addr.getHash()]);
}
/**
* Get transactions by address.
* @param {Address} addr
* @param {Object} options
* @param {Number} options.limit
* @param {Number} options.reverse
* @param {Buffer} options.after
*/
get(addr, options = {}) {
const values = this.getEntries(addr, options);
const out = [];
for (const entry of values)
out.push(entry.tx);
return out;
}
/**
* Get transaction meta by address.
* @param {Address} addr
* @param {Object} options
* @param {Number} options.limit
* @param {Number} options.reverse
* @param {Buffer} options.after
*/
getMeta(addr, options = {}) {
const values = this.getEntries(addr, options);
const out = [];
for (const entry of values) {
const meta = TXMeta.fromTX(entry.tx);
meta.mtime = entry.time;
out.push(meta);
}
return out;
}
/**
* Get entries by address.
* @param {Address} addr
* @param {Object} options
* @param {Number} options.limit
* @param {Number} options.reverse
* @param {Buffer} options.after
*/
getEntries(addr, options = {}) {
const {limit, reverse, after} = options;
const key = this.getKey(addr);
if (!key)
return [];
const items = this.index.get(key);
if (!items)
return [];
let values = [];
// Check to see if results should be skipped because
// the after hash is expected to be within a following
// confirmed query.
const skip = (after && !items.has(after) && reverse);
if (skip)
return values;
if (after && items.has(after)) {
// Give results starting from after
// the tx hash for the address.
let index = 0;
for (const k of items.keys()) {
if (k.compare(after) === 0)
break;
index += 1;
}
values = Array.from(items.values());
let start = index + 1;
let end = values.length;
if (end - start > limit)
end = start + limit;
if (reverse) {
start = 0;
end = index;
if (end > limit)
start = end - limit;
}
values = values.slice(start, end);
} else {
// Give earliest or latest results
// for the address.
values = Array.from(items.values());
if (values.length > limit) {
let start = 0;
let end = limit;
if (reverse) {
start = values.length - limit;
end = values.length;
}
values = values.slice(start, end);
}
}
if (reverse)
values.reverse();
return values;
}
insert(entry, view) {
const tx = entry.tx;
const hash = tx.hash();
const addrs = tx.getAddresses(view);
if (addrs.length === 0)
return;
for (const addr of addrs) {
const key = this.getKey(addr);
if (!key)
continue;
let items = this.index.get(key);
if (!items) {
items = new BufferMap();
this.index.set(key, items);
}
assert(!items.has(hash));
items.set(hash, entry);
}
this.map.set(hash, addrs);
}
remove(hash) {
const addrs = this.map.get(hash);
if (!addrs)
return;
for (const addr of addrs) {
const key = this.getKey(addr);
if (!key)
continue;
const items = this.index.get(key);
assert(items);
assert(items.has(hash));
items.delete(hash);
if (items.size === 0)
this.index.delete(key);
}
this.map.delete(hash);
}
}
/*
* Expose
*/
module.exports = AddrIndexer;

View File

@ -19,7 +19,6 @@ const policy = require('../protocol/policy');
const util = require('../utils/util');
const random = require('bcrypto/lib/random');
const {VerifyError} = require('../protocol/errors');
const Address = require('../primitives/address');
const Script = require('../script/script');
const Outpoint = require('../primitives/outpoint');
const TX = require('../primitives/tx');
@ -28,6 +27,7 @@ const TXMeta = require('../primitives/txmeta');
const MempoolEntry = require('./mempoolentry');
const Network = require('../protocol/network');
const layout = require('./layout');
const AddrIndexer = require('./addrindexer');
const Fees = require('./fees');
const CoinView = require('../coins/coinview');
@ -73,8 +73,7 @@ class Mempool extends EventEmitter {
this.spents = new BufferMap();
this.rejects = new RollingFilter(120000, 0.000001);
this.coinIndex = new CoinIndex();
this.txIndex = new TXIndex();
this.addrindex = new AddrIndexer();
}
/**
@ -364,8 +363,7 @@ class Mempool extends EventEmitter {
this.orphans.clear();
this.map.clear();
this.spents.clear();
this.coinIndex.reset();
this.txIndex.reset();
this.addrindex.reset();
this.freeCount = 0;
this.lastTime = 0;
@ -568,73 +566,32 @@ class Mempool extends EventEmitter {
return entry.tx;
}
/**
* Find all coins pertaining to a certain address.
* @param {Address[]} addrs
* @returns {Coin[]}
*/
getCoinsByAddress(addrs) {
if (!Array.isArray(addrs))
addrs = [addrs];
const out = [];
for (const addr of addrs) {
const hash = Address.getHash(addr);
const coins = this.coinIndex.get(hash);
for (const coin of coins)
out.push(coin);
}
return out;
}
/**
* Find all transactions pertaining to a certain address.
* @param {Address[]} addrs
* @param {Address} addr
* @param {Object} options
* @param {Number} options.limit
* @param {Number} options.reverse
* @param {Buffer} options.after
* @returns {TX[]}
*/
getTXByAddress(addrs) {
if (!Array.isArray(addrs))
addrs = [addrs];
const out = [];
for (const addr of addrs) {
const hash = Address.getHash(addr);
const txs = this.txIndex.get(hash);
for (const tx of txs)
out.push(tx);
}
return out;
getTXByAddress(addr, options) {
return this.addrindex.get(addr, options);
}
/**
* Find all transactions pertaining to a certain address.
* @param {Address[]} addrs
* @param {Address} addr
* @param {Object} options
* @param {Number} options.limit
* @param {Number} options.reverse
* @param {Buffer} options.after
* @returns {TXMeta[]}
*/
getMetaByAddress(addrs) {
if (!Array.isArray(addrs))
addrs = [addrs];
const out = [];
for (const addr of addrs) {
const hash = Address.getHash(addr);
const txs = this.txIndex.getMeta(hash);
for (const tx of txs)
out.push(tx);
}
return out;
getMetaByAddress(addr, options) {
return this.addrindex.getMeta(addr, options);
}
/**
@ -1883,17 +1840,7 @@ class Mempool extends EventEmitter {
*/
indexEntry(entry, view) {
const tx = entry.tx;
this.txIndex.insert(entry, view);
for (const {prevout} of tx.inputs) {
const {hash, index} = prevout;
this.coinIndex.remove(hash, index);
}
for (let i = 0; i < tx.outputs.length; i++)
this.coinIndex.insert(tx, i);
this.addrindex.insert(entry, view);
}
/**
@ -1903,23 +1850,8 @@ class Mempool extends EventEmitter {
*/
unindexEntry(entry) {
const tx = entry.tx;
const hash = tx.hash();
this.txIndex.remove(hash);
for (const {prevout} of tx.inputs) {
const {hash, index} = prevout;
const prev = this.getTX(hash);
if (!prev)
continue;
this.coinIndex.insert(prev, index);
}
for (let i = 0; i < tx.outputs.length; i++)
this.coinIndex.remove(hash, i);
const hash = entry.tx.hash();
this.addrindex.remove(hash);
}
/**
@ -2187,210 +2119,6 @@ class MempoolOptions {
}
}
/**
* TX Address Index
* @ignore
*/
class TXIndex {
/**
* Create TX address index.
* @constructor
*/
constructor() {
// Map of addr->entries.
this.index = new BufferMap();
// Map of txid->addrs.
this.map = new BufferMap();
}
reset() {
this.index.clear();
this.map.clear();
}
get(addr) {
const items = this.index.get(addr);
if (!items)
return [];
const out = [];
for (const entry of items.values())
out.push(entry.tx);
return out;
}
getMeta(addr) {
const items = this.index.get(addr);
if (!items)
return [];
const out = [];
for (const entry of items.values()) {
const meta = TXMeta.fromTX(entry.tx);
meta.mtime = entry.time;
out.push(meta);
}
return out;
}
insert(entry, view) {
const tx = entry.tx;
const hash = tx.hash();
const addrs = tx.getHashes(view);
if (addrs.length === 0)
return;
for (const addr of addrs) {
let items = this.index.get(addr);
if (!items) {
items = new BufferMap();
this.index.set(addr, items);
}
assert(!items.has(hash));
items.set(hash, entry);
}
this.map.set(hash, addrs);
}
remove(hash) {
const addrs = this.map.get(hash);
if (!addrs)
return;
for (const addr of addrs) {
const items = this.index.get(addr);
assert(items);
assert(items.has(hash));
items.delete(hash);
if (items.size === 0)
this.index.delete(addr);
}
this.map.delete(hash);
}
}
/**
* Coin Address Index
* @ignore
*/
class CoinIndex {
/**
* Create coin address index.
* @constructor
*/
constructor() {
// Map of addr->coins.
this.index = new BufferMap();
// Map of outpoint->addr.
this.map = new BufferMap();
}
reset() {
this.index.clear();
this.map.clear();
}
get(addr) {
const items = this.index.get(addr);
if (!items)
return [];
const out = [];
for (const coin of items.values())
out.push(coin.toCoin());
return out;
}
insert(tx, index) {
const output = tx.outputs[index];
const hash = tx.hash();
const addr = output.getHash();
if (!addr)
return;
let items = this.index.get(addr);
if (!items) {
items = new BufferMap();
this.index.set(addr, items);
}
const key = Outpoint.toKey(hash, index);
assert(!items.has(key));
items.set(key, new IndexedCoin(tx, index));
this.map.set(key, addr);
}
remove(hash, index) {
const key = Outpoint.toKey(hash, index);
const addr = this.map.get(key);
if (!addr)
return;
const items = this.index.get(addr);
assert(items);
assert(items.has(key));
items.delete(key);
if (items.size === 0)
this.index.delete(addr);
this.map.delete(key);
}
}
/**
* Indexed Coin
* @ignore
*/
class IndexedCoin {
/**
* Create an indexed coin.
* @constructor
* @param {TX} tx
* @param {Number} index
*/
constructor(tx, index) {
this.tx = tx;
this.index = index;
}
toCoin() {
return Coin.fromTX(this.tx, this.index, -1);
}
}
/**
* Orphan
* @ignore

View File

@ -643,11 +643,9 @@ class Pool extends EventEmitter {
*/
startSync() {
if (!this.opened)
if (!this.opened || !this.connected)
return;
assert(this.connected, 'Pool is not connected!');
this.syncing = true;
this.resync(false);
}
@ -657,11 +655,9 @@ class Pool extends EventEmitter {
*/
forceSync() {
if (!this.opened)
if (!this.opened || !this.connected)
return;
assert(this.connected, 'Pool is not connected!');
this.resync(true);
}

View File

@ -17,6 +17,8 @@ const Node = require('./node');
const HTTP = require('./http');
const RPC = require('./rpc');
const blockstore = require('../blockstore');
const TXIndexer = require('../indexer/txindexer');
const AddrIndexer = require('../indexer/addrindexer');
/**
* Full Node
@ -151,9 +153,36 @@ class FullNode extends Node {
port: this.config.uint('http-port'),
apiKey: this.config.str('api-key'),
noAuth: this.config.bool('no-auth'),
cors: this.config.bool('cors')
cors: this.config.bool('cors'),
maxTxs: this.config.uint('max-txs')
});
// Indexers
if (this.config.bool('index-tx')) {
this.txindex = new TXIndexer({
network: this.network,
logger: this.logger,
blocks: this.blocks,
chain: this.chain,
prune: this.config.bool('prune'),
memory: this.config.bool('memory'),
prefix: this.config.str('index-prefix', this.config.prefix)
});
}
if (this.config.bool('index-address')) {
this.addrindex= new AddrIndexer({
network: this.network,
logger: this.logger,
blocks: this.blocks,
chain: this.chain,
prune: this.config.bool('prune'),
memory: this.config.bool('memory'),
prefix: this.config.str('index-prefix', this.config.prefix),
maxTxs: this.config.uint('max-txs')
});
}
this.init();
}
@ -169,6 +198,12 @@ class FullNode extends Node {
this.pool.on('error', err => this.error(err));
this.miner.on('error', err => this.error(err));
if (this.txindex)
this.txindex.on('error', err => this.error(err));
if (this.addrindex)
this.addrindex.on('error', err => this.error(err));
if (this.http)
this.http.on('error', err => this.error(err));
@ -235,6 +270,12 @@ class FullNode extends Node {
await this.miner.open();
await this.pool.open();
if (this.txindex)
await this.txindex.open();
if (this.addrindex)
await this.addrindex.open();
await this.openPlugins();
await this.http.open();
@ -256,6 +297,12 @@ class FullNode extends Node {
await this.handlePreclose();
await this.http.close();
if (this.txindex)
await this.txindex.close();
if (this.addrindex)
await this.addrindex.close();
await this.closePlugins();
await this.pool.close();
@ -367,6 +414,12 @@ class FullNode extends Node {
*/
startSync() {
if (this.txindex)
this.txindex.sync();
if (this.addrindex)
this.addrindex.sync();
return this.pool.startSync();
}
@ -408,44 +461,61 @@ class FullNode extends Node {
return this.chain.getCoin(hash, index);
}
/**
* Get coins that pertain to an address from the mempool or chain database.
* Takes into account spent coins in the mempool.
* @param {Address} addrs
* @returns {Promise} - Returns {@link Coin}[].
*/
async getCoinsByAddress(addrs) {
const mempool = this.mempool.getCoinsByAddress(addrs);
const chain = await this.chain.getCoinsByAddress(addrs);
const out = [];
for (const coin of chain) {
const spent = this.mempool.isSpent(coin.hash, coin.index);
if (spent)
continue;
out.push(coin);
}
for (const coin of mempool)
out.push(coin);
return out;
}
/**
* Retrieve transactions pertaining to an
* address from the mempool or chain database.
* @param {Address} addrs
* @param {Address} addr
* @param {Object} options
* @param {Number} options.limit
* @param {Number} options.reverse
* @param {Buffer} options.after
* @returns {Promise} - Returns {@link TXMeta}[].
*/
async getMetaByAddress(addrs) {
const mempool = this.mempool.getMetaByAddress(addrs);
const chain = await this.chain.getMetaByAddress(addrs);
return chain.concat(mempool);
async getMetaByAddress(addr, options = {}) {
if (!this.txindex || !this.addrindex)
return [];
const {reverse, after} = options;
let {limit} = options;
let metas = [];
const confirmed = async () => {
const hashes = await this.addrindex.getHashesByAddress(
addr, {limit, reverse, after});
for (const hash of hashes) {
const mtx = await this.txindex.getMeta(hash);
assert(mtx);
metas.push(mtx);
}
};
const unconfirmed = () => {
const mempool = this.mempool.getMetaByAddress(
addr, {limit, reverse, after});
metas = metas.concat(mempool);
};
if (reverse)
unconfirmed();
else
await confirmed();
if (metas.length > 0)
limit -= metas.length;
if (limit <= 0)
return metas;
if (reverse)
await confirmed();
else
unconfirmed();
return metas;
}
/**
@ -460,7 +530,10 @@ class FullNode extends Node {
if (meta)
return meta;
return this.chain.getMeta(hash);
if (this.txindex)
return this.txindex.getMeta(hash);
return null;
}
/**
@ -472,18 +545,26 @@ class FullNode extends Node {
async getMetaView(meta) {
if (meta.height === -1)
return this.mempool.getSpentView(meta.tx);
return this.chain.getSpentView(meta.tx);
if (this.txindex)
return this.txindex.getSpentView(meta.tx);
return null;
}
/**
* Retrieve transactions pertaining to an
* address from the mempool or chain database.
* @param {Address} addrs
* @param {Address} addr
* @param {Object} options
* @param {Number} options.limit
* @param {Number} options.reverse
* @param {Buffer} options.after
* @returns {Promise} - Returns {@link TX}[].
*/
async getTXByAddress(addrs) {
const mtxs = await this.getMetaByAddress(addrs);
async getTXByAddress(addr, options = {}) {
const mtxs = await this.getMetaByAddress(addr, options);
const out = [];
for (const mtx of mtxs)
@ -517,7 +598,10 @@ class FullNode extends Node {
if (this.mempool.hasEntry(hash))
return true;
return this.chain.hasTX(hash);
if (this.txindex)
return this.txindex.hasTX(hash);
return false;
}
}

View File

@ -127,6 +127,16 @@ class HTTP extends Server {
tip: this.chain.tip.rhash(),
progress: this.chain.getProgress()
},
indexes: {
addr: {
enabled: Boolean(this.node.addrindex),
height: this.node.addrindex ? this.node.addrindex.height : 0
},
tx: {
enabled: Boolean(this.node.txindex),
height: this.node.txindex ? this.node.txindex.height : 0
}
},
pool: {
host: addr.host,
port: addr.port,
@ -149,24 +159,6 @@ class HTTP extends Server {
});
});
// UTXO by address
this.get('/coin/address/:address', async (req, res) => {
const valid = Validator.fromRequest(req);
const address = valid.str('address');
enforce(address, 'Address is required.');
enforce(!this.chain.options.spv, 'Cannot get coins in SPV mode.');
const addr = Address.fromString(address, this.network);
const coins = await this.node.getCoinsByAddress(addr);
const result = [];
for (const coin of coins)
result.push(coin.getJSON(this.network));
res.json(200, result);
});
// UTXO by id
this.get('/coin/:hash/:index', async (req, res) => {
const valid = Validator.fromRequest(req);
@ -187,23 +179,6 @@ class HTTP extends Server {
res.json(200, coin.getJSON(this.network));
});
// Bulk read UTXOs
this.post('/coin/address', async (req, res) => {
const valid = Validator.fromRequest(req);
const address = valid.array('addresses');
enforce(address, 'Address is required.');
enforce(!this.chain.options.spv, 'Cannot get coins in SPV mode.');
const coins = await this.node.getCoinsByAddress(address);
const result = [];
for (const coin of coins)
result.push(coin.getJSON(this.network));
res.json(200, result);
});
// TX by hash
this.get('/tx/:hash', async (req, res) => {
const valid = Validator.fromRequest(req);
@ -228,31 +203,20 @@ class HTTP extends Server {
this.get('/tx/address/:address', async (req, res) => {
const valid = Validator.fromRequest(req);
const address = valid.str('address');
const limit = valid.uint('limit', this.options.maxTxs);
const reverse = valid.bool('reverse', false);
const after = valid.brhash('after', null);
enforce(address, 'Address is required.');
enforce(!this.chain.options.spv, 'Cannot get TX in SPV mode.');
enforce(limit <= this.options.maxTxs,
`Limit above max of ${this.options.maxTxs}.`);
const addr = Address.fromString(address, this.network);
const metas = await this.node.getMetaByAddress(addr);
const result = [];
for (const meta of metas) {
const view = await this.node.getMetaView(meta);
result.push(meta.getJSON(this.network, view, this.chain.height));
}
const metas = await this.node.getMetaByAddress(
addr, {limit, reverse, after});
res.json(200, result);
});
// Bulk read TXs
this.post('/tx/address', async (req, res) => {
const valid = Validator.fromRequest(req);
const address = valid.array('addresses');
enforce(address, 'Address is required.');
enforce(!this.chain.options.spv, 'Cannot get TX in SPV mode.');
const metas = await this.node.getMetaByAddress(address);
const result = [];
for (const meta of metas) {
@ -689,6 +653,7 @@ class HTTPOptions {
this.apiHash = sha256.digest(Buffer.from(this.apiKey, 'ascii'));
this.noAuth = false;
this.cors = false;
this.maxTxs = 100;
this.prefix = null;
this.host = '127.0.0.1';
@ -775,6 +740,11 @@ class HTTPOptions {
this.certFile = options.certFile;
}
if (options.maxTxs != null) {
assert(Number.isSafeInteger(options.maxTxs));
this.maxTxs = options.maxTxs;
}
// Allow no-auth implicitly
// if we're listening locally.
if (!options.apiKey) {

View File

@ -64,6 +64,8 @@ class Node extends EventEmitter {
this.pool = null;
this.miner = null;
this.http = null;
this.txindex = null;
this.addrindex = null;
this._init(file);
}

View File

@ -963,8 +963,8 @@ class RPC extends RPCBase {
if (hash) {
block = await this.chain.getBlock(hash);
} else if (this.chain.options.indexTX) {
const tx = await this.chain.getMeta(last);
} else if (await this.node.hasTX(last)) {
const tx = await this.node.getMeta(last);
if (tx)
block = await this.chain.getBlock(tx.block);
} else {

View File

@ -109,6 +109,15 @@ class Block extends AbstractBlock {
return this.frame().data;
}
/**
* Check if block has been serialized.
* @returns {Buffer}
*/
hasRaw() {
return Boolean(this._raw);
}
/**
* Serialize the block, do not include witnesses.
* @returns {Buffer}
@ -645,7 +654,7 @@ class Block extends AbstractBlock {
let witness = 0;
for (let i = 0; i < count; i++) {
const tx = TX.fromReader(br);
const tx = TX.fromReader(br, true);
witness += tx._witness;
this.txs.push(tx);
}
@ -738,7 +747,7 @@ class Block extends AbstractBlock {
bw.writeVarint(this.txs.length);
for (const tx of this.txs)
tx.toWriter(bw);
tx.toWriter(bw, true);
return bw;
}

View File

@ -57,6 +57,8 @@ class TX {
this._whash = null;
this._raw = null;
this._offset = -1;
this._block = false;
this._size = -1;
this._witness = -1;
this._sigops = -1;
@ -157,6 +159,8 @@ class TX {
this._raw = null;
this._size = -1;
this._offset = -1;
this._block = false;
this._witness = -1;
this._sigops = -1;
@ -245,15 +249,21 @@ class TX {
/**
* Write the transaction to a buffer writer.
* @param {BufferWriter} bw
* @param {Boolean} block
*/
toWriter(bw) {
toWriter(bw, block) {
if (this.mutable) {
if (this.hasWitness())
return this.writeWitness(bw);
return this.writeNormal(bw);
}
if (block) {
this._offset = bw.offset;
this._block = true;
}
bw.writeBytes(this.toRaw());
return bw;
@ -311,6 +321,21 @@ class TX {
return raw;
}
/**
* Return the offset and size of the transaction. Useful
* when the transaction is deserialized within a block.
* @returns {Object} Contains `size` and `offset`.
*/
getPosition() {
assert(this._block && this._offset > 80, 'Position not available.');
return {
offset: this._offset,
size: this._size
};
}
/**
* Calculate total size and size of the witness bytes.
* @returns {Object} Contains `size` and `witness`.
@ -2226,11 +2251,12 @@ class TX {
/**
* Instantiate a transaction from a buffer reader.
* @param {BufferReader} br
* @param {Boolean} block
* @returns {TX}
*/
static fromReader(br) {
return new this().fromReader(br);
static fromReader(br, block) {
return new this().fromReader(br, block);
}
/**
@ -2247,13 +2273,14 @@ class TX {
* Inject properties from buffer reader.
* @private
* @param {BufferReader} br
* @param {Boolean} block
*/
fromReader(br) {
fromReader(br, block) {
if (hasWitnessBytes(br))
return this.fromWitnessReader(br);
return this.fromWitnessReader(br, block);
br.start();
const start = br.start();
this.version = br.readU32();
@ -2269,6 +2296,11 @@ class TX {
this.locktime = br.readU32();
if (block) {
this._offset = start;
this._block = true;
}
if (!this.mutable) {
this._raw = br.endData();
this._size = this._raw.length;
@ -2285,10 +2317,11 @@ class TX {
* buffer reader (witness serialization).
* @private
* @param {BufferReader} br
* @param {Boolean} block
*/
fromWitnessReader(br) {
br.start();
fromWitnessReader(br, block) {
const start = br.start();
this.version = br.readU32();
@ -2336,6 +2369,11 @@ class TX {
this.locktime = br.readU32();
if (block) {
this._offset = start;
this._block = true;
}
if (!this.mutable && hasWitness) {
this._raw = br.endData();
this._size = this._raw.length;

View File

@ -8,8 +8,9 @@ const {resolve} = require('path');
assert(process.argv.length > 2, 'Please pass in a database path.');
// migration -
// chaindb: leveldb to flat files
// Changes:
// 1. Moves blocks and undo blocks from leveldb to flat files.
// 2. Removes tx and addr indexes from chaindb.
const db = bdb.create({
location: process.argv[2],
@ -25,29 +26,33 @@ const blockStore = new FileBlockStore({
location: location
});
async function updateVersion() {
const ver = await checkVersion();
async function getVersion() {
const data = await db.get(layout.V.encode());
assert(data, 'No version.');
console.log('Updating version to %d.', ver + 1);
return data.readUInt32LE(5, true);
}
async function updateVersion(version) {
await checkVersion(version - 1);
console.log('Updating version to %d.', version);
const buf = Buffer.allocUnsafe(5 + 4);
buf.write('chain', 0, 'ascii');
buf.writeUInt32LE(5, 5, true);
buf.writeUInt32LE(version, 5, true);
const parent = db.batch();
parent.put(layout.V.encode(), buf);
await parent.write();
}
async function checkVersion() {
async function checkVersion(version) {
console.log('Checking version.');
const data = await db.get(layout.V.encode());
assert(data, 'No version.');
const ver = await getVersion();
const ver = data.readUInt32LE(5, true);
if (ver !== 4)
if (ver !== version)
throw Error(`DB is version ${ver}.`);
return ver;
@ -113,26 +118,85 @@ async function migrateBlocks() {
await parent.write();
}
async function removeKey(name, key) {
const iter = db.iterator({
gte: key.min(),
lte: key.max(),
reverse: true,
keys: true
});
let batch = db.batch();
let total = 0;
while (await iter.next()) {
const {key} = iter;
batch.del(key);
if (++total % 10000 === 0) {
console.log('Cleaned up %d %s index records.', total, name);
await batch.write();
batch = db.batch();
}
}
await batch.write();
console.log('Cleaned up %d %s index records.', total, name);
}
async function migrateIndexes() {
const t = bdb.key('t', ['hash256']);
const T = bdb.key('T', ['hash', 'hash256']);
const C = bdb.key('C', ['hash', 'hash256', 'uint32']);
await removeKey('hash -> tx', t);
await removeKey('addr -> tx', T);
await removeKey('addr -> coin', C);
}
/*
* Execute
*/
(async () => {
await db.open();
await blockStore.ensure();
await blockStore.open();
console.log('Opened %s.', process.argv[2]);
await checkVersion();
await migrateBlocks();
await migrateUndoBlocks();
await updateVersion();
const version = await getVersion();
let compact = false;
switch (version) {
case 4:
// Upgrade from version 4 to 5.
await checkVersion(4);
await blockStore.ensure();
await blockStore.open();
await migrateBlocks();
await migrateUndoBlocks();
await updateVersion(5);
await blockStore.close();
compact = true;
case 5:
// Upgrade from version 5 to 6.
await checkVersion(5);
await migrateIndexes();
await updateVersion(6);
compact = true;
break;
case 6:
console.log('Already upgraded.');
break;
default:
console.log(`DB version is ${version}.`);
}
if (compact) {
console.log('Compacting database');
await db.compactRange();
}
console.log('Compacting database');
await db.compactRange();
await db.close();
await blockStore.close();
})().then(() => {
console.log('Migration complete.');
process.exit(0);

View File

@ -1,9 +1,10 @@
#!/usr/bin/env node
const assert = require('assert');
const fs = require('fs');
'use strict';
const cp = require('child_process');
const res = require('path').resolve;
const fs = require('bfile');
const {resolve} = require('path');
const {argv} = process;
if (argv.length < 3) {
@ -13,14 +14,6 @@ if (argv.length < 3) {
return;
}
function mv(from, to) {
try {
fs.renameSync(from, to);
} catch (e) {
console.error(e.message);
}
}
function exec(file, ...args) {
try {
const result = cp.spawnSync(file, args, {
@ -39,11 +32,16 @@ function exec(file, ...args) {
const node = argv[0];
const prefix = argv[2];
mv(res(prefix, 'chain.ldb'), res(prefix, 'chain'));
mv(res(prefix, 'spvchain.ldb'), res(prefix, 'spvchain'));
mv(res(prefix, 'mempool.ldb'), res(prefix, 'mempool'));
mv(res(prefix, 'walletdb.ldb'), res(prefix, 'wallet'));
const chain = resolve(prefix, 'chain');
const spvchain = resolve(prefix, 'spvchain');
exec(node, res(__dirname, 'chaindb3to4.js'), res(prefix, 'chain'));
exec(node, res(__dirname, 'chaindb3to4.js'), res(prefix, 'spvchain'));
exec(node, res(__dirname, 'walletdb6to7.js'), res(prefix, 'wallet'));
(async () => {
if (await fs.exists(chain))
exec(node, resolve(__dirname, 'chaindb4to6.js'), chain);
if (await fs.exists(spvchain))
exec(node, resolve(__dirname, 'chaindb4to6.js'), spvchain);
})().catch((err) => {
console.error(err.stack);
process.exit(1);
});

View File

@ -375,4 +375,91 @@ describe('Block', function() {
});
}
}
it('should deserialize with offset positions for txs (witness)', () => {
const [block] = block482683.getBlock();
const expected = [
{offset: 81, size: 217},
{offset: 298, size: 815},
{offset: 1113, size: 192},
{offset: 1305, size: 259},
{offset: 1564, size: 223},
{offset: 1787, size: 1223},
{offset: 3010, size: 486},
{offset: 3496, size: 665},
{offset: 4161, size: 3176},
{offset: 7337, size: 225},
{offset: 7562, size: 1223},
{offset: 8785, size: 503}
];
assert.equal(expected.length, block.txs.length);
assert.equal(block.getSize(), expected.reduce((a, b) => a + b.size, 81));
for (let i = 0; i < block.txs.length; i++) {
const {offset, size} = block.txs[i].getPosition();
assert.strictEqual(offset, expected[i].offset);
assert.strictEqual(size, expected[i].size);
}
});
it('should serialize with offset positions for txs (witness)', () => {
const [block] = block482683.getBlock();
const expected = [
{offset: 81, size: 217},
{offset: 298, size: 815},
{offset: 1113, size: 192},
{offset: 1305, size: 259},
{offset: 1564, size: 223},
{offset: 1787, size: 1223},
{offset: 3010, size: 486},
{offset: 3496, size: 665},
{offset: 4161, size: 3176},
{offset: 7337, size: 225},
{offset: 7562, size: 1223},
{offset: 8785, size: 503}
];
assert.equal(expected.length, block.txs.length);
assert.equal(block.getSize(), expected.reduce((a, b) => a + b.size, 81));
// Reset the offset for all transactions, and clear
// any cached values for the block.
block.refresh(true);
for (let i = 0; i < block.txs.length; i++)
assert.equal(block.txs[i]._offset, -1);
// Serialize the block, as done before saving to disk.
const raw = block.toRaw();
assert(raw);
for (let i = 0; i < block.txs.length; i++) {
const {offset, size} = block.txs[i].getPosition();
assert.strictEqual(offset, expected[i].offset);
assert.strictEqual(size, expected[i].size);
}
});
it('should deserialize with offset positions for txs', () => {
const [block] = block300025.getBlock();
assert.equal(block.txs.length, 461);
let expect = 83;
let total = 83;
for (let i = 0; i < block.txs.length; i++) {
const {offset, size} = block.txs[i].getPosition();
assert.strictEqual(offset, expect);
expect += size;
total += size;
}
assert.equal(total, 284231);
});
});

View File

@ -89,6 +89,13 @@ describe('HTTP', function() {
assert.strictEqual(info.pool.agent, node.pool.options.agent);
assert.typeOf(info.chain, 'object');
assert.strictEqual(info.chain.height, 0);
assert.typeOf(info.indexes, 'object');
assert.typeOf(info.indexes.addr, 'object');
assert.equal(info.indexes.addr.enabled, false);
assert.equal(info.indexes.addr.height, 0);
assert.typeOf(info.indexes.tx, 'object');
assert.equal(info.indexes.addr.enabled, false);
assert.equal(info.indexes.tx.height, 0);
});
it('should get wallet info', async () => {

1204
test/indexer-test.js Normal file

File diff suppressed because it is too large Load Diff

View File

@ -5,8 +5,11 @@
const assert = require('./util/assert');
const random = require('bcrypto/lib/random');
const common = require('../lib/blockchain/common');
const Block = require('../lib/primitives/block');
const MempoolEntry = require('../lib/mempool/mempoolentry');
const Mempool = require('../lib/mempool/mempool');
const AddrIndexer = require('../lib/mempool/addrindexer');
const WorkerPool = require('../lib/workers/workerpool');
const Chain = require('../lib/blockchain/chain');
const MTX = require('../lib/primitives/mtx');
@ -14,12 +17,17 @@ const Coin = require('../lib/primitives/coin');
const KeyRing = require('../lib/primitives/keyring');
const Address = require('../lib/primitives/address');
const Outpoint = require('../lib/primitives/outpoint');
const Input = require('../lib/primitives/input');
const Script = require('../lib/script/script');
const Opcode = require('../lib/script/opcode');
const opcodes = Script.opcodes;
const Witness = require('../lib/script/witness');
const MemWallet = require('./util/memwallet');
const BlockStore = require('../lib/blockstore/level');
const {BufferSet} = require('buffer-map');
const ALL = Script.hashType.ALL;
const VERIFY_NONE = common.flags.VERIFY_NONE;
const ONE_HASH = Buffer.alloc(32, 0x00);
ONE_HASH[0] = 0x01;
@ -69,6 +77,28 @@ function dummyInput(script, hash) {
return Coin.fromTX(fund, 0, -1);
}
async function getMockBlock(chain, txs = [], cb = true) {
if (cb) {
const raddr = KeyRing.generate().getAddress();
const mtx = new MTX();
mtx.addInput(new Input());
mtx.addOutput(raddr, 0);
txs = [mtx.toTX(), ...txs];
}
const now = Math.floor(Date.now() / 1000);
const time = chain.tip.time <= now ? chain.tip.time + 1 : now;
const block = new Block();
block.txs = txs;
block.prevBlock = chain.tip.hash;
block.time = time;
block.bits = await chain.getTarget(block.time, chain.tip);
return block;
}
describe('Mempool', function() {
this.timeout(5000);
@ -463,4 +493,561 @@ describe('Mempool', function() {
await blocks.close();
await workers.close();
});
describe('Index', function () {
const workers = new WorkerPool({
enabled: true
});
const blocks = new BlockStore({
memory: true
});
const chain = new Chain({
memory: true,
workers,
blocks
});
const mempool = new Mempool({
chain,
workers,
memory: true,
indexAddress: true
});
before(async () => {
await blocks.open();
await mempool.open();
await chain.open();
await workers.open();
});
after(async () => {
await workers.close();
await chain.close();
await mempool.close();
await blocks.close();
});
// Number of coins available in
// chaincoins (100k satoshi per coin).
const N = 100;
const chaincoins = new MemWallet();
const wallet = new MemWallet();
it('should create coins in chain', async () => {
const mtx = new MTX();
mtx.addInput(new Input());
for (let i = 0; i < N; i++) {
const addr = chaincoins.createReceive().getAddress();
mtx.addOutput(addr, 100000);
}
const cb = mtx.toTX();
const block = await getMockBlock(chain, [cb], false);
const entry = await chain.add(block, VERIFY_NONE);
await mempool._addBlock(entry, block.txs);
// Add 100 blocks so we don't get
// premature spend of coinbase.
for (let i = 0; i < 100; i++) {
const block = await getMockBlock(chain);
const entry = await chain.add(block, VERIFY_NONE);
await mempool._addBlock(entry, block.txs);
}
chaincoins.addTX(cb);
});
it('should spend txs and coins in the mempool', async () => {
// Verify coins are removed from the coin index.
const coin = chaincoins.getCoins()[0];
const addr = wallet.createReceive().getAddress();
const mtx1 = new MTX();
mtx1.addCoin(coin);
mtx1.addOutput(addr, 90000);
chaincoins.sign(mtx1);
const tx1 = mtx1.toTX();
chaincoins.addTX(tx1, -1);
wallet.addTX(tx1, -1);
{
const missing = await mempool.addTX(tx1);
assert.strictEqual(missing, null);
}
assert(mempool.hasCoin(tx1.hash(), 0));
{
const txs = mempool.getTXByAddress(addr);
const metas = mempool.getMetaByAddress(addr);
assert.strictEqual(txs.length, 1);
assert.strictEqual(metas.length, 1);
assert.bufferEqual(txs[0].hash(), tx1.hash());
}
const mtx2 = new MTX();
mtx2.addTX(tx1, 0, -1);
mtx2.addOutput(addr, 80000);
wallet.sign(mtx2);
const tx2 = mtx2.toTX();
{
const missing = await mempool.addTX(tx2);
assert.strictEqual(missing, null);
}
wallet.addTX(tx2, -1);
assert(!mempool.hasCoin(tx1.hash(), 0));
assert(mempool.hasCoin(tx2.hash(), 0));
{
const txs = mempool.getTXByAddress(addr);
assert.strictEqual(txs.length, 2);
}
});
it('should spend resolved orphans', async () => {
const coin = chaincoins.getCoins()[0];
const addr = wallet.createReceive().getAddress();
const pmtx = new MTX();
pmtx.addOutput(addr, 90000);
pmtx.addCoin(coin);
chaincoins.sign(pmtx);
const parentTX = pmtx.toTX();
const cmtx = new MTX();
cmtx.addTX(pmtx.toTX(), 0, -1);
cmtx.addOutput(addr, 80000);
wallet.sign(cmtx);
const childTX = cmtx.toTX();
{
// Create orphan tx.
const missing = await mempool.addTX(childTX);
// We only have one input missing.
assert.strictEqual(missing.length, 1);
}
{
const txs = mempool.getTXByAddress(addr);
assert.strictEqual(txs.length, 0);
}
{
// Orphans are not coins.
const childCoin = mempool.getCoin(childTX.hash(), 0);
assert.strictEqual(childCoin, null);
}
{
// Orphans should be resolved.
const missing = await mempool.addTX(parentTX);
assert.strictEqual(missing, null);
// Coins should be available once they are resolved.
const parentCoin = mempool.getCoin(parentTX.hash(), 0);
// We spent this.
assert.strictEqual(parentCoin, null);
const childCoin = mempool.getCoin(childTX.hash(), 0);
assert(childCoin);
}
{
const txs = mempool.getTXByAddress(addr);
assert.strictEqual(txs.length, 2);
}
// Update coins in wallets.
for (const tx of [parentTX, childTX]) {
chaincoins.addTX(tx);
wallet.addTX(tx);
}
});
it('should remove double spend tx from mempool', async () => {
const coin = chaincoins.getCoins()[0];
const addr = wallet.createReceive().getAddress();
const randomAddress = KeyRing.generate().getAddress();
// We check double spending our mempool tx.
const mtx1 = new MTX();
mtx1.addCoin(coin);
mtx1.addOutput(addr, 90000);
chaincoins.sign(mtx1);
// This will double spend in block.
const mtx2 = new MTX();
mtx2.addCoin(coin);
mtx2.addOutput(randomAddress, 90000);
chaincoins.sign(mtx2);
const tx1 = mtx1.toTX();
const tx2 = mtx2.toTX();
{
const missing = await mempool.addTX(tx1);
assert.strictEqual(missing, null);
}
{
const txs = mempool.getTXByAddress(addr);
assert.strictEqual(txs.length, 1);
}
assert(mempool.hasCoin(tx1.hash(), 0));
const block = await getMockBlock(chain, [tx2]);
const entry = await chain.add(block, VERIFY_NONE);
await mempool._addBlock(entry, block.txs);
{
const txs = mempool.getTXByAddress(addr);
assert.strictEqual(txs.length, 0);
}
assert(!mempool.hasCoin(tx1.hash(), 0));
chaincoins.addTX(tx2);
});
it('should remove confirmed txs from mempool', async () => {
const coin = chaincoins.getCoins()[0];
const addr = wallet.createReceive().getAddress();
const mtx = new MTX();
mtx.addCoin(coin);
mtx.addOutput(addr, 90000);
chaincoins.sign(mtx);
const tx = mtx.toTX();
await mempool.addTX(tx);
assert(mempool.hasCoin(tx.hash(), 0));
{
const txs = mempool.getTXByAddress(addr);
assert.strictEqual(txs.length, 1);
}
const block = await getMockBlock(chain, [tx]);
const entry = await chain.add(block, VERIFY_NONE);
await mempool._addBlock(entry, block.txs);
{
const txs = mempool.getTXByAddress(addr);
assert.strictEqual(txs.length, 0);
}
assert(!mempool.hasCoin(tx.hash(), 0));
chaincoins.addTX(tx);
wallet.addTX(tx);
});
});
describe('AddrIndexer', function () {
it('will not get key for witness program v1', function() {
const addrindex = new AddrIndexer();
// Create a witness program version 1 with
// 40 byte data push.
const script = new Script();
script.push(Opcode.fromSmall(1));
script.push(Opcode.fromData(Buffer.alloc(40)));
script.compile();
const addr = Address.fromScript(script);
const key = addrindex.getKey(addr);
assert.strictEqual(key, null);
});
it('will get key for witness program v0', function() {
const addrindex = new AddrIndexer();
// Create a witness program version 0 with
// 32 byte data push.
const script = new Script();
script.push(Opcode.fromSmall(0));
script.push(Opcode.fromData(Buffer.alloc(32)));
script.compile();
const addr = Address.fromScript(script);
const key = addrindex.getKey(addr);
assert.bufferEqual(key, Buffer.from('0a' + '00'.repeat(32), 'hex'));
});
});
describe('Mempool persistent cache', function () {
const workers = new WorkerPool({
enabled: true
});
const blocks = new BlockStore({
memory: true
});
const chain = new Chain({
memory: true,
workers,
blocks
});
const mempool = new Mempool({
chain,
workers,
memory: true,
indexAddress: true,
persistent: true
});
before(async () => {
await blocks.open();
await mempool.open();
await chain.open();
await workers.open();
});
after(async () => {
await workers.close();
await chain.close();
await mempool.close();
await blocks.close();
});
// Number of coins available in
// chaincoins (100k satoshi per coin).
const N = 100;
const chaincoins = new MemWallet();
const wallet = new MemWallet();
it('should create txs in chain', async () => {
const mtx = new MTX();
mtx.addInput(new Input());
for (let i = 0; i < N; i++) {
const addr = chaincoins.createReceive().getAddress();
mtx.addOutput(addr, 100000);
}
const cb = mtx.toTX();
const block = await getMockBlock(chain, [cb], false);
const entry = await chain.add(block, VERIFY_NONE);
await mempool._addBlock(entry, block.txs);
// Add 100 blocks so we don't get premature
// spend of coinbase.
for (let i = 0; i < 100; i++) {
const block = await getMockBlock(chain);
const entry = await chain.add(block, VERIFY_NONE);
await mempool._addBlock(entry, block.txs);
}
chaincoins.addTX(cb);
});
it('should restore txs in the mempool', async () => {
const coins = chaincoins.getCoins();
assert.strictEqual(coins.length, N);
const addrs = [];
const txs = 20;
const spend = 5;
for (let i = 0; i < txs; i++)
addrs.push(wallet.createReceive().getAddress());
const mempoolTXs = new BufferSet();
const mempoolCoins = new BufferSet();
// Send 15 txs to the wallet.
for (let i = 0; i < txs - spend; i++) {
const mtx = new MTX();
mtx.addCoin(coins[i]);
mtx.addOutput(addrs[i], 90000);
chaincoins.sign(mtx);
const tx = mtx.toTX();
const missing = await mempool.addTX(tx);
assert.strictEqual(missing, null);
assert(mempool.hasCoin(tx.hash(), 0));
// Indexer checks.
{
const txs = mempool.getTXByAddress(addrs[i]);
assert.strictEqual(txs.length, 1);
assert.bufferEqual(txs[0].hash(), tx.hash());
}
wallet.addTX(tx);
mempoolTXs.add(tx.hash());
mempoolCoins.add(Outpoint.fromTX(tx, 0).toKey());
}
// Spend first 5 coins from the mempool.
for (let i = 0; i < spend; i++) {
const coin = wallet.getCoins()[0];
const addr = addrs[txs - spend + i];
const mtx = new MTX();
mtx.addCoin(coin);
mtx.addOutput(addr, 80000);
wallet.sign(mtx);
const tx = mtx.toTX();
const missing = await mempool.addTX(tx);
assert.strictEqual(missing, null);
assert(!mempool.hasCoin(coin.hash, 0));
assert(mempool.hasCoin(tx.hash(), 0));
{
const txs = mempool.getTXByAddress(addr);
assert.strictEqual(txs.length, 1);
}
{
const txs = mempool.getTXByAddress(addrs[i]);
assert.strictEqual(txs.length, 2);
}
mempoolTXs.add(tx.hash());
mempoolCoins.delete(coin.toKey());
mempoolCoins.add(Outpoint.fromTX(tx, 0).toKey());
wallet.addTX(tx);
}
const verifyMempoolState = (mempool) => {
// Verify general state of the mempool.
assert.strictEqual(mempool.map.size, txs);
assert.strictEqual(mempool.spents.size, txs);
assert.strictEqual(mempool.addrindex.map.size, txs);
// Verify txs are same.
for (const val of mempoolTXs.values())
assert(mempool.getTX(val));
for (const opkey of mempoolCoins.values()) {
const outpoint = Outpoint.fromRaw(opkey);
assert(mempool.hasCoin(outpoint.hash, outpoint.index));
}
// Coins in these txs are spent.
for (let i = 0; i < spend; i++) {
const addr = addrs[i];
const txs = mempool.getTXByAddress(addr);
assert.strictEqual(txs.length, 2);
}
// These txs are untouched.
for (let i = spend; i < txs - spend; i++) {
const addr = addrs[i];
const txs = mempool.getTXByAddress(addr);
assert.strictEqual(txs.length, 1);
}
// These are txs spending mempool txs.
for (let i = txs - spend; i < txs; i++) {
const addr = addrs[i];
const txs = mempool.getTXByAddress(addr);
assert.strictEqual(txs.length, 1);
}
};
verifyMempoolState(mempool);
// Hack to get in memory cache in new mempool.
const cache = mempool.cache;
// We need to manually sync because when first block
// was mined there were no mempool txs.
await cache.sync(chain.tip.hash);
// Apply batch to the memdb.
await cache.flush();
await mempool.close();
let err;
{
const mempool = new Mempool({
chain,
workers,
memory: true,
indexAddress: true,
persistent: true
});
mempool.cache = cache;
await mempool.open();
try {
verifyMempoolState(mempool);
} catch (e) {
err = e;
} finally {
await cache.wipe();
await mempool.close();
}
}
// Reopen for after cleanup.
await mempool.open();
if (err)
throw err;
});
});
});

View File

@ -28,6 +28,8 @@ const node = new FullNode({
network: 'regtest',
workers: true,
plugins: [require('../lib/wallet/plugin')],
indexTX: true,
indexAddress: true,
port: ports.p2p,
httpPort: ports.node,
env: {
@ -756,6 +758,23 @@ describe('Node', function() {
assert.strictEqual(tx1.txid(), tx2.txid());
});
it('should get tx by hash', async () => {
const block = await mineBlock();
await chain.add(block);
const tx = block.txs[0];
const hash = tx.hash();
const hasTX = await node.hasTX(hash);
assert.strictEqual(hasTX, true);
const tx2 = await node.getTX(hash);
assert.strictEqual(tx.txid(), tx2.txid());
const meta = await node.getMeta(hash);
assert.strictEqual(meta.tx.txid(), tx2.txid());
});
it('should cleanup', async () => {
consensus.COINBASE_MATURITY = 100;
await node.close();

View File

@ -115,8 +115,11 @@ assert.asyncThrows = async function asyncThrows(func, expectedError) {
} catch (e) {
err = e;
}
const re = new RegExp('^' + expectedError);
assert(re.test(err.message));
assert(err, 'Expected error.');
if (expectedError) {
const re = new RegExp('^' + expectedError);
assert(re.test(err.message), err.message);
}
};
function _isString(value, message, stackStartFunction) {

View File

@ -102,6 +102,27 @@ common.rimraf = async function(p) {
return await fs.rimraf(p);
};
common.forValue = async function(obj, key, val, timeout = 30000) {
assert(typeof obj === 'object');
assert(typeof key === 'string');
const ms = 10;
let interval = null;
let count = 0;
return new Promise((resolve, reject) => {
interval = setInterval(() => {
if (obj[key] === val) {
clearInterval(interval);
resolve();
} else if (count * ms >= timeout) {
clearInterval(interval);
reject(new Error('Timeout waiting for value.'));
}
count += 1;
}, ms);
});
};
function parseUndo(data) {
const br = bio.read(data);
const items = [];

63
test/util/reorg.js Normal file
View File

@ -0,0 +1,63 @@
'use strict';
const assert = require('./assert');
const Chain = require('../../lib/blockchain/chain');
const CPUMiner = require('../../lib/mining/cpuminer');
/**
* Reorgs the chain to given height using miners.
* @param {Chain} chain chain
* @param {CPUMiner} cpu cpuminer
* @param {Number} height height
* @returns {Promise} null
*/
async function reorg(chain, cpu, height) {
assert(chain instanceof Chain);
assert(cpu instanceof CPUMiner);
assert(typeof height === 'number');
let tip1, tip2 = null;
for (let i = 0; i < height; i++) {
const job1 = await cpu.createJob(tip1);
const job2 = await cpu.createJob(tip2);
const blk1 = await job1.mineAsync();
const blk2 = await job2.mineAsync();
const hash1 = blk1.hash();
const hash2 = blk2.hash();
assert(await chain.add(blk1));
assert(await chain.add(blk2));
assert.bufferEqual(chain.tip.hash, hash1);
tip1 = await chain.getEntry(hash1);
tip2 = await chain.getEntry(hash2);
assert(tip1);
assert(tip2);
assert(!await chain.isMainChain(tip2));
}
const entry = await chain.getEntry(tip2.hash);
assert(entry);
assert.strictEqual(chain.height, entry.height);
const block = await cpu.mineBlock(entry);
assert(block);
let forked = false;
chain.once('reorganize', () => {
forked = true;
});
assert(await chain.add(block));
assert(forked);
assert.bufferEqual(chain.tip.hash, block.hash());
assert(chain.tip.chainwork.gt(tip1.chainwork));
}
module.exports = reorg;