This commit is contained in:
Chris Kleeschulte 2017-07-21 07:22:13 -04:00
parent 759087bfc2
commit 3044a4dcfe
6 changed files with 104 additions and 135 deletions

View File

@ -13,7 +13,6 @@ function Logger(options) {
options = {};
}
this.formatting = _.isUndefined(options.formatting) ? Logger.DEFAULT_FORMATTING : options.formatting;
this.level = options.logLevel || process.env.LOG_LEVEL || 0; // lower numbers means less logs
}
Logger.DEFAULT_FORMATTING = true;
@ -39,31 +38,11 @@ Logger.prototype.error = function() {
* #debug
*/
Logger.prototype.debug = function() {
if (this.level >= 1) {
this._log.apply(this, ['magenta', 'debug'].concat(Array.prototype.slice.call(arguments)));
}
};
/**
* Prints an debug2 message
* #debug2
*/
Logger.prototype.debug2 = function() {
if (this.level >= 2) {
if (process.env.BITCORE_ENV === 'debug') {
this._log.apply(this, ['green', 'debug'].concat(Array.prototype.slice.call(arguments)));
}
};
/**
* Prints an debug3 message
* #debug3
*/
Logger.prototype.debug3 = function() {
if (this.level >= 3) {
this._log.apply(this, ['cyan', 'debug'].concat(Array.prototype.slice.call(arguments)));
}
};
/**
* Prints an warn message
* #warn
@ -86,7 +65,7 @@ Logger.prototype._log = function(color) {
var typeString = colors[color].italic(level + ':');
args[0] = '[' + date.toISOString() + ']' + ' ' + typeString + ' ' + args[0];
}
var fn = console[level] || console.log;
var fn = console.log;
fn.apply(console, args);
};

View File

@ -1,6 +1,6 @@
'use strict';
var Block = require('bitcore-lib').Block;
var Block = require('bcoin').block;
// stores -- block header as key, block itself as value (optionally)
function Encoding(servicePrefix) {

View File

@ -48,7 +48,7 @@ inherits(BlockService, BaseService);
BlockService.dependencies = [ 'p2p', 'db', 'header' ];
BlockService.MAX_BLOCKS = 500;
BlockService.MAX_BLOCKS = 250;
// --- public prototype functions
BlockService.prototype.getAPIMethods = function() {
@ -86,18 +86,18 @@ BlockService.prototype.getBlockOverview = function(hash, callback) {
return callback(err);
}
var header = block.toHeaders();
var header = block.toHeaders().toJSON();
var blockOverview = {
hash: block.hash,
version: header.version,
confirmations: null,
height: null,
chainWork: null,
height: header.height,
chainWork: header.chainwork,
prevHash: header.prevBlock,
nextHash: null,
merkleRoot: block.merkleroot,
time: null,
time: header.timestamp,
medianTime: null,
nonce: block.nonce,
bits: block.bits,
@ -134,7 +134,7 @@ BlockService.prototype.getRawBlock = function(hash, callback) {
if(err) {
return callback(err);
}
callback(null, block.toString());
callback(null, block.toRaw().toString('hex'));
});
};
@ -181,7 +181,7 @@ BlockService.prototype._primeBlockQueue = function(callback) {
async.timesSeries(50, function(index, next) {
self._db.get(self._encoding.encodeBlockKey(hash), function(err, block) {
self._db.get(self._encoding.encodeBlockKey(hash), function(err, data) {
if(err) {
return next(err);
@ -191,8 +191,9 @@ BlockService.prototype._primeBlockQueue = function(callback) {
return next();
}
hash = block.toHeaders().prevBlock;
self._blockQueue.set(block.hash, block);
var block = self._encoding.decodeBlockValue(data);
hash = block.toHeaders().toJSON().prevBlock;
self._blockQueue.set(block.rhash(), block);
next();
});
@ -254,7 +255,7 @@ BlockService.prototype.unsubscribe = function(name, emitter) {
BlockService.prototype._blockAlreadyProcessed = function(block) {
return this._blockQueue.get(block.hash) ? true : false;
return this._blockQueue.get(block.rhash()) ? true : false;
};
@ -274,17 +275,7 @@ BlockService.prototype._cacheBlock = function(block) {
// 2. store the block in the database
var operations = this._getBlockOperations(block);
this._db.batch(operations, function(err) {
if(err) {
log.error('There was an error attempting to save block for hash: ' + block.hash);
this._db.emit('error', err);
return;
}
log.debug('Success saving block hash ' + block.hash);
});
this._db.batch(operations);
};
BlockService.prototype._determineBlockState = function(block) {
@ -325,13 +316,14 @@ BlockService.prototype._findCommonAncestor = function(hash) {
function(next) {
// old tip has to be in database
self._db.get(self._encoding.encodeBlockKey(_oldTip), function(err, block) {
self._db.get(self._encoding.encodeBlockKey(_oldTip), function(err, data) {
if (err || !block) {
return next(err || new Error('missing block'));
}
_oldTip = block.toHeaders().prevBlock;
var block = self._encoding.decodeBlockValue(data);
_oldTip = block.toHeaders().toJSON().prevBlock;
var header = headers.get(_newTip);
if (!header) {
@ -352,11 +344,29 @@ BlockService.prototype._findCommonAncestor = function(hash) {
};
BlockService.prototype._getBlock = function(hash, callback) {
var self = this;
var block = this._blockQueue(hash);
if (block) {
return callback(null, block);
}
this._db.get(this._encoding.encodeBlockKey(hash), callback);
this._db.get(this._encoding.encodeBlockKey(hash), function(err, data) {
if(err) {
return callback(err);
}
if (!block) {
return callback();
}
var block = self._encoding.decodeBlockValue(data);
callback(null, block);
});
};
BlockService.prototype._getBlockOperations = function(block) {
@ -392,7 +402,7 @@ BlockService.prototype._getDelta = function(tip) {
while (_tip !== this._tip.hash) {
var blk = this._blockQueue.get(_tip);
_tip = blk.toHeaders().prevBlock;
_tip = blk.toHeaders().toJSON().prevBlock;
blocks.push(blk);
}
@ -415,7 +425,7 @@ BlockService.prototype._getIncompleteChainIndexes = function(block) {
var ret = [];
for(var i = 0; i < this._incompleteChains.length; i++) {
var chain = this._incompleteChains[i];
var lastEntry = chain[chain.length - 1].toHeaders();
var lastEntry = chain[chain.length - 1].toHeaders().toJSON();
if (lastEntry.prevHash === block.rhash()) {
ret.push(i);
}
@ -511,7 +521,7 @@ BlockService.prototype._onReorg = function(oldBlockList, commonAncestorHeader) {
BlockService.prototype._isChainReorganizing = function(block) {
// if we aren't an out of order block, then is this block's prev hash our tip?
var prevHash = block.toHeaders().prevBlock;
var prevHash = block.toHeaders().toJSON().prevBlock;
return prevHash !== this._tip.hash;
@ -520,7 +530,7 @@ BlockService.prototype._isChainReorganizing = function(block) {
BlockService.prototype._isOutOfOrder = function(block) {
// is this block the direct child of one of our chain tips? If so, not an out of order block
var prevHash = block.toHeaders().prevBlock;
var prevHash = block.toHeaders().toJSON().prevBlock;
for(var i = 0; i < this._chainTips.length; i++) {
var chainTip = this._chainTips[i];
@ -547,7 +557,7 @@ BlockService.prototype._onBlock = function(block) {
}
// 2. log the reception
log.info('New block received: ' + block.rhash());
log.debug('New block received: ' + block.rhash());
// 3. store the block for safe keeping
this._cacheBlock(block);
@ -572,6 +582,7 @@ BlockService.prototype._onBlock = function(block) {
break;
default:
// send all unsent blocks now that we have a complete chain
this._sendDelta();
break;
}
@ -581,12 +592,19 @@ BlockService.prototype._selectActiveChain = function() {
var chainTip;
var mostChainWork = new BN(0);
var headers = this._header.getAllHeaders();
var self = this;
if (this._chainTips.length === 1) {
return this._chainTips[0];
}
for(var i = 0; i < this._chainTips.length; i++) {
var work = self._getChainwork(this._chainTips[i]);
var header = headers.get(this._chainTips[i]);
assert(header, 'we must have a header for chain tip.');
var work = new BN(new Buffer(header.chainwork, 'hex'));
if (work.gt(mostChainWork)) {
mostChainWork = work;
@ -611,51 +629,28 @@ BlockService.prototype._sendDelta = function() {
this._broadcast(this._subscriptions.block, 'block/block', blocks[i]);
}
var headers = this._header.getAllHeaders();
var len = headers.length - 1;
this._setTip({ height: len, hash: headers[len].hash });
var newTipHeight = this._tip.height + i;
var newTipHash = blocks[i - 1].rhash();
this._setTip({ height: newTipHeight, hash: newTipHash });
var tipOps = utils.encodeTip(this._tip, this.name);
var tipOps = [{
type: 'put',
key: tipOps.key,
value: tipOps.value
}];
this._db.batch(tipOps);
if (++this._blockCount >= BlockService.MAX_BLOCKS) {
this._latestBlockHash = this._tip.hash;
this._blockCount = 0;
this._continueSync();
log.debug('Block Service: calling sync again for more blocks.');
this._sync();
}
};
BlockService.prototype._continueSync = function() {
/*
Essentially, we have three distinct tasks taking place during a sync operation
1. receiving network data
2. light processing of that data (data manipulation)
3. writing that data to disk
Our goal: Sync as quickly as our least performant task will allow using a modest amount of memory.
If our network is very fast and our disk is very slow and we have very little memory to work with,
this is the worst case scenario. We want to defer asking the p2p service for more blocks until the
disk has time to process all the queued everts.
Our way of dealing with this is to review the resident set size and defer the next call to sync if
we are in danger of a 'JS Object Allocation Failed - Out of Memory' fault.
You can use the "maxMemory" config setting for this service to limit the amount of memory considered.
The default is 1.5GB of memory, max.
If we are over 95% of max memory usage, we will defer the sync until we are 95% or less.
*/
var self = this;
var totalRss = process.memoryUsage().rss;
if ((totalRss / 1E6) / self._maxMem > 0.95) {
self._deferTimeout = setTimeout(function() {
self._continueSync();
}, 1000);
return;
}
self._sync();
};
BlockService.prototype._setListeners = function() {
this._header.once('headers', this._onAllHeaders.bind(this));
@ -677,8 +672,6 @@ BlockService.prototype._startSync = function() {
log.info('Gathering: ' + this._numNeeded + ' ' + 'block(s) from the peer-to-peer network.');
this._p2pBlockCallsNeeded = Math.ceil(this._numNeeded / 500);
this._latestBlockHash = this._tip.hash || this.GENESIS_HASH;
this._sync();
};
@ -699,20 +692,25 @@ BlockService.prototype._startSubscriptions = function() {
BlockService.prototype._sync = function() {
if (--this._p2pBlockCallsNeeded > 0) {
var headers = this._header.getAllHeaders();
var size = headers.size - 1;
if (this._tip.height < size) {
log.info('Blocks download progress: ' + this._tip.height + '/' +
this._numNeeded + ' (' + (this._tip.height / this._numNeeded*100).toFixed(2) + '%)');
this._p2p.getBlocks({ startHash: this._latestBlockHash });
return;
var endHash = Array.from(headers)[ Math.min(this._tip.height + BlockService.MAX_BLOCKS, size) ][0];
this._p2p.getBlocks({ startHash: this._tip.hash, endHash: endHash });
return;
}
};
BlockService.prototype._updateChainInfo = function(block, state) {
var prevHash = block.toHeaders().prevBlock;
var prevHash = block.toHeaders().toJSON().prevBlock;
if (state === 'normal') {
this._updateNormalStateChainInfo(block, prevHash);
@ -740,13 +738,13 @@ BlockService.prototype._updateNormalStateChainInfo = function(block, prevHash) {
var incompleteChainIndexes = this._getIncompleteChainIndexes(block);
//retrieving more than one incomplete chain for a given block means there is a future reorg in the incomplete chain
if (incompleteChainIndexes.length < 1) {
this._chainTips.push(block.hash);
this._chainTips.push(block.rhash());
} else {
incompleteChainIndexes.sort().reverse();
for(var i = 0; i < incompleteChainIndexes.length; i++) {
var incompleteIndex = incompleteChainIndexes[i];
this._chainTips.push(this._incompleteChains[incompleteIndex][0].hash);
this._chainTips.push(this._incompleteChains[incompleteIndex][0].rhash());
this._incompleteChains.splice(incompleteIndex, 1);
}
@ -771,7 +769,7 @@ BlockService.prototype._updateOutOfOrderStateChainInfo = function(block) {
*/
var prevHash = block.toHeaders().prevBlock;
var prevHash = block.toHeaders().toJSON().prevBlock;
var possibleJoins = { tip: [], genesis: [] };
var newChains = [];
var joinedChains = false;
@ -788,7 +786,7 @@ BlockService.prototype._updateOutOfOrderStateChainInfo = function(block) {
var chains;
// criteria 1
var lastEntryPrevHash = lastEntry.toHeaders().prevBlock;
var lastEntryPrevHash = lastEntry.toHeaders().toJSON().prevBlock;
if (lastEntryPrevHash === block.hash) {
joinedChains = true;

View File

@ -108,22 +108,15 @@ HeaderService.prototype._startSubscriptions = function() {
HeaderService.prototype._onBlock = function(block) {
// we just want the header to keep a running list
log.debug('Header Service: new block: ' + block.hash);
log.debug('Header Service: new block: ' + block.rhash());
var header = {
hash: block.hash,
version: block.version,
timestamp: block.ts,
bits: block.bits,
nonce: block.nonce,
merkleRoot: block.merkleRoot,
prevHash: block.prevBlock
};
this._onHeaders([header]);
var header = block.toHeaders().toJSON();
header.timestamp = header.ts;
header.prevHash = header.prevBlock;
this._onHeaders([header], 1);
};
HeaderService.prototype._onHeaders = function(headers) {
HeaderService.prototype._onHeaders = function(headers, convert) {
if (!headers || headers.length < 1) {
return;
@ -131,10 +124,18 @@ HeaderService.prototype._onHeaders = function(headers) {
log.debug('Header Service: Received: ' + headers.length + ' header(s).');
var operations = this._getHeaderOperations(headers);
var newHeaders = headers;
if (!convert) {
newHeaders = headers.map(function(header) {
header = header.toObject();
return header;
});
}
this._tip.hash = headers[headers.length - 1].hash;
this._tip.height = this._tip.height + headers.length;
var operations = this._getHeaderOperations(newHeaders);
this._tip.hash = newHeaders[newHeaders.length - 1].hash;
this._tip.height = this._tip.height + newHeaders.length;
this._db.batch(operations);
@ -154,7 +155,7 @@ HeaderService.prototype._getHeaderOperations = function(headers) {
header.chainwork = self._getChainwork(header, prevHeader).toString(16, 32);
prevHeader = header;
// set the header in the in-memory map
self._headers.set(header.hash, utils.convertLEHeaderStrings(header));
self._headers.set(header.hash, header);
return {
type: 'put',
key: self._encoding.encodeHeaderKey(header.height, header.hash),
@ -191,7 +192,7 @@ HeaderService.prototype._sync = function() {
if (this._tip.height < this._bestHeight) {
log.info('Header Service: download progress: ' + this._tip.height + '/' +
log.debug('Header Service: download progress: ' + this._tip.height + '/' +
this._numNeeded + ' (' + (this._tip.height / this._numNeeded*100).toFixed(2) + '%)');

View File

@ -172,7 +172,7 @@ P2P.prototype._connect = function() {
};
P2P.prototype._getBestHeight = function() {
if (this._peers < this._minPeers) {
if (this._peers === 0) {
return 0;
}
@ -184,18 +184,12 @@ P2P.prototype._getBestHeight = function() {
}
}
return maxHeight;
};
// we should only choose from a list of peers that sync'ed
P2P.prototype._getPeer = function() {
return this._peer;
//if (this._peers.length === 0) {
// return;
//}
//var index = this._peerIndex++ % this._peers.length;
//return this._peers[index];
};
P2P.prototype._hasPeers = function() {
@ -211,7 +205,7 @@ P2P.prototype._initCache = function() {
P2P.prototype._initP2P = function() {
this._maxPeers = this._options.maxPeers || 60;
this._minPeers = this._options.minPeers || 1;
this._minPeers = this._options.minPeers || 0;
this._configPeers = this._options.peers;
this.messages = new p2p.Messages({ network: this.node.network });
this._peerHeights = [];
@ -226,7 +220,7 @@ P2P.prototype._initPool = function() {
opts.addrs = this._configPeers;
}
// TODO: bcoin stuff goes here
opts.dnsSeed = false; // do not set to true unless you want to be pwned
opts.dnsSeed = false;
opts.maxPeers = this._maxPeers;
opts.network = this.node.getNetworkName();
this._pool = new p2p.Pool(opts);

View File

@ -56,6 +56,9 @@ utils.diffTime = function(time) {
};
utils.reverseBufferToString = function(buf) {
if (_.isString(buf)) {
buf = new Buffer(buf, 'hex');
}
return BufferUtil.reverse(buf).toString('hex');
};
@ -213,11 +216,5 @@ utils.isHeight = function(arg) {
return _.isNumber(blockArg) || (blockArg.length < 40 && /^[0-9]+$/.test(blockArg))
};
utils.convertLEHeaderStrings = function(header) {
return Object.assign(header, {
prevHash: utils.reverseBufferToString(new Buffer(header.prevHash, 'hex')),
merkleRoot: utils.reverseBufferToString(new Buffer(header.merkleRoot, 'hex'))
});
};
module.exports = utils;