Added "emitInvalidBlockHashes" option for those in MPOS mode that require it

This commit is contained in:
Matt 2014-04-02 13:01:45 -06:00
parent 78f6a8c2e6
commit 68f9fb566c
3 changed files with 22 additions and 24 deletions

View File

@ -150,6 +150,9 @@ var pool = Stratum.createPool({
detects those and disconnects them. */
"connectionTimeout": 600, //Remove workers that haven't been in contact for this many seconds
/* Sometimes you want the block hashes even for shares that aren't block candidates. */
"emitInvalidBlockHashes": false,
/* If a worker is submitting a good deal of invalid shares we can temporarily ban them to
reduce system/network load. Also useful to fight against flooding attacks. */
"banning": {
@ -248,11 +251,14 @@ Listen to pool events
height: 443795, //block height
networkDifficulty: 3349 //network difficulty for this block
//solution is set if block was found
solution: '110c0447171ad819dd181216d5d80f41e9218e25d833a2789cb8ba289a52eee4',
//AAK the block solution - set if block was found
blockHash: '110c0447171ad819dd181216d5d80f41e9218e25d833a2789cb8ba289a52eee4',
//tx is the coinbase transaction hash from the block
tx: '41bb22d6cc409f9c0bae2c39cecd2b3e3e1be213754f23d12c5d6d2003d59b1d,
//Exists if "emitInvalidBlockHashes" is set to true
blockHashInvalid: '110c0447171ad819dd181216d5d80f41e9218e25d833a2789cb8ba289a52eee4'
//txHash is the coinbase transaction hash from the block
txHash: '41bb22d6cc409f9c0bae2c39cecd2b3e3e1be213754f23d12c5d6d2003d59b1d,
error: 'low share difficulty' //set if share is rejected for some reason
*/
@ -262,8 +268,8 @@ pool.on('share', function(isValidShare, isValidBlock, data){
console.log('Block found');
else if (isValidShare)
console.log('Valid share submitted');
else if (data.solution)
console.log('We thought a block solution was found but it was rejected by the daemon');
else if (data.blockHash)
console.log('We thought a block was found but it was rejected by the daemon');
else
console.log('Invalid share submitted')

View File

@ -178,6 +178,7 @@ var JobManager = module.exports = function JobManager(maxDifficulty, hashDigest,
var headerHash = hashDigest(headerBuffer, nTimeInt);
var headerBigNum = bignum.fromBuffer(headerHash, {endian: 'little', size: 32});
var blockHashInvalid;
var blockHash;
var blockHex;
@ -188,6 +189,9 @@ var JobManager = module.exports = function JobManager(maxDifficulty, hashDigest,
blockHash = util.reverseBuffer(util.sha256d(headerBuffer)).toString('hex');
}
else {
if (options.emitInvalidBlockHashes)
blockHashInvalid = util.reverseBuffer(util.sha256d(headerBuffer)).toString('hex');
var targetUser = maxDifficulty.div(difficulty);
if (headerBigNum.gt(targetUser)){
//Check if share matched a previous difficulty from before vardiff retarget
@ -202,20 +206,6 @@ var JobManager = module.exports = function JobManager(maxDifficulty, hashDigest,
}
}
if (!!blockHex) {
_this.emit('debugBlockShare',
{
'extraNonce1': extraNonce1,
'extraNonce2': extraNonce2,
'nTime': nTime,
'nonce': nonce,
'headerBuffer': headerBuffer.toString('hex'),
'headerHash': headerHash.toString('hex'),
'blockHex': blockHex,
'blockHash': blockHash
}
);
}
_this.emit('share', {
job: jobId,
@ -225,10 +215,12 @@ var JobManager = module.exports = function JobManager(maxDifficulty, hashDigest,
height: job.rpcData.height,
reward: job.rpcData.coinbasevalue,
networkDifficulty : job.difficulty.toString(),
solution: blockHash
blockHash: blockHash,
blockHashInvalid: blockHashInvalid
}, blockHex);
return {result: true, error: null, solution: blockHash};
return {result: true, error: null, blockHash: blockHash};
};
};
JobManager.prototype.__proto__ = events.EventEmitter.prototype;

View File

@ -289,9 +289,9 @@ var pool = module.exports = function pool(options, authorizeFn){
emitShare();
else{
SubmitBlock(blockHex, function(){
CheckBlockAccepted(shareData.solution, function(isAccepted, tx){
CheckBlockAccepted(shareData.blockHash, function(isAccepted, tx){
isValidBlock = isAccepted;
shareData.tx = tx;
shareData.txHash = tx;
emitShare();
});
});