Updated readme overscrolling text

This commit is contained in:
Matt 2014-04-16 17:59:13 -06:00
parent 7be455ebd2
commit ca27990228
5 changed files with 153 additions and 86 deletions

View File

@ -26,6 +26,7 @@ Features
* Daemon RPC interface
* Stratum TCP socket server
* Block template / job manager
* P2P to get block notifications as peer node
* Optimized generation transaction building
* Connecting to multiple daemons for redundancy
* Process share submissions
@ -42,11 +43,10 @@ Features
* ✓ __Scrypt-Jane__ (YaCoin, CopperBars, Pennies, Tickets, etc..)
* ✓ __Scrypt-N__ (Vertcoin [VTC])
* ✓ __Quark__ (Quarkcoin [QRK])
* ✓ __X11__ (Darkcoin [DRK])
* ✓ __X11__ (Darkcoin [DRK], Hirocoin, Limecoin)
* ✓ __Keccak__ (Maxcoin [MAX], HelixCoin, CryptoMeth, Galleon, 365coin, Slothcoin, BitcointalkCoin, eCoin, CopperLark)
Under development:
* ✗ *Keccak* (Maxcoin [MAX], HelixCoin, CryptoMeth, eCoin, CopperLark)
* ✗ *Skein* (Skeincoin [SKC])
* ✗ *Groestl* (MyriadCoin [MYR]
* ✗ *Qubit* (QubitCoin [Q2C], MyriadCoin [MYR])
@ -123,6 +123,24 @@ var myCoin = {
};
```
If you are using the `keccak` algorithm there are additional configurations *(The rare `normalHashing` keccak coins
such as Copperlark and eCoin don't appear to work yet, byt the popular ones like Maxcoin are)*:
```javascript
var myCoin = {
"name": "eCoin",
"symbol": "ECN",
"algorithm": "keccak",
/* This is not required and set to false by default. Some coins such as Copperlark and eCoin
require it to be set to true. Maxcoin and most others are false. */
"normalHashing": true,
/* The rare normalHashing coins also require diff to be manually set here. Do not use for
typical keccak coins like Maxcoin. */
"diffShift": 32
};
```
Create and start new pool with configuration options and authentication function
```javascript

View File

@ -99,9 +99,16 @@ var algos = module.exports = global.algos = {
keccak: {
shift: 24,
multiplier: Math.pow(2, 24),
hash: function(){
return function(data){
return multiHashing.keccak(data);
hash: function(coinConfig){
if (coinConfig.normalHashing === true) {
return function (data, nTimeInt) {
return multiHashing.keccak(multiHashing.keccak(Buffer.concat([data, new Buffer(nTimeInt.toString(16), 'hex')])));
};
}
else {
return function (data) {
return multiHashing.keccak(data);
}
}
}
},
@ -139,80 +146,11 @@ var algos = module.exports = global.algos = {
}
};
//Creates a non-truncated max difficulty (diff1) by bitwise right-shifting the max value of a uint256
function ShiftMax256Right(shiftRight){
//Max value uint256 (an array of ones representing 256 enabled bits)
var arr256 = Array.apply(null, new Array(256)).map(Number.prototype.valueOf, 1);
//An array of zero bits for how far the max uint256 is shifted right
var arrLeft = Array.apply(null, new Array(shiftRight)).map(Number.prototype.valueOf, 0);
//Add zero bits to uint256 and remove the bits shifted out
arr256 = arrLeft.concat(arr256).slice(0, 256);
//An array of bytes to convert the bits to, 8 bits in a byte so length will be 32
var octets = [];
for (var i = 0; i < 32; i++){
octets[i] = 0;
//The 8 bits for this byte
var bits = arr256.slice(i * 8, i * 8 + 8);
//Bit math to add the bits into a byte
for (var f = 0; f < bits.length; f++){
var multiplier = Math.pow(2, f);
octets[i] += bits[f] * multiplier;
}
}
return new Buffer(octets);;
}
function BufferToCompact(startingBuff){
var bigNum = bignum.fromBuffer(startingBuff);
var buff = bigNum.toBuffer();
buff = buff.readUInt8(0) > 0x7f ? Buffer.concat([new Buffer([0x00]), buff]) : buff;
buff = Buffer.concat([new Buffer([buff.length]), buff]);
var compact = buff.slice(0, 4);
return compact;
}
function ConvertBitsToBuff(bitsBuff){
var numBytes = bitsBuff.readUInt8(0);
var bigBits = bignum.fromBuffer(bitsBuff.slice(1));
var target = bigBits.mul(
bignum(2).pow(
bignum(8).mul(
numBytes - 3
)
)
);
var resultBuff = target.toBuffer();
var buff256 = new Buffer(32);
buff256.fill(0);
resultBuff.copy(buff256, buff256.length - resultBuff.length);
return buff256;
}
for (var algo in algos){
if (!algos[algo].diff) {
var nonTruncatedDiff = ShiftMax256Right(algos[algo].shift);
var compactBits = BufferToCompact(nonTruncatedDiff);
var truncatedDiff = ConvertBitsToBuff(compactBits);
algos[algo].bits = compactBits;
algos[algo].diff = truncatedDiff;
algos[algo].nonTruncatedDiff = nonTruncatedDiff;
algos[algo].diff = nonTruncatedDiff;
algos[algo].nonTruncatedDiff = util.shiftMax256Right(algos[algo].shift);
algos[algo].bits = util.bufferToCompactBits(algos[algo].nonTruncatedDiff);
algos[algo].diff = util.convertBitsToBuff(algos[algo].bits);
}
}

View File

@ -45,7 +45,7 @@ var JobCounter = function(){
* - newBlock(blockTemplate) - When a new block (previously unknown to the JobManager) is added, use this event to broadcast new jobs
* - share(shareData, blockHex) - When a worker submits a share. It will have blockHex if a block was found
**/
var JobManager = module.exports = function JobManager(maxDifficulty, hashDigest, options){
var JobManager = module.exports = function JobManager(maxDifficulty, options){
//private members
@ -65,7 +65,33 @@ var JobManager = module.exports = function JobManager(maxDifficulty, hashDigest,
var lastTransactionUpdateCheck = Date.now();
var coinbaseHasher = options.coin.algorithm === 'keccak' ? util.sha256 : util.sha256d;
var hashDigest = algos[options.coin.algorithm].hash(options.coin);
var coinbaseHasher = (function(){
switch(options.coin.algorithm){
case 'keccak':
case 'blake':
if (options.coin.normalHashing !== true)
return util.sha256;
default:
return util.sha256d;
}
})();
var blockHasher = (function(){
switch(options.coin.algorithm){
case 'keccak':
case 'blake':
//if (options.coin.normalHashing !== true)
return function(d, nTime){
util.reverseBuffer(util.sha256d(Buffer.concat([d, new Buffer(nTime, 'hex')])));
};
default:
return function(d){
util.reverseBuffer(util.sha256d(d));
};
}
})();
//returns true if processed a new block
this.processTemplate = function(rpcData, publicKey){
@ -187,7 +213,7 @@ var JobManager = module.exports = function JobManager(maxDifficulty, hashDigest,
//Check if share is a block candidate (matched network difficulty)
if (job.target.ge(headerBigNum)){
blockHex = job.serializeBlock(headerBuffer, coinbaseBuffer).toString('hex');
blockHash = util.reverseBuffer(util.sha256d(headerBuffer)).toString('hex');
blockHash = blockHasher(headerBuffer, nTime)
}
else {
if (options.emitInvalidBlockHashes)

View File

@ -44,10 +44,12 @@ var pool = module.exports = function pool(options, authorizeFn){
throw new Error();
}
//Which number to use as dividend when converting difficulty to target
var maxDifficulty = bignum.fromBuffer(algos[options.coin.algorithm].diff);
var diff1 = options.coin.diffShift ?
util.getTruncatedDiff(options.coin.diffShift) :
algos[options.coin.algorithm].diff;
var hashDigest = algos[options.coin.algorithm].hash(options.coin);
//Which number to use as dividend when converting difficulty to target
var maxDifficulty = bignum.fromBuffer(diff1);
this.start = function(){
@ -115,7 +117,7 @@ var pool = module.exports = function pool(options, authorizeFn){
'Detected Reward Type:\t' + options.coin.reward,
'Current Block Height:\t' + _this.jobManager.currentJob.rpcData.height,
'Current Connect Peers:\t' + options.initStats.connections,
'Network Hash Rate:\t' + (options.initStats.networkHashRate / 1e3).toFixed(6) + ' KH/s',
'Network Hash Rate:\t' + util.getReadableHashRateString(options.initStats.networkHashRate),
'Network Difficulty:\t' + _this.jobManager.currentJob.difficulty.toString(),
'Listening Port(s):\t' + _this.options.initStats.stratumPorts.join(', ')
];
@ -262,7 +264,7 @@ var pool = module.exports = function pool(options, authorizeFn){
function SetupJobManager(){
_this.jobManager = new jobManager(maxDifficulty, hashDigest, options);
_this.jobManager = new jobManager(maxDifficulty, options);
_this.jobManager.on('newBlock', function(blockTemplate){
//Check if stratumServer has been initialized yet

View File

@ -275,4 +275,87 @@ exports.addressToScript = function(addr){
var pubkey = decoded.slice(1,-4);
return Buffer.concat([new Buffer([0x76, 0xa9, 0x14]), pubkey, new Buffer([0x88, 0xac])]);
};
exports.getReadableHashRateString = function(hashrate){
var i = -1;
var byteUnits = [ ' KH', ' MH', ' GH', ' TH', ' PH' ];
do {
hashrate = hashrate / 1024;
i++;
} while (hashrate > 1024);
return hashrate.toFixed(2) + byteUnits[i];
};
//Creates a non-truncated max difficulty (diff1) by bitwise right-shifting the max value of a uint256
exports.shiftMax256Right = function(shiftRight){
//Max value uint256 (an array of ones representing 256 enabled bits)
var arr256 = Array.apply(null, new Array(256)).map(Number.prototype.valueOf, 1);
//An array of zero bits for how far the max uint256 is shifted right
var arrLeft = Array.apply(null, new Array(shiftRight)).map(Number.prototype.valueOf, 0);
//Add zero bits to uint256 and remove the bits shifted out
arr256 = arrLeft.concat(arr256).slice(0, 256);
//An array of bytes to convert the bits to, 8 bits in a byte so length will be 32
var octets = [];
for (var i = 0; i < 32; i++){
octets[i] = 0;
//The 8 bits for this byte
var bits = arr256.slice(i * 8, i * 8 + 8);
//Bit math to add the bits into a byte
for (var f = 0; f < bits.length; f++){
var multiplier = Math.pow(2, f);
octets[i] += bits[f] * multiplier;
}
}
return new Buffer(octets);
};
exports.bufferToCompactBits = function(startingBuff){
var bigNum = bignum.fromBuffer(startingBuff);
var buff = bigNum.toBuffer();
buff = buff.readUInt8(0) > 0x7f ? Buffer.concat([new Buffer([0x00]), buff]) : buff;
buff = Buffer.concat([new Buffer([buff.length]), buff]);
var compact = buff.slice(0, 4);
return compact;
};
exports.convertBitsToBuff = function(bitsBuff){
var numBytes = bitsBuff.readUInt8(0);
var bigBits = bignum.fromBuffer(bitsBuff.slice(1));
var target = bigBits.mul(
bignum(2).pow(
bignum(8).mul(
numBytes - 3
)
)
);
var resultBuff = target.toBuffer();
var buff256 = new Buffer(32);
buff256.fill(0);
resultBuff.copy(buff256, buff256.length - resultBuff.length);
return buff256;
};
exports.getTruncatedDiff = function(shift){
return exports.convertBitsToBuff(exports.bufferToCompactBits(exports.shiftMax256Right(shift)));
};