From b19af3e9ace7cc215f199c0584efd4b6af83d7ef Mon Sep 17 00:00:00 2001 From: Ahmed Bodiwala Date: Thu, 21 Nov 2013 17:03:31 +0000 Subject: [PATCH 01/20] POW and POS support via a config change --- lib/bitcoin_rpc_manager.py | 1 - lib/block_template.py | 10 +- lib/coinbaser.py | 86 +++++----- lib/coinbasetx.py | 53 ++++++- lib/halfnode.py | 68 ++++++-- lib/template_registry.py | 3 +- lib/template_registry.py.save | 288 ++++++++++++++++++++++++++++++++++ lib/util.py | 22 ++- mining/basic_share_limiter.py | 2 +- 9 files changed, 463 insertions(+), 70 deletions(-) create mode 100644 lib/template_registry.py.save diff --git a/lib/bitcoin_rpc_manager.py b/lib/bitcoin_rpc_manager.py index 06b6c8e..9977e87 100644 --- a/lib/bitcoin_rpc_manager.py +++ b/lib/bitcoin_rpc_manager.py @@ -124,7 +124,6 @@ class BitcoinRPCManager(object): except: self.next_connection() - def getdifficulty(self): while True: try: diff --git a/lib/block_template.py b/lib/block_template.py index e04ee53..e30bcf9 100644 --- a/lib/block_template.py +++ b/lib/block_template.py @@ -19,6 +19,7 @@ class BlockTemplate(halfnode.CBlock): coinbase_transaction_class = CoinbaseTransaction def __init__(self, timestamper, coinbaser, job_id): + print("Hit Block_template.py") super(BlockTemplate, self).__init__() self.job_id = job_id @@ -46,10 +47,13 @@ class BlockTemplate(halfnode.CBlock): #txhashes = [None] + [ binascii.unhexlify(t['hash']) for t in data['transactions'] ] txhashes = [None] + [ util.ser_uint256(int(t['hash'], 16)) for t in data['transactions'] ] mt = merkletree.MerkleTree(txhashes) + if settings.COINDAEMON_Reward == 'POW': + coinbase = self.coinbase_transaction_class(self.timestamper, self.coinbaser, data['coinbasevalue'], data['coinbaseaux']['flags'], data['height'], + settings.COINBASE_EXTRAS) + else: + coinbase = self.coinbase_transaction_class(self.timestamper, self.coinbaser, data['coinbasevalue'], data['coinbaseaux']['flags'], data['height'], + settings.COINBASE_EXTRAS, data['curtime']) - coinbase = self.coinbase_transaction_class(self.timestamper, self.coinbaser, data['coinbasevalue'], - data['coinbaseaux']['flags'], data['height'], settings.COINBASE_EXTRAS) - self.height = data['height'] self.nVersion = data['version'] self.hashPrevBlock = int(data['previousblockhash'], 16) diff --git a/lib/coinbaser.py b/lib/coinbaser.py index 97f3c88..932bdf8 100644 --- a/lib/coinbaser.py +++ b/lib/coinbaser.py @@ -9,59 +9,71 @@ log = lib.logger.get_logger('coinbaser') # TODO: Add on_* hooks in the app class SimpleCoinbaser(object): - '''This very simple coinbaser uses a constant coin address + '''This very simple coinbaser uses constant bitcoin address for all generated blocks.''' def __init__(self, bitcoin_rpc, address): - # Fire callback when coinbaser is ready - self.on_load = defer.Deferred() - + print("hit the coinbaser") + # Fire Callback when the coinbaser is ready + self.on_load = defer.Deferred() + self.address = address - self.is_valid = False # We need to check if pool can use this address - - self.bitcoin_rpc = bitcoin_rpc - self._validate() + self.is_valid = False + + self.bitcoin_rpc = bitcoin_rpc + self._validate() def _validate(self): - d = self.bitcoin_rpc.validateaddress(self.address) - d.addCallback(self._address_check) - d.addErrback(self._failure) - - def _address_check(self, result): - if result['isvalid'] and result['ismine']: - self.is_valid = True - log.info("Coinbase address '%s' is valid" % self.address) - - if not self.on_load.called: - self.on_load.callback(True) - - elif result['isvalid'] and settings.ALLOW_NONLOCAL_WALLET == True : - self.is_valid = True - log.warning("!!! Coinbase address '%s' is valid BUT it is not local" % self.address) - - if not self.on_load.called: - self.on_load.callback(True) + d = self.bitcoin_rpc.validateaddress(self.address) + if settings.COINDAEMON_Reward == 'POW': + d.addCallback(self._POW_address_check) + else: d.addCallback(self._POS_address_check) + d.addErrback(self._failure) + + def _POW_address_check(self, result): + if result['isvalid'] and result['ismine']: + self.is_valid = True + log.info("Coinbase address '%s' is valid" % self.address) + + if not self.on_load.called: + self.on_load.callback(True) + + elif result['isvalid'] and settings.ALLOW_NONLOCAL_WALLET == True : + self.is_valid = True + log.warning("!!! Coinbase address '%s' is valid BUT it is not local" % self.address) + + if not self.on_load.called: + self.on_load.callback(True) - else: - self.is_valid = False - log.error("Coinbase address '%s' is NOT valid!" % self.address) + else: + self.is_valid = False + log.error("Coinbase address '%s' is NOT valid!" % self.address) - def _failure(self, failure): - log.error("Cannot validate Bitcoin address '%s'" % self.address) - raise + def _POS_address_check(self, result): + print(result) + print(result['pubkey']) + self.pubkey = result['pubkey'] + print("You're PUBKEY is : ", self.pubkey) + # Fire callback when coinbaser is ready + self.on_load.callback(True) #def on_new_block(self): # pass #def on_new_template(self): # pass + def _failure(self, failure): + log.error("Cannot validate Bitcoin address '%s'" % self.address) + raise def get_script_pubkey(self): - if not self.is_valid: - # Try again, maybe the coind was down? - self._validate() - raise Exception("Coinbase address is not validated!") - return util.script_to_address(self.address) + if settings.COINDAEMON_Reward == 'POW': + if not self.is_valid: + self._validate() + raise Exception("Wallet Address is Wrong") + return util.script_to_address(self.address) + else: + return util.script_to_pubkey(self.pubkey) def get_coinbase_data(self): return '' diff --git a/lib/coinbasetx.py b/lib/coinbasetx.py index 11c7e36..e2d5743 100644 --- a/lib/coinbasetx.py +++ b/lib/coinbasetx.py @@ -2,8 +2,9 @@ import binascii import halfnode import struct import util - -class CoinbaseTransaction(halfnode.CTransaction): +import settings +if settings.COINDAEMON_Reward == 'POW': + class CoinbaseTransaction(halfnode.CTransaction): '''Construct special transaction used for coinbase tx. It also implements quick serialization using pre-cached scriptSig template.''' @@ -46,4 +47,50 @@ class CoinbaseTransaction(halfnode.CTransaction): raise Exception("Incorrect extranonce size") (part1, part2) = self.vin[0]._scriptSig_template - self.vin[0].scriptSig = part1 + extranonce + part2 \ No newline at end of file + self.vin[0].scriptSig = part1 + extranonce + part2 +else: + class CoinbaseTransaction(halfnode.CTransaction): + '''Construct special transaction used for coinbase tx. + It also implements quick serialization using pre-cached + scriptSig template.''' + + extranonce_type = '>Q' + extranonce_placeholder = struct.pack(extranonce_type, int('f000000ff111111f', 16)) + extranonce_size = struct.calcsize(extranonce_type) + + def __init__(self, timestamper, coinbaser, value, flags, height, data, ntime): + super(CoinbaseTransaction, self).__init__() + + #self.extranonce = 0 + + if len(self.extranonce_placeholder) != self.extranonce_size: + raise Exception("Extranonce placeholder don't match expected length!") + + tx_in = halfnode.CTxIn() + tx_in.prevout.hash = 0L + tx_in.prevout.n = 2**32-1 + tx_in._scriptSig_template = ( + util.ser_number(height) + binascii.unhexlify(flags) + util.ser_number(int(timestamper.time())) + \ + chr(self.extranonce_size), + util.ser_string(coinbaser.get_coinbase_data() + data) + ) + + tx_in.scriptSig = tx_in._scriptSig_template[0] + self.extranonce_placeholder + tx_in._scriptSig_template[1] + + tx_out = halfnode.CTxOut() + tx_out.nValue = value + tx_out.scriptPubKey = coinbaser.get_script_pubkey() + + self.nTime = ntime + self.vin.append(tx_in) + self.vout.append(tx_out) + + # Two parts of serialized coinbase, just put part1 + extranonce + part2 to have final serialized tx + self._serialized = super(CoinbaseTransaction, self).serialize().split(self.extranonce_placeholder) + + def set_extranonce(self, extranonce): + if len(extranonce) != self.extranonce_size: + raise Exception("Incorrect extranonce size") + + (part1, part2) = self.vin[0]._scriptSig_template + self.vin[0].scriptSig = part1 + extranonce + part2 diff --git a/lib/halfnode.py b/lib/halfnode.py index 576ea8b..b03a6f5 100644 --- a/lib/halfnode.py +++ b/lib/halfnode.py @@ -138,25 +138,49 @@ class CTxOut(object): class CTransaction(object): def __init__(self): - self.nVersion = 1 - self.vin = [] - self.vout = [] - self.nLockTime = 0 - self.sha256 = None + if settings.COINDAEMON_Reward == 'POW': + self.nVersion = 1 + self.vin = [] + self.vout = [] + self.nLockTime = 0 + self.sha256 = None + else: + self.nVersion = 1 + self.nTime = 0 + self.vin = [] + self.vout = [] + self.nLockTime = 0 + self.sha256 = None def deserialize(self, f): - self.nVersion = struct.unpack(" target_user and \ + ( 'prev_jobid' not in session or session['prev_jobid'] < job_id \ + or 'prev_diff' not in session or hash_int > self.diff_to_target(session['prev_diff']) ): + raise SubmitException("Share is above target") + + # Mostly for debugging purposes + target_info = self.diff_to_target(100000) + if hash_int <= target_info: + log.info("Yay, share with diff above 100000") + + # Algebra tells us the diff_to_target is the same as hash_to_diff + share_diff = int(self.diff_to_target(hash_int)) + + + # 5. Compare hash with target of the network + if hash_int <= job.target: + # Yay! It is block candidate! + log.info("We found a block candidate! %s" % scrypt_hash_hex) + + # Reverse the header and get the potential block hash (for scrypt only) + block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) + block_hash_hex = block_hash_bin[::-1].encode('hex_codec') + + # 6. Finalize and serialize block object + job.finalize(merkle_root_int, extranonce1_bin, extranonce2_bin, int(ntime, 16), int(nonce, 16)) + + if not job.is_valid(): + # Should not happen + log.error("Final job validation failed!") + + # 7. Submit block to the network + serialized = binascii.hexlify(job.serialize()) + on_submit = self.bitcoin_rpc.submitblock(serialized, block_hash_hex) + if on_submit: + self.update_block() + + if settings.SOLUTION_BLOCK_HASH: + return (header_hex, block_hash_hex, share_diff, on_submit) + else: + return (header_hex, scrypt_hash_hex, share_diff, on_submit) + + if settings.SOLUTION_BLOCK_HASH: + # Reverse the header and get the potential block hash (for scrypt only) only do this if we want to send in the block hash to the shares table + block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) + block_hash_hex = block_hash_bin[::-1].encode('hex_codec') + return (header_hex, block_hash_hex, share_diff, None) + else: + return (header_hex, scrypt_hash_hex, share_diff, None) diff --git a/lib/util.py b/lib/util.py index 050876c..1b41659 100644 --- a/lib/util.py +++ b/lib/util.py @@ -3,6 +3,8 @@ import struct import StringIO import binascii +import settings +import bitcoin_rpc from hashlib import sha256 def deser_string(f): @@ -171,7 +173,7 @@ def address_to_pubkeyhash(addr): addr = b58decode(addr, 25) except: return None - + if addr is None: return None @@ -209,9 +211,15 @@ def ser_number(n): s.append(n) return bytes(s) -def script_to_address(addr): - d = address_to_pubkeyhash(addr) - if not d: - raise ValueError('invalid address') - (ver, pubkeyhash) = d - return b'\x76\xa9\x14' + pubkeyhash + b'\x88\xac' +if settings.COINDAEMON_Reward == 'POW': + def script_to_address(addr): + d = address_to_pubkeyhash(addr) + if not d: + raise ValueError('invalid address') + (ver, pubkeyhash) = d + return b'\x76\xa9\x14' + pubkeyhash + b'\x88\xac' +else: + def script_to_pubkey(key): + if len(key) == 66: key = binascii.unhexlify(key) + if len(key) != 33: raise Exception('invalid pubkey passed to script_to_pubkey') + return b'\x21' + key + b'\xac' diff --git a/mining/basic_share_limiter.py b/mining/basic_share_limiter.py index adc04c2..73450c2 100644 --- a/mining/basic_share_limiter.py +++ b/mining/basic_share_limiter.py @@ -125,7 +125,7 @@ class BasicShareLimiter(object): ddiff = 1 # Don't go above LITECOIN or VDIFF_MAX_TARGET self.update_litecoin_difficulty() - if settings.USE_LITECOIN_DIFF: + if settings.USE_COINDAEMON_DIFF: diff_max = min([settings.VDIFF_MAX_TARGET, self.litecoin_diff]) else: diff_max = settings.VDIFF_MAX_TARGET From fb6ce0187af18e2bd5853c5905064a07c6ed266b Mon Sep 17 00:00:00 2001 From: Ahmed Bodiwala Date: Thu, 21 Nov 2013 17:56:01 +0000 Subject: [PATCH 02/20] MultiAlgo Support TX comments OSC, B0C, TEK --- conf/config_sample.py | 6 +++++- lib/coinbasetx.py | 47 +++++++++++++++++++++++++++++++++++++++++++ lib/config_default.py | 8 +++++++- lib/halfnode.py | 27 ++++++++++++++++++++++++- 4 files changed, 85 insertions(+), 3 deletions(-) diff --git a/conf/config_sample.py b/conf/config_sample.py index a4633cb..91f6cb6 100644 --- a/conf/config_sample.py +++ b/conf/config_sample.py @@ -20,8 +20,12 @@ COINDAEMON_TRUSTED_PASSWORD = 'somepassword' # This currently only works with POW SHA256 and Scrypt Coins # The available options are scrypt and sha256d. # If the option does not meet either of these criteria stratum defaults to scrypt +# Until AutoReward Selecting Code has been implemented the below options are used to select the type of coin +# For Reward type there is POW and POS. please ensure you choose the currect type. +# For SHA256 PoS Coins which support TX Messages please enter yes in the TX selection COINDAEMON_ALGO = 'scrypt' - +COINDAEMON_Reward = 'POW' +COINDAEMON_SHA256_TX = 'no' # ******************** BASIC SETTINGS *************** # Backup Coin Daemon address's (consider having at least 1 backup) # You can have up to 99 diff --git a/lib/coinbasetx.py b/lib/coinbasetx.py index e2d5743..3387260 100644 --- a/lib/coinbasetx.py +++ b/lib/coinbasetx.py @@ -42,6 +42,53 @@ if settings.COINDAEMON_Reward == 'POW': # Two parts of serialized coinbase, just put part1 + extranonce + part2 to have final serialized tx self._serialized = super(CoinbaseTransaction, self).serialize().split(self.extranonce_placeholder) + def set_extranonce(self, extranonce): + if len(extranonce) != self.extranonce_size: + raise Exception("Incorrect extranonce size") + + (part1, part2) = self.vin[0]._scriptSig_template + self.vin[0].scriptSig = part1 + extranonce + part2 +elif settings.COINDAEMON_Reward == 'POS' and settings.COINDAEMON_SHA256_TX == 'yes': + class CoinbaseTransaction(halfnode.CTransaction): + '''Construct special transaction used for coinbase tx. + It also implements quick serialization using pre-cached + scriptSig template.''' + + extranonce_type = '>Q' + extranonce_placeholder = struct.pack(extranonce_type, int('f000000ff111111f', 16)) + extranonce_size = struct.calcsize(extranonce_type) + + def __init__(self, timestamper, coinbaser, value, flags, height, data, ntime): + super(CoinbaseTransaction, self).__init__() + + #self.extranonce = 0 + + if len(self.extranonce_placeholder) != self.extranonce_size: + raise Exception("Extranonce placeholder don't match expected length!") + + tx_in = halfnode.CTxIn() + tx_in.prevout.hash = 0L + tx_in.prevout.n = 2**32-1 + tx_in._scriptSig_template = ( + util.ser_number(height) + binascii.unhexlify(flags) + util.ser_number(int(timestamper.time())) + \ + chr(self.extranonce_size), + util.ser_string(coinbaser.get_coinbase_data() + data) + ) + + tx_in.scriptSig = tx_in._scriptSig_template[0] + self.extranonce_placeholder + tx_in._scriptSig_template[1] + + tx_out = halfnode.CTxOut() + tx_out.nValue = value + tx_out.scriptPubKey = coinbaser.get_script_pubkey() + + self.nTime = ntime + self.strTxComment = "Mined By AhmedBodi's CryptoExpert Pools" + self.vin.append(tx_in) + self.vout.append(tx_out) + + # Two parts of serialized coinbase, just put part1 + extranonce + part2 to have final serialized tx + self._serialized = super(CoinbaseTransaction, self).serialize().split(self.extranonce_placeholder) + def set_extranonce(self, extranonce): if len(extranonce) != self.extranonce_size: raise Exception("Incorrect extranonce size") diff --git a/lib/config_default.py b/lib/config_default.py index 5f96e58..3693932 100755 --- a/lib/config_default.py +++ b/lib/config_default.py @@ -111,11 +111,17 @@ COINDAEMON_TRUSTED_PORT = 8332 # RPC port COINDAEMON_TRUSTED_USER = 'stratum' COINDAEMON_TRUSTED_PASSWORD = '***somepassword***' + # Coin Algorithm is the option used to determine the algortithm used by stratum # This currently only works with POW SHA256 and Scrypt Coins # The available options are scrypt and sha256d. -# If the option does not meet either of these criteria stratum defaults to scrypt +# If the option does not meet either of these criteria stratum defaults to scry$ +# Until AutoReward Selecting Code has been implemented the below options are us$ +# For Reward type there is POW and POS. please ensure you choose the currect ty$ +# For SHA256 PoS Coins which support TX Messages please enter yes in the TX sel$ COINDAEMON_ALGO = 'scrypt' +COINDAEMON_Reward = 'POW' +COINDAEMON_SHA256_TX = 'yes' # ******************** OTHER CORE SETTINGS ********************* # Use "echo -n '' | sha256sum | cut -f1 -d' ' " diff --git a/lib/halfnode.py b/lib/halfnode.py index b03a6f5..491b1cc 100644 --- a/lib/halfnode.py +++ b/lib/halfnode.py @@ -144,6 +144,14 @@ class CTransaction(object): self.vout = [] self.nLockTime = 0 self.sha256 = None + elif settings.COINDAEMON_Reward == 'POS' and settings.COINDAEMON_SHA256_TX == 'yes': + self.nVersion = 1 + self.nTime = 0 + self.vin = [] + self.vout = [] + self.nLockTime = 0 + self.sha256 = None + self.strTxComment = "" else: self.nVersion = 1 self.nTime = 0 @@ -158,7 +166,15 @@ class CTransaction(object): self.vout = deser_vector(f, CTxOut) self.nLockTime = struct.unpack(" Date: Thu, 21 Nov 2013 18:01:10 +0000 Subject: [PATCH 03/20] Added Notification Code --- lib/halfnode.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/lib/halfnode.py b/lib/halfnode.py index 491b1cc..b5b0e64 100644 --- a/lib/halfnode.py +++ b/lib/halfnode.py @@ -23,6 +23,20 @@ else: print("########################################### NOT Loading LTC Scrypt Module ######################################################") pass +if settings.COINDAEMON_Reward == 'POS': + print("########################################### Loading POS Support #########################################################") + import ltc_scrypt +else: + print("########################################### NOT Loading POS Support ######################################################") + pass + +if settings.COINDAEMON_Reward == 'POS' and settings.COINDAEMON_SH256_TX = 'yes': + print("########################################### Loading SHA256 Transaction Message Support #########################################################") + import ltc_scrypt +else: + print("########################################### NOT Loading SHA256 Transaction Message Support ######################################################") + pass + import lib.logger log = lib.logger.get_logger('halfnode') From 621eec24c150e812e237533b4b6d3adaa13d8847 Mon Sep 17 00:00:00 2001 From: Ahmed Bodiwala Date: Thu, 21 Nov 2013 22:51:29 +0000 Subject: [PATCH 04/20] Force DB Loader on New Block --- lib/halfnode.py | 2 +- mining/service.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/halfnode.py b/lib/halfnode.py index b5b0e64..6a9e211 100644 --- a/lib/halfnode.py +++ b/lib/halfnode.py @@ -30,7 +30,7 @@ else: print("########################################### NOT Loading POS Support ######################################################") pass -if settings.COINDAEMON_Reward == 'POS' and settings.COINDAEMON_SH256_TX = 'yes': +if settings.COINDAEMON_Reward == 'POS' and settings.COINDAEMON_SH256_TX == 'yes': print("########################################### Loading SHA256 Transaction Message Support #########################################################") import ltc_scrypt else: diff --git a/mining/service.py b/mining/service.py index ef8be29..e56b06c 100644 --- a/mining/service.py +++ b/mining/service.py @@ -7,6 +7,9 @@ from stratum.pubsub import Pubsub from interfaces import Interfaces from subscription import MiningSubscription from lib.exceptions import SubmitException +import DBInterface +dbi = DBInterface.DBInterface() +dbi.init_main() import lib.logger log = lib.logger.get_logger('mining') @@ -29,7 +32,8 @@ class MiningService(GenericService): See blocknotify.sh in /scripts/ for more info.''' log.info("New block notification received") - Interfaces.template_registry.update_block() + dbi.do_import(self, dbi, True) + Interfaces.template_registry.update_block() return True @admin From ba43ec37c5c992d6398d74ac851ec2f83a688844 Mon Sep 17 00:00:00 2001 From: ahmedbodi Date: Fri, 22 Nov 2013 10:21:40 +0000 Subject: [PATCH 05/20] Fixed an unneeded call --- lib/halfnode.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/halfnode.py b/lib/halfnode.py index 6a9e211..646db4c 100644 --- a/lib/halfnode.py +++ b/lib/halfnode.py @@ -25,14 +25,14 @@ else: if settings.COINDAEMON_Reward == 'POS': print("########################################### Loading POS Support #########################################################") - import ltc_scrypt + pass else: print("########################################### NOT Loading POS Support ######################################################") pass if settings.COINDAEMON_Reward == 'POS' and settings.COINDAEMON_SH256_TX == 'yes': print("########################################### Loading SHA256 Transaction Message Support #########################################################") - import ltc_scrypt + pass else: print("########################################### NOT Loading SHA256 Transaction Message Support ######################################################") pass From 523df53ed41e472e2d0b00af838962b0e475bef8 Mon Sep 17 00:00:00 2001 From: ahmedbodi Date: Thu, 28 Nov 2013 09:35:01 +0000 Subject: [PATCH 06/20] Dead Coin Daemon Email Notifications --- lib/notify_email.py | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 lib/notify_email.py diff --git a/lib/notify_email.py b/lib/notify_email.py new file mode 100644 index 0000000..dcabd3b --- /dev/null +++ b/lib/notify_email.py @@ -0,0 +1,43 @@ +import smtplib +from email.mime.text import MIMEText + +from stratum import settings + +import stratum.logger +log = stratum.logger.get_logger('Notify_Email') + +class NOTIFY_EMAIL(): + + def notify_start(self): + if settings.NOTIFY_EMAIL_TO != '': + self.send_email(settings.NOTIFY_EMAIL_TO,'Stratum Server Started','Stratum server has started!') + + def notify_found_block(self,worker_name): + if settings.NOTIFY_EMAIL_TO != '': + text = '%s on Stratum server found a block!' % worker_name + self.send_email(settings.NOTIFY_EMAIL_TO,'Stratum Server Found Block',text) + + def notify_dead_coindaemon(self,worker_name): + if settings.NOTIFY_EMAIL_TO != '': + text = 'Coin Daemon Has Crashed Please Report' % worker_name + self.send_email(settings.NOTIFY_EMAIL_TO,'Coin Daemon Crashed!',text) + + def send_email(self,to,subject,message): + msg = MIMEText(message) + msg['Subject'] = subject + msg['From'] = settings.NOTIFY_EMAIL_FROM + msg['To'] = to + try: + s = smtplib.SMTP(settings.NOTIFY_EMAIL_SERVER) + if settings.NOTIFY_EMAIL_USERNAME != '': + if settings.NOTIFY_EMAIL_USETLS: + s.ehlo() + s.starttls() + s.ehlo() + s.login(settings.NOTIFY_EMAIL_USERNAME, settings.NOTIFY_EMAIL_PASSWORD) + s.sendmail(settings.NOTIFY_EMAIL_FROM,to,msg.as_string()) + s.quit() + except smtplib.SMTPAuthenticationError as e: + log.error('Error sending Email: %s' % e[1]) + except Exception as e: + log.error('Error sending Email: %s' % e[0]) From c94ec3f4d44fd34bf72d4d75d00a1231b0bbba70 Mon Sep 17 00:00:00 2001 From: ahmedbodi Date: Thu, 28 Nov 2013 09:37:38 +0000 Subject: [PATCH 07/20] Add Email Notofications --- conf/config_sample.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/conf/config_sample.py b/conf/config_sample.py index 91f6cb6..0754600 100644 --- a/conf/config_sample.py +++ b/conf/config_sample.py @@ -162,3 +162,12 @@ GW_ENABLE = True # Enable the Proxy GW_PORT = 3333 # Getwork Proxy Port GW_DISABLE_MIDSTATE = False # Disable midstate's (Faster but breaks some clients) GW_SEND_REAL_TARGET = True # Propigate >1 difficulty to Clients (breaks some clients) + +# ******************** E-Mail Notification Settings ********************* +NOTIFY_EMAIL_TO = '' # Where to send Start/Found block notifications +NOTIFY_EMAIL_TO_DEADMINER = '' # Where to send dead miner notifications +NOTIFY_EMAIL_FROM = 'root@localhost' # Sender address +NOTIFY_EMAIL_SERVER = 'localhost' # E-Mail Sender +NOTIFY_EMAIL_USERNAME = '' # E-Mail server SMTP Logon +NOTIFY_EMAIL_PASSWORD = '' +NOTIFY_EMAIL_USETLS = True From beda42123574a298e1de93d2c6773aa86bb9586b Mon Sep 17 00:00:00 2001 From: Ahmed Bodiwala Date: Sat, 7 Dec 2013 21:33:57 +0000 Subject: [PATCH 08/20] Fixed Typo --- lib/halfnode.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/halfnode.py b/lib/halfnode.py index 646db4c..e503a5d 100644 --- a/lib/halfnode.py +++ b/lib/halfnode.py @@ -30,7 +30,7 @@ else: print("########################################### NOT Loading POS Support ######################################################") pass -if settings.COINDAEMON_Reward == 'POS' and settings.COINDAEMON_SH256_TX == 'yes': +if settings.COINDAEMON_Reward == 'POS' and settings.COINDAEMON_SHA256_TX == 'yes': print("########################################### Loading SHA256 Transaction Message Support #########################################################") pass else: From 1286b20c2a20d190f3c1988c4c0d2daa96187022 Mon Sep 17 00:00:00 2001 From: Ahmed Bodiwala Date: Sat, 7 Dec 2013 22:08:15 +0000 Subject: [PATCH 09/20] Added YAC Support, Vardiff WorkAround (Power of 2), Worker Banning, Custom Difficulties --- conf/config_sample.py | 39 +++++++++++++------- lib/template_registry.py | 12 +++++-- mining/basic_share_limiter.py | 61 +++++++++++++++++++++---------- mining/interfaces.py | 9 +++++ mining/service.py | 67 ++++++++++++++++++++++++++++------- 5 files changed, 142 insertions(+), 46 deletions(-) diff --git a/conf/config_sample.py b/conf/config_sample.py index 0754600..ee68d39 100644 --- a/conf/config_sample.py +++ b/conf/config_sample.py @@ -128,25 +128,32 @@ MERKLE_REFRESH_INTERVAL = 60 # How often check memorypool INSTANCE_ID = 31 # Used for extranonce and needs to be 0-31 # ******************** Pool Difficulty Settings ********************* -# Again, Don't change unless you know what this is for. +VDIFF_X2_TYPE = True # powers of 2 e.g. 2,4,8,16,32,64,128,256,512,1024 +VDIFF_FLOAT = False # Use float difficulty # Pool Target (Base Difficulty) -# In order to match the Pool Target with a frontend like MPOS the following formula is used: (stratum diff) ~= 2^((target bits in pushpool) - 16) -# E.G. a Pool Target of 16 would = a MPOS and PushPool Target bit's of 20 -POOL_TARGET = 16 # Pool-wide difficulty target int >= 1 +POOL_TARGET = 32 # Pool-wide difficulty target int >= 1 # Variable Difficulty Enable -VARIABLE_DIFF = True # Master variable difficulty enable +VARIABLE_DIFF = True # Master variable difficulty enable # Variable diff tuning variables -#VARDIFF will start at the POOL_TARGET. It can go as low as the VDIFF_MIN and as high as min(VDIFF_MAX or the coin daemon's difficulty) -USE_COINDAEMON_DIFF = False # Set the maximum difficulty to the coin daemon's difficulty. -DIFF_UPDATE_FREQUENCY = 86400 # Update the COINDAEMON difficulty once a day for the VARDIFF maximum -VDIFF_MIN_TARGET = 15 # Minimum Target difficulty -VDIFF_MAX_TARGET = 1000 # Maximum Target difficulty -VDIFF_TARGET_TIME = 30 # Target time per share (i.e. try to get 1 share per this many seconds) -VDIFF_RETARGET_TIME = 120 # Check to see if we should retarget this often -VDIFF_VARIANCE_PERCENT = 20 # Allow average time to very this % from target without retarget +#VARDIFF will start at the POOL_TARGET. It can go as low as the VDIFF_MIN and as high as min(VDIFF_MAX or Liteconin's difficulty) +USE_LITECOIN_DIFF = False # Set the maximum difficulty to the litecoin difficulty. +DIFF_UPDATE_FREQUENCY = 86400 # Update the litecoin difficulty once a day for the VARDIFF maximum +VDIFF_MIN_TARGET = 16 # Minimum Target difficulty +VDIFF_MAX_TARGET = 1024 # Maximum Target difficulty +VDIFF_TARGET_TIME = 15 # Target time per share (i.e. try to get 1 share per this many seconds) +VDIFF_RETARGET_TIME = 120 # Check to see if we should retarget this often +VDIFF_VARIANCE_PERCENT = 30 # Allow average time to very this % from target without retarget +VDIFF_RETARGET_DELAY = 25 # Wait this many seconds before applying new variable difficulty target +VDIFF_RETARGET_REJECT_TIME = 60 # Wait this many seconds before rejecting old difficulty shares + +# Allow external setting of worker difficulty, checks pool_worker table datarow[6] position for target difficulty +# if present or else defaults to pool target, over rides all other difficulty settings, no checks are made +#for min or max limits this sould be done by your front end software +ALLOW_EXTERNAL_DIFFICULTY = False + #### Advanced Option ##### # For backwards compatibility, we send the scrypt hash to the solutions column in the shares table # For block confirmation, we have an option to send the block hash in @@ -163,6 +170,12 @@ GW_PORT = 3333 # Getwork Proxy Port GW_DISABLE_MIDSTATE = False # Disable midstate's (Faster but breaks some clients) GW_SEND_REAL_TARGET = True # Propigate >1 difficulty to Clients (breaks some clients) +# ******************** Worker Ban Options ********************* +ENABLE_WORKER_BANNING = True # enable/disable temporary worker banning +WORKER_CACHE_TIME = 600 # How long the worker stats cache is good before we check and refresh +WORKER_BAN_TIME = 300 # How long we temporarily ban worker +INVALID_SHARES_PERCENT = 50 # Allow average invalid shares vary this % before we ban + # ******************** E-Mail Notification Settings ********************* NOTIFY_EMAIL_TO = '' # Where to send Start/Found block notifications NOTIFY_EMAIL_TO_DEADMINER = '' # Where to send dead miner notifications diff --git a/lib/template_registry.py b/lib/template_registry.py index 3d00660..75b3636 100644 --- a/lib/template_registry.py +++ b/lib/template_registry.py @@ -5,6 +5,8 @@ import StringIO import settings if settings.COINDAEMON_ALGO == 'scrypt': import ltc_scrypt +elif settings.MAIN_COIN_ALGORITHM == 'scrypt-jane': + import yac_scrypt else: pass from twisted.internet import defer from lib.exceptions import SubmitException @@ -170,7 +172,7 @@ class TemplateRegistry(object): return j def submit_share(self, job_id, worker_name, session, extranonce1_bin, extranonce2, ntime, nonce, - difficulty): + difficulty, submit_time): '''Check parameters and finalize block template. If it leads to valid block candidate, asynchronously submits the block back to the bitcoin network. @@ -229,11 +231,13 @@ class TemplateRegistry(object): # 4. Reverse header and compare it with target of the user if settings.COINDAEMON_ALGO == 'scrypt': hash_bin = ltc_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) + elif settings.MAIN_COIN_ALGORITHM == 'scrypt-jane': + hash_bin = yac_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]), int(ntime, 16)) else: hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) hash_int = util.uint256_from_str(hash_bin) scrypt_hash_hex = "%064x" % hash_int header_hex = binascii.hexlify(header_bin) - if settings.COINDAEMON_ALGO == 'scrypt': + if not settings.COINDAEMON_ALGO == 'sha256d': header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000" else: pass @@ -243,6 +247,10 @@ class TemplateRegistry(object): or 'prev_diff' not in session or hash_int > self.diff_to_target(session['prev_diff']) ): raise SubmitException("Share is above target") + if hash_int > target_user and 'prev_ts' in session \ + and (submit_time - session['prev_ts']) > settings.VDIFF_RETARGET_REJECT_TIME: + raise SubmitException("Stale-share above target") + # Mostly for debugging purposes target_info = self.diff_to_target(100000) if hash_int <= target_info: diff --git a/mining/basic_share_limiter.py b/mining/basic_share_limiter.py index 73450c2..06b2fb6 100644 --- a/mining/basic_share_limiter.py +++ b/mining/basic_share_limiter.py @@ -87,7 +87,7 @@ class BasicShareLimiter(object): ts = int(timestamp) # Init the stats for this worker if it isn't set. - if worker_name not in self.worker_stats : + if worker_name not in self.worker_stats or self.worker_stats[worker_name]['last_ts'] < ts - settings.DB_USERCACHE_TIME : self.worker_stats[worker_name] = {'last_rtc': (ts - self.retarget / 2), 'last_ts': ts, 'buffer': SpeedBuffer(self.buffersize) } dbi.update_worker_diff(worker_name, settings.POOL_TARGET) return @@ -111,33 +111,58 @@ class BasicShareLimiter(object): avg = 1 # Figure out our Delta-Diff - ddiff = float((float(current_difficulty) * (float(self.target) / float(avg))) - current_difficulty) + if settings.VDIFF_FLOAT: + ddiff = float((float(current_difficulty) * (float(self.target) / float(avg))) - current_difficulty) + else: + ddiff = int((float(current_difficulty) * (float(self.target) / float(avg))) - current_difficulty) + if (avg > self.tmax and current_difficulty > settings.VDIFF_MIN_TARGET): # For fractional -0.1 ddiff's just drop by 1 - if ddiff > -1: - ddiff = -1 - # Don't drop below POOL_TARGET - if (ddiff + current_difficulty) < settings.VDIFF_MIN_TARGET: - ddiff = settings.VDIFF_MIN_TARGET - current_difficulty + if settings.VDIFF_X2_TYPE: + ddiff = 0.5 + # Don't drop below POOL_TARGET + if (ddiff * current_difficulty) < settings.VDIFF_MIN_TARGET: + ddiff = settings.VDIFF_MIN_TARGET / current_difficulty + else: + if ddiff > -1: + ddiff = -1 + # Don't drop below POOL_TARGET + if (ddiff + current_difficulty) < settings.POOL_TARGET: + ddiff = settings.VDIFF_MIN_TARGET - current_difficulty elif avg < self.tmin: # For fractional 0.1 ddiff's just up by 1 - if ddiff < 1: - ddiff = 1 - # Don't go above LITECOIN or VDIFF_MAX_TARGET - self.update_litecoin_difficulty() - if settings.USE_COINDAEMON_DIFF: - diff_max = min([settings.VDIFF_MAX_TARGET, self.litecoin_diff]) - else: - diff_max = settings.VDIFF_MAX_TARGET + if settings.VDIFF_X2_TYPE: + ddiff = 2 + # Don't go above LITECOIN or VDIFF_MAX_TARGET + if settings.USE_LITECOIN_DIFF: + self.update_litecoin_difficulty() + diff_max = min([settings.VDIFF_MAX_TARGET, self.litecoin_diff]) + else: + diff_max = settings.VDIFF_MAX_TARGET - if (ddiff + current_difficulty) > diff_max: - ddiff = diff_max - current_difficulty + if (ddiff * current_difficulty) > diff_max: + ddiff = diff_max / current_difficulty + else: + if ddiff < 1: + ddiff = 1 + # Don't go above LITECOIN or VDIFF_MAX_TARGET + if settings.USE_LITECOIN_DIFF: + self.update_litecoin_difficulty() + diff_max = min([settings.VDIFF_MAX_TARGET, self.litecoin_diff]) + else: + diff_max = settings.VDIFF_MAX_TARGET + + if (ddiff + current_difficulty) > diff_max: + ddiff = diff_max - current_difficulty else: # If we are here, then we should not be retargeting. return # At this point we are retargeting this worker - new_diff = current_difficulty + ddiff + if settings.VDIFF_X2_TYPE: + new_diff = current_difficulty * ddiff + else: + new_diff = current_difficulty + ddiff log.info("Retarget for %s %i old: %i new: %i" % (worker_name, ddiff, current_difficulty, new_diff)) self.worker_stats[worker_name]['buffer'].clear() diff --git a/mining/interfaces.py b/mining/interfaces.py index 707dfd9..0370d06 100644 --- a/mining/interfaces.py +++ b/mining/interfaces.py @@ -17,12 +17,21 @@ dbi.init_main() class WorkerManagerInterface(object): def __init__(self): + self.worker_log = {} + self.worker_log.setdefault('authorized', {}) return def authorize(self, worker_name, worker_password): # Important NOTE: This is called on EVERY submitted share. So you'll need caching!!! return dbi.check_password(worker_name, worker_password) + def get_user_difficulty(self, worker_name): + wd = dbi.get_user(worker_name) + if len(wd) > 6: + #dbi.update_worker_diff(worker_name, wd[6]) + return (True, wd[6]) + else: + return (False, settings.POOL_TARGET) class ShareLimiterInterface(object): '''Implement difficulty adjustments here''' diff --git a/mining/service.py b/mining/service.py index e56b06c..e1a11f6 100644 --- a/mining/service.py +++ b/mining/service.py @@ -7,9 +7,6 @@ from stratum.pubsub import Pubsub from interfaces import Interfaces from subscription import MiningSubscription from lib.exceptions import SubmitException -import DBInterface -dbi = DBInterface.DBInterface() -dbi.init_main() import lib.logger log = lib.logger.get_logger('mining') @@ -32,8 +29,7 @@ class MiningService(GenericService): See blocknotify.sh in /scripts/ for more info.''' log.info("New block notification received") - dbi.do_import(self, dbi, True) - Interfaces.template_registry.update_block() + Interfaces.template_registry.update_block() return True @admin @@ -55,10 +51,20 @@ class MiningService(GenericService): if Interfaces.worker_manager.authorize(worker_name, worker_password): session['authorized'][worker_name] = worker_password + is_ext_diff = False + if settings.ALLOW_EXTERNAL_DIFFICULTY: + (is_ext_diff, session['difficulty']) = Interfaces.worker_manager.get_user_difficulty(worker_name) + self.connection_ref().rpc('mining.set_difficulty', [session['difficulty'], ], is_notification=True) + else: + session['difficulty'] = settings.POOL_TARGET + # worker_log = (valid, invalid, is_banned, diff, is_ext_diff, timestamp) + Interfaces.worker_manager.worker_log['authorized'][worker_name] = (0, 0, False, session['difficulty'], is_ext_diff, Interfaces.timestamper.time()) return True else: if worker_name in session['authorized']: del session['authorized'][worker_name] + if worker_name in Interfaces.worker_manager.worker_log['authorized']: + del Interfaces.worker_manager.worker_log['authorized'][worker_name] return False def subscribe(self, *args): @@ -72,7 +78,6 @@ class MiningService(GenericService): session = self.connection_ref().get_session() session['extranonce1'] = extranonce1 session['difficulty'] = settings.POOL_TARGET # Following protocol specs, default diff is 1 - return Pubsub.subscribe(self.connection_ref(), MiningSubscription()) + (extranonce1_hex, extranonce2_size) def submit(self, worker_name, job_id, extranonce2, ntime, nonce): @@ -92,26 +97,62 @@ class MiningService(GenericService): raise SubmitException("Connection is not subscribed for mining") difficulty = session['difficulty'] + s_difficulty = difficulty submit_time = Interfaces.timestamper.time() ip = self.connection_ref()._get_ip() - - Interfaces.share_limiter.submit(self.connection_ref, job_id, difficulty, submit_time, worker_name) + (valid, invalid, is_banned, diff, is_ext_diff, last_ts) = Interfaces.worker_manager.worker_log['authorized'][worker_name] + percent = float(float(invalid) / (float(valid) if valid else 1) * 100) + + if is_banned and submit_time - last_ts > settings.WORKER_BAN_TIME: + if percent > settings.INVALID_SHARES_PERCENT: + log.debug("Worker invalid percent: %0.2f %s STILL BANNED!" % (percent, worker_name)) + else: + is_banned = False + log.debug("Clearing ban for worker: %s UNBANNED" % worker_name) + (valid, invalid, is_banned, last_ts) = (0, 0, is_banned, Interfaces.timestamper.time()) + + if submit_time - last_ts > settings.WORKER_CACHE_TIME and not is_banned: + if percent > settings.INVALID_SHARES_PERCENT and settings.ENABLE_WORKER_BANNING: + is_banned = True + log.debug("Worker invalid percent: %0.2f %s BANNED!" % (percent, worker_name)) + else: + log.debug("Clearing worker stats for: %s" % worker_name) + (valid, invalid, is_banned, last_ts) = (0, 0, is_banned, Interfaces.timestamper.time()) + + if 'prev_ts' in session and (submit_time - session['prev_ts']) < settings.VDIFF_RETARGET_DELAY \ + and not is_ext_diff: + difficulty = session['prev_diff'] or session['difficulty'] or settings.POOL_TARGET + diff = difficulty + log.debug("%s (%d, %d, %s, %s, %d) %0.2f%% diff(%f)" % (worker_name, valid, invalid, is_banned, is_ext_diff, last_ts, percent, diff)) + if not is_ext_diff: + Interfaces.share_limiter.submit(self.connection_ref, job_id, difficulty, submit_time, worker_name) # This checks if submitted share meet all requirements # and it is valid proof of work. try: (block_header, block_hash, share_diff, on_submit) = Interfaces.template_registry.submit_share(job_id, - worker_name, session, extranonce1_bin, extranonce2, ntime, nonce, difficulty) + worker_name, session, extranonce1_bin, extranonce2, ntime, nonce, s_difficulty, submit_time) except SubmitException as e: # block_header and block_hash are None when submitted data are corrupted + invalid += 1 + Interfaces.worker_manager.worker_log['authorized'][worker_name] = (valid, invalid, is_banned, diff, is_ext_diff, last_ts) + + if is_banned: + raise SubmitException("Worker is temporarily banned") + Interfaces.share_manager.on_submit_share(worker_name, False, False, difficulty, - submit_time, False, ip, e[0], 0) + submit_time, False, ip, e[0], 0) raise - - + + valid += 1 + Interfaces.worker_manager.worker_log['authorized'][worker_name] = (valid, invalid, is_banned, diff, is_ext_diff, last_ts) + + if is_banned: + raise SubmitException("Worker is temporarily banned") + Interfaces.share_manager.on_submit_share(worker_name, block_header, block_hash, difficulty, submit_time, True, ip, '', share_diff) - + if on_submit != None: # Pool performs submitblock() to litecoind. Let's hook # to result and report it to share manager From 9a8515207d2a5901e94cf0b08919cbf744ecc068 Mon Sep 17 00:00:00 2001 From: Ahmed Bodiwala Date: Sat, 7 Dec 2013 22:52:31 +0000 Subject: [PATCH 10/20] Fixed Typo --- lib/template_registry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/template_registry.py b/lib/template_registry.py index 75b3636..1589998 100644 --- a/lib/template_registry.py +++ b/lib/template_registry.py @@ -5,7 +5,7 @@ import StringIO import settings if settings.COINDAEMON_ALGO == 'scrypt': import ltc_scrypt -elif settings.MAIN_COIN_ALGORITHM == 'scrypt-jane': +elif settings.COINDAEMON_ALGO == 'scrypt-jane': import yac_scrypt else: pass from twisted.internet import defer From 932926a0788a3e196bf7c1867f6d5593ba3b1422 Mon Sep 17 00:00:00 2001 From: Ahmed Bodiwala Date: Sun, 8 Dec 2013 02:10:21 +0000 Subject: [PATCH 11/20] Added QRK Hash Algo --- .gitmodules | 3 +++ externals/quarkcoin-hash | 1 + 2 files changed, 4 insertions(+) create mode 160000 externals/quarkcoin-hash diff --git a/.gitmodules b/.gitmodules index e569f67..c28fe5b 100644 --- a/.gitmodules +++ b/.gitmodules @@ -7,3 +7,6 @@ [submodule "externals/stratum"] path = externals/stratum url = https://github.com/slush0/stratum.git +[submodule "externals/quarkcoin-hash"] + path = externals/quarkcoin-hash + url = https://github.com/Neisklar/quarkcoin-hash-python diff --git a/externals/quarkcoin-hash b/externals/quarkcoin-hash new file mode 160000 index 0000000..8855c58 --- /dev/null +++ b/externals/quarkcoin-hash @@ -0,0 +1 @@ +Subproject commit 8855c585d06f510134a8c66b686705f494bda0d9 From db169ed82cec5fa1641715342993df79135d5e8d Mon Sep 17 00:00:00 2001 From: Ahmed Bodiwala Date: Sun, 8 Dec 2013 02:20:47 +0000 Subject: [PATCH 12/20] Added Quark Support --- lib/halfnode.py | 22 ++++++++++++++++++++++ lib/template_registry.py | 16 ++++++++++++---- 2 files changed, 34 insertions(+), 4 deletions(-) diff --git a/lib/halfnode.py b/lib/halfnode.py index e503a5d..d0a633a 100644 --- a/lib/halfnode.py +++ b/lib/halfnode.py @@ -19,6 +19,9 @@ import settings if settings.COINDAEMON_ALGO == 'scrypt': print("########################################### Loading LTC Scrypt Module #########################################################") import ltc_scrypt +elif settings.COINDAEMON_ALGO == 'quark': + print("########################################### Loading Quark Module #########################################################") + import quark_hash else: print("########################################### NOT Loading LTC Scrypt Module ######################################################") pass @@ -246,6 +249,8 @@ class CBlock(object): self.sha256 = None if settings.COINDAEMON_ALGO == 'scrypt': self.scrypt = None + elif settings.COINDAEMON_ALGO == 'quark': + self.quark = None else: pass if settings.COINDAEMON_Reward == 'POS': self.signature = b"" @@ -288,6 +293,18 @@ class CBlock(object): r.append(struct.pack(" target: return false + elif settings.COINDAEMON_ALGO == 'quark': + if self.quark > target: + return false else: if self.sha256 > target: return False diff --git a/lib/template_registry.py b/lib/template_registry.py index 1589998..7aa4370 100644 --- a/lib/template_registry.py +++ b/lib/template_registry.py @@ -7,6 +7,8 @@ if settings.COINDAEMON_ALGO == 'scrypt': import ltc_scrypt elif settings.COINDAEMON_ALGO == 'scrypt-jane': import yac_scrypt +elif settings.COINDAEMON_ALGO == 'quark': + import quark_hash else: pass from twisted.internet import defer from lib.exceptions import SubmitException @@ -147,6 +149,8 @@ class TemplateRegistry(object): '''Converts difficulty to target''' if settings.COINDAEMON_ALGO == 'scrypt': diff1 = 0x0000ffff00000000000000000000000000000000000000000000000000000000 + elif settings.COINDAEMON_ALGO == 'quark': + diff1 = 0x000000ffff000000000000000000000000000000000000000000000000000000 else: diff1 = 0x00000000ffff0000000000000000000000000000000000000000000000000000 return diff1 / difficulty @@ -231,15 +235,19 @@ class TemplateRegistry(object): # 4. Reverse header and compare it with target of the user if settings.COINDAEMON_ALGO == 'scrypt': hash_bin = ltc_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) - elif settings.MAIN_COIN_ALGORITHM == 'scrypt-jane': + elif settings.COINDAEMON_ALGO == 'scrypt-jane': hash_bin = yac_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]), int(ntime, 16)) - else: hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) + elif settings.COINDAEMON_ALGO == 'quark': + hash_bin = quark_hash.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) + else: hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) hash_int = util.uint256_from_str(hash_bin) scrypt_hash_hex = "%064x" % hash_int header_hex = binascii.hexlify(header_bin) - if not settings.COINDAEMON_ALGO == 'sha256d': + if settings.COINDAEMON_ALGO == 'scrypt' or settings.COINDAEMON_ALGO == 'scrypt-jane': header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000" - else: pass + elif settings.COINDAEMON_ALGO == 'quark': + header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000" + else: pass target_user = self.diff_to_target(difficulty) if hash_int > target_user and \ From 60edae568130409ed1d751b8260e3f0dd93e8809 Mon Sep 17 00:00:00 2001 From: Ahmed Bodiwala Date: Sun, 8 Dec 2013 02:23:48 +0000 Subject: [PATCH 13/20] Updated Config with NEW Algo Support --- conf/config_sample.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/conf/config_sample.py b/conf/config_sample.py index ee68d39..471eafb 100644 --- a/conf/config_sample.py +++ b/conf/config_sample.py @@ -17,8 +17,9 @@ COINDAEMON_TRUSTED_USER = 'user' COINDAEMON_TRUSTED_PASSWORD = 'somepassword' # Coin Algorithm is the option used to determine the algortithm used by stratum -# This currently only works with POW SHA256 and Scrypt Coins -# The available options are scrypt and sha256d. +# This currently works with POW and POS coins +# The available options are: +# scrypt, sha256d, scrypt-jane and quark # If the option does not meet either of these criteria stratum defaults to scrypt # Until AutoReward Selecting Code has been implemented the below options are used to select the type of coin # For Reward type there is POW and POS. please ensure you choose the currect type. From ec9aa5be5c1e3f91207164982e0bfd335c4aba05 Mon Sep 17 00:00:00 2001 From: Ahmed Bodiwala Date: Sun, 8 Dec 2013 02:44:19 +0000 Subject: [PATCH 14/20] Added Support for Other DB Types --- conf/config_sample.py | 12 ++ mining/DBInterface.py | 26 ++- mining/DB_None.py | 54 ++++++ mining/DB_Postgresql.py | 394 ++++++++++++++++++++++++++++++++++++++++ mining/DB_Sqlite.py | 299 ++++++++++++++++++++++++++++++ 5 files changed, 783 insertions(+), 2 deletions(-) create mode 100644 mining/DB_None.py create mode 100644 mining/DB_Postgresql.py create mode 100644 mining/DB_Sqlite.py diff --git a/conf/config_sample.py b/conf/config_sample.py index 471eafb..344f23a 100644 --- a/conf/config_sample.py +++ b/conf/config_sample.py @@ -88,12 +88,24 @@ PASSWORD_SALT = 'some_crazy_string' # ******************** Database ********************* +DATABASE_DRIVER = 'sqlite' # Options: none, sqlite, postgresql or mysql +DATABASE_EXTEND = True # False = pushpool db layout, True = pushpool + extra columns + +# SQLite +DB_SQLITE_FILE = 'pooldb.sqlite' +# Postgresql +DB_PGSQL_HOST = 'localhost' +DB_PGSQL_DBNAME = 'pooldb' +DB_PGSQL_USER = 'pooldb' +DB_PGSQL_PASS = '**empty**' +DB_PGSQL_SCHEMA = 'public' # MySQL DB_MYSQL_HOST = 'localhost' DB_MYSQL_DBNAME = 'pooldb' DB_MYSQL_USER = 'pooldb' DB_MYSQL_PASS = '**empty**' + # ******************** Adv. DB Settings ********************* # Don't change these unless you know what you are doing diff --git a/mining/DBInterface.py b/mining/DBInterface.py index 76a535a..a5539bc 100644 --- a/mining/DBInterface.py +++ b/mining/DBInterface.py @@ -43,10 +43,30 @@ class DBInterface(): log.debug("DB_Mysql_Vardiff INIT") import DB_Mysql_Vardiff return DB_Mysql_Vardiff.DB_Mysql_Vardiff() - else: + elif settings.DATABASE_DRIVER == "sqlite": + log.debug('DB_Sqlite INIT') + import DB_Sqlite + return DB_Sqlite.DB_Sqlite() + elif settings.DATABASE_DRIVER == "mysql": log.debug('DB_Mysql INIT') import DB_Mysql return DB_Mysql.DB_Mysql() + elif settings.DATABASE_DRIVER == "postgresql": + log.debug('DB_Postgresql INIT') + import DB_Postgresql + return DB_Postgresql.DB_Postgresql() + elif settings.DATABASE_DRIVER == "none": + log.debug('DB_None INIT') + import DB_None + return DB_None.DB_None() + else: + log.error('Invalid DATABASE_DRIVER -- using NONE') + log.debug('DB_None INIT') + import DB_None + return DB_None.DB_None() + log.debug('DB_Mysql INIT') + import DB_Mysql + return DB_Mysql.DB_Mysql() def clearusercache(self): log.debug("DBInterface.clearusercache called") @@ -55,7 +75,9 @@ class DBInterface(): def scheduleImport(self): # This schedule's the Import - use_thread = True + if settings.DATABASE_DRIVER == "sqlite": + use_thread = False + else: use_thread = True if use_thread: self.queueclock = reactor.callLater(settings.DB_LOADER_CHECKTIME , self.run_import_thread) diff --git a/mining/DB_None.py b/mining/DB_None.py new file mode 100644 index 0000000..d4e8df5 --- /dev/null +++ b/mining/DB_None.py @@ -0,0 +1,54 @@ +import stratum.logger +log = stratum.logger.get_logger('None') + +class DB_None(): + def __init__(self): + log.debug("Connecting to DB") + + def updateStats(self,averageOverTime): + log.debug("Updating Stats") + + def import_shares(self,data): + log.debug("Importing Shares") + + def found_block(self,data): + log.debug("Found Block") + + def get_user(self, id_or_username): + log.debug("Get User") + + def list_users(self): + log.debug("List Users") + + def delete_user(self,username): + log.debug("Deleting Username") + + def insert_user(self,username,password): + log.debug("Adding Username/Password") + + def update_user(self,username,password): + log.debug("Updating Username/Password") + + def check_password(self,username,password): + log.debug("Checking Username/Password") + return True + + def update_pool_info(self,pi): + log.debug("Update Pool Info") + + def get_pool_stats(self): + log.debug("Get Pool Stats") + ret = {} + return ret + + def get_workers_stats(self): + log.debug("Get Workers Stats") + ret = {} + return ret + + def check_tables(self): + log.debug("Checking Tables") + + def close(self): + log.debug("Close Connection") + diff --git a/mining/DB_Postgresql.py b/mining/DB_Postgresql.py new file mode 100644 index 0000000..6a3fda9 --- /dev/null +++ b/mining/DB_Postgresql.py @@ -0,0 +1,394 @@ +import time +import hashlib +from stratum import settings +import stratum.logger +log = stratum.logger.get_logger('DB_Postgresql') + +import psycopg2 +from psycopg2 import extras + +class DB_Postgresql(): + def __init__(self): + log.debug("Connecting to DB") + self.dbh = psycopg2.connect("host='"+settings.DB_PGSQL_HOST+"' dbname='"+settings.DB_PGSQL_DBNAME+"' user='"+settings.DB_PGSQL_USER+\ + "' password='"+settings.DB_PGSQL_PASS+"'") + # TODO -- set the schema + self.dbc = self.dbh.cursor() + + if hasattr(settings, 'PASSWORD_SALT'): + self.salt = settings.PASSWORD_SALT + else: + raise ValueError("PASSWORD_SALT isn't set, please set in config.py") + + def updateStats(self,averageOverTime): + log.debug("Updating Stats") + # Note: we are using transactions... so we can set the speed = 0 and it doesn't take affect until we are commited. + self.dbc.execute("update pool_worker set speed = 0, alive = 'f'"); + stime = '%.2f' % ( time.time() - averageOverTime ); + self.dbc.execute("select username,SUM(difficulty) from shares where time > to_timestamp(%s) group by username", [stime]) + total_speed = 0 + for name,shares in self.dbc.fetchall(): + speed = int(int(shares) * pow(2,32)) / ( int(averageOverTime) * 1000 * 1000) + total_speed += speed + self.dbc.execute("update pool_worker set speed = %s, alive = 't' where username = %s", (speed,name)) + self.dbc.execute("update pool set value = %s where parameter = 'pool_speed'",[total_speed]) + self.dbh.commit() + + def archive_check(self): + # Check for found shares to archive + self.dbc.execute("select time from shares where upstream_result = true order by time limit 1") + data = self.dbc.fetchone() + if data is None or (data[0] + settings.ARCHIVE_DELAY) > time.time() : + return False + return data[0] + + def archive_found(self,found_time): + self.dbc.execute("insert into shares_archive_found select * from shares where upstream_result = true and time <= to_timestamp(%s)", [found_time]) + self.dbh.commit() + + def archive_to_db(self,found_time): + self.dbc.execute("insert into shares_archive select * from shares where time <= to_timestamp(%s)",[found_time]) + self.dbh.commit() + + def archive_cleanup(self,found_time): + self.dbc.execute("delete from shares where time <= to_timestamp(%s)",[found_time]) + self.dbh.commit() + + def archive_get_shares(self,found_time): + self.dbc.execute("select * from shares where time <= to_timestamp(%s)",[found_time]) + return self.dbc + + def import_shares(self,data): + log.debug("Importing Shares") +# 0 1 2 3 4 5 6 7 8 9 10 +# data: [worker_name,block_header,block_hash,difficulty,timestamp,is_valid,ip,block_height,prev_hash,invalid_reason,best_diff] + checkin_times = {} + total_shares = 0 + best_diff = 0 + for k,v in enumerate(data): + if settings.DATABASE_EXTEND : + total_shares += v[3] + if v[0] in checkin_times: + if v[4] > checkin_times[v[0]] : + checkin_times[v[0]]["time"] = v[4] + else: + checkin_times[v[0]] = {"time": v[4], "shares": 0, "rejects": 0 } + + if v[5] == True : + checkin_times[v[0]]["shares"] += v[3] + else : + checkin_times[v[0]]["rejects"] += v[3] + + if v[10] > best_diff: + best_diff = v[10] + + self.dbc.execute("insert into shares " +\ + "(time,rem_host,username,our_result,upstream_result,reason,solution,block_num,prev_block_hash,useragent,difficulty) " +\ + "VALUES (to_timestamp(%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", + (v[4],v[6],v[0],bool(v[5]),False,v[9],'',v[7],v[8],'',v[3]) ) + else : + self.dbc.execute("insert into shares (time,rem_host,username,our_result,upstream_result,reason,solution) VALUES " +\ + "(to_timestamp(%s),%s,%s,%s,%s,%s,%s)", + (v[4],v[6],v[0],bool(v[5]),False,v[9],'') ) + + if settings.DATABASE_EXTEND : + self.dbc.execute("select value from pool where parameter = 'round_shares'") + round_shares = int(self.dbc.fetchone()[0]) + total_shares + self.dbc.execute("update pool set value = %s where parameter = 'round_shares'",[round_shares]) + + self.dbc.execute("select value from pool where parameter = 'round_best_share'") + round_best_share = int(self.dbc.fetchone()[0]) + if best_diff > round_best_share: + self.dbc.execute("update pool set value = %s where parameter = 'round_best_share'",[best_diff]) + + self.dbc.execute("select value from pool where parameter = 'bitcoin_difficulty'") + difficulty = float(self.dbc.fetchone()[0]) + + if difficulty == 0: + progress = 0 + else: + progress = (round_shares/difficulty)*100 + self.dbc.execute("update pool set value = %s where parameter = 'round_progress'",[progress]) + + for k,v in checkin_times.items(): + self.dbc.execute("update pool_worker set last_checkin = to_timestamp(%s), total_shares = total_shares + %s, total_rejects = total_rejects + %s where username = %s", + (v["time"],v["shares"],v["rejects"],k)) + + self.dbh.commit() + + + def found_block(self,data): + # Note: difficulty = -1 here + self.dbc.execute("update shares set upstream_result = %s, solution = %s where id in (select id from shares where time = to_timestamp(%s) and username = %s limit 1)", + (bool(data[5]),data[2],data[4],data[0])) + if settings.DATABASE_EXTEND and data[5] == True : + self.dbc.execute("update pool_worker set total_found = total_found + 1 where username = %s",(data[0],)) + self.dbc.execute("select value from pool where parameter = 'pool_total_found'") + total_found = int(self.dbc.fetchone()[0]) + 1 + self.dbc.executemany("update pool set value = %s where parameter = %s",[(0,'round_shares'), + (0,'round_progress'), + (0,'round_best_share'), + (time.time(),'round_start'), + (total_found,'pool_total_found') + ]) + self.dbh.commit() + + def get_user(self, id_or_username): + log.debug("Finding user with id or username of %s", id_or_username) + cursor = self.dbh.cursor(cursor_factory=extras.DictCursor) + + cursor.execute( + """ + SELECT * + FROM pool_worker + WHERE id = %(id)s + OR username = %(uname)s + """, + { + "id": id_or_username if id_or_username.isdigit() else -1, + "uname": id_or_username + } + ) + + user = cursor.fetchone() + cursor.close() + return user + + def list_users(self): + cursor = self.dbh.cursor(cursor_factory=extras.DictCursor) + cursor.execute( + """ + SELECT * + FROM pool_worker + WHERE id > 0 + """ + ) + + while True: + results = cursor.fetchmany() + if not results: + break + + for result in results: + yield result + + def delete_user(self, id_or_username): + log.debug("Deleting Username") + self.dbc.execute( + """ + delete from pool_worker where id = %(id)s or username = %(uname)s + """, + { + "id": id_or_username if id_or_username.isdigit() else -1, + "uname": id_or_username + } + ) + self.dbh.commit() + + def insert_user(self,username,password): + log.debug("Adding Username/Password") + m = hashlib.sha1() + m.update(password) + m.update(self.salt) + self.dbc.execute("insert into pool_worker (username,password) VALUES (%s,%s)", + (username, m.hexdigest() )) + self.dbh.commit() + + return str(username) + + def update_user(self, id_or_username, password): + log.debug("Updating Username/Password") + m = hashlib.sha1() + m.update(password) + m.update(self.salt) + self.dbc.execute( + """ + update pool_worker set password = %(pass)s where id = %(id)s or username = %(uname)s + """, + { + "id": id_or_username if id_or_username.isdigit() else -1, + "uname": id_or_username, + "pass": m.hexdigest() + } + ) + self.dbh.commit() + + def update_worker_diff(self,username,diff): + self.dbc.execute("update pool_worker set difficulty = %s where username = %s",(diff,username)) + self.dbh.commit() + + def clear_worker_diff(self): + if settings.DATABASE_EXTEND == True : + self.dbc.execute("update pool_worker set difficulty = 0") + self.dbh.commit() + + def check_password(self,username,password): + log.debug("Checking Username/Password") + m = hashlib.sha1() + m.update(password) + m.update(self.salt) + self.dbc.execute("select COUNT(*) from pool_worker where username = %s and password = %s", + (username, m.hexdigest() )) + data = self.dbc.fetchone() + if data[0] > 0 : + return True + return False + + def update_pool_info(self,pi): + self.dbc.executemany("update pool set value = %s where parameter = %s",[(pi['blocks'],"bitcoin_blocks"), + (pi['balance'],"bitcoin_balance"), + (pi['connections'],"bitcoin_connections"), + (pi['difficulty'],"bitcoin_difficulty"), + (time.time(),"bitcoin_infotime") + ]) + self.dbh.commit() + + def get_pool_stats(self): + self.dbc.execute("select * from pool") + ret = {} + for data in self.dbc.fetchall(): + ret[data[0]] = data[1] + return ret + + def get_workers_stats(self): + self.dbc.execute("select username,speed,last_checkin,total_shares,total_rejects,total_found,alive,difficulty from pool_worker") + ret = {} + for data in self.dbc.fetchall(): + ret[data[0]] = { "username" : data[0], + "speed" : data[1], + "last_checkin" : time.mktime(data[2].timetuple()), + "total_shares" : data[3], + "total_rejects" : data[4], + "total_found" : data[5], + "alive" : data[6], + "difficulty" : data[7] } + return ret + + def close(self): + self.dbh.close() + + def check_tables(self): + log.debug("Checking Tables") + + shares_exist = False + self.dbc.execute("select COUNT(*) from pg_catalog.pg_tables where schemaname = %(schema)s and tablename = 'shares'", + {"schema": settings.DB_PGSQL_SCHEMA }) + data = self.dbc.fetchone() + if data[0] <= 0 : + self.update_version_1() + + if settings.DATABASE_EXTEND == True : + self.update_tables() + + + def update_tables(self): + version = 0 + current_version = 7 + while version < current_version : + self.dbc.execute("select value from pool where parameter = 'DB Version'") + data = self.dbc.fetchone() + version = int(data[0]) + if version < current_version : + log.info("Updating Database from %i to %i" % (version, version +1)) + getattr(self, 'update_version_' + str(version) )() + + def update_version_1(self): + if settings.DATABASE_EXTEND == True : + self.dbc.execute("create table shares" +\ + "(id serial primary key,time timestamp,rem_host TEXT, username TEXT, our_result BOOLEAN, upstream_result BOOLEAN, reason TEXT, solution TEXT, " +\ + "block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)") + self.dbc.execute("create index shares_username ON shares(username)") + self.dbc.execute("create table pool_worker" +\ + "(id serial primary key,username TEXT, password TEXT, speed INTEGER, last_checkin timestamp)") + self.dbc.execute("create index pool_worker_username ON pool_worker(username)") + self.dbc.execute("alter table pool_worker add total_shares INTEGER default 0") + self.dbc.execute("alter table pool_worker add total_rejects INTEGER default 0") + self.dbc.execute("alter table pool_worker add total_found INTEGER default 0") + self.dbc.execute("create table pool(parameter TEXT, value TEXT)") + self.dbc.execute("insert into pool (parameter,value) VALUES ('DB Version',2)") + else : + self.dbc.execute("create table shares" + \ + "(id serial,time timestamp,rem_host TEXT, username TEXT, our_result BOOLEAN, upstream_result BOOLEAN, reason TEXT, solution TEXT)") + self.dbc.execute("create index shares_username ON shares(username)") + self.dbc.execute("create table pool_worker(id serial,username TEXT, password TEXT)") + self.dbc.execute("create index pool_worker_username ON pool_worker(username)") + self.dbh.commit() + + def update_version_2(self): + log.info("running update 2") + self.dbc.executemany("insert into pool (parameter,value) VALUES (%s,%s)",[('bitcoin_blocks',0), + ('bitcoin_balance',0), + ('bitcoin_connections',0), + ('bitcoin_difficulty',0), + ('pool_speed',0), + ('pool_total_found',0), + ('round_shares',0), + ('round_progress',0), + ('round_start',time.time()) + ]) + self.dbc.execute("update pool set value = 3 where parameter = 'DB Version'") + self.dbh.commit() + + def update_version_3(self): + log.info("running update 3") + self.dbc.executemany("insert into pool (parameter,value) VALUES (%s,%s)",[ + ('round_best_share',0), + ('bitcoin_infotime',0) + ]) + self.dbc.execute("alter table pool_worker add alive BOOLEAN") + self.dbc.execute("update pool set value = 4 where parameter = 'DB Version'") + self.dbh.commit() + + def update_version_4(self): + log.info("running update 4") + self.dbc.execute("alter table pool_worker add difficulty INTEGER default 0") + self.dbc.execute("create table shares_archive" +\ + "(id serial primary key,time timestamp,rem_host TEXT, username TEXT, our_result BOOLEAN, upstream_result BOOLEAN, reason TEXT, solution TEXT, " +\ + "block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)") + self.dbc.execute("create table shares_archive_found" +\ + "(id serial primary key,time timestamp,rem_host TEXT, username TEXT, our_result BOOLEAN, upstream_result BOOLEAN, reason TEXT, solution TEXT, " +\ + "block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)") + self.dbc.execute("update pool set value = 5 where parameter = 'DB Version'") + self.dbh.commit() + + def update_version_5(self): + log.info("running update 5") + # Adding Primary key to table: pool + self.dbc.execute("alter table pool add primary key (parameter)") + self.dbh.commit() + # Adjusting indicies on table: shares + self.dbc.execute("DROP INDEX shares_username") + self.dbc.execute("CREATE INDEX shares_time_username ON shares(time,username)") + self.dbc.execute("CREATE INDEX shares_upstreamresult ON shares(upstream_result)") + self.dbh.commit() + + self.dbc.execute("update pool set value = 6 where parameter = 'DB Version'") + self.dbh.commit() + + def update_version_6(self): + log.info("running update 6") + + try: + self.dbc.execute("CREATE EXTENSION pgcrypto") + except psycopg2.ProgrammingError: + log.info("pgcrypto already added to database") + except psycopg2.OperationalError: + raise Exception("Could not add pgcrypto extension to database. Have you got it installed? Ubuntu is postgresql-contrib") + self.dbh.commit() + + # Optimising table layout + self.dbc.execute("ALTER TABLE pool " +\ + "ALTER COLUMN parameter TYPE character varying(128), ALTER COLUMN value TYPE character varying(512);") + self.dbh.commit() + + self.dbc.execute("UPDATE pool_worker SET password = encode(digest(concat(password, %s), 'sha1'), 'hex') WHERE id > 0", [self.salt]) + self.dbh.commit() + + self.dbc.execute("ALTER TABLE pool_worker " +\ + "ALTER COLUMN username TYPE character varying(512), ALTER COLUMN password TYPE character(40), " +\ + "ADD CONSTRAINT username UNIQUE (username)") + self.dbh.commit() + + self.dbc.execute("update pool set value = 7 where parameter = 'DB Version'") + self.dbh.commit() + diff --git a/mining/DB_Sqlite.py b/mining/DB_Sqlite.py new file mode 100644 index 0000000..5218439 --- /dev/null +++ b/mining/DB_Sqlite.py @@ -0,0 +1,299 @@ +import time +from stratum import settings +import stratum.logger +log = stratum.logger.get_logger('DB_Sqlite') + +import sqlite3 + +class DB_Sqlite(): + def __init__(self): + log.debug("Connecting to DB") + self.dbh = sqlite3.connect(settings.DB_SQLITE_FILE) + self.dbc = self.dbh.cursor() + + def updateStats(self,averageOverTime): + log.debug("Updating Stats") + # Note: we are using transactions... so we can set the speed = 0 and it doesn't take affect until we are commited. + self.dbc.execute("update pool_worker set speed = 0, alive = 0"); + stime = '%.2f' % ( time.time() - averageOverTime ); + self.dbc.execute("select username,SUM(difficulty) from shares where time > :time group by username", {'time':stime}) + total_speed = 0 + sqldata = [] + for name,shares in self.dbc.fetchall(): + speed = int(int(shares) * pow(2,32)) / ( int(averageOverTime) * 1000 * 1000) + total_speed += speed + sqldata.append({'speed':speed,'user':name}) + self.dbc.executemany("update pool_worker set speed = :speed, alive = 1 where username = :user",sqldata) + self.dbc.execute("update pool set value = :val where parameter = 'pool_speed'",{'val':total_speed}) + self.dbh.commit() + + def archive_check(self): + # Check for found shares to archive + self.dbc.execute("select time from shares where upstream_result = 1 order by time limit 1") + data = self.dbc.fetchone() + if data is None or (data[0] + settings.ARCHIVE_DELAY) > time.time() : + return False + return data[0] + + def archive_found(self,found_time): + self.dbc.execute("insert into shares_archive_found select * from shares where upstream_result = 1 and time <= :time",{'time':found_time}) + self.dbh.commit() + + def archive_to_db(self,found_time): + self.dbc.execute("insert into shares_archive select * from shares where time <= :time",{'time':found_time}) + self.dbh.commit() + + def archive_cleanup(self,found_time): + self.dbc.execute("delete from shares where time <= :time",{'time':found_time}) + self.dbc.execute("vacuum") + self.dbh.commit() + + def archive_get_shares(self,found_time): + self.dbc.execute("select * from shares where time <= :time",{'time':found_time}) + return self.dbc + + def import_shares(self,data): + log.debug("Importing Shares") +# 0 1 2 3 4 5 6 7 8 9 10 +# data: [worker_name,block_header,block_hash,difficulty,timestamp,is_valid,ip,block_height,prev_hash,invalid_reason,share_diff] + checkin_times = {} + total_shares = 0 + best_diff = 0 + sqldata = [] + for k,v in enumerate(data): + if settings.DATABASE_EXTEND : + total_shares += v[3] + if v[0] in checkin_times: + if v[4] > checkin_times[v[0]] : + checkin_times[v[0]]["time"] = v[4] + else: + checkin_times[v[0]] = {"time": v[4], "shares": 0, "rejects": 0 } + + if v[5] == True : + checkin_times[v[0]]["shares"] += v[3] + else : + checkin_times[v[0]]["rejects"] += v[3] + + if v[10] > best_diff: + best_diff = v[10] + + sqldata.append({'time':v[4],'rem_host':v[6],'username':v[0],'our_result':v[5],'upstream_result':0,'reason':v[9],'solution':'', + 'block_num':v[7],'prev_block_hash':v[8],'ua':'','diff':v[3]} ) + else : + sqldata.append({'time':v[4],'rem_host':v[6],'username':v[0],'our_result':v[5],'upstream_result':0,'reason':v[9],'solution':''} ) + + if settings.DATABASE_EXTEND : + self.dbc.executemany("insert into shares " +\ + "(time,rem_host,username,our_result,upstream_result,reason,solution,block_num,prev_block_hash,useragent,difficulty) " +\ + "VALUES (:time,:rem_host,:username,:our_result,:upstream_result,:reason,:solution,:block_num,:prev_block_hash,:ua,:diff)",sqldata) + + + self.dbc.execute("select value from pool where parameter = 'round_shares'") + round_shares = int(self.dbc.fetchone()[0]) + total_shares + self.dbc.execute("update pool set value = :val where parameter = 'round_shares'",{'val':round_shares}) + + self.dbc.execute("select value from pool where parameter = 'round_best_share'") + round_best_share = int(self.dbc.fetchone()[0]) + if best_diff > round_best_share: + self.dbc.execute("update pool set value = :val where parameter = 'round_best_share'",{'val':best_diff}) + + self.dbc.execute("select value from pool where parameter = 'bitcoin_difficulty'") + difficulty = float(self.dbc.fetchone()[0]) + + if difficulty == 0: + progress = 0 + else: + progress = (round_shares/difficulty)*100 + self.dbc.execute("update pool set value = :val where parameter = 'round_progress'",{'val':progress}) + + sqldata = [] + for k,v in checkin_times.items(): + sqldata.append({'last_checkin':v["time"],'addshares':v["shares"],'addrejects':v["rejects"],'user':k}) + self.dbc.executemany("update pool_worker set last_checkin = :last_checkin, total_shares = total_shares + :addshares, " +\ + "total_rejects = total_rejects + :addrejects where username = :user",sqldata) + else: + self.dbc.executemany("insert into shares (time,rem_host,username,our_result,upstream_result,reason,solution) " +\ + "VALUES (:time,:rem_host,:username,:our_result,:upstream_result,:reason,:solution)",sqldata) + + self.dbh.commit() + + def found_block(self,data): + # Note: difficulty = -1 here + self.dbc.execute("update shares set upstream_result = :usr, solution = :sol where time = :time and username = :user", + {'usr':data[5],'sol':data[2],'time':data[4],'user':data[0]}) + if settings.DATABASE_EXTEND and data[5] == True : + self.dbc.execute("update pool_worker set total_found = total_found + 1 where username = :user",{'user':data[0]}) + self.dbc.execute("select value from pool where parameter = 'pool_total_found'") + total_found = int(self.dbc.fetchone()[0]) + 1 + self.dbc.executemany("update pool set value = :val where parameter = :parm", [{'val':0,'parm':'round_shares'}, + {'val':0,'parm':'round_progress'}, + {'val':0,'parm':'round_best_share'}, + {'val':time.time(),'parm':'round_start'}, + {'val':total_found,'parm':'pool_total_found'} + ]) + self.dbh.commit() + + def get_user(self, id_or_username): + raise NotImplementedError('Not implemented for SQLite') + + def list_users(self): + raise NotImplementedError('Not implemented for SQLite') + + def delete_user(self,id_or_username): + raise NotImplementedError('Not implemented for SQLite') + + def insert_user(self,username,password): + log.debug("Adding Username/Password") + self.dbc.execute("insert into pool_worker (username,password) VALUES (:user,:pass)", {'user':username,'pass':password}) + self.dbh.commit() + + def update_user(self,username,password): + raise NotImplementedError('Not implemented for SQLite') + + def check_password(self,username,password): + log.debug("Checking Username/Password") + self.dbc.execute("select COUNT(*) from pool_worker where username = :user and password = :pass", {'user':username,'pass':password}) + data = self.dbc.fetchone() + if data[0] > 0 : + return True + return False + + def update_worker_diff(self,username,diff): + self.dbc.execute("update pool_worker set difficulty = :diff where username = :user",{'diff':diff,'user':username}) + self.dbh.commit() + + def clear_worker_diff(self): + if settings.DATABASE_EXTEND == True : + self.dbc.execute("update pool_worker set difficulty = 0") + self.dbh.commit() + + def update_pool_info(self,pi): + self.dbc.executemany("update pool set value = :val where parameter = :parm",[{'val':pi['blocks'],'parm':"bitcoin_blocks"}, + {'val':pi['balance'],'parm':"bitcoin_balance"}, + {'val':pi['connections'],'parm':"bitcoin_connections"}, + {'val':pi['difficulty'],'parm':"bitcoin_difficulty"}, + {'val':time.time(),'parm':"bitcoin_infotime"} + ]) + self.dbh.commit() + + def get_pool_stats(self): + self.dbc.execute("select * from pool") + ret = {} + for data in self.dbc.fetchall(): + ret[data[0]] = data[1] + return ret + + def get_workers_stats(self): + self.dbc.execute("select username,speed,last_checkin,total_shares,total_rejects,total_found,alive,difficulty from pool_worker") + ret = {} + for data in self.dbc.fetchall(): + ret[data[0]] = { "username" : data[0], + "speed" : data[1], + "last_checkin" : data[2], + "total_shares" : data[3], + "total_rejects" : data[4], + "total_found" : data[5], + "alive" : data[6], + "difficulty" : data[7] } + return ret + + def close(self): + self.dbh.close() + + def check_tables(self): + log.debug("Checking Tables") + if settings.DATABASE_EXTEND == True : + self.dbc.execute("create table if not exists shares" +\ + "(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\ + "block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)") + self.dbc.execute("create table if not exists pool_worker" +\ + "(username TEXT, password TEXT, speed INTEGER, last_checkin DATETIME)") + self.dbc.execute("create table if not exists pool(parameter TEXT, value TEXT)") + + self.dbc.execute("select COUNT(*) from pool where parameter = 'DB Version'") + data = self.dbc.fetchone() + if data[0] <= 0: + self.dbc.execute("alter table pool_worker add total_shares INTEGER default 0") + self.dbc.execute("alter table pool_worker add total_rejects INTEGER default 0") + self.dbc.execute("alter table pool_worker add total_found INTEGER default 0") + self.dbc.execute("insert into pool (parameter,value) VALUES ('DB Version',2)") + self.update_tables() + else : + self.dbc.execute("create table if not exists shares" + \ + "(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT)") + self.dbc.execute("create table if not exists pool_worker(username TEXT, password TEXT)") + self.dbc.execute("create index if not exists pool_worker_username ON pool_worker(username)") + + def update_tables(self): + version = 0 + current_version = 6 + while version < current_version : + self.dbc.execute("select value from pool where parameter = 'DB Version'") + data = self.dbc.fetchone() + version = int(data[0]) + if version < current_version : + log.info("Updating Database from %i to %i" % (version, version +1)) + getattr(self, 'update_version_' + str(version) )() + + + def update_version_2(self): + log.info("running update 2") + self.dbc.executemany("insert into pool (parameter,value) VALUES (?,?)",[('bitcoin_blocks',0), + ('bitcoin_balance',0), + ('bitcoin_connections',0), + ('bitcoin_difficulty',0), + ('pool_speed',0), + ('pool_total_found',0), + ('round_shares',0), + ('round_progress',0), + ('round_start',time.time()) + ]) + self.dbc.execute("create index if not exists shares_username ON shares(username)") + self.dbc.execute("create index if not exists pool_worker_username ON pool_worker(username)") + self.dbc.execute("update pool set value = 3 where parameter = 'DB Version'") + self.dbh.commit() + + def update_version_3(self): + log.info("running update 3") + self.dbc.executemany("insert into pool (parameter,value) VALUES (?,?)",[ + ('round_best_share',0), + ('bitcoin_infotime',0), + ]) + self.dbc.execute("alter table pool_worker add alive INTEGER default 0") + self.dbc.execute("update pool set value = 4 where parameter = 'DB Version'") + self.dbh.commit() + + def update_version_4(self): + log.info("running update 4") + self.dbc.execute("alter table pool_worker add difficulty INTEGER default 0") + self.dbc.execute("create table if not exists shares_archive" +\ + "(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\ + "block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)") + self.dbc.execute("create table if not exists shares_archive_found" +\ + "(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\ + "block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)") + self.dbc.execute("update pool set value = 5 where parameter = 'DB Version'") + self.dbh.commit() + + def update_version_5(self): + log.info("running update 5") + # Adding Primary key to table: pool + self.dbc.execute("alter table pool rename to pool_old") + self.dbc.execute("create table if not exists pool(parameter TEXT, value TEXT, primary key(parameter))") + self.dbc.execute("insert into pool select * from pool_old") + self.dbc.execute("drop table pool_old") + self.dbh.commit() + # Adding Primary key to table: pool_worker + self.dbc.execute("alter table pool_worker rename to pool_worker_old") + self.dbc.execute("CREATE TABLE pool_worker(username TEXT, password TEXT, speed INTEGER, last_checkin DATETIME, total_shares INTEGER default 0, total_rejects INTEGER default 0, total_found INTEGER default 0, alive INTEGER default 0, difficulty INTEGER default 0, primary key(username))") + self.dbc.execute("insert into pool_worker select * from pool_worker_old") + self.dbc.execute("drop table pool_worker_old") + self.dbh.commit() + # Adjusting indicies on table: shares + self.dbc.execute("DROP INDEX shares_username") + self.dbc.execute("CREATE INDEX shares_time_username ON shares(time,username)") + self.dbc.execute("CREATE INDEX shares_upstreamresult ON shares(upstream_result)") + self.dbh.commit() + + self.dbc.execute("update pool set value = 6 where parameter = 'DB Version'") + self.dbh.commit() From 0d57d4e725f8544782a6d4b76a64b12a032c78fa Mon Sep 17 00:00:00 2001 From: Ahmed Bodiwala Date: Sun, 8 Dec 2013 02:46:19 +0000 Subject: [PATCH 15/20] Removed Uneeded Extend Option --- conf/config_sample.py | 1 - 1 file changed, 1 deletion(-) diff --git a/conf/config_sample.py b/conf/config_sample.py index 344f23a..fe40bf0 100644 --- a/conf/config_sample.py +++ b/conf/config_sample.py @@ -89,7 +89,6 @@ PASSWORD_SALT = 'some_crazy_string' # ******************** Database ********************* DATABASE_DRIVER = 'sqlite' # Options: none, sqlite, postgresql or mysql -DATABASE_EXTEND = True # False = pushpool db layout, True = pushpool + extra columns # SQLite DB_SQLITE_FILE = 'pooldb.sqlite' From 75961601cd9091fe85d88b7aece76bec63f2ced2 Mon Sep 17 00:00:00 2001 From: ahmedbodi Date: Mon, 9 Dec 2013 15:49:03 +0000 Subject: [PATCH 16/20] Update DBInterface.py --- mining/DBInterface.py | 41 +++++++++++++++++++++-------------------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/mining/DBInterface.py b/mining/DBInterface.py index a5539bc..645b9b4 100644 --- a/mining/DBInterface.py +++ b/mining/DBInterface.py @@ -39,34 +39,35 @@ class DBInterface(): self.bitcoinrpc = bitcoinrpc def connectDB(self): - if settings.VARIABLE_DIFF: + if settings.VARIABLE_DIFF and settings.DATABASE_DRIVER == "mysql": log.debug("DB_Mysql_Vardiff INIT") import DB_Mysql_Vardiff return DB_Mysql_Vardiff.DB_Mysql_Vardiff() + else log.debug('DB_Mysql INIT') + import DB_Mysql + return DB_Mysql.DB_Mysql() elif settings.DATABASE_DRIVER == "sqlite": - log.debug('DB_Sqlite INIT') - import DB_Sqlite - return DB_Sqlite.DB_Sqlite() + log.debug('DB_Sqlite INIT') + import DB_Sqlite + return DB_Sqlite.DB_Sqlite() elif settings.DATABASE_DRIVER == "mysql": - log.debug('DB_Mysql INIT') - import DB_Mysql - return DB_Mysql.DB_Mysql() + log.debug('DB_Mysql INIT') + import DB_Mysql + return DB_Mysql.DB_Mysql() elif settings.DATABASE_DRIVER == "postgresql": - log.debug('DB_Postgresql INIT') - import DB_Postgresql - return DB_Postgresql.DB_Postgresql() + log.debug('DB_Postgresql INIT') + import DB_Postgresql + return DB_Postgresql.DB_Postgresql() elif settings.DATABASE_DRIVER == "none": - log.debug('DB_None INIT') - import DB_None - return DB_None.DB_None() + log.debug('DB_None INIT') + import DB_None + return DB_None.DB_None() else: - log.error('Invalid DATABASE_DRIVER -- using NONE') - log.debug('DB_None INIT') - import DB_None - return DB_None.DB_None() - log.debug('DB_Mysql INIT') - import DB_Mysql - return DB_Mysql.DB_Mysql() + log.error('Invalid DATABASE_DRIVER -- using NONE') + log.debug('DB_None INIT') + import DB_None + return DB_None.DB_None() + def clearusercache(self): log.debug("DBInterface.clearusercache called") From 709643ae561c44ac6ba8e9e0a9d1f7230ebea350 Mon Sep 17 00:00:00 2001 From: ahmedbodi Date: Wed, 11 Dec 2013 15:15:45 +0000 Subject: [PATCH 18/20] Update interfaces.py --- mining/interfaces.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mining/interfaces.py b/mining/interfaces.py index 0370d06..cf13cd6 100644 --- a/mining/interfaces.py +++ b/mining/interfaces.py @@ -17,7 +17,7 @@ dbi.init_main() class WorkerManagerInterface(object): def __init__(self): - self.worker_log = {} + self.worker_log = {} self.worker_log.setdefault('authorized', {}) return From 6f0d5f016902d2600122c6ab93f22cf2a4ae22d9 Mon Sep 17 00:00:00 2001 From: ahmedbodi Date: Thu, 12 Dec 2013 13:34:04 +0000 Subject: [PATCH 19/20] Fixed DBI error --- mining/DBInterface.py | 46 +++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 24 deletions(-) diff --git a/mining/DBInterface.py b/mining/DBInterface.py index 645b9b4..6dafd26 100644 --- a/mining/DBInterface.py +++ b/mining/DBInterface.py @@ -39,34 +39,32 @@ class DBInterface(): self.bitcoinrpc = bitcoinrpc def connectDB(self): - if settings.VARIABLE_DIFF and settings.DATABASE_DRIVER == "mysql": - log.debug("DB_Mysql_Vardiff INIT") - import DB_Mysql_Vardiff - return DB_Mysql_Vardiff.DB_Mysql_Vardiff() - else log.debug('DB_Mysql INIT') - import DB_Mysql - return DB_Mysql.DB_Mysql() - elif settings.DATABASE_DRIVER == "sqlite": - log.debug('DB_Sqlite INIT') - import DB_Sqlite - return DB_Sqlite.DB_Sqlite() + if settings.DATABASE_DRIVER == "sqlite": + log.debug('DB_Sqlite INIT') + import DB_Sqlite + return DB_Sqlite.DB_Sqlite() elif settings.DATABASE_DRIVER == "mysql": - log.debug('DB_Mysql INIT') - import DB_Mysql - return DB_Mysql.DB_Mysql() + if settings.VARIABLE_DIFF: + log.debug("DB_Mysql_Vardiff INIT") + import DB_Mysql_Vardiff + return DB_Mysql_Vardiff.DB_Mysql_Vardiff() + else: + log.debug('DB_Mysql INIT') + import DB_Mysql + return DB_Mysql.DB_Mysql() elif settings.DATABASE_DRIVER == "postgresql": - log.debug('DB_Postgresql INIT') - import DB_Postgresql - return DB_Postgresql.DB_Postgresql() + log.debug('DB_Postgresql INIT') + import DB_Postgresql + return DB_Postgresql.DB_Postgresql() elif settings.DATABASE_DRIVER == "none": - log.debug('DB_None INIT') - import DB_None - return DB_None.DB_None() + log.debug('DB_None INIT') + import DB_None + return DB_None.DB_None() else: - log.error('Invalid DATABASE_DRIVER -- using NONE') - log.debug('DB_None INIT') - import DB_None - return DB_None.DB_None() + log.error('Invalid DATABASE_DRIVER -- using NONE') + log.debug('DB_None INIT') + import DB_None + return DB_None.DB_None() def clearusercache(self): From 9ed61bd5f968d4a0816ebe27e1a594f9772ac133 Mon Sep 17 00:00:00 2001 From: ahmedbodi Date: Thu, 12 Dec 2013 13:41:00 +0000 Subject: [PATCH 20/20] Fixed DBI error --- mining/DBInterface.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mining/DBInterface.py b/mining/DBInterface.py index 6dafd26..2749166 100644 --- a/mining/DBInterface.py +++ b/mining/DBInterface.py @@ -48,7 +48,7 @@ class DBInterface(): log.debug("DB_Mysql_Vardiff INIT") import DB_Mysql_Vardiff return DB_Mysql_Vardiff.DB_Mysql_Vardiff() - else: + else: log.debug('DB_Mysql INIT') import DB_Mysql return DB_Mysql.DB_Mysql()