even more whitespace fixes
This commit is contained in:
parent
a6938e1b36
commit
a5a6d29b07
@ -9,7 +9,7 @@ You NEED to set the parameters in BASIC SETTINGS
|
||||
# ******************** BASIC SETTINGS ***************
|
||||
# These are the MUST BE SET parameters!
|
||||
|
||||
CENTRAL_WALLET = 'set_valid_addresss_in_config!' # local coin address where money goes
|
||||
CENTRAL_WALLET = 'set_valid_addresss_in_config!' # local coin address where money goes
|
||||
|
||||
COINDAEMON_TRUSTED_HOST = 'localhost'
|
||||
COINDAEMON_TRUSTED_PORT = 8332
|
||||
@ -50,7 +50,7 @@ DEBUG = False
|
||||
LOGDIR = 'log/'
|
||||
|
||||
# Main application log file.
|
||||
LOGFILE = None # eg. 'stratum.log'
|
||||
LOGFILE = None # eg. 'stratum.log'
|
||||
LOGLEVEL = 'DEBUG'
|
||||
# Logging Rotation can be enabled with the following settings
|
||||
# It if not enabled here, you can set up logrotate to rotate the files.
|
||||
@ -104,39 +104,39 @@ DB_MYSQL_HOST = 'localhost'
|
||||
DB_MYSQL_DBNAME = 'pooldb'
|
||||
DB_MYSQL_USER = 'pooldb'
|
||||
DB_MYSQL_PASS = '**empty**'
|
||||
DB_MYSQL_PORT = 3306 # Default port for MySQL
|
||||
DB_MYSQL_PORT = 3306 # Default port for MySQL
|
||||
|
||||
# ******************** Adv. DB Settings *********************
|
||||
# Don't change these unless you know what you are doing
|
||||
|
||||
DB_LOADER_CHECKTIME = 15 # How often we check to see if we should run the loader
|
||||
DB_LOADER_REC_MIN = 10 # Min Records before the bulk loader fires
|
||||
DB_LOADER_REC_MAX = 50 # Max Records the bulk loader will commit at a time
|
||||
DB_LOADER_CHECKTIME = 15 # How often we check to see if we should run the loader
|
||||
DB_LOADER_REC_MIN = 10 # Min Records before the bulk loader fires
|
||||
DB_LOADER_REC_MAX = 50 # Max Records the bulk loader will commit at a time
|
||||
DB_LOADER_FORCE_TIME = 300 # How often the cache should be flushed into the DB regardless of size.
|
||||
DB_STATS_AVG_TIME = 300 # When using the DATABASE_EXTEND option, average speed over X sec
|
||||
# Note: this is also how often it updates
|
||||
DB_USERCACHE_TIME = 600 # How long the usercache is good for before we refresh
|
||||
DB_STATS_AVG_TIME = 300 # When using the DATABASE_EXTEND option, average speed over X sec
|
||||
# Note: this is also how often it updates
|
||||
DB_USERCACHE_TIME = 600 # How long the usercache is good for before we refresh
|
||||
|
||||
# ******************** Pool Settings *********************
|
||||
|
||||
# User Auth Options
|
||||
USERS_AUTOADD = False # Automatically add users to db when they connect.
|
||||
USERS_AUTOADD = False # Automatically add users to db when they connect.
|
||||
# This basically disables User Auth for the pool.
|
||||
USERS_CHECK_PASSWORD = False # Check the workers password? (Many pools don't)
|
||||
USERS_CHECK_PASSWORD = False # Check the workers password? (Many pools don't)
|
||||
|
||||
# Transaction Settings
|
||||
COINBASE_EXTRAS = '/stratumPool/' # Extra Descriptive String to incorporate in solved blocks
|
||||
ALLOW_NONLOCAL_WALLET = False # Allow valid, but NON-Local wallet's
|
||||
COINBASE_EXTRAS = '/stratumPool/' # Extra Descriptive String to incorporate in solved blocks
|
||||
ALLOW_NONLOCAL_WALLET = False # Allow valid, but NON-Local wallet's
|
||||
|
||||
# Coin Daemon communication polling settings (In Seconds)
|
||||
PREVHASH_REFRESH_INTERVAL = 5 # How often to check for new Blocks
|
||||
# If using the blocknotify script (recommended) set = to MERKLE_REFRESH_INTERVAL
|
||||
# (No reason to poll if we're getting pushed notifications)
|
||||
MERKLE_REFRESH_INTERVAL = 60 # How often check memorypool
|
||||
# This effectively resets the template and incorporates new transactions.
|
||||
# This should be "slow"
|
||||
PREVHASH_REFRESH_INTERVAL = 5 # How often to check for new Blocks
|
||||
# If using the blocknotify script (recommended) set = to MERKLE_REFRESH_INTERVAL
|
||||
# (No reason to poll if we're getting pushed notifications)
|
||||
MERKLE_REFRESH_INTERVAL = 60 # How often check memorypool
|
||||
# This effectively resets the template and incorporates new transactions.
|
||||
# This should be "slow"
|
||||
|
||||
INSTANCE_ID = 31 # Used for extranonce and needs to be 0-31
|
||||
INSTANCE_ID = 31 # Used for extranonce and needs to be 0-31
|
||||
|
||||
# ******************** Pool Difficulty Settings *********************
|
||||
VDIFF_X2_TYPE = True # powers of 2 e.g. 2,4,8,16,32,64,128,256,512,1024
|
||||
|
||||
@ -14,14 +14,14 @@ log = lib.logger.get_logger('bitcoin_rpc')
|
||||
class BitcoinRPC(object):
|
||||
|
||||
def __init__(self, host, port, username, password):
|
||||
log.debug("Got to Bitcoin RPC")
|
||||
log.debug("Got to Bitcoin RPC")
|
||||
self.bitcoin_url = 'http://%s:%d' % (host, port)
|
||||
self.credentials = base64.b64encode("%s:%s" % (username, password))
|
||||
self.headers = {
|
||||
'Content-Type': 'text/json',
|
||||
'Authorization': 'Basic %s' % self.credentials,
|
||||
}
|
||||
client.HTTPClientFactory.noisy = False
|
||||
client.HTTPClientFactory.noisy = False
|
||||
|
||||
def _call_raw(self, data):
|
||||
client.Headers
|
||||
@ -44,17 +44,17 @@ class BitcoinRPC(object):
|
||||
def submitblock(self, block_hex, hash_hex):
|
||||
# Try submitblock if that fails, go to getblocktemplate
|
||||
try:
|
||||
log.debug("Submitting Block with Submit Block ")
|
||||
log.debug([block_hex,])
|
||||
log.debug("Submitting Block with Submit Block ")
|
||||
log.debug([block_hex,])
|
||||
resp = (yield self._call('submitblock', [block_hex,]))
|
||||
except Exception:
|
||||
try:
|
||||
log.exception("Submit Block Failed, does the coind have submitblock?")
|
||||
log.exception("Trying GetBlockTemplate")
|
||||
log.exception("Submit Block Failed, does the coind have submitblock?")
|
||||
log.exception("Trying GetBlockTemplate")
|
||||
resp = (yield self._call('getblocktemplate', [{'mode': 'submit', 'data': block_hex}]))
|
||||
except Exception as e:
|
||||
log.exception("Both SubmitBlock and GetBlockTemplate failed. Problem Submitting block %s" % str(e))
|
||||
log.exception("Try Enabling TX Messages in config.py!")
|
||||
log.exception("Try Enabling TX Messages in config.py!")
|
||||
raise
|
||||
|
||||
if json.loads(resp)['result'] == None:
|
||||
@ -85,8 +85,8 @@ class BitcoinRPC(object):
|
||||
@defer.inlineCallbacks
|
||||
def validateaddress(self, address):
|
||||
resp = (yield self._call('validateaddress', [address,]))
|
||||
defer.returnValue(json.loads(resp)['result'])
|
||||
|
||||
defer.returnValue(json.loads(resp)['result'])
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def getdifficulty(self):
|
||||
resp = (yield self._call('getdifficulty', []))
|
||||
@ -101,4 +101,3 @@ class BitcoinRPC(object):
|
||||
else:
|
||||
log.info("Cannot find block for %s" % hash_hex)
|
||||
defer.returnValue(False)
|
||||
|
||||
|
||||
@ -19,115 +19,116 @@ from lib.bitcoin_rpc import BitcoinRPC
|
||||
class BitcoinRPCManager(object):
|
||||
|
||||
def __init__(self):
|
||||
log.debug("Got to Bitcoin RPC Manager")
|
||||
self.conns = {}
|
||||
self.conns[0] = BitcoinRPC(settings.COINDAEMON_TRUSTED_HOST,
|
||||
settings.COINDAEMON_TRUSTED_PORT,
|
||||
settings.COINDAEMON_TRUSTED_USER,
|
||||
settings.COINDAEMON_TRUSTED_PASSWORD)
|
||||
self.curr_conn = 0
|
||||
for x in range (1, 99):
|
||||
if hasattr(settings, 'COINDAEMON_TRUSTED_HOST_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_PORT_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_USER_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_PASSWORD_' + str(x)):
|
||||
self.conns[len(self.conns)] = BitcoinRPC(settings.__dict__['COINDAEMON_TRUSTED_HOST_' + str(x)],
|
||||
settings.__dict__['COINDAEMON_TRUSTED_PORT_' + str(x)],
|
||||
settings.__dict__['COINDAEMON_TRUSTED_USER_' + str(x)],
|
||||
settings.__dict__['COINDAEMON_TRUSTED_PASSWORD_' + str(x)])
|
||||
log.debug("Got to Bitcoin RPC Manager")
|
||||
self.conns = {}
|
||||
self.conns[0] = BitcoinRPC(settings.COINDAEMON_TRUSTED_HOST,
|
||||
settings.COINDAEMON_TRUSTED_PORT,
|
||||
settings.COINDAEMON_TRUSTED_USER,
|
||||
settings.COINDAEMON_TRUSTED_PASSWORD)
|
||||
self.curr_conn = 0
|
||||
for x in range (1, 99):
|
||||
if hasattr(settings, 'COINDAEMON_TRUSTED_HOST_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_PORT_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_USER_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_PASSWORD_' + str(x)):
|
||||
self.conns[len(self.conns)] = BitcoinRPC(settings.__dict__['COINDAEMON_TRUSTED_HOST_' + str(x)],
|
||||
settings.__dict__['COINDAEMON_TRUSTED_PORT_' + str(x)],
|
||||
settings.__dict__['COINDAEMON_TRUSTED_USER_' + str(x)],
|
||||
settings.__dict__['COINDAEMON_TRUSTED_PASSWORD_' + str(x)])
|
||||
|
||||
def add_connection(self, host, port, user, password):
|
||||
# TODO: Some string sanity checks
|
||||
self.conns[len(self.conns)] = BitcoinRPC(host, port, user, password)
|
||||
|
||||
def next_connection(self):
|
||||
time.sleep(1)
|
||||
if len(self.conns) <= 1:
|
||||
log.error("Problem with Pool 0 -- NO ALTERNATE POOLS!!!")
|
||||
time.sleep(4)
|
||||
return
|
||||
log.error("Problem with Pool %i Switching to Next!" % (self.curr_conn) )
|
||||
self.curr_conn = self.curr_conn + 1
|
||||
if self.curr_conn >= len(self.conns):
|
||||
self.curr_conn = 0
|
||||
time.sleep(1)
|
||||
if len(self.conns) <= 1:
|
||||
log.error("Problem with Pool 0 -- NO ALTERNATE POOLS!!!")
|
||||
time.sleep(4)
|
||||
return
|
||||
log.error("Problem with Pool %i Switching to Next!" % (self.curr_conn) )
|
||||
self.curr_conn = self.curr_conn + 1
|
||||
if self.curr_conn >= len(self.conns):
|
||||
self.curr_conn = 0
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_height(self):
|
||||
while True:
|
||||
try:
|
||||
resp = (yield self.conns[self.curr_conn]._call('getinfo', []))
|
||||
break
|
||||
except:
|
||||
log.error("Check Height -- Pool %i Down!" % (self.curr_conn) )
|
||||
self.next_connection()
|
||||
curr_height = json.loads(resp)['result']['blocks']
|
||||
log.debug("Check Height -- Current Pool %i : %i" % (self.curr_conn,curr_height) )
|
||||
for i in self.conns:
|
||||
if i == self.curr_conn:
|
||||
continue
|
||||
while True:
|
||||
try:
|
||||
resp = (yield self.conns[self.curr_conn]._call('getinfo', []))
|
||||
break
|
||||
except:
|
||||
log.error("Check Height -- Pool %i Down!" % (self.curr_conn) )
|
||||
self.next_connection()
|
||||
curr_height = json.loads(resp)['result']['blocks']
|
||||
log.debug("Check Height -- Current Pool %i : %i" % (self.curr_conn,curr_height) )
|
||||
for i in self.conns:
|
||||
if i == self.curr_conn:
|
||||
continue
|
||||
|
||||
try:
|
||||
resp = (yield self.conns[i]._call('getinfo', []))
|
||||
except:
|
||||
log.error("Check Height -- Pool %i Down!" % (i,) )
|
||||
continue
|
||||
try:
|
||||
resp = (yield self.conns[i]._call('getinfo', []))
|
||||
except:
|
||||
log.error("Check Height -- Pool %i Down!" % (i,) )
|
||||
continue
|
||||
|
||||
height = json.loads(resp)['result']['blocks']
|
||||
log.debug("Check Height -- Pool %i : %i" % (i,height) )
|
||||
if height > curr_height:
|
||||
self.curr_conn = i
|
||||
defer.returnValue(True)
|
||||
height = json.loads(resp)['result']['blocks']
|
||||
log.debug("Check Height -- Pool %i : %i" % (i,height) )
|
||||
if height > curr_height:
|
||||
self.curr_conn = i
|
||||
|
||||
defer.returnValue(True)
|
||||
|
||||
def _call_raw(self, data):
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn]._call_raw(data)
|
||||
except:
|
||||
self.next_connection()
|
||||
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn]._call_raw(data)
|
||||
except:
|
||||
self.next_connection()
|
||||
|
||||
def _call(self, method, params):
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn]._call(method,params)
|
||||
except:
|
||||
self.next_connection()
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn]._call(method,params)
|
||||
except:
|
||||
self.next_connection()
|
||||
|
||||
def submitblock(self, block_hex, hash_hex):
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn].submitblock(block_hex, hash_hex)
|
||||
except:
|
||||
self.next_connection()
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn].submitblock(block_hex, hash_hex)
|
||||
except:
|
||||
self.next_connection()
|
||||
|
||||
def getinfo(self):
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn].getinfo()
|
||||
except:
|
||||
self.next_connection()
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn].getinfo()
|
||||
except:
|
||||
self.next_connection()
|
||||
|
||||
def getblocktemplate(self):
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn].getblocktemplate()
|
||||
except:
|
||||
self.next_connection()
|
||||
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn].getblocktemplate()
|
||||
except:
|
||||
self.next_connection()
|
||||
|
||||
def prevhash(self):
|
||||
self.check_height()
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn].prevhash()
|
||||
except:
|
||||
self.next_connection()
|
||||
self.check_height()
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn].prevhash()
|
||||
except:
|
||||
self.next_connection()
|
||||
|
||||
def validateaddress(self, address):
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn].validateaddress(address)
|
||||
except:
|
||||
self.next_connection()
|
||||
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn].validateaddress(address)
|
||||
except:
|
||||
self.next_connection()
|
||||
|
||||
def getdifficulty(self):
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn].getdifficulty()
|
||||
except:
|
||||
self.next_connection()
|
||||
while True:
|
||||
try:
|
||||
return self.conns[self.curr_conn].getdifficulty()
|
||||
except:
|
||||
self.next_connection()
|
||||
|
||||
@ -27,8 +27,8 @@ class BlockTemplate(halfnode.CBlock):
|
||||
coinbase_transaction_class = CoinbaseTransaction
|
||||
|
||||
def __init__(self, timestamper, coinbaser, job_id):
|
||||
log.debug("Got To Block_template.py")
|
||||
log.debug("Got To Block_template.py")
|
||||
log.debug("Got To Block_template.py")
|
||||
log.debug("Got To Block_template.py")
|
||||
super(BlockTemplate, self).__init__()
|
||||
|
||||
self.job_id = job_id
|
||||
@ -56,12 +56,14 @@ class BlockTemplate(halfnode.CBlock):
|
||||
#txhashes = [None] + [ binascii.unhexlify(t['hash']) for t in data['transactions'] ]
|
||||
txhashes = [None] + [ util.ser_uint256(int(t['hash'], 16)) for t in data['transactions'] ]
|
||||
mt = merkletree.MerkleTree(txhashes)
|
||||
if settings.COINDAEMON_Reward == 'POW':
|
||||
coinbase = CoinbaseTransactionPOW(self.timestamper, self.coinbaser, data['coinbasevalue'], data['coinbaseaux']['flags'], data['height'],
|
||||
settings.COINBASE_EXTRAS)
|
||||
else:
|
||||
coinbase = CoinbaseTransactionPOS(self.timestamper, self.coinbaser, data['coinbasevalue'], data['coinbaseaux']['flags'], data['height'],
|
||||
settings.COINBASE_EXTRAS, data['curtime'])
|
||||
if settings.COINDAEMON_Reward == 'POW':
|
||||
coinbase = CoinbaseTransactionPOW(self.timestamper, self.coinbaser, data['coinbasevalue'],
|
||||
data['coinbaseaux']['flags'], data['height'],
|
||||
settings.COINBASE_EXTRAS)
|
||||
else:
|
||||
coinbase = CoinbaseTransactionPOS(self.timestamper, self.coinbaser, data['coinbasevalue'],
|
||||
data['coinbaseaux']['flags'], data['height'],
|
||||
settings.COINBASE_EXTRAS, data['curtime'])
|
||||
|
||||
self.height = data['height']
|
||||
self.nVersion = data['version']
|
||||
|
||||
@ -18,7 +18,7 @@ class BlockUpdater(object):
|
||||
|
||||
def __init__(self, registry, bitcoin_rpc):
|
||||
log.debug("Got To Block Updater")
|
||||
self.bitcoin_rpc = bitcoin_rpc
|
||||
self.bitcoin_rpc = bitcoin_rpc
|
||||
self.registry = registry
|
||||
self.clock = None
|
||||
self.schedule()
|
||||
@ -46,7 +46,7 @@ class BlockUpdater(object):
|
||||
current_prevhash = None
|
||||
|
||||
log.info("Checking for new block.")
|
||||
prevhash = util.reverse_hash((yield self.bitcoin_rpc.prevhash()))
|
||||
prevhash = util.reverse_hash((yield self.bitcoin_rpc.prevhash()))
|
||||
if prevhash and prevhash != current_prevhash:
|
||||
log.info("New block! Prevhash: %s" % prevhash)
|
||||
update = True
|
||||
|
||||
105
lib/coinbaser.py
105
lib/coinbaser.py
@ -13,68 +13,70 @@ class SimpleCoinbaser(object):
|
||||
for all generated blocks.'''
|
||||
|
||||
def __init__(self, bitcoin_rpc, address):
|
||||
log.debug("Got to coinbaser")
|
||||
# Fire Callback when the coinbaser is ready
|
||||
self.on_load = defer.Deferred()
|
||||
log.debug("Got to coinbaser")
|
||||
# Fire Callback when the coinbaser is ready
|
||||
self.on_load = defer.Deferred()
|
||||
|
||||
self.address = address
|
||||
self.is_valid = False
|
||||
self.is_valid = False
|
||||
|
||||
self.bitcoin_rpc = bitcoin_rpc
|
||||
self._validate()
|
||||
self.bitcoin_rpc = bitcoin_rpc
|
||||
self._validate()
|
||||
|
||||
def _validate(self):
|
||||
d = self.bitcoin_rpc.validateaddress(self.address)
|
||||
if settings.COINDAEMON_Reward == 'POW':
|
||||
d.addCallback(self._POW_address_check)
|
||||
else: d.addCallback(self._POS_address_check)
|
||||
d.addErrback(self._failure)
|
||||
d = self.bitcoin_rpc.validateaddress(self.address)
|
||||
if settings.COINDAEMON_Reward == 'POW':
|
||||
d.addCallback(self._POW_address_check)
|
||||
else:
|
||||
d.addCallback(self._POS_address_check)
|
||||
|
||||
d.addErrback(self._failure)
|
||||
|
||||
def _POW_address_check(self, result):
|
||||
if result['isvalid'] and result['ismine']:
|
||||
self.is_valid = True
|
||||
log.info("Coinbase address '%s' is valid" % self.address)
|
||||
if result['isvalid'] == True:
|
||||
log.debug("Is Valid = %s" % result['isvalid'])
|
||||
log.debug("Address = %s " % result['address'])
|
||||
log.debug("PubKey = %s " % result['pubkey'])
|
||||
log.debug("Is Compressed = %s " % result['iscompressed'])
|
||||
log.debug("Account = %s " % result['account'])
|
||||
self.address = result['address']
|
||||
if not self.on_load.called:
|
||||
self.on_load.callback(True)
|
||||
|
||||
elif result['isvalid'] and settings.ALLOW_NONLOCAL_WALLET == True :
|
||||
self.is_valid = True
|
||||
log.warning("!!! Coinbase address '%s' is valid BUT it is not local" % self.address)
|
||||
if not self.on_load.called:
|
||||
self.on_load.callback(True)
|
||||
|
||||
else:
|
||||
self.is_valid = False
|
||||
log.error("Coinbase address '%s' is NOT valid!" % self.address)
|
||||
|
||||
def _POS_address_check(self, result):
|
||||
if result['isvalid'] and result['ismine']:
|
||||
self.is_valid = True
|
||||
log.info("Coinbase address '%s' is valid" % self.address)
|
||||
if result['isvalid'] == True:
|
||||
log.debug("Is Valid = %s" % result['isvalid'])
|
||||
log.debug("Address = %s " % result['address'])
|
||||
log.debug("PubKey = %s " % result['pubkey'])
|
||||
log.debug("Is Compressed = %s " % result['iscompressed'])
|
||||
log.debug("Account = %s " % result['account'])
|
||||
self.pubkey = result['pubkey']
|
||||
if not self.on_load.called:
|
||||
self.on_load.callback(True)
|
||||
self.is_valid = True
|
||||
log.info("Coinbase address '%s' is valid" % self.address)
|
||||
if result['isvalid'] == True:
|
||||
log.debug("Is Valid = %s" % result['isvalid'])
|
||||
log.debug("Address = %s " % result['address'])
|
||||
log.debug("PubKey = %s " % result['pubkey'])
|
||||
log.debug("Is Compressed = %s " % result['iscompressed'])
|
||||
log.debug("Account = %s " % result['account'])
|
||||
self.address = result['address']
|
||||
if not self.on_load.called:
|
||||
self.on_load.callback(True)
|
||||
|
||||
elif result['isvalid'] and settings.ALLOW_NONLOCAL_WALLET == True :
|
||||
self.is_valid = True
|
||||
log.warning("!!! Coinbase address '%s' is valid BUT it is not local" % self.address)
|
||||
self.pubkey = result['pubkey']
|
||||
if not self.on_load.called:
|
||||
self.on_load.callback(True)
|
||||
|
||||
else:
|
||||
self.is_valid = False
|
||||
log.error("Coinbase address '%s' is NOT valid!" % self.address)
|
||||
|
||||
def _POS_address_check(self, result):
|
||||
if result['isvalid'] and result['ismine']:
|
||||
self.is_valid = True
|
||||
log.info("Coinbase address '%s' is valid" % self.address)
|
||||
if result['isvalid'] == True:
|
||||
log.debug("Is Valid = %s" % result['isvalid'])
|
||||
log.debug("Address = %s " % result['address'])
|
||||
log.debug("PubKey = %s " % result['pubkey'])
|
||||
log.debug("Is Compressed = %s " % result['iscompressed'])
|
||||
log.debug("Account = %s " % result['account'])
|
||||
self.pubkey = result['pubkey']
|
||||
if not self.on_load.called:
|
||||
self.on_load.callback(True)
|
||||
|
||||
elif result['isvalid'] and settings.ALLOW_NONLOCAL_WALLET == True :
|
||||
self.is_valid = True
|
||||
log.warning("!!! Coinbase address '%s' is valid BUT it is not local" % self.address)
|
||||
self.pubkey = result['pubkey']
|
||||
if not self.on_load.called:
|
||||
self.on_load.callback(True)
|
||||
|
||||
else:
|
||||
self.is_valid = False
|
||||
|
||||
@ -88,10 +90,11 @@ class SimpleCoinbaser(object):
|
||||
raise
|
||||
|
||||
def get_script_pubkey(self):
|
||||
if settings.COINDAEMON_Reward == 'POW':
|
||||
self._validate()
|
||||
return util.script_to_address(self.address)
|
||||
else: return util.script_to_pubkey(self.pubkey)
|
||||
if settings.COINDAEMON_Reward == 'POW':
|
||||
self._validate()
|
||||
return util.script_to_address(self.address)
|
||||
else:
|
||||
return util.script_to_pubkey(self.pubkey)
|
||||
|
||||
def get_coinbase_data(self):
|
||||
return ''
|
||||
|
||||
@ -235,14 +235,14 @@ class CBlock(object):
|
||||
self.nNonce = 0
|
||||
self.vtx = []
|
||||
self.sha256 = None
|
||||
if settings.COINDAEMON_ALGO == 'scrypt':
|
||||
self.scrypt = None
|
||||
if settings.COINDAEMON_ALGO == 'scrypt':
|
||||
self.scrypt = None
|
||||
elif settings.COINDAEMON_ALGO == 'quark':
|
||||
self.quark = None
|
||||
else: pass
|
||||
if settings.COINDAEMON_Reward == 'POS':
|
||||
self.signature = b""
|
||||
else: pass
|
||||
self.quark = None
|
||||
else: pass
|
||||
if settings.COINDAEMON_Reward == 'POS':
|
||||
self.signature = b""
|
||||
else: pass
|
||||
|
||||
def deserialize(self, f):
|
||||
self.nVersion = struct.unpack("<i", f.read(4))[0]
|
||||
@ -252,9 +252,10 @@ class CBlock(object):
|
||||
self.nBits = struct.unpack("<I", f.read(4))[0]
|
||||
self.nNonce = struct.unpack("<I", f.read(4))[0]
|
||||
self.vtx = deser_vector(f, CTransaction)
|
||||
if settings.COINDAEMON_Reward == 'POS':
|
||||
self.signature = deser_string(f)
|
||||
else: pass
|
||||
if settings.COINDAEMON_Reward == 'POS':
|
||||
self.signature = deser_string(f)
|
||||
else: pass
|
||||
|
||||
def serialize(self):
|
||||
r = []
|
||||
r.append(struct.pack("<i", self.nVersion))
|
||||
@ -264,9 +265,9 @@ class CBlock(object):
|
||||
r.append(struct.pack("<I", self.nBits))
|
||||
r.append(struct.pack("<I", self.nNonce))
|
||||
r.append(ser_vector(self.vtx))
|
||||
if settings.COINDAEMON_Reward == 'POS':
|
||||
r.append(ser_string(self.signature))
|
||||
else: pass
|
||||
if settings.COINDAEMON_Reward == 'POS':
|
||||
r.append(ser_string(self.signature))
|
||||
else: pass
|
||||
return ''.join(r)
|
||||
|
||||
if settings.COINDAEMON_ALGO == 'scrypt':
|
||||
@ -308,22 +309,25 @@ class CBlock(object):
|
||||
|
||||
|
||||
def is_valid(self):
|
||||
if settings.COINDAEMON_ALGO == 'scrypt':
|
||||
self.calc_scrypt()
|
||||
elif settings.COINDAEMON_ALGO == 'quark':
|
||||
self.calc_quark()
|
||||
else:
|
||||
self.calc_sha256()
|
||||
target = uint256_from_compact(self.nBits)
|
||||
if settings.COINDAEMON_ALGO == 'scrypt':
|
||||
if self.scrypt > target:
|
||||
return false
|
||||
self.calc_scrypt()
|
||||
elif settings.COINDAEMON_ALGO == 'quark':
|
||||
if self.quark > target:
|
||||
return false
|
||||
else:
|
||||
if self.sha256 > target:
|
||||
return False
|
||||
self.calc_quark()
|
||||
else:
|
||||
self.calc_sha256()
|
||||
|
||||
target = uint256_from_compact(self.nBits)
|
||||
|
||||
if settings.COINDAEMON_ALGO == 'scrypt':
|
||||
if self.scrypt > target:
|
||||
return False
|
||||
elif settings.COINDAEMON_ALGO == 'quark':
|
||||
if self.quark > target:
|
||||
return False
|
||||
else:
|
||||
if self.sha256 > target:
|
||||
return False
|
||||
|
||||
hashes = []
|
||||
for tx in self.vtx:
|
||||
tx.sha256 = None
|
||||
|
||||
@ -9,35 +9,35 @@ log = stratum.logger.get_logger('Notify_Email')
|
||||
class NOTIFY_EMAIL():
|
||||
|
||||
def notify_start(self):
|
||||
if settings.NOTIFY_EMAIL_TO != '':
|
||||
self.send_email(settings.NOTIFY_EMAIL_TO,'Stratum Server Started','Stratum server has started!')
|
||||
if settings.NOTIFY_EMAIL_TO != '':
|
||||
self.send_email(settings.NOTIFY_EMAIL_TO,'Stratum Server Started','Stratum server has started!')
|
||||
|
||||
def notify_found_block(self,worker_name):
|
||||
if settings.NOTIFY_EMAIL_TO != '':
|
||||
text = '%s on Stratum server found a block!' % worker_name
|
||||
self.send_email(settings.NOTIFY_EMAIL_TO,'Stratum Server Found Block',text)
|
||||
|
||||
if settings.NOTIFY_EMAIL_TO != '':
|
||||
text = '%s on Stratum server found a block!' % worker_name
|
||||
self.send_email(settings.NOTIFY_EMAIL_TO,'Stratum Server Found Block',text)
|
||||
|
||||
def notify_dead_coindaemon(self,worker_name):
|
||||
if settings.NOTIFY_EMAIL_TO != '':
|
||||
text = 'Coin Daemon Has Crashed Please Report' % worker_name
|
||||
self.send_email(settings.NOTIFY_EMAIL_TO,'Coin Daemon Crashed!',text)
|
||||
|
||||
if settings.NOTIFY_EMAIL_TO != '':
|
||||
text = 'Coin Daemon Has Crashed Please Report' % worker_name
|
||||
self.send_email(settings.NOTIFY_EMAIL_TO,'Coin Daemon Crashed!',text)
|
||||
|
||||
def send_email(self,to,subject,message):
|
||||
msg = MIMEText(message)
|
||||
msg['Subject'] = subject
|
||||
msg['From'] = settings.NOTIFY_EMAIL_FROM
|
||||
msg['To'] = to
|
||||
try:
|
||||
s = smtplib.SMTP(settings.NOTIFY_EMAIL_SERVER)
|
||||
if settings.NOTIFY_EMAIL_USERNAME != '':
|
||||
if settings.NOTIFY_EMAIL_USETLS:
|
||||
s.ehlo()
|
||||
s.starttls()
|
||||
s.ehlo()
|
||||
s.login(settings.NOTIFY_EMAIL_USERNAME, settings.NOTIFY_EMAIL_PASSWORD)
|
||||
s.sendmail(settings.NOTIFY_EMAIL_FROM,to,msg.as_string())
|
||||
s.quit()
|
||||
except smtplib.SMTPAuthenticationError as e:
|
||||
log.error('Error sending Email: %s' % e[1])
|
||||
except Exception as e:
|
||||
log.error('Error sending Email: %s' % e[0])
|
||||
msg = MIMEText(message)
|
||||
msg['Subject'] = subject
|
||||
msg['From'] = settings.NOTIFY_EMAIL_FROM
|
||||
msg['To'] = to
|
||||
try:
|
||||
s = smtplib.SMTP(settings.NOTIFY_EMAIL_SERVER)
|
||||
if settings.NOTIFY_EMAIL_USERNAME != '':
|
||||
if settings.NOTIFY_EMAIL_USETLS:
|
||||
s.ehlo()
|
||||
s.starttls()
|
||||
s.ehlo()
|
||||
s.login(settings.NOTIFY_EMAIL_USERNAME, settings.NOTIFY_EMAIL_PASSWORD)
|
||||
s.sendmail(settings.NOTIFY_EMAIL_FROM,to,msg.as_string())
|
||||
s.quit()
|
||||
except smtplib.SMTPAuthenticationError as e:
|
||||
log.error('Error sending Email: %s' % e[1])
|
||||
except Exception as e:
|
||||
log.error('Error sending Email: %s' % e[0])
|
||||
|
||||
@ -4,11 +4,11 @@ import util
|
||||
import StringIO
|
||||
import settings
|
||||
if settings.COINDAEMON_ALGO == 'scrypt':
|
||||
import ltc_scrypt
|
||||
import ltc_scrypt
|
||||
elif settings.COINDAEMON_ALGO == 'scrypt-jane':
|
||||
import yac_scrypt
|
||||
import yac_scrypt
|
||||
elif settings.COINDAEMON_ALGO == 'quark':
|
||||
import quark_hash
|
||||
import quark_hash
|
||||
else: pass
|
||||
from twisted.internet import defer
|
||||
from lib.exceptions import SubmitException
|
||||
@ -63,13 +63,13 @@ class TemplateRegistry(object):
|
||||
def get_new_extranonce1(self):
|
||||
'''Generates unique extranonce1 (e.g. for newly
|
||||
subscribed connection.'''
|
||||
log.debug("Getting Unique Extronance")
|
||||
log.debug("Getting Unique Extronance")
|
||||
return self.extranonce_counter.get_new_bin()
|
||||
|
||||
def get_last_broadcast_args(self):
|
||||
'''Returns arguments for mining.notify
|
||||
from last known template.'''
|
||||
log.debug("Getting Laat Template")
|
||||
log.debug("Getting Laat Template")
|
||||
return self.last_block.broadcast_args
|
||||
|
||||
def add_template(self, block,block_height):
|
||||
@ -137,7 +137,7 @@ class TemplateRegistry(object):
|
||||
start = Interfaces.timestamper.time()
|
||||
|
||||
template = self.block_template_class(Interfaces.timestamper, self.coinbaser, JobIdGenerator.get_new_id())
|
||||
log.info(template.fill_from_rpc(data))
|
||||
log.info(template.fill_from_rpc(data))
|
||||
self.add_template(template,data['height'])
|
||||
|
||||
log.info("Update finished, %.03f sec, %d txes" % \
|
||||
@ -148,12 +148,14 @@ class TemplateRegistry(object):
|
||||
|
||||
def diff_to_target(self, difficulty):
|
||||
'''Converts difficulty to target'''
|
||||
if settings.COINDAEMON_ALGO == 'scrypt' or 'scrypt-jane':
|
||||
diff1 = 0x0000ffff00000000000000000000000000000000000000000000000000000000
|
||||
if settings.COINDAEMON_ALGO == 'scrypt' or 'scrypt-jane':
|
||||
diff1 = 0x0000ffff00000000000000000000000000000000000000000000000000000000
|
||||
elif settings.COINDAEMON_ALGO == 'quark':
|
||||
diff1 = 0x000000ffff000000000000000000000000000000000000000000000000000000
|
||||
else: diff1 = 0x00000000ffff0000000000000000000000000000000000000000000000000000
|
||||
return diff1 / difficulty
|
||||
diff1 = 0x000000ffff000000000000000000000000000000000000000000000000000000
|
||||
else:
|
||||
diff1 = 0x00000000ffff0000000000000000000000000000000000000000000000000000
|
||||
|
||||
return diff1 / difficulty
|
||||
|
||||
def get_job(self, job_id):
|
||||
'''For given job_id returns BlockTemplate instance or None'''
|
||||
@ -234,21 +236,23 @@ class TemplateRegistry(object):
|
||||
header_bin = job.serialize_header(merkle_root_int, ntime_bin, nonce_bin)
|
||||
|
||||
# 4. Reverse header and compare it with target of the user
|
||||
if settings.COINDAEMON_ALGO == 'scrypt':
|
||||
hash_bin = ltc_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
|
||||
elif settings.COINDAEMON_ALGO == 'scrypt-jane':
|
||||
hash_bin = yac_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]), int(ntime, 16))
|
||||
if settings.COINDAEMON_ALGO == 'scrypt':
|
||||
hash_bin = ltc_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
|
||||
elif settings.COINDAEMON_ALGO == 'scrypt-jane':
|
||||
hash_bin = yac_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]), int(ntime, 16))
|
||||
elif settings.COINDAEMON_ALGO == 'quark':
|
||||
hash_bin = quark_hash.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
|
||||
else: hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
|
||||
hash_bin = quark_hash.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
|
||||
else:
|
||||
hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
|
||||
|
||||
hash_int = util.uint256_from_str(hash_bin)
|
||||
scrypt_hash_hex = "%064x" % hash_int
|
||||
header_hex = binascii.hexlify(header_bin)
|
||||
if settings.COINDAEMON_ALGO == 'scrypt' or settings.COINDAEMON_ALGO == 'scrypt-jane':
|
||||
header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000"
|
||||
if settings.COINDAEMON_ALGO == 'scrypt' or settings.COINDAEMON_ALGO == 'scrypt-jane':
|
||||
header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000"
|
||||
elif settings.COINDAEMON_ALGO == 'quark':
|
||||
header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000"
|
||||
else: pass
|
||||
header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000"
|
||||
else: pass
|
||||
|
||||
target_user = self.diff_to_target(difficulty)
|
||||
if hash_int > target_user:
|
||||
@ -262,28 +266,28 @@ class TemplateRegistry(object):
|
||||
# Algebra tells us the diff_to_target is the same as hash_to_diff
|
||||
share_diff = int(self.diff_to_target(hash_int))
|
||||
|
||||
# 5. Compare hash with target of the network
|
||||
# 5. Compare hash with target of the network
|
||||
if hash_int <= job.target:
|
||||
# Yay! It is block candidate!
|
||||
log.info("We found a block candidate! %s" % scrypt_hash_hex)
|
||||
|
||||
# Reverse the header and get the potential block hash (for scrypt only)
|
||||
#if settings.COINDAEMON_ALGO == 'scrypt' or settings.COINDAEMON_ALGO == 'sha256d':
|
||||
# if settings.COINDAEMON_Reward == 'POW':
|
||||
block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
|
||||
block_hash_hex = block_hash_bin[::-1].encode('hex_codec')
|
||||
#if settings.COINDAEMON_ALGO == 'scrypt' or settings.COINDAEMON_ALGO == 'sha256d':
|
||||
# if settings.COINDAEMON_Reward == 'POW':
|
||||
block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
|
||||
block_hash_hex = block_hash_bin[::-1].encode('hex_codec')
|
||||
#else: block_hash_hex = hash_bin[::-1].encode('hex_codec')
|
||||
#else: block_hash_hex = hash_bin[::-1].encode('hex_codec')
|
||||
#else: block_hash_hex = hash_bin[::-1].encode('hex_codec')
|
||||
# 6. Finalize and serialize block object
|
||||
job.finalize(merkle_root_int, extranonce1_bin, extranonce2_bin, int(ntime, 16), int(nonce, 16))
|
||||
|
||||
if not job.is_valid():
|
||||
# Should not happen
|
||||
log.exception("FINAL JOB VALIDATION FAILED!(Try enabling/disabling tx messages)")
|
||||
log.exception("FINAL JOB VALIDATION FAILED!(Try enabling/disabling tx messages)")
|
||||
|
||||
# 7. Submit block to the network
|
||||
serialized = binascii.hexlify(job.serialize())
|
||||
if settings.BLOCK_CHECK_SCRYPT_HASH:
|
||||
if settings.BLOCK_CHECK_SCRYPT_HASH:
|
||||
on_submit = self.bitcoin_rpc.submitblock(serialized, scrypt_hash_hex)
|
||||
else:
|
||||
on_submit = self.bitcoin_rpc.submitblock(serialized, block_hash_hex)
|
||||
|
||||
@ -173,7 +173,7 @@ def address_to_pubkeyhash(addr):
|
||||
addr = b58decode(addr, 25)
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
if addr is None:
|
||||
return None
|
||||
|
||||
|
||||
@ -66,13 +66,13 @@ class DBInterface():
|
||||
log.debug('DB_None INIT')
|
||||
import DB_None
|
||||
return DB_None.DB_None()
|
||||
|
||||
|
||||
def scheduleImport(self):
|
||||
# This schedule's the Import
|
||||
if settings.DATABASE_DRIVER == "sqlite":
|
||||
use_thread = False
|
||||
else: use_thread = True
|
||||
use_thread = False
|
||||
else:
|
||||
use_thread = True
|
||||
|
||||
if use_thread:
|
||||
self.queueclock = reactor.callLater(settings.DB_LOADER_CHECKTIME , self.run_import_thread)
|
||||
@ -174,11 +174,11 @@ class DBInterface():
|
||||
self.cache.set(username, password)
|
||||
return True
|
||||
elif settings.USERS_AUTOADD == True:
|
||||
if self.dbi.get_uid(username) != False:
|
||||
uid = self.dbi.get_uid(username)
|
||||
self.dbi.insert_worker(uid, username, password)
|
||||
self.cache.set(username, password)
|
||||
return True
|
||||
if self.dbi.get_uid(username) != False:
|
||||
uid = self.dbi.get_uid(username)
|
||||
self.dbi.insert_worker(uid, username, password)
|
||||
self.cache.set(username, password)
|
||||
return True
|
||||
|
||||
log.info("Authentication for %s failed" % username)
|
||||
return False
|
||||
@ -190,8 +190,8 @@ class DBInterface():
|
||||
return self.dbi.get_user(id)
|
||||
|
||||
def user_exists(self, username):
|
||||
if self.cache.get(username) is not None:
|
||||
return True
|
||||
if self.cache.get(username) is not None:
|
||||
return True
|
||||
user = self.dbi.get_user(username)
|
||||
return user is not None
|
||||
|
||||
@ -199,7 +199,7 @@ class DBInterface():
|
||||
return self.dbi.insert_user(username, password)
|
||||
|
||||
def delete_user(self, username):
|
||||
self.mc.delete(username)
|
||||
self.mc.delete(username)
|
||||
self.usercache = {}
|
||||
return self.dbi.delete_user(username)
|
||||
|
||||
|
||||
@ -155,13 +155,13 @@ class DB_Mysql():
|
||||
%(lres)s, %(result)s, %(reason)s, %(solution)s)
|
||||
""",
|
||||
{
|
||||
"time": v[4],
|
||||
"host": v[6],
|
||||
"uname": v[0],
|
||||
"lres": v[5],
|
||||
"result": v[5],
|
||||
"reason": v[9],
|
||||
"solution": v[2]
|
||||
"time": data[4],
|
||||
"host": data[6],
|
||||
"uname": data[0],
|
||||
"lres": data[5],
|
||||
"result": data[5],
|
||||
"reason": data[9],
|
||||
"solution": data[2]
|
||||
}
|
||||
)
|
||||
|
||||
@ -204,7 +204,7 @@ class DB_Mysql():
|
||||
|
||||
user = self.dbc.fetchone()
|
||||
return user
|
||||
|
||||
|
||||
def get_uid(self, id_or_username):
|
||||
log.debug("Finding user id of %s", id_or_username)
|
||||
uname = id_or_username.split(".", 1)[0]
|
||||
|
||||
@ -3,16 +3,16 @@ log = stratum.logger.get_logger('None')
|
||||
|
||||
class DB_None():
|
||||
def __init__(self):
|
||||
log.debug("Connecting to DB")
|
||||
log.debug("Connecting to DB")
|
||||
|
||||
def updateStats(self,averageOverTime):
|
||||
log.debug("Updating Stats")
|
||||
log.debug("Updating Stats")
|
||||
|
||||
def import_shares(self,data):
|
||||
log.debug("Importing Shares")
|
||||
log.debug("Importing Shares")
|
||||
|
||||
def found_block(self,data):
|
||||
log.debug("Found Block")
|
||||
log.debug("Found Block")
|
||||
|
||||
def get_user(self, id_or_username):
|
||||
log.debug("Get User")
|
||||
@ -21,37 +21,36 @@ class DB_None():
|
||||
log.debug("List Users")
|
||||
|
||||
def delete_user(self,username):
|
||||
log.debug("Deleting Username")
|
||||
log.debug("Deleting Username")
|
||||
|
||||
def insert_user(self,username,password):
|
||||
log.debug("Adding Username/Password")
|
||||
log.debug("Adding Username/Password")
|
||||
|
||||
def update_user(self,username,password):
|
||||
log.debug("Updating Username/Password")
|
||||
log.debug("Updating Username/Password")
|
||||
|
||||
def check_password(self,username,password):
|
||||
log.debug("Checking Username/Password")
|
||||
return True
|
||||
log.debug("Checking Username/Password")
|
||||
return True
|
||||
|
||||
def update_pool_info(self,pi):
|
||||
log.debug("Update Pool Info")
|
||||
|
||||
log.debug("Update Pool Info")
|
||||
|
||||
def clear_worker_diff(self):
|
||||
log.debug("Clear Worker Diff")
|
||||
log.debug("Clear Worker Diff")
|
||||
|
||||
def get_pool_stats(self):
|
||||
log.debug("Get Pool Stats")
|
||||
ret = {}
|
||||
return ret
|
||||
log.debug("Get Pool Stats")
|
||||
ret = {}
|
||||
return ret
|
||||
|
||||
def get_workers_stats(self):
|
||||
log.debug("Get Workers Stats")
|
||||
ret = {}
|
||||
return ret
|
||||
log.debug("Get Workers Stats")
|
||||
ret = {}
|
||||
return ret
|
||||
|
||||
def check_tables(self):
|
||||
log.debug("Checking Tables")
|
||||
log.debug("Checking Tables")
|
||||
|
||||
def close(self):
|
||||
log.debug("Close Connection")
|
||||
|
||||
log.debug("Close Connection")
|
||||
|
||||
@ -7,131 +7,132 @@ import sqlite3
|
||||
|
||||
class DB_Sqlite():
|
||||
def __init__(self):
|
||||
log.debug("Connecting to DB")
|
||||
self.dbh = sqlite3.connect(settings.DB_SQLITE_FILE)
|
||||
self.dbc = self.dbh.cursor()
|
||||
log.debug("Connecting to DB")
|
||||
self.dbh = sqlite3.connect(settings.DB_SQLITE_FILE)
|
||||
self.dbc = self.dbh.cursor()
|
||||
|
||||
def updateStats(self,averageOverTime):
|
||||
log.debug("Updating Stats")
|
||||
# Note: we are using transactions... so we can set the speed = 0 and it doesn't take affect until we are commited.
|
||||
self.dbc.execute("update pool_worker set speed = 0, alive = 0");
|
||||
stime = '%.2f' % ( time.time() - averageOverTime );
|
||||
self.dbc.execute("select username,SUM(difficulty) from shares where time > :time group by username", {'time':stime})
|
||||
total_speed = 0
|
||||
sqldata = []
|
||||
for name,shares in self.dbc.fetchall():
|
||||
speed = int(int(shares) * pow(2,32)) / ( int(averageOverTime) * 1000 * 1000)
|
||||
total_speed += speed
|
||||
sqldata.append({'speed':speed,'user':name})
|
||||
self.dbc.executemany("update pool_worker set speed = :speed, alive = 1 where username = :user",sqldata)
|
||||
self.dbc.execute("update pool set value = :val where parameter = 'pool_speed'",{'val':total_speed})
|
||||
self.dbh.commit()
|
||||
log.debug("Updating Stats")
|
||||
# Note: we are using transactions... so we can set the speed = 0 and it doesn't take affect until we are commited.
|
||||
self.dbc.execute("update pool_worker set speed = 0, alive = 0");
|
||||
stime = '%.2f' % ( time.time() - averageOverTime );
|
||||
self.dbc.execute("select username,SUM(difficulty) from shares where time > :time group by username", {'time':stime})
|
||||
total_speed = 0
|
||||
sqldata = []
|
||||
for name,shares in self.dbc.fetchall():
|
||||
speed = int(int(shares) * pow(2,32)) / ( int(averageOverTime) * 1000 * 1000)
|
||||
total_speed += speed
|
||||
sqldata.append({'speed':speed,'user':name})
|
||||
self.dbc.executemany("update pool_worker set speed = :speed, alive = 1 where username = :user",sqldata)
|
||||
self.dbc.execute("update pool set value = :val where parameter = 'pool_speed'",{'val':total_speed})
|
||||
self.dbh.commit()
|
||||
|
||||
def archive_check(self):
|
||||
# Check for found shares to archive
|
||||
self.dbc.execute("select time from shares where upstream_result = 1 order by time limit 1")
|
||||
data = self.dbc.fetchone()
|
||||
if data is None or (data[0] + settings.ARCHIVE_DELAY) > time.time() :
|
||||
return False
|
||||
return data[0]
|
||||
# Check for found shares to archive
|
||||
self.dbc.execute("select time from shares where upstream_result = 1 order by time limit 1")
|
||||
data = self.dbc.fetchone()
|
||||
if data is None or (data[0] + settings.ARCHIVE_DELAY) > time.time() :
|
||||
return False
|
||||
return data[0]
|
||||
|
||||
def archive_found(self,found_time):
|
||||
self.dbc.execute("insert into shares_archive_found select * from shares where upstream_result = 1 and time <= :time",{'time':found_time})
|
||||
self.dbh.commit()
|
||||
self.dbc.execute("insert into shares_archive_found select * from shares where upstream_result = 1 and time <= :time",{'time':found_time})
|
||||
self.dbh.commit()
|
||||
|
||||
def archive_to_db(self,found_time):
|
||||
self.dbc.execute("insert into shares_archive select * from shares where time <= :time",{'time':found_time})
|
||||
self.dbh.commit()
|
||||
self.dbc.execute("insert into shares_archive select * from shares where time <= :time",{'time':found_time})
|
||||
self.dbh.commit()
|
||||
|
||||
def archive_cleanup(self,found_time):
|
||||
self.dbc.execute("delete from shares where time <= :time",{'time':found_time})
|
||||
self.dbc.execute("vacuum")
|
||||
self.dbh.commit()
|
||||
self.dbc.execute("delete from shares where time <= :time",{'time':found_time})
|
||||
self.dbc.execute("vacuum")
|
||||
self.dbh.commit()
|
||||
|
||||
def archive_get_shares(self,found_time):
|
||||
self.dbc.execute("select * from shares where time <= :time",{'time':found_time})
|
||||
return self.dbc
|
||||
|
||||
self.dbc.execute("select * from shares where time <= :time",{'time':found_time})
|
||||
return self.dbc
|
||||
|
||||
def import_shares(self,data):
|
||||
log.debug("Importing Shares")
|
||||
# 0 1 2 3 4 5 6 7 8 9 10
|
||||
# data: [worker_name,block_header,block_hash,difficulty,timestamp,is_valid,ip,block_height,prev_hash,invalid_reason,share_diff]
|
||||
checkin_times = {}
|
||||
total_shares = 0
|
||||
best_diff = 0
|
||||
sqldata = []
|
||||
for k,v in enumerate(data):
|
||||
if settings.DATABASE_EXTEND :
|
||||
total_shares += v[3]
|
||||
if v[0] in checkin_times:
|
||||
if v[4] > checkin_times[v[0]] :
|
||||
checkin_times[v[0]]["time"] = v[4]
|
||||
else:
|
||||
checkin_times[v[0]] = {"time": v[4], "shares": 0, "rejects": 0 }
|
||||
log.debug("Importing Shares")
|
||||
# 0 1 2 3 4 5 6 7 8 9 10
|
||||
# data: [worker_name,block_header,block_hash,difficulty,timestamp,is_valid,ip,block_height,prev_hash,invalid_reason,share_diff]
|
||||
checkin_times = {}
|
||||
total_shares = 0
|
||||
best_diff = 0
|
||||
sqldata = []
|
||||
for k,v in enumerate(data):
|
||||
if settings.DATABASE_EXTEND :
|
||||
total_shares += v[3]
|
||||
if v[0] in checkin_times:
|
||||
if v[4] > checkin_times[v[0]] :
|
||||
checkin_times[v[0]]["time"] = v[4]
|
||||
else:
|
||||
checkin_times[v[0]] = {"time": v[4], "shares": 0, "rejects": 0 }
|
||||
|
||||
if v[5] == True :
|
||||
checkin_times[v[0]]["shares"] += v[3]
|
||||
else :
|
||||
checkin_times[v[0]]["rejects"] += v[3]
|
||||
if v[5] == True :
|
||||
checkin_times[v[0]]["shares"] += v[3]
|
||||
else :
|
||||
checkin_times[v[0]]["rejects"] += v[3]
|
||||
|
||||
if v[10] > best_diff:
|
||||
best_diff = v[10]
|
||||
if v[10] > best_diff:
|
||||
best_diff = v[10]
|
||||
|
||||
sqldata.append({'time':v[4],'rem_host':v[6],'username':v[0],'our_result':v[5],'upstream_result':0,'reason':v[9],'solution':'',
|
||||
'block_num':v[7],'prev_block_hash':v[8],'ua':'','diff':v[3]} )
|
||||
else :
|
||||
sqldata.append({'time':v[4],'rem_host':v[6],'username':v[0],'our_result':v[5],'upstream_result':0,'reason':v[9],'solution':''} )
|
||||
sqldata.append({'time':v[4],'rem_host':v[6],'username':v[0],'our_result':v[5],'upstream_result':0,'reason':v[9],'solution':'',
|
||||
'block_num':v[7],'prev_block_hash':v[8],'ua':'','diff':v[3]} )
|
||||
else :
|
||||
sqldata.append({'time':v[4],'rem_host':v[6],'username':v[0],'our_result':v[5],'upstream_result':0,'reason':v[9],'solution':''} )
|
||||
|
||||
if settings.DATABASE_EXTEND :
|
||||
self.dbc.executemany("insert into shares " +\
|
||||
"(time,rem_host,username,our_result,upstream_result,reason,solution,block_num,prev_block_hash,useragent,difficulty) " +\
|
||||
"VALUES (:time,:rem_host,:username,:our_result,:upstream_result,:reason,:solution,:block_num,:prev_block_hash,:ua,:diff)",sqldata)
|
||||
if settings.DATABASE_EXTEND :
|
||||
self.dbc.executemany("insert into shares " +\
|
||||
"(time,rem_host,username,our_result,upstream_result,reason,solution,block_num,prev_block_hash,useragent,difficulty) " +\
|
||||
"VALUES (:time,:rem_host,:username,:our_result,:upstream_result,:reason,:solution,:block_num,:prev_block_hash,:ua,:diff)",sqldata)
|
||||
|
||||
|
||||
self.dbc.execute("select value from pool where parameter = 'round_shares'")
|
||||
round_shares = int(self.dbc.fetchone()[0]) + total_shares
|
||||
self.dbc.execute("update pool set value = :val where parameter = 'round_shares'",{'val':round_shares})
|
||||
self.dbc.execute("select value from pool where parameter = 'round_shares'")
|
||||
round_shares = int(self.dbc.fetchone()[0]) + total_shares
|
||||
self.dbc.execute("update pool set value = :val where parameter = 'round_shares'",{'val':round_shares})
|
||||
|
||||
self.dbc.execute("select value from pool where parameter = 'round_best_share'")
|
||||
round_best_share = int(self.dbc.fetchone()[0])
|
||||
if best_diff > round_best_share:
|
||||
self.dbc.execute("update pool set value = :val where parameter = 'round_best_share'",{'val':best_diff})
|
||||
self.dbc.execute("select value from pool where parameter = 'round_best_share'")
|
||||
round_best_share = int(self.dbc.fetchone()[0])
|
||||
if best_diff > round_best_share:
|
||||
self.dbc.execute("update pool set value = :val where parameter = 'round_best_share'",{'val':best_diff})
|
||||
|
||||
self.dbc.execute("select value from pool where parameter = 'bitcoin_difficulty'")
|
||||
difficulty = float(self.dbc.fetchone()[0])
|
||||
self.dbc.execute("select value from pool where parameter = 'bitcoin_difficulty'")
|
||||
difficulty = float(self.dbc.fetchone()[0])
|
||||
|
||||
if difficulty == 0:
|
||||
progress = 0
|
||||
else:
|
||||
progress = (round_shares/difficulty)*100
|
||||
self.dbc.execute("update pool set value = :val where parameter = 'round_progress'",{'val':progress})
|
||||
if difficulty == 0:
|
||||
progress = 0
|
||||
else:
|
||||
progress = (round_shares/difficulty)*100
|
||||
self.dbc.execute("update pool set value = :val where parameter = 'round_progress'",{'val':progress})
|
||||
|
||||
sqldata = []
|
||||
for k,v in checkin_times.items():
|
||||
sqldata.append({'last_checkin':v["time"],'addshares':v["shares"],'addrejects':v["rejects"],'user':k})
|
||||
self.dbc.executemany("update pool_worker set last_checkin = :last_checkin, total_shares = total_shares + :addshares, " +\
|
||||
"total_rejects = total_rejects + :addrejects where username = :user",sqldata)
|
||||
else:
|
||||
self.dbc.executemany("insert into shares (time,rem_host,username,our_result,upstream_result,reason,solution) " +\
|
||||
"VALUES (:time,:rem_host,:username,:our_result,:upstream_result,:reason,:solution)",sqldata)
|
||||
sqldata = []
|
||||
for k,v in checkin_times.items():
|
||||
sqldata.append({'last_checkin':v["time"],'addshares':v["shares"],'addrejects':v["rejects"],'user':k})
|
||||
|
||||
self.dbh.commit()
|
||||
self.dbc.executemany("update pool_worker set last_checkin = :last_checkin, total_shares = total_shares + :addshares, " +\
|
||||
"total_rejects = total_rejects + :addrejects where username = :user",sqldata)
|
||||
else:
|
||||
self.dbc.executemany("insert into shares (time,rem_host,username,our_result,upstream_result,reason,solution) " +\
|
||||
"VALUES (:time,:rem_host,:username,:our_result,:upstream_result,:reason,:solution)",sqldata)
|
||||
|
||||
self.dbh.commit()
|
||||
|
||||
def found_block(self,data):
|
||||
# Note: difficulty = -1 here
|
||||
self.dbc.execute("update shares set upstream_result = :usr, solution = :sol where time = :time and username = :user",
|
||||
{'usr':data[5],'sol':data[2],'time':data[4],'user':data[0]})
|
||||
if settings.DATABASE_EXTEND and data[5] == True :
|
||||
self.dbc.execute("update pool_worker set total_found = total_found + 1 where username = :user",{'user':data[0]})
|
||||
self.dbc.execute("select value from pool where parameter = 'pool_total_found'")
|
||||
total_found = int(self.dbc.fetchone()[0]) + 1
|
||||
self.dbc.executemany("update pool set value = :val where parameter = :parm", [{'val':0,'parm':'round_shares'},
|
||||
{'val':0,'parm':'round_progress'},
|
||||
{'val':0,'parm':'round_best_share'},
|
||||
{'val':time.time(),'parm':'round_start'},
|
||||
{'val':total_found,'parm':'pool_total_found'}
|
||||
])
|
||||
self.dbh.commit()
|
||||
# Note: difficulty = -1 here
|
||||
self.dbc.execute("update shares set upstream_result = :usr, solution = :sol where time = :time and username = :user",
|
||||
{'usr':data[5],'sol':data[2],'time':data[4],'user':data[0]})
|
||||
if settings.DATABASE_EXTEND and data[5] == True :
|
||||
self.dbc.execute("update pool_worker set total_found = total_found + 1 where username = :user",{'user':data[0]})
|
||||
self.dbc.execute("select value from pool where parameter = 'pool_total_found'")
|
||||
total_found = int(self.dbc.fetchone()[0]) + 1
|
||||
self.dbc.executemany("update pool set value = :val where parameter = :parm", [{'val':0,'parm':'round_shares'},
|
||||
{'val':0,'parm':'round_progress'},
|
||||
{'val':0,'parm':'round_best_share'},
|
||||
{'val':time.time(),'parm':'round_start'},
|
||||
{'val':total_found,'parm':'pool_total_found'}
|
||||
])
|
||||
self.dbh.commit()
|
||||
|
||||
def get_user(self, id_or_username):
|
||||
raise NotImplementedError('Not implemented for SQLite')
|
||||
@ -143,157 +144,157 @@ class DB_Sqlite():
|
||||
raise NotImplementedError('Not implemented for SQLite')
|
||||
|
||||
def insert_user(self,username,password):
|
||||
log.debug("Adding Username/Password")
|
||||
self.dbc.execute("insert into pool_worker (username,password) VALUES (:user,:pass)", {'user':username,'pass':password})
|
||||
self.dbh.commit()
|
||||
log.debug("Adding Username/Password")
|
||||
self.dbc.execute("insert into pool_worker (username,password) VALUES (:user,:pass)", {'user':username,'pass':password})
|
||||
self.dbh.commit()
|
||||
|
||||
def update_user(self,username,password):
|
||||
raise NotImplementedError('Not implemented for SQLite')
|
||||
|
||||
def check_password(self,username,password):
|
||||
log.debug("Checking Username/Password")
|
||||
self.dbc.execute("select COUNT(*) from pool_worker where username = :user and password = :pass", {'user':username,'pass':password})
|
||||
data = self.dbc.fetchone()
|
||||
if data[0] > 0 :
|
||||
return True
|
||||
return False
|
||||
log.debug("Checking Username/Password")
|
||||
self.dbc.execute("select COUNT(*) from pool_worker where username = :user and password = :pass", {'user':username,'pass':password})
|
||||
data = self.dbc.fetchone()
|
||||
if data[0] > 0 :
|
||||
return True
|
||||
return False
|
||||
|
||||
def update_worker_diff(self,username,diff):
|
||||
self.dbc.execute("update pool_worker set difficulty = :diff where username = :user",{'diff':diff,'user':username})
|
||||
self.dbh.commit()
|
||||
self.dbc.execute("update pool_worker set difficulty = :diff where username = :user",{'diff':diff,'user':username})
|
||||
self.dbh.commit()
|
||||
|
||||
def clear_worker_diff(self):
|
||||
if settings.DATABASE_EXTEND == True :
|
||||
self.dbc.execute("update pool_worker set difficulty = 0")
|
||||
self.dbh.commit()
|
||||
if settings.DATABASE_EXTEND == True :
|
||||
self.dbc.execute("update pool_worker set difficulty = 0")
|
||||
self.dbh.commit()
|
||||
|
||||
def update_pool_info(self,pi):
|
||||
self.dbc.executemany("update pool set value = :val where parameter = :parm",[{'val':pi['blocks'],'parm':"bitcoin_blocks"},
|
||||
{'val':pi['balance'],'parm':"bitcoin_balance"},
|
||||
{'val':pi['connections'],'parm':"bitcoin_connections"},
|
||||
{'val':pi['difficulty'],'parm':"bitcoin_difficulty"},
|
||||
{'val':time.time(),'parm':"bitcoin_infotime"}
|
||||
])
|
||||
self.dbh.commit()
|
||||
self.dbc.executemany("update pool set value = :val where parameter = :parm",[{'val':pi['blocks'],'parm':"bitcoin_blocks"},
|
||||
{'val':pi['balance'],'parm':"bitcoin_balance"},
|
||||
{'val':pi['connections'],'parm':"bitcoin_connections"},
|
||||
{'val':pi['difficulty'],'parm':"bitcoin_difficulty"},
|
||||
{'val':time.time(),'parm':"bitcoin_infotime"}
|
||||
])
|
||||
self.dbh.commit()
|
||||
|
||||
def get_pool_stats(self):
|
||||
self.dbc.execute("select * from pool")
|
||||
ret = {}
|
||||
for data in self.dbc.fetchall():
|
||||
ret[data[0]] = data[1]
|
||||
return ret
|
||||
self.dbc.execute("select * from pool")
|
||||
ret = {}
|
||||
for data in self.dbc.fetchall():
|
||||
ret[data[0]] = data[1]
|
||||
return ret
|
||||
|
||||
def get_workers_stats(self):
|
||||
self.dbc.execute("select username,speed,last_checkin,total_shares,total_rejects,total_found,alive,difficulty from pool_worker")
|
||||
ret = {}
|
||||
for data in self.dbc.fetchall():
|
||||
ret[data[0]] = { "username" : data[0],
|
||||
"speed" : data[1],
|
||||
"last_checkin" : data[2],
|
||||
"total_shares" : data[3],
|
||||
"total_rejects" : data[4],
|
||||
"total_found" : data[5],
|
||||
"alive" : data[6],
|
||||
"difficulty" : data[7] }
|
||||
return ret
|
||||
self.dbc.execute("select username,speed,last_checkin,total_shares,total_rejects,total_found,alive,difficulty from pool_worker")
|
||||
ret = {}
|
||||
for data in self.dbc.fetchall():
|
||||
ret[data[0]] = { "username" : data[0],
|
||||
"speed" : data[1],
|
||||
"last_checkin" : data[2],
|
||||
"total_shares" : data[3],
|
||||
"total_rejects" : data[4],
|
||||
"total_found" : data[5],
|
||||
"alive" : data[6],
|
||||
"difficulty" : data[7] }
|
||||
return ret
|
||||
|
||||
def close(self):
|
||||
self.dbh.close()
|
||||
self.dbh.close()
|
||||
|
||||
def check_tables(self):
|
||||
log.debug("Checking Tables")
|
||||
if settings.DATABASE_EXTEND == True :
|
||||
self.dbc.execute("create table if not exists shares" +\
|
||||
"(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\
|
||||
"block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)")
|
||||
self.dbc.execute("create table if not exists pool_worker" +\
|
||||
"(username TEXT, password TEXT, speed INTEGER, last_checkin DATETIME)")
|
||||
self.dbc.execute("create table if not exists pool(parameter TEXT, value TEXT)")
|
||||
log.debug("Checking Tables")
|
||||
if settings.DATABASE_EXTEND == True :
|
||||
self.dbc.execute("create table if not exists shares" +\
|
||||
"(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\
|
||||
"block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)")
|
||||
self.dbc.execute("create table if not exists pool_worker" +\
|
||||
"(username TEXT, password TEXT, speed INTEGER, last_checkin DATETIME)")
|
||||
self.dbc.execute("create table if not exists pool(parameter TEXT, value TEXT)")
|
||||
|
||||
self.dbc.execute("select COUNT(*) from pool where parameter = 'DB Version'")
|
||||
data = self.dbc.fetchone()
|
||||
if data[0] <= 0:
|
||||
self.dbc.execute("alter table pool_worker add total_shares INTEGER default 0")
|
||||
self.dbc.execute("alter table pool_worker add total_rejects INTEGER default 0")
|
||||
self.dbc.execute("alter table pool_worker add total_found INTEGER default 0")
|
||||
self.dbc.execute("insert into pool (parameter,value) VALUES ('DB Version',2)")
|
||||
self.update_tables()
|
||||
else :
|
||||
self.dbc.execute("create table if not exists shares" + \
|
||||
"(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT)")
|
||||
self.dbc.execute("create table if not exists pool_worker(username TEXT, password TEXT)")
|
||||
self.dbc.execute("create index if not exists pool_worker_username ON pool_worker(username)")
|
||||
self.dbc.execute("select COUNT(*) from pool where parameter = 'DB Version'")
|
||||
data = self.dbc.fetchone()
|
||||
if data[0] <= 0:
|
||||
self.dbc.execute("alter table pool_worker add total_shares INTEGER default 0")
|
||||
self.dbc.execute("alter table pool_worker add total_rejects INTEGER default 0")
|
||||
self.dbc.execute("alter table pool_worker add total_found INTEGER default 0")
|
||||
self.dbc.execute("insert into pool (parameter,value) VALUES ('DB Version',2)")
|
||||
self.update_tables()
|
||||
else :
|
||||
self.dbc.execute("create table if not exists shares" + \
|
||||
"(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT)")
|
||||
self.dbc.execute("create table if not exists pool_worker(username TEXT, password TEXT)")
|
||||
self.dbc.execute("create index if not exists pool_worker_username ON pool_worker(username)")
|
||||
|
||||
def update_tables(self):
|
||||
version = 0
|
||||
current_version = 6
|
||||
while version < current_version :
|
||||
self.dbc.execute("select value from pool where parameter = 'DB Version'")
|
||||
data = self.dbc.fetchone()
|
||||
version = int(data[0])
|
||||
if version < current_version :
|
||||
log.info("Updating Database from %i to %i" % (version, version +1))
|
||||
getattr(self, 'update_version_' + str(version) )()
|
||||
|
||||
version = 0
|
||||
current_version = 6
|
||||
while version < current_version :
|
||||
self.dbc.execute("select value from pool where parameter = 'DB Version'")
|
||||
data = self.dbc.fetchone()
|
||||
version = int(data[0])
|
||||
if version < current_version :
|
||||
log.info("Updating Database from %i to %i" % (version, version +1))
|
||||
getattr(self, 'update_version_' + str(version) )()
|
||||
|
||||
|
||||
def update_version_2(self):
|
||||
log.info("running update 2")
|
||||
self.dbc.executemany("insert into pool (parameter,value) VALUES (?,?)",[('bitcoin_blocks',0),
|
||||
('bitcoin_balance',0),
|
||||
('bitcoin_connections',0),
|
||||
('bitcoin_difficulty',0),
|
||||
('pool_speed',0),
|
||||
('pool_total_found',0),
|
||||
('round_shares',0),
|
||||
('round_progress',0),
|
||||
('round_start',time.time())
|
||||
])
|
||||
self.dbc.execute("create index if not exists shares_username ON shares(username)")
|
||||
self.dbc.execute("create index if not exists pool_worker_username ON pool_worker(username)")
|
||||
self.dbc.execute("update pool set value = 3 where parameter = 'DB Version'")
|
||||
self.dbh.commit()
|
||||
log.info("running update 2")
|
||||
self.dbc.executemany("insert into pool (parameter,value) VALUES (?,?)",[('bitcoin_blocks',0),
|
||||
('bitcoin_balance',0),
|
||||
('bitcoin_connections',0),
|
||||
('bitcoin_difficulty',0),
|
||||
('pool_speed',0),
|
||||
('pool_total_found',0),
|
||||
('round_shares',0),
|
||||
('round_progress',0),
|
||||
('round_start',time.time())
|
||||
])
|
||||
self.dbc.execute("create index if not exists shares_username ON shares(username)")
|
||||
self.dbc.execute("create index if not exists pool_worker_username ON pool_worker(username)")
|
||||
self.dbc.execute("update pool set value = 3 where parameter = 'DB Version'")
|
||||
self.dbh.commit()
|
||||
|
||||
def update_version_3(self):
|
||||
log.info("running update 3")
|
||||
self.dbc.executemany("insert into pool (parameter,value) VALUES (?,?)",[
|
||||
('round_best_share',0),
|
||||
('bitcoin_infotime',0),
|
||||
])
|
||||
self.dbc.execute("alter table pool_worker add alive INTEGER default 0")
|
||||
self.dbc.execute("update pool set value = 4 where parameter = 'DB Version'")
|
||||
self.dbh.commit()
|
||||
|
||||
log.info("running update 3")
|
||||
self.dbc.executemany("insert into pool (parameter,value) VALUES (?,?)",[
|
||||
('round_best_share',0),
|
||||
('bitcoin_infotime',0),
|
||||
])
|
||||
self.dbc.execute("alter table pool_worker add alive INTEGER default 0")
|
||||
self.dbc.execute("update pool set value = 4 where parameter = 'DB Version'")
|
||||
self.dbh.commit()
|
||||
|
||||
def update_version_4(self):
|
||||
log.info("running update 4")
|
||||
self.dbc.execute("alter table pool_worker add difficulty INTEGER default 0")
|
||||
self.dbc.execute("create table if not exists shares_archive" +\
|
||||
"(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\
|
||||
"block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)")
|
||||
self.dbc.execute("create table if not exists shares_archive_found" +\
|
||||
"(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\
|
||||
"block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)")
|
||||
self.dbc.execute("update pool set value = 5 where parameter = 'DB Version'")
|
||||
self.dbh.commit()
|
||||
|
||||
log.info("running update 4")
|
||||
self.dbc.execute("alter table pool_worker add difficulty INTEGER default 0")
|
||||
self.dbc.execute("create table if not exists shares_archive" +\
|
||||
"(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\
|
||||
"block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)")
|
||||
self.dbc.execute("create table if not exists shares_archive_found" +\
|
||||
"(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\
|
||||
"block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)")
|
||||
self.dbc.execute("update pool set value = 5 where parameter = 'DB Version'")
|
||||
self.dbh.commit()
|
||||
|
||||
def update_version_5(self):
|
||||
log.info("running update 5")
|
||||
# Adding Primary key to table: pool
|
||||
self.dbc.execute("alter table pool rename to pool_old")
|
||||
self.dbc.execute("create table if not exists pool(parameter TEXT, value TEXT, primary key(parameter))")
|
||||
self.dbc.execute("insert into pool select * from pool_old")
|
||||
self.dbc.execute("drop table pool_old")
|
||||
self.dbh.commit()
|
||||
# Adding Primary key to table: pool_worker
|
||||
self.dbc.execute("alter table pool_worker rename to pool_worker_old")
|
||||
self.dbc.execute("CREATE TABLE pool_worker(username TEXT, password TEXT, speed INTEGER, last_checkin DATETIME, total_shares INTEGER default 0, total_rejects INTEGER default 0, total_found INTEGER default 0, alive INTEGER default 0, difficulty INTEGER default 0, primary key(username))")
|
||||
self.dbc.execute("insert into pool_worker select * from pool_worker_old")
|
||||
self.dbc.execute("drop table pool_worker_old")
|
||||
self.dbh.commit()
|
||||
# Adjusting indicies on table: shares
|
||||
self.dbc.execute("DROP INDEX shares_username")
|
||||
self.dbc.execute("CREATE INDEX shares_time_username ON shares(time,username)")
|
||||
self.dbc.execute("CREATE INDEX shares_upstreamresult ON shares(upstream_result)")
|
||||
self.dbh.commit()
|
||||
|
||||
self.dbc.execute("update pool set value = 6 where parameter = 'DB Version'")
|
||||
self.dbh.commit()
|
||||
log.info("running update 5")
|
||||
# Adding Primary key to table: pool
|
||||
self.dbc.execute("alter table pool rename to pool_old")
|
||||
self.dbc.execute("create table if not exists pool(parameter TEXT, value TEXT, primary key(parameter))")
|
||||
self.dbc.execute("insert into pool select * from pool_old")
|
||||
self.dbc.execute("drop table pool_old")
|
||||
self.dbh.commit()
|
||||
# Adding Primary key to table: pool_worker
|
||||
self.dbc.execute("alter table pool_worker rename to pool_worker_old")
|
||||
self.dbc.execute("CREATE TABLE pool_worker(username TEXT, password TEXT, speed INTEGER, last_checkin DATETIME, total_shares INTEGER default 0, total_rejects INTEGER default 0, total_found INTEGER default 0, alive INTEGER default 0, difficulty INTEGER default 0, primary key(username))")
|
||||
self.dbc.execute("insert into pool_worker select * from pool_worker_old")
|
||||
self.dbc.execute("drop table pool_worker_old")
|
||||
self.dbh.commit()
|
||||
# Adjusting indicies on table: shares
|
||||
self.dbc.execute("DROP INDEX shares_username")
|
||||
self.dbc.execute("CREATE INDEX shares_time_username ON shares(time,username)")
|
||||
self.dbc.execute("CREATE INDEX shares_upstreamresult ON shares(upstream_result)")
|
||||
self.dbh.commit()
|
||||
|
||||
self.dbc.execute("update pool set value = 6 where parameter = 'DB Version'")
|
||||
self.dbh.commit()
|
||||
|
||||
@ -126,9 +126,9 @@ class BasicShareLimiter(object):
|
||||
else:
|
||||
if ddiff > -1:
|
||||
ddiff = -1
|
||||
# Don't drop below POOL_TARGET
|
||||
if (ddiff + current_difficulty) < settings.POOL_TARGET:
|
||||
ddiff = settings.VDIFF_MIN_TARGET - current_difficulty
|
||||
# Don't drop below POOL_TARGET
|
||||
if (ddiff + current_difficulty) < settings.POOL_TARGET:
|
||||
ddiff = settings.VDIFF_MIN_TARGET - current_difficulty
|
||||
elif avg < self.tmin:
|
||||
# For fractional 0.1 ddiff's just up by 1
|
||||
if settings.VDIFF_X2_TYPE:
|
||||
@ -174,8 +174,8 @@ class BasicShareLimiter(object):
|
||||
|
||||
session['difficulty'] = new_diff
|
||||
connection_ref().rpc('mining.set_difficulty', [new_diff, ], is_notification=True)
|
||||
log.debug("Notified of New Difficulty")
|
||||
log.debug("Notified of New Difficulty")
|
||||
connection_ref().rpc('mining.notify', [work_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, False, ], is_notification=True)
|
||||
log.debug("Sent new work")
|
||||
dbi.update_worker_diff(worker_name, new_diff)
|
||||
dbi.update_worker_diff(worker_name, new_diff)
|
||||
|
||||
|
||||
@ -36,10 +36,10 @@ class WorkerManagerInterface(object):
|
||||
return (False, settings.POOL_TARGET)
|
||||
|
||||
def register_work(self, worker_name, job_id, difficulty):
|
||||
now = Interfaces.timestamper.time()
|
||||
work_id = WorkIdGenerator.get_new_id()
|
||||
self.job_log.setdefault(worker_name, {})[work_id] = (job_id, difficulty, now)
|
||||
return work_id
|
||||
now = Interfaces.timestamper.time()
|
||||
work_id = WorkIdGenerator.get_new_id()
|
||||
self.job_log.setdefault(worker_name, {})[work_id] = (job_id, difficulty, now)
|
||||
return work_id
|
||||
|
||||
class WorkIdGenerator(object):
|
||||
counter = 1000
|
||||
|
||||
@ -4,20 +4,20 @@ import lib.logger
|
||||
log = lib.logger.get_logger('work_log_pruner')
|
||||
|
||||
def _WorkLogPruner_I(wl):
|
||||
now = time()
|
||||
pruned = 0
|
||||
for username in wl:
|
||||
userwork = wl[username]
|
||||
for wli in tuple(userwork.keys()):
|
||||
if now > userwork[wli][2] + 120:
|
||||
del userwork[wli]
|
||||
pruned += 1
|
||||
log.info('Pruned %d jobs' % (pruned,))
|
||||
now = time()
|
||||
pruned = 0
|
||||
for username in wl:
|
||||
userwork = wl[username]
|
||||
for wli in tuple(userwork.keys()):
|
||||
if now > userwork[wli][2] + 120:
|
||||
del userwork[wli]
|
||||
pruned += 1
|
||||
log.info('Pruned %d jobs' % (pruned,))
|
||||
|
||||
def WorkLogPruner(wl):
|
||||
while True:
|
||||
try:
|
||||
sleep(60)
|
||||
_WorkLogPruner_I(wl)
|
||||
except:
|
||||
log.debug(traceback.format_exc())
|
||||
while True:
|
||||
try:
|
||||
sleep(60)
|
||||
_WorkLogPruner_I(wl)
|
||||
except:
|
||||
log.debug(traceback.format_exc())
|
||||
|
||||
Loading…
Reference in New Issue
Block a user