Compare commits

...

69 Commits

Author SHA1 Message Date
sairajzero
878e5f23fd Update flo-data text when for mined tx 2023-07-23 20:07:29 +05:30
Sai Raj
76077630aa
Replace getwork call with getblocktemplate 2021-06-06 03:04:46 +05:30
Sai Raj
a19ae3444a
Update launcher.tac 2021-06-06 03:01:23 +05:30
Sai Raj
83d92719bb
Rename base_structure to base_structure.sql 2021-06-06 02:29:52 +05:30
Sai Raj
8df9fc4c58
Create base_structure 2021-06-06 02:29:29 +05:30
Sai Raj
03dc1e3201
Update README.md 2021-06-06 01:45:31 +05:30
ahmedbodi
33f65ae2a8 Update .travis.yml 2014-04-20 15:07:17 +01:00
ahmedbodi
62fa7d504c Update requirements.txt 2014-04-20 14:57:38 +01:00
ahmedbodi
42d3d8a398 Update .travis.yml 2014-04-20 14:56:39 +01:00
ahmedbodi
45b20392af Add Stratum 2014-04-20 14:50:46 +01:00
Ahmed Bodiwala
a2dce6593a Add Travis 2014-04-20 13:28:03 +00:00
Ahmed Bodiwala
d6b5625321 Fix Requirements 2014-04-20 13:23:56 +00:00
ahmedbodi
7e3019259c Merge pull request #5 from youjying/patch-1
user nonexistance error
2014-03-21 14:29:09 +00:00
youjying
a8a0b09be2 user nonexistance error
some servers like to allow people to connect with no signup. this avoids the issue of not having a user created. perhaps missing a uid generator?
2014-03-20 14:03:30 -06:00
ahmedbodi
652dd310f2 Update README.md 2014-03-18 09:52:16 +00:00
ahmedbodi
836b9a3432 Merge pull request #4 from gatra/master
Riecoin Support
2014-03-18 09:49:09 +00:00
gatra
574a9b1e4d submit block bug fixes 2014-03-18 01:14:37 -03:00
gatra
60e4eb6149 readme updates 2014-03-13 23:32:37 -03:00
gatra
d6729c10e1 riecoin bug fixes 2014-03-13 23:29:04 -03:00
ahmedbodi
8c1f3d255c Merge pull request #3 from mrbr1ghtside/patch-1
Update template_registry.py
2014-03-07 19:24:18 +00:00
mrbr1ghtside
1b6f5a1127 Update template_registry.py
Typo ;)
2014-03-07 20:16:03 +01:00
gatra
77e05277e2 some fixes 2014-03-06 18:35:41 -03:00
gatra
94276a8132 riecoin support fixes 2014-03-06 00:14:53 -03:00
gatra
05d3f1789a riecoin support 2014-03-05 12:42:52 -03:00
ahmedbodi
88a1499685 Status Update 2014-02-28 23:38:35 +00:00
ahmedbodi
ab3a04a011 Merge pull request #227 from daygle/patch-1
Update config_sample.py
2014-02-19 15:28:33 +00:00
ahmedbodi
6285aad778 Merge pull request #225 from slaveofmoney/patch-1
Removed double function declaration (get_uid)
2014-02-12 15:42:09 +00:00
ahmedbodi
52db8210d8 Fix Get User Call 2014-02-10 12:56:59 +00:00
Glen
60492a2ae4 Update config_sample.py
Clean-up config template.
2014-02-03 22:47:51 +11:00
slaveofmoney
11843fd9e1 Removed double function declaration (get_uid) 2014-02-03 13:25:57 +06:00
ahmedbodi
9fdd3ddc08 Merge pull request #220 from Crypto-Expert/issue-92
add skein-sha256 algo support
2014-02-02 05:14:29 -08:00
Alan Penner
052dc3ae58 fix indent 2014-01-31 09:15:43 -08:00
ahmedbodi
781aa0285a Update interfaces.py 2014-01-31 00:13:47 +00:00
root
ce26572267 add skein-sha256 algo support 2014-01-31 00:08:33 +00:00
ahmedbodi
7a6fd9ac30 Update README.md 2014-01-31 00:05:33 +00:00
ahmedbodi
1e8a69d60d Merge pull request #206 from ahmedbodi/issue-92
Issue 92
2014-01-30 15:04:55 -08:00
Ahmed Bodiwala
41f11d9ac4 Multiple Fixed 2014-01-30 22:40:36 +00:00
Ahmed Bodiwala
d1094c3715 Fixes 2014-01-30 22:12:25 +00:00
ahmedbodi
df94a0967d Attempt at force loading of shares
Force load of shares when a block is submitted to the coind but before the db is updated
2014-01-30 11:35:28 +00:00
ahmedbodi
b9821911d1 Delete template_registry.py.save 2014-01-30 11:28:20 +00:00
ahmedbodi
52072d0a7b Update README.md 2014-01-30 10:53:10 +00:00
ahmedbodi
58c12f18e2 Update bitcoin_rpc_manager.py 2014-01-30 09:40:45 +00:00
ahmedbodi
b44277b43c Update bitcoin_rpc.py 2014-01-30 09:37:14 +00:00
Ahmed Bodiwala
0473513525 Fixing error 2014-01-29 03:32:21 +00:00
Alan Penner
041c5a0608 add extra argument to bitcoin_rpc_manager submitblock 2014-01-29 03:24:06 +00:00
Alan Penner
604e22749c change submitblock to try both (correct) hashes on block creation 2014-01-29 03:22:04 +00:00
Alan Penner
40a7e5dd92 try both hash_hex and block_hex on checking for block submission 2014-01-29 03:19:33 +00:00
Alan Penner
35cfaaf2d1 try both hash_hex and block_hex on checking for block submission 2014-01-29 03:16:56 +00:00
Alan Penner
a25489296b debugging submitblock output 2014-01-29 03:14:04 +00:00
Alan Penner
cc04405826 debugging submitblock output 2014-01-29 03:13:24 +00:00
Alan Penner
b80dae572c debugging submitblock output 2014-01-29 03:07:00 +00:00
Alan Penner
811ab8f5c4 fix attempt -> attempts 2014-01-29 03:07:00 +00:00
Alan Penner
1a1866664e update exception 2014-01-29 03:07:00 +00:00
Alan Penner
d64c08f54c make 5 attempts at block submission using detected value of submitblock or getblocktemplate 2014-01-29 03:06:22 +00:00
Alan Penner
009519e295 add detection of submitblock on init 2014-01-29 03:00:45 +00:00
Alan Penner
47d827ec98 remove some semicolons :) 2014-01-29 02:51:10 +00:00
ahmedbodi
688489f649 Merge pull request #196 from ahmedbodi/issue-194
Fix POW PubKey Error
2014-01-28 03:18:26 -08:00
ahmedbodi
5fb1931fd3 Fix POW PubKey Error 2014-01-28 11:12:23 +00:00
ahmedbodi
d5bee92932 Merge pull request #188 from penner42/ppcoin-fix
[FIX] Coind error: Invalid mode for ppcoin, possibly others
2014-01-27 13:49:00 -08:00
ahmedbodi
560d9d0ce7 Merge pull request #189 from penner42/invalidworkerIP
Log IP addresses of unauthorized workers.
2014-01-27 13:48:42 -08:00
Alan Penner
06e2d03a9c add log of IP in authorize 2014-01-26 20:33:08 -08:00
Alan Penner
62befe0168 log ip instead of throw it in exception 2014-01-26 20:29:06 -08:00
Alan Penner
558cf7a17f fix error messages 2014-01-26 20:25:20 -08:00
Alan Penner
8fc232a820 print IP address of invalid workers 2014-01-26 20:17:21 -08:00
Alan Penner
2ed44ed6d9 pass exception on if not 500 internal server error 2014-01-26 16:16:21 -08:00
Alan Penner
63cce44bee fix getblocktemplate for ppcoin. if error passing {} try passing nothing. 2014-01-26 16:10:50 -08:00
ahmedbodi
116e9df4e9 Merge pull request #180 from erasmospunk/master
fix whitespace errors
2014-01-24 14:03:06 -08:00
Giannis Dzegoutanis
a5a6d29b07 even more whitespace fixes 2014-01-24 22:05:12 +01:00
Giannis Dzegoutanis
a6938e1b36 fix mixed tab indentation 2014-01-24 19:52:23 +01:00
33 changed files with 1778 additions and 1055 deletions

1
.gitignore vendored
View File

@ -2,3 +2,4 @@
conf/config.py conf/config.py
*.log *.log
LOG LOG
*.bak

9
.travis.yml Normal file
View File

@ -0,0 +1,9 @@
language: python
python:
- "2.6"
- "2.7"
- "3.2"
- "3.3"
# command to install dependencies
install:
- "pip install -r requirements.txt"

View File

@ -1,20 +1,18 @@
[ ![Codeship Status for ahmedbodi/php-mpos](https://www.codeship.io/projects/b3003a70-61a3-0131-231e-26f75a0c690d/status?branch=master)](https://www.codeship.io/projects/12274) # Description
#Description
Stratum-mining is a pooled mining protocol. It is a replacement for *getwork* based pooling servers by allowing clients to generate work. The stratum protocol is described [here](http://mining.bitcoin.cz/stratum-mining) in full detail. Stratum-mining is a pooled mining protocol. It is a replacement for *getwork* based pooling servers by allowing clients to generate work. The stratum protocol is described [here](http://mining.bitcoin.cz/stratum-mining) in full detail.
This is a implementation of stratum-mining for scrypt based coins. It is compatible with *MPOS* as it complies with the standards of *pushpool*. The end goal is to build on these standards to come up with a more stable solution. This is a implementation of stratum-mining for scrypt based coins. It is compatible with *MPOS* as it complies with the standards of *pushpool*. The end goal is to build on these standards to come up with a more stable solution.
The goal is to make a reliable stratum mining server for scrypt based coins. Over time I will develop this to be more feature rich and very stable. If you would like to see a feature please file a feature request. The goal is to make a reliable stratum mining server for a wide range of coins unlike other forks where the code is limited to specific algorithm's. Over time I will develop this to be more feature rich and very stable. If you would like to see a feature please file a feature request.
**NOTE:** This fork is still in development. Many features may be broken. Please report any broken features or issues. **NOTE:** This fork is still in development. Many features may be broken. Please report any broken features or issues.
#Features # Features
* Stratum Mining Pool * Stratum Mining Pool
* Solved Block Confirmation * Solved Block Confirmation
* Job Based Vardiff support * Job Based Vardiff support
* Solution Block Hash Support * Solution Block Hash Support
* *NEW* SHA256 and Scrypt Algo Support
* Log Rotation * Log Rotation
* Initial low difficulty share confirmation * Initial low difficulty share confirmation
* Multiple *coind* wallets * Multiple *coind* wallets
@ -25,25 +23,14 @@ The goal is to make a reliable stratum mining server for scrypt based coins. Ove
* Proof Of Work and Proof of Stake Coin Support * Proof Of Work and Proof of Stake Coin Support
* Transaction Messaging Support * Transaction Messaging Support
#Donations
* BTC: 18Xg4qP6RUvpeajanKPt5PDvvcqvU2pP6d
* BTE: 8UJLskr8eDYATvYzmaCBw3vbRmeNweT3rW
* DGC: DSBb5KmGWYKMJjxk3rETtvpk9sPqgCCYAw
* LTC: Lg4kXMqPsmMHrGr81LLe8oHpbsMiWiuMSB
* WDC: WeVFgZQsKSKXGak7NJPp9SrcUexghzTPGJ
* Doge: DLtBRYtNCzfiZfcpUeEr8KPvy5k1aR7jca
* SRC: sMP2wHN5H2ik7FQDPjhSzFZUWux75BYZGe
* ARG: AQvXPWVqGzcpH2j2XSRG7X5R9nA3y9D9aQ
* CryptsyTradeKey: ec13d183e304326ebd41258d6ae7188e303866fe
# Requirements
#Requirements
*stratum-mining* is built in python. I have been testing it with 2.7.3, but it should work with other versions. The requirements for running the software are below. *stratum-mining* is built in python. I have been testing it with 2.7.3, but it should work with other versions. The requirements for running the software are below.
* Python 2.7+ * Python 2.7+
* python-twisted * python-twisted
* stratum * stratum
* MySQL Server * MySQL Server
* SHA256 or Scrypt CoinDaemon * CoinD's
Other coins have been known to work with this implementation. I have tested with the following coins, but there may be many others that work. Other coins have been known to work with this implementation. I have tested with the following coins, but there may be many others that work.
@ -63,25 +50,17 @@ Other coins have been known to work with this implementation. I have tested with
* Quark * Quark
* Securecoin * Securecoin
#Installation # Installation
The installation of this *stratum-mining* can be found in the Repo Wiki. The installation of this *stratum-mining* can be found in the Repo Wiki.
#Contact # Credits
I am available in the #MPOS, #crypto-expert, #digitalcoin, and #worldcoin channels on freenode.
Although i am willing to provide support through IRC please file issues on the repo.
Issues as a direct result of stratum will be helped with as much as possible
However issues related to a coin daemon's setup and other non stratum issues,
Please research and attempt to debug first.
#Credits * Original version by Slush0 and ArtForz (original stratum code)
* More Features added by GeneralFault, Wadee Womersley, Viperaus, TheSeven and Moopless
* Multi Algo, Vardiff, DB and MPOS support done by Ahmed_Bodi, penner42 and Obigal
* Riecoin support implemented by gatra
* Original version by Slush0 (original stratum code) # License
* More Features added by GeneralFault, Wadee Womersley and Moopless This software is provided AS-IS without any warranties of any kind. Please use at your own risk.
* Scrypt conversion from work done by viperaus
* PoS conversion done by TheSeven
* Multi Algo, Vardiff, DB and MPOS support done by Ahmed_Bodi and Obigal
#License
This software is provides AS-IS without any warranties of any kind. Please use at your own risk.

View File

@ -9,32 +9,33 @@ You NEED to set the parameters in BASIC SETTINGS
# ******************** BASIC SETTINGS *************** # ******************** BASIC SETTINGS ***************
# These are the MUST BE SET parameters! # These are the MUST BE SET parameters!
CENTRAL_WALLET = 'set_valid_addresss_in_config!' # local coin address where money goes CENTRAL_WALLET = 'set_valid_addresss_in_config!' # Local coin address where money goes
COINDAEMON_TRUSTED_HOST = 'localhost' COINDAEMON_TRUSTED_HOST = 'localhost'
COINDAEMON_TRUSTED_PORT = 8332 COINDAEMON_TRUSTED_PORT = 28332
COINDAEMON_TRUSTED_USER = 'user' COINDAEMON_TRUSTED_USER = 'user'
COINDAEMON_TRUSTED_PASSWORD = 'somepassword' COINDAEMON_TRUSTED_PASSWORD = 'somepassword'
# Coin Algorithm is the option used to determine the algortithm used by stratum # Coin algorithm is the option used to determine the algorithm used by stratum
# This currently works with POW and POS coins # This currently works with POW and POS coins
# The available options are: # The available options are:
# scrypt, sha256d, scrypt-jane and quark # scrypt, sha256d, scrypt-jane, skeinhash, quark and riecoin
# If the option does not meet either of these criteria stratum defaults to scrypt # If the option does not meet either of these criteria stratum defaults to scrypt
# For Coins which support TX Messages please enter yes in the TX selection # For Coins which support TX Messages please enter yes in the TX selection
COINDAEMON_ALGO = 'scrypt' COINDAEMON_ALGO = 'riecoin'
COINDAEMON_TX = 'no' COINDAEMON_TX = 'no'
# ******************** BASIC SETTINGS *************** # ******************** BASIC SETTINGS ***************
# Backup Coin Daemon address's (consider having at least 1 backup) # Backup Coin Daemon address's (consider having at least 1 backup)
# You can have up to 99 # You can have up to 99
#COINDAEMON_TRUSTED_HOST_1 = 'localhost' #COINDAEMON_TRUSTED_HOST_1 = 'localhost'
#COINDAEMON_TRUSTED_PORT_1 = 8332 #COINDAEMON_TRUSTED_PORT_1 = 28332
#COINDAEMON_TRUSTED_USER_1 = 'user' #COINDAEMON_TRUSTED_USER_1 = 'user'
#COINDAEMON_TRUSTED_PASSWORD_1 = 'somepassword' #COINDAEMON_TRUSTED_PASSWORD_1 = 'somepassword'
#COINDAEMON_TRUSTED_HOST_2 = 'localhost' #COINDAEMON_TRUSTED_HOST_2 = 'localhost'
#COINDAEMON_TRUSTED_PORT_2 = 8332 #COINDAEMON_TRUSTED_PORT_2 = 28332
#COINDAEMON_TRUSTED_USER_2 = 'user' #COINDAEMON_TRUSTED_USER_2 = 'user'
#COINDAEMON_TRUSTED_PASSWORD_2 = 'somepassword' #COINDAEMON_TRUSTED_PASSWORD_2 = 'somepassword'
@ -50,11 +51,11 @@ DEBUG = False
LOGDIR = 'log/' LOGDIR = 'log/'
# Main application log file. # Main application log file.
LOGFILE = None # eg. 'stratum.log' LOGFILE = None # eg. 'stratum.log'
LOGLEVEL = 'DEBUG' LOGLEVEL = 'DEBUG'
# Logging Rotation can be enabled with the following settings # Logging Rotation can be enabled with the following settings
# It if not enabled here, you can set up logrotate to rotate the files. # It if not enabled here, you can set up logrotate to rotate the files.
# For built in log rotation set LOG_ROTATION = True and configrue the variables # For built in log rotation set LOG_ROTATION = True and configure the variables
LOG_ROTATION = True LOG_ROTATION = True
LOG_SIZE = 10485760 # Rotate every 10M LOG_SIZE = 10485760 # Rotate every 10M
LOG_RETENTION = 10 # Keep 10 Logs LOG_RETENTION = 10 # Keep 10 Logs
@ -65,7 +66,6 @@ LOG_RETENTION = 10 # Keep 10 Logs
THREAD_POOL_SIZE = 300 THREAD_POOL_SIZE = 300
# ******************** TRANSPORTS ********************* # ******************** TRANSPORTS *********************
# Hostname or external IP to expose # Hostname or external IP to expose
HOSTNAME = 'localhost' HOSTNAME = 'localhost'
@ -87,9 +87,8 @@ LISTEN_WSS_TRANSPORT = None
PASSWORD_SALT = 'some_crazy_string' PASSWORD_SALT = 'some_crazy_string'
# ******************** Database ********************* # ******************** Database *********************
DATABASE_DRIVER = 'mysql' # Options: none, sqlite, postgresql or mysql
DATABASE_DRIVER = 'mysql' # Options: none, sqlite, postgresql or mysql DATABASE_EXTEND = False # SQLite and PGSQL Only!
DATABASE_EXTEND = False # SQLite and PGSQL Only!
# SQLite # SQLite
DB_SQLITE_FILE = 'pooldb.sqlite' DB_SQLITE_FILE = 'pooldb.sqlite'
@ -104,95 +103,94 @@ DB_MYSQL_HOST = 'localhost'
DB_MYSQL_DBNAME = 'pooldb' DB_MYSQL_DBNAME = 'pooldb'
DB_MYSQL_USER = 'pooldb' DB_MYSQL_USER = 'pooldb'
DB_MYSQL_PASS = '**empty**' DB_MYSQL_PASS = '**empty**'
DB_MYSQL_PORT = 3306 # Default port for MySQL DB_MYSQL_PORT = 3306 # Default port for MySQL
# ******************** Adv. DB Settings ********************* # ******************** Adv. DB Settings *********************
# Don't change these unless you know what you are doing # Don't change these unless you know what you are doing
DB_LOADER_CHECKTIME = 15 # How often we check to see if we should run the loader DB_LOADER_CHECKTIME = 15 # How often we check to see if we should run the loader
DB_LOADER_REC_MIN = 10 # Min Records before the bulk loader fires DB_LOADER_REC_MIN = 10 # Min Records before the bulk loader fires
DB_LOADER_REC_MAX = 50 # Max Records the bulk loader will commit at a time DB_LOADER_REC_MAX = 50 # Max Records the bulk loader will commit at a time
DB_LOADER_FORCE_TIME = 300 # How often the cache should be flushed into the DB regardless of size. DB_LOADER_FORCE_TIME = 300 # How often the cache should be flushed into the DB regardless of size.
DB_STATS_AVG_TIME = 300 # When using the DATABASE_EXTEND option, average speed over X sec DB_STATS_AVG_TIME = 300 # When using the DATABASE_EXTEND option, average speed over X sec
# Note: this is also how often it updates # Note: this is also how often it updates
DB_USERCACHE_TIME = 600 # How long the usercache is good for before we refresh DB_USERCACHE_TIME = 600 # How long the usercache is good for before we refresh
# ******************** Pool Settings ********************* # ******************** Pool Settings *********************
# User Auth Options # User Auth Options
USERS_AUTOADD = False # Automatically add users to db when they connect. USERS_AUTOADD = False # Automatically add users to database when they connect.
# This basically disables User Auth for the pool. # This basically disables User Auth for the pool.
USERS_CHECK_PASSWORD = False # Check the workers password? (Many pools don't) USERS_CHECK_PASSWORD = False # Check the workers password? (Many pools don't)
# Transaction Settings # Transaction Settings
COINBASE_EXTRAS = '/stratumPool/' # Extra Descriptive String to incorporate in solved blocks COINBASE_EXTRAS = '/stratumPool/' # Extra Descriptive String to incorporate in solved blocks
ALLOW_NONLOCAL_WALLET = False # Allow valid, but NON-Local wallet's ALLOW_NONLOCAL_WALLET = False # Allow valid, but NON-Local wallet's
# Coin Daemon communication polling settings (In Seconds) # Coin Daemon communication polling settings (In Seconds)
PREVHASH_REFRESH_INTERVAL = 5 # How often to check for new Blocks PREVHASH_REFRESH_INTERVAL = 5 # How often to check for new Blocks
# If using the blocknotify script (recommended) set = to MERKLE_REFRESH_INTERVAL # If using the blocknotify script (recommended) set = to MERKLE_REFRESH_INTERVAL
# (No reason to poll if we're getting pushed notifications) # (No reason to poll if we're getting pushed notifications)
MERKLE_REFRESH_INTERVAL = 60 # How often check memorypool MERKLE_REFRESH_INTERVAL = 60 # How often check memorypool
# This effectively resets the template and incorporates new transactions. # This effectively resets the template and incorporates new transactions.
# This should be "slow" # This should be "slow"
INSTANCE_ID = 31 # Used for extranonce and needs to be 0-31 INSTANCE_ID = 31 # Used for extranonce and needs to be 0-31
# ******************** Pool Difficulty Settings ********************* # ******************** Pool Difficulty Settings *********************
VDIFF_X2_TYPE = True # powers of 2 e.g. 2,4,8,16,32,64,128,256,512,1024 VDIFF_X2_TYPE = True # Powers of 2 e.g. 2,4,8,16,32,64,128,256,512,1024
VDIFF_FLOAT = False # Use float difficulty VDIFF_FLOAT = False # Use float difficulty
# Pool Target (Base Difficulty) # Pool Target (Base Difficulty)
POOL_TARGET = 32 # Pool-wide difficulty target int >= 1 POOL_TARGET = 4 # Pool-wide difficulty target int >= 1
# Variable Difficulty Enable # Variable Difficulty Enable
VARIABLE_DIFF = True # Master variable difficulty enable VARIABLE_DIFF = True # Master variable difficulty enable
# Variable diff tuning variables # Variable diff tuning variables
#VARDIFF will start at the POOL_TARGET. It can go as low as the VDIFF_MIN and as high as min(VDIFF_MAX or Liteconin's difficulty) #VARDIFF will start at the POOL_TARGET. It can go as low as the VDIFF_MIN and as high as min(VDIFF_MAX or coindaemons difficulty)
USE_COINDAEMON_DIFF = False # Set the maximum difficulty to the litecoin difficulty. USE_COINDAEMON_DIFF = False # Set the maximum difficulty to the coindaemon difficulty.
DIFF_UPDATE_FREQUENCY = 86400 # Update the litecoin difficulty once a day for the VARDIFF maximum DIFF_UPDATE_FREQUENCY = 86400 # Update the coindaemon difficulty once a day for the VARDIFF maximum
VDIFF_MIN_TARGET = 16 # Minimum Target difficulty VDIFF_MIN_TARGET = 16 # Minimum target difficulty
VDIFF_MAX_TARGET = 1024 # Maximum Target difficulty VDIFF_MAX_TARGET = 1024 # Maximum target difficulty
VDIFF_TARGET_TIME = 15 # Target time per share (i.e. try to get 1 share per this many seconds) VDIFF_TARGET_TIME = 15 # Target time per share (i.e. try to get 1 share per this many seconds)
VDIFF_RETARGET_TIME = 120 # Check to see if we should retarget this often VDIFF_RETARGET_TIME = 120 # Check to see if we should retarget this often
VDIFF_VARIANCE_PERCENT = 30 # Allow average time to very this % from target without retarget VDIFF_VARIANCE_PERCENT = 30 # Allow average time to very this % from target without retarget
# Allow external setting of worker difficulty, checks pool_worker table datarow[6] position for target difficulty # Allow external setting of worker difficulty, checks pool_worker table datarow[6] position for target difficulty
# if present or else defaults to pool target, over rides all other difficulty settings, no checks are made # if present or else defaults to pool target, over rides all other difficulty settings, no checks are made
#for min or max limits this sould be done by your front end software # for min or max limits this should be done by your front end software
ALLOW_EXTERNAL_DIFFICULTY = False ALLOW_EXTERNAL_DIFFICULTY = False
#### Advanced Option ##### #### Advanced Option #####
# For backwards compatibility, we send the scrypt hash to the solutions column in the shares table # For backwards compatibility, we send the scrypt hash to the solutions column in the shares table
# For block confirmation, we have an option to send the block hash in # For block confirmation, we have an option to send the block hash in
# Please make sure your front end is compatible with the block hash in the solutions table. # Please make sure your front end is compatible with the block hash in the solutions table.
# For People using the MPOS frontend enabling this is recommended. It allows the frontend to compare the block hash to the coin daemon reducing the liklihood of missing share error's for blocks # For People using the MPOS frontend enabling this is recommended. It allows the frontend to compare the block hash to the coin daemon reducing the likelihood of missing share error's for blocks
SOLUTION_BLOCK_HASH = True # If enabled, enter the block hash. If false enter the scrypt/sha hash into the shares table SOLUTION_BLOCK_HASH = True # If enabled, enter the block hash. If false enter the scrypt/sha hash into the shares table
#Pass scrypt hash to submit block check. #Pass scrypt hash to submit block check.
#Use if submit block is returning errors and marking submitted blocks invaild upstream, but the submitted blocks are being a accepted by the coin daemon into the block chain. #Use if submit block is returning errors and marking submitted blocks invalid upstream, but the submitted blocks are being a accepted by the coin daemon into the block chain.
BLOCK_CHECK_SCRYPT_HASH = False BLOCK_CHECK_SCRYPT_HASH = False
# ******************** Worker Ban Options ********************* # ******************** Worker Ban Options *********************
ENABLE_WORKER_BANNING = True # enable/disable temporary worker banning ENABLE_WORKER_BANNING = True # Enable/disable temporary worker banning
WORKER_CACHE_TIME = 600 # How long the worker stats cache is good before we check and refresh WORKER_CACHE_TIME = 600 # How long the worker stats cache is good before we check and refresh
WORKER_BAN_TIME = 300 # How long we temporarily ban worker WORKER_BAN_TIME = 300 # How long we temporarily ban worker
INVALID_SHARES_PERCENT = 50 # Allow average invalid shares vary this % before we ban INVALID_SHARES_PERCENT = 50 # Allow average invalid shares vary this % before we ban
# ******************** E-Mail Notification Settings ********************* # ******************** E-Mail Notification Settings *********************
NOTIFY_EMAIL_TO = '' # Where to send Start/Found block notifications NOTIFY_EMAIL_TO = '' # Where to send Start/Found block notifications
NOTIFY_EMAIL_TO_DEADMINER = '' # Where to send dead miner notifications NOTIFY_EMAIL_TO_DEADMINER = '' # Where to send dead miner notifications
NOTIFY_EMAIL_FROM = 'root@localhost' # Sender address NOTIFY_EMAIL_FROM = 'root@localhost' # Sender address
NOTIFY_EMAIL_SERVER = 'localhost' # E-Mail Sender NOTIFY_EMAIL_SERVER = 'localhost' # E-Mail sender
NOTIFY_EMAIL_USERNAME = '' # E-Mail server SMTP Logon NOTIFY_EMAIL_USERNAME = '' # E-Mail server SMTP logon
NOTIFY_EMAIL_PASSWORD = '' NOTIFY_EMAIL_PASSWORD = ''
NOTIFY_EMAIL_USETLS = True NOTIFY_EMAIL_USETLS = True
#### Memcache #### # ******************** Memcache Settings *********************
# Memcahce is a requirement. Enter the settings below # Memcahce is a requirement. Enter the settings below
MEMCACHE_HOST = "localhost" # hostname or IP that runs memcached MEMCACHE_HOST = "localhost" # Hostname or IP that runs memcached
MEMCACHE_PORT = 11211 # Port MEMCACHE_PORT = 11211 # Port
MEMCACHE_TIMEOUT = 900 # Key timeout MEMCACHE_TIMEOUT = 900 # Key timeout
MEMCACHE_PREFIX = "stratum_" # Prefix for keys MEMCACHE_PREFIX = "stratum_" # Prefix for keys

View File

@ -3,7 +3,7 @@
# Add conf directory to python path. # Add conf directory to python path.
# Configuration file is standard python module. # Configuration file is standard python module.
import os, sys import os, sys
sys.path = [os.path.join(os.getcwd(), 'conf'),os.path.join(os.getcwd(), 'externals', 'stratum-mining-proxy'),] + sys.path sys.path = [os.path.join(os.getcwd(), 'conf'),os.path.join(os.getcwd(), '.'),os.path.join(os.getcwd(), 'externals', 'stratum-mining-proxy'),] + sys.path
from twisted.internet import defer from twisted.internet import defer
from twisted.application.service import Application, IProcess from twisted.application.service import Application, IProcess

View File

@ -14,14 +14,15 @@ log = lib.logger.get_logger('bitcoin_rpc')
class BitcoinRPC(object): class BitcoinRPC(object):
def __init__(self, host, port, username, password): def __init__(self, host, port, username, password):
log.debug("Got to Bitcoin RPC") log.debug("Got to Bitcoin RPC")
self.bitcoin_url = 'http://%s:%d' % (host, port) self.bitcoin_url = 'http://%s:%d' % (host, port)
self.credentials = base64.b64encode("%s:%s" % (username, password)) self.credentials = base64.b64encode("%s:%s" % (username, password))
self.headers = { self.headers = {
'Content-Type': 'text/json', 'Content-Type': 'text/json',
'Authorization': 'Basic %s' % self.credentials, 'Authorization': 'Basic %s' % self.credentials,
} }
client.HTTPClientFactory.noisy = False client.HTTPClientFactory.noisy = False
self.has_submitblock = False
def _call_raw(self, data): def _call_raw(self, data):
client.Headers client.Headers
@ -41,25 +42,82 @@ class BitcoinRPC(object):
})) }))
@defer.inlineCallbacks @defer.inlineCallbacks
def submitblock(self, block_hex, hash_hex): def check_submitblock(self):
# Try submitblock if that fails, go to getblocktemplate
try: try:
log.debug("Submitting Block with Submit Block ") log.info("Checking for submitblock")
log.debug([block_hex,]) resp = (yield self._call('submitblock', []))
resp = (yield self._call('submitblock', [block_hex,])) self.has_submitblock = True
except Exception: except Exception as e:
try: if (str(e) == "404 Not Found"):
log.exception("Submit Block Failed, does the coind have submitblock?") log.debug("No submitblock detected.")
log.exception("Trying GetBlockTemplate") self.has_submitblock = False
resp = (yield self._call('getblocktemplate', [{'mode': 'submit', 'data': block_hex}])) elif (str(e) == "500 Internal Server Error"):
except Exception as e: log.debug("submitblock detected.")
log.exception("Both SubmitBlock and GetBlockTemplate failed. Problem Submitting block %s" % str(e)) self.has_submitblock = True
log.exception("Try Enabling TX Messages in config.py!") else:
raise log.debug("unknown submitblock check result.")
self.has_submitblock = True
finally:
defer.returnValue(self.has_submitblock)
@defer.inlineCallbacks
def submitblock(self, block_hex, hash_hex, scrypt_hex):
#try 5 times? 500 Internal Server Error could mean random error or that TX messages setting is wrong
attempts = 0
while True:
attempts += 1
if self.has_submitblock == True:
try:
log.debug("Submitting Block with submitblock: attempt #"+str(attempts))
log.debug([block_hex,])
resp = (yield self._call('submitblock', [block_hex,]))
log.debug("SUBMITBLOCK RESULT: %s", resp)
break
except Exception as e:
if attempts > 4:
log.exception("submitblock failed. Problem Submitting block %s" % str(e))
log.exception("Try Enabling TX Messages in config.py!")
raise
else:
continue
elif self.has_submitblock == False:
try:
log.debug("Submitting Block with getblocktemplate submit: attempt #"+str(attempts))
log.debug([block_hex,])
resp = (yield self._call('getblocktemplate', [{'mode': 'submit', 'data': block_hex}]))
break
except Exception as e:
if attempts > 4:
log.exception("getblocktemplate submit failed. Problem Submitting block %s" % str(e))
log.exception("Try Enabling TX Messages in config.py!")
raise
else:
continue
else: # self.has_submitblock = None; unable to detect submitblock, try both
try:
log.debug("Submitting Block with submitblock")
log.debug([block_hex,])
resp = (yield self._call('submitblock', [block_hex,]))
break
except Exception as e:
try:
log.exception("submitblock Failed, does the coind have submitblock?")
log.exception("Trying GetBlockTemplate")
resp = (yield self._call('getblocktemplate', [{'mode': 'submit', 'data': block_hex}]))
break
except Exception as e:
if attempts > 4:
log.exception("submitblock failed. Problem Submitting block %s" % str(e))
log.exception("Try Enabling TX Messages in config.py!")
raise
else:
continue
if json.loads(resp)['result'] == None: if json.loads(resp)['result'] == None:
# make sure the block was created. # make sure the block was created.
defer.returnValue((yield self.blockexists(hash_hex))) log.info("CHECKING FOR BLOCK AFTER SUBMITBLOCK")
defer.returnValue((yield self.blockexists(hash_hex, scrypt_hex)))
else: else:
defer.returnValue(False) defer.returnValue(False)
@ -70,22 +128,34 @@ class BitcoinRPC(object):
@defer.inlineCallbacks @defer.inlineCallbacks
def getblocktemplate(self): def getblocktemplate(self):
resp = (yield self._call('getblocktemplate', [{}])) try:
defer.returnValue(json.loads(resp)['result']) resp = (yield self._call('getblocktemplate', [{}]))
defer.returnValue(json.loads(resp)['result'])
# if internal server error try getblocktemplate without empty {} # ppcoin
except Exception as e:
if (str(e) == "500 Internal Server Error"):
resp = (yield self._call('getblocktemplate', []))
defer.returnValue(json.loads(resp)['result'])
else:
raise
@defer.inlineCallbacks @defer.inlineCallbacks
def prevhash(self): def prevhash(self):
resp = (yield self._call('getwork', []))
try: try:
defer.returnValue(json.loads(resp)['result']['data'][8:72]) resp = (yield self._call('getblocktemplate', [{}]))
defer.returnValue(json.loads(resp)['result']['previousblockhash'])
except Exception as e: except Exception as e:
log.exception("Cannot decode prevhash %s" % str(e)) if (str(e) == "500 Internal Server Error"):
raise resp = (yield self._call('getblocktemplate', []))
defer.returnValue(json.loads(resp)['result']['previousblockhash'])
else:
log.exception("Cannot decode prevhash %s" % str(e))
raise
@defer.inlineCallbacks @defer.inlineCallbacks
def validateaddress(self, address): def validateaddress(self, address):
resp = (yield self._call('validateaddress', [address,])) resp = (yield self._call('validateaddress', [address,]))
defer.returnValue(json.loads(resp)['result']) defer.returnValue(json.loads(resp)['result'])
@defer.inlineCallbacks @defer.inlineCallbacks
def getdifficulty(self): def getdifficulty(self):
@ -93,12 +163,59 @@ class BitcoinRPC(object):
defer.returnValue(json.loads(resp)['result']) defer.returnValue(json.loads(resp)['result'])
@defer.inlineCallbacks @defer.inlineCallbacks
def blockexists(self, hash_hex): def blockexists(self, hash_hex, scrypt_hex):
resp = (yield self._call('getblock', [hash_hex,])) valid_hash = None
if "hash" in json.loads(resp)['result'] and json.loads(resp)['result']['hash'] == hash_hex: blockheight = None
log.debug("Block Confirmed: %s" % hash_hex) # try both hash_hex and scrypt_hex to find block
try:
resp = (yield self._call('getblock', [hash_hex,]))
result = json.loads(resp)['result']
if "hash" in result and result['hash'] == hash_hex:
log.debug("Block found: %s" % hash_hex)
valid_hash = hash_hex
if "height" in result:
blockheight = result['height']
else:
defer.returnValue(True)
else:
log.info("Cannot find block for %s" % hash_hex)
defer.returnValue(False)
except Exception as e:
try:
resp = (yield self._call('getblock', [scrypt_hex,]))
result = json.loads(resp)['result']
if "hash" in result and result['hash'] == scrypt_hex:
valid_hash = scrypt_hex
log.debug("Block found: %s" % scrypt_hex)
if "height" in result:
blockheight = result['height']
else:
defer.returnValue(True)
else:
log.info("Cannot find block for %s" % scrypt_hex)
defer.returnValue(False)
except Exception as e:
log.info("Cannot find block for hash_hex %s or scrypt_hex %s" % hash_hex, scrypt_hex)
defer.returnValue(False)
#after we've found the block, check the block with that height in the blockchain to see if hashes match
try:
log.debug("checking block hash against hash of block height: %s", blockheight)
resp = (yield self._call('getblockhash', [blockheight,]))
hash = json.loads(resp)['result']
log.debug("hash of block of height %s: %s", blockheight, hash)
if hash == valid_hash:
log.debug("Block confirmed: hash of block matches hash of blockheight")
defer.returnValue(True)
else:
log.debug("Block invisible: hash of block does not match hash of blockheight")
defer.returnValue(False)
except Exception as e:
# cannot get blockhash from height; block was created, so return true
defer.returnValue(True) defer.returnValue(True)
else: else:
log.info("Cannot find block for %s" % hash_hex) log.info("Cannot find block for %s" % hash_hex)
defer.returnValue(False) defer.returnValue(False)

View File

@ -19,115 +19,123 @@ from lib.bitcoin_rpc import BitcoinRPC
class BitcoinRPCManager(object): class BitcoinRPCManager(object):
def __init__(self): def __init__(self):
log.debug("Got to Bitcoin RPC Manager") log.debug("Got to Bitcoin RPC Manager")
self.conns = {} self.conns = {}
self.conns[0] = BitcoinRPC(settings.COINDAEMON_TRUSTED_HOST, self.conns[0] = BitcoinRPC(settings.COINDAEMON_TRUSTED_HOST,
settings.COINDAEMON_TRUSTED_PORT, settings.COINDAEMON_TRUSTED_PORT,
settings.COINDAEMON_TRUSTED_USER, settings.COINDAEMON_TRUSTED_USER,
settings.COINDAEMON_TRUSTED_PASSWORD) settings.COINDAEMON_TRUSTED_PASSWORD)
self.curr_conn = 0 self.curr_conn = 0
for x in range (1, 99): for x in range (1, 99):
if hasattr(settings, 'COINDAEMON_TRUSTED_HOST_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_PORT_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_USER_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_PASSWORD_' + str(x)): if hasattr(settings, 'COINDAEMON_TRUSTED_HOST_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_PORT_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_USER_' + str(x)) and hasattr(settings, 'COINDAEMON_TRUSTED_PASSWORD_' + str(x)):
self.conns[len(self.conns)] = BitcoinRPC(settings.__dict__['COINDAEMON_TRUSTED_HOST_' + str(x)], self.conns[len(self.conns)] = BitcoinRPC(settings.__dict__['COINDAEMON_TRUSTED_HOST_' + str(x)],
settings.__dict__['COINDAEMON_TRUSTED_PORT_' + str(x)], settings.__dict__['COINDAEMON_TRUSTED_PORT_' + str(x)],
settings.__dict__['COINDAEMON_TRUSTED_USER_' + str(x)], settings.__dict__['COINDAEMON_TRUSTED_USER_' + str(x)],
settings.__dict__['COINDAEMON_TRUSTED_PASSWORD_' + str(x)]) settings.__dict__['COINDAEMON_TRUSTED_PASSWORD_' + str(x)])
def add_connection(self, host, port, user, password): def add_connection(self, host, port, user, password):
# TODO: Some string sanity checks # TODO: Some string sanity checks
self.conns[len(self.conns)] = BitcoinRPC(host, port, user, password) self.conns[len(self.conns)] = BitcoinRPC(host, port, user, password)
def next_connection(self): def next_connection(self):
time.sleep(1) time.sleep(1)
if len(self.conns) <= 1: if len(self.conns) <= 1:
log.error("Problem with Pool 0 -- NO ALTERNATE POOLS!!!") log.error("Problem with Pool 0 -- NO ALTERNATE POOLS!!!")
time.sleep(4) time.sleep(4)
return
log.error("Problem with Pool %i Switching to Next!" % (self.curr_conn) )
self.curr_conn = self.curr_conn + 1
if self.curr_conn >= len(self.conns):
self.curr_conn = 0 self.curr_conn = 0
return
log.error("Problem with Pool %i Switching to Next!" % (self.curr_conn) )
self.curr_conn = self.curr_conn + 1
if self.curr_conn >= len(self.conns):
self.curr_conn = 0
@defer.inlineCallbacks @defer.inlineCallbacks
def check_height(self): def check_height(self):
while True: while True:
try: try:
resp = (yield self.conns[self.curr_conn]._call('getinfo', [])) resp = (yield self.conns[self.curr_conn]._call('getinfo', []))
break break
except: except:
log.error("Check Height -- Pool %i Down!" % (self.curr_conn) ) log.error("Check Height -- Pool %i Down!" % (self.curr_conn) )
self.next_connection() self.next_connection()
curr_height = json.loads(resp)['result']['blocks'] curr_height = json.loads(resp)['result']['blocks']
log.debug("Check Height -- Current Pool %i : %i" % (self.curr_conn,curr_height) ) log.debug("Check Height -- Current Pool %i : %i" % (self.curr_conn,curr_height) )
for i in self.conns: for i in self.conns:
if i == self.curr_conn: if i == self.curr_conn:
continue continue
try: try:
resp = (yield self.conns[i]._call('getinfo', [])) resp = (yield self.conns[i]._call('getinfo', []))
except: except:
log.error("Check Height -- Pool %i Down!" % (i,) ) log.error("Check Height -- Pool %i Down!" % (i,) )
continue continue
height = json.loads(resp)['result']['blocks'] height = json.loads(resp)['result']['blocks']
log.debug("Check Height -- Pool %i : %i" % (i,height) ) log.debug("Check Height -- Pool %i : %i" % (i,height) )
if height > curr_height: if height > curr_height:
self.curr_conn = i self.curr_conn = i
defer.returnValue(True)
defer.returnValue(True)
def _call_raw(self, data): def _call_raw(self, data):
while True: while True:
try: try:
return self.conns[self.curr_conn]._call_raw(data) return self.conns[self.curr_conn]._call_raw(data)
except: except:
self.next_connection() self.next_connection()
def _call(self, method, params): def _call(self, method, params):
while True: while True:
try: try:
return self.conns[self.curr_conn]._call(method,params) return self.conns[self.curr_conn]._call(method,params)
except: except:
self.next_connection() self.next_connection()
def check_submitblock(self):
while True:
try:
return self.conns[self.curr_conn].check_submitblock()
except:
self.next_connection()
def submitblock(self, block_hex, hash_hex): def submitblock(self, block_hex, hash_hex, scrypt_hex):
while True: while True:
try: try:
return self.conns[self.curr_conn].submitblock(block_hex, hash_hex) return self.conns[self.curr_conn].submitblock(block_hex, hash_hex, scrypt_hex)
except: except:
self.next_connection() self.next_connection()
def getinfo(self): def getinfo(self):
while True: while True:
try: try:
return self.conns[self.curr_conn].getinfo() return self.conns[self.curr_conn].getinfo()
except: except:
self.next_connection() self.next_connection()
def getblocktemplate(self): def getblocktemplate(self):
while True: while True:
try: try:
return self.conns[self.curr_conn].getblocktemplate() return self.conns[self.curr_conn].getblocktemplate()
except: except:
self.next_connection() self.next_connection()
def prevhash(self): def prevhash(self):
self.check_height() self.check_height()
while True: while True:
try: try:
return self.conns[self.curr_conn].prevhash() return self.conns[self.curr_conn].prevhash()
except: except:
self.next_connection() self.next_connection()
def validateaddress(self, address): def validateaddress(self, address):
while True: while True:
try: try:
return self.conns[self.curr_conn].validateaddress(address) return self.conns[self.curr_conn].validateaddress(address)
except: except:
self.next_connection() self.next_connection()
def getdifficulty(self): def getdifficulty(self):
while True: while True:
try: try:
return self.conns[self.curr_conn].getdifficulty() return self.conns[self.curr_conn].getdifficulty()
except: except:
self.next_connection() self.next_connection()

View File

@ -27,8 +27,8 @@ class BlockTemplate(halfnode.CBlock):
coinbase_transaction_class = CoinbaseTransaction coinbase_transaction_class = CoinbaseTransaction
def __init__(self, timestamper, coinbaser, job_id): def __init__(self, timestamper, coinbaser, job_id):
log.debug("Got To Block_template.py") log.debug("Got To Block_template.py")
log.debug("Got To Block_template.py") log.debug("Got To Block_template.py")
super(BlockTemplate, self).__init__() super(BlockTemplate, self).__init__()
self.job_id = job_id self.job_id = job_id
@ -56,17 +56,20 @@ class BlockTemplate(halfnode.CBlock):
#txhashes = [None] + [ binascii.unhexlify(t['hash']) for t in data['transactions'] ] #txhashes = [None] + [ binascii.unhexlify(t['hash']) for t in data['transactions'] ]
txhashes = [None] + [ util.ser_uint256(int(t['hash'], 16)) for t in data['transactions'] ] txhashes = [None] + [ util.ser_uint256(int(t['hash'], 16)) for t in data['transactions'] ]
mt = merkletree.MerkleTree(txhashes) mt = merkletree.MerkleTree(txhashes)
if settings.COINDAEMON_Reward == 'POW': if settings.COINDAEMON_Reward == 'POW':
coinbase = CoinbaseTransactionPOW(self.timestamper, self.coinbaser, data['coinbasevalue'], data['coinbaseaux']['flags'], data['height'], coinbase = CoinbaseTransactionPOW(self.timestamper, self.coinbaser, data['coinbasevalue'],
settings.COINBASE_EXTRAS) data['coinbaseaux']['flags'], data['height'],
else: settings.COINBASE_EXTRAS)
coinbase = CoinbaseTransactionPOS(self.timestamper, self.coinbaser, data['coinbasevalue'], data['coinbaseaux']['flags'], data['height'], else:
settings.COINBASE_EXTRAS, data['curtime']) coinbase = CoinbaseTransactionPOS(self.timestamper, self.coinbaser, data['coinbasevalue'],
data['coinbaseaux']['flags'], data['height'],
settings.COINBASE_EXTRAS, data['curtime'])
self.height = data['height'] self.height = data['height']
self.nVersion = data['version'] self.nVersion = data['version']
self.hashPrevBlock = int(data['previousblockhash'], 16) self.hashPrevBlock = int(data['previousblockhash'], 16)
self.nBits = int(data['bits'], 16) self.nBits = int(data['bits'], 16)
self.hashMerkleRoot = 0 self.hashMerkleRoot = 0
self.nTime = 0 self.nTime = 0
self.nNonce = 0 self.nNonce = 0
@ -137,8 +140,12 @@ class BlockTemplate(halfnode.CBlock):
r = struct.pack(">i", self.nVersion) r = struct.pack(">i", self.nVersion)
r += self.prevhash_bin r += self.prevhash_bin
r += util.ser_uint256_be(merkle_root_int) r += util.ser_uint256_be(merkle_root_int)
r += ntime_bin if settings.COINDAEMON_ALGO == 'riecoin':
r += struct.pack(">I", self.nBits) r += struct.pack(">I", self.nBits)
r += ntime_bin
else:
r += ntime_bin
r += struct.pack(">I", self.nBits)
r += nonce_bin r += nonce_bin
return r return r

View File

@ -18,7 +18,7 @@ class BlockUpdater(object):
def __init__(self, registry, bitcoin_rpc): def __init__(self, registry, bitcoin_rpc):
log.debug("Got To Block Updater") log.debug("Got To Block Updater")
self.bitcoin_rpc = bitcoin_rpc self.bitcoin_rpc = bitcoin_rpc
self.registry = registry self.registry = registry
self.clock = None self.clock = None
self.schedule() self.schedule()
@ -46,7 +46,7 @@ class BlockUpdater(object):
current_prevhash = None current_prevhash = None
log.info("Checking for new block.") log.info("Checking for new block.")
prevhash = util.reverse_hash((yield self.bitcoin_rpc.prevhash())) prevhash = util.reverse_hash((yield self.bitcoin_rpc.prevhash()))
if prevhash and prevhash != current_prevhash: if prevhash and prevhash != current_prevhash:
log.info("New block! Prevhash: %s" % prevhash) log.info("New block! Prevhash: %s" % prevhash)
update = True update = True

View File

@ -13,72 +13,55 @@ class SimpleCoinbaser(object):
for all generated blocks.''' for all generated blocks.'''
def __init__(self, bitcoin_rpc, address): def __init__(self, bitcoin_rpc, address):
log.debug("Got to coinbaser") log.debug("Got to coinbaser")
# Fire Callback when the coinbaser is ready # Fire Callback when the coinbaser is ready
self.on_load = defer.Deferred() self.on_load = defer.Deferred()
self.address = address self.address = address
self.is_valid = False self.is_valid = False
self.bitcoin_rpc = bitcoin_rpc self.bitcoin_rpc = bitcoin_rpc
self._validate() self._validate()
def _validate(self): def _validate(self):
d = self.bitcoin_rpc.validateaddress(self.address) d = self.bitcoin_rpc.validateaddress(self.address)
if settings.COINDAEMON_Reward == 'POW': d.addCallback(self.address_check)
d.addCallback(self._POW_address_check) d.addErrback(self._failure)
else: d.addCallback(self._POS_address_check)
d.addErrback(self._failure)
def _POW_address_check(self, result): def address_check(self, result):
if result['isvalid'] and result['ismine']:
self.is_valid = True
log.info("Coinbase address '%s' is valid" % self.address)
if result['isvalid'] == True:
log.debug("Is Valid = %s" % result['isvalid'])
log.debug("Address = %s " % result['address'])
log.debug("PubKey = %s " % result['pubkey'])
log.debug("Is Compressed = %s " % result['iscompressed'])
log.debug("Account = %s " % result['account'])
self.address = result['address']
if not self.on_load.called:
self.on_load.callback(True)
elif result['isvalid'] and settings.ALLOW_NONLOCAL_WALLET == True :
self.is_valid = True
log.warning("!!! Coinbase address '%s' is valid BUT it is not local" % self.address)
if not self.on_load.called:
self.on_load.callback(True)
else:
self.is_valid = False
log.error("Coinbase address '%s' is NOT valid!" % self.address)
def _POS_address_check(self, result):
if result['isvalid'] and result['ismine']: if result['isvalid'] and result['ismine']:
self.is_valid = True self.is_valid = True
log.info("Coinbase address '%s' is valid" % self.address) log.info("Coinbase address '%s' is valid" % self.address)
if result['isvalid'] == True: if 'address' in result:
log.debug("Is Valid = %s" % result['isvalid']) log.debug("Address = %s " % result['address'])
log.debug("Address = %s " % result['address']) self.address = result['address']
log.debug("PubKey = %s " % result['pubkey']) if 'pubkey' in result:
log.debug("Is Compressed = %s " % result['iscompressed']) log.debug("PubKey = %s " % result['pubkey'])
log.debug("Account = %s " % result['account']) self.pubkey = result['pubkey']
self.pubkey = result['pubkey'] if 'iscompressed' in result:
if not self.on_load.called: log.debug("Is Compressed = %s " % result['iscompressed'])
self.on_load.callback(True) if 'account' in result:
log.debug("Account = %s " % result['account'])
if not self.on_load.called:
self.address = result['address']
self.on_load.callback(True)
elif result['isvalid'] and settings.ALLOW_NONLOCAL_WALLET == True : elif result['isvalid'] and settings.ALLOW_NONLOCAL_WALLET == True :
self.is_valid = True self.is_valid = True
log.warning("!!! Coinbase address '%s' is valid BUT it is not local" % self.address) log.warning("!!! Coinbase address '%s' is valid BUT it is not local" % self.address)
self.pubkey = result['pubkey'] if 'pubkey' in result:
log.debug("PubKey = %s " % result['pubkey'])
self.pubkey = result['pubkey']
if 'account' in result:
log.debug("Account = %s " % result['account'])
if not self.on_load.called: if not self.on_load.called:
self.on_load.callback(True) self.on_load.callback(True)
else: else:
self.is_valid = False self.is_valid = False
log.error("Coinbase address '%s' is NOT valid!" % self.address)
#def on_new_block(self): #def on_new_block(self):
# pass # pass
#def on_new_template(self): #def on_new_template(self):
@ -88,10 +71,11 @@ class SimpleCoinbaser(object):
raise raise
def get_script_pubkey(self): def get_script_pubkey(self):
if settings.COINDAEMON_Reward == 'POW': if settings.COINDAEMON_Reward == 'POW':
self._validate() self._validate()
return util.script_to_address(self.address) return util.script_to_address(self.address)
else: return util.script_to_pubkey(self.pubkey) else:
return util.script_to_pubkey(self.pubkey)
def get_coinbase_data(self): def get_coinbase_data(self):
return '' return ''

View File

@ -40,7 +40,7 @@ class CoinbaseTransactionPOW(halfnode.CTransaction):
tx_out.scriptPubKey = coinbaser.get_script_pubkey() tx_out.scriptPubKey = coinbaser.get_script_pubkey()
if settings.COINDAEMON_TX == 'yes': if settings.COINDAEMON_TX == 'yes':
self.strTxComment = "http://github.com/ahmedbodi/stratum-mining" self.strTxComment = "RanchiMall mining"
self.vin.append(tx_in) self.vin.append(tx_in)
self.vout.append(tx_out) self.vout.append(tx_out)
@ -88,7 +88,7 @@ class CoinbaseTransactionPOS(halfnode.CTransaction):
self.nTime = ntime self.nTime = ntime
if settings.COINDAEMON_SHA256_TX == 'yes': if settings.COINDAEMON_SHA256_TX == 'yes':
self.strTxComment = "http://github.com/ahmedbodi/stratum-mining" self.strTxComment = "RanchiMall mining"
self.vin.append(tx_in) self.vin.append(tx_in)
self.vout.append(tx_out) self.vout.append(tx_out)

View File

@ -109,7 +109,7 @@ COINDAEMON_TRUSTED_PASSWORD = '***somepassword***'
# Until AutoReward Selecting Code has been implemented the below options are us$ # Until AutoReward Selecting Code has been implemented the below options are us$
# For Reward type there is POW and POS. please ensure you choose the currect ty$ # For Reward type there is POW and POS. please ensure you choose the currect ty$
# For SHA256 PoS Coins which support TX Messages please enter yes in the TX sel$ # For SHA256 PoS Coins which support TX Messages please enter yes in the TX sel$
COINDAEMON_ALGO = 'scrypt' COINDAEMON_ALGO = 'riecoin'
COINDAEMON_Reward = 'POW' COINDAEMON_Reward = 'POW'
COINDAEMON_SHA256_TX = 'yes' COINDAEMON_SHA256_TX = 'yes'

View File

@ -235,38 +235,51 @@ class CBlock(object):
self.nNonce = 0 self.nNonce = 0
self.vtx = [] self.vtx = []
self.sha256 = None self.sha256 = None
if settings.COINDAEMON_ALGO == 'scrypt': if settings.COINDAEMON_ALGO == 'scrypt':
self.scrypt = None self.scrypt = None
elif settings.COINDAEMON_ALGO == 'quark': elif settings.COINDAEMON_ALGO == 'quark':
self.quark = None self.quark = None
else: pass elif settings.COINDAEMON_ALGO == 'riecoin':
if settings.COINDAEMON_Reward == 'POS': self.riecoin = None
self.signature = b"" else: pass
else: pass if settings.COINDAEMON_Reward == 'POS':
self.signature = b""
else: pass
def deserialize(self, f): def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0] self.nVersion = struct.unpack("<i", f.read(4))[0]
self.hashPrevBlock = deser_uint256(f) self.hashPrevBlock = deser_uint256(f)
self.hashMerkleRoot = deser_uint256(f) self.hashMerkleRoot = deser_uint256(f)
self.nTime = struct.unpack("<I", f.read(4))[0] if settings.COINDAEMON_ALGO == 'riecoin':
self.nBits = struct.unpack("<I", f.read(4))[0] self.nBits = struct.unpack("<I", f.read(4))[0]
self.nNonce = struct.unpack("<I", f.read(4))[0] self.nTime = struct.unpack("<II", f.read(8))[0]
self.nNonce = struct.unpack("<IIIIIIII", f.read(32))[0]
else:
self.nTime = struct.unpack("<I", f.read(4))[0]
self.nBits = struct.unpack("<I", f.read(4))[0]
self.nNonce = struct.unpack("<I", f.read(4))[0]
self.vtx = deser_vector(f, CTransaction) self.vtx = deser_vector(f, CTransaction)
if settings.COINDAEMON_Reward == 'POS': if settings.COINDAEMON_Reward == 'POS':
self.signature = deser_string(f) self.signature = deser_string(f)
else: pass else: pass
def serialize(self): def serialize(self):
r = [] r = []
r.append(struct.pack("<i", self.nVersion)) r.append(struct.pack("<i", self.nVersion))
r.append(ser_uint256(self.hashPrevBlock)) r.append(ser_uint256(self.hashPrevBlock))
r.append(ser_uint256(self.hashMerkleRoot)) r.append(ser_uint256(self.hashMerkleRoot))
r.append(struct.pack("<I", self.nTime)) if settings.COINDAEMON_ALGO == 'riecoin':
r.append(struct.pack("<I", self.nBits)) r.append(struct.pack("<I", self.nBits))
r.append(struct.pack("<I", self.nNonce)) r.append(struct.pack("<Q", self.nTime))
r.append(ser_uint256(self.nNonce))
else:
r.append(struct.pack("<I", self.nTime))
r.append(struct.pack("<I", self.nBits))
r.append(struct.pack("<I", self.nNonce))
r.append(ser_vector(self.vtx)) r.append(ser_vector(self.vtx))
if settings.COINDAEMON_Reward == 'POS': if settings.COINDAEMON_Reward == 'POS':
r.append(ser_string(self.signature)) r.append(ser_string(self.signature))
else: pass else: pass
return ''.join(r) return ''.join(r)
if settings.COINDAEMON_ALGO == 'scrypt': if settings.COINDAEMON_ALGO == 'scrypt':
@ -293,6 +306,18 @@ class CBlock(object):
r.append(struct.pack("<I", self.nNonce)) r.append(struct.pack("<I", self.nNonce))
self.quark = uint256_from_str(quark_hash.getPoWHash(''.join(r))) self.quark = uint256_from_str(quark_hash.getPoWHash(''.join(r)))
return self.quark return self.quark
elif settings.COINDAEMON_ALGO == 'riecoin':
def calc_riecoin(self):
if self.riecoin is None:
r = []
r.append(struct.pack("<i", self.nVersion))
r.append(ser_uint256(self.hashPrevBlock))
r.append(ser_uint256(self.hashMerkleRoot))
r.append(struct.pack("<I", self.nBits))
r.append(struct.pack("<Q", self.nTime))
sha256 = uint256_from_str(SHA256.new(SHA256.new(''.join(r)).digest()).digest())
self.riecoin = riecoinPoW( sha256, uint256_from_compact(self.nBits), self.nNonce )
return self.riecoin
else: else:
def calc_sha256(self): def calc_sha256(self):
if self.sha256 is None: if self.sha256 is None:
@ -308,22 +333,33 @@ class CBlock(object):
def is_valid(self): def is_valid(self):
if settings.COINDAEMON_ALGO == 'scrypt': if settings.COINDAEMON_ALGO == 'riecoin':
self.calc_scrypt() self.calc_riecoin()
elif settings.COINDAEMON_ALGO == 'quark': elif settings.COINDAEMON_ALGO == 'scrypt':
self.calc_quark() self.calc_scrypt()
else:
self.calc_sha256()
target = uint256_from_compact(self.nBits)
if settings.COINDAEMON_ALGO == 'scrypt':
if self.scrypt > target:
return false
elif settings.COINDAEMON_ALGO == 'quark': elif settings.COINDAEMON_ALGO == 'quark':
if self.quark > target: self.calc_quark()
return false else:
else: self.calc_sha256()
if self.sha256 > target:
return False if settings.COINDAEMON_ALGO == 'riecoin':
target = settings.POOL_TARGET
else:
target = uint256_from_compact(self.nBits)
if settings.COINDAEMON_ALGO == 'riecoin':
if self.riecoin < target:
return False
if settings.COINDAEMON_ALGO == 'scrypt':
if self.scrypt > target:
return False
elif settings.COINDAEMON_ALGO == 'quark':
if self.quark > target:
return False
else:
if self.sha256 > target:
return False
hashes = [] hashes = []
for tx in self.vtx: for tx in self.vtx:
tx.sha256 = None tx.sha256 = None

44
lib/ircbot.py.save Normal file
View File

@ -0,0 +1,44 @@
if args.irc_announce:
from twisted.words.protocols import irc
class IRCClient(irc.IRCClient):
nickname = 'xpool%02i' % (random.randrange(100),)
channel = net.ANNOUNCE_CHANNEL
def lineReceived(self, line):
if p2pool.DEBUG:
print repr(line)
irc.IRCClient.lineReceived(self, line)
def signedOn(self):
self.in_channel = False
irc.IRCClient.signedOn(self)
self.factory.resetDelay()
self.join(self.channel)
@defer.inlineCallbacks
def new_share(share):
if not self.in_channel:
return
if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
yield deferral.sleep(random.expovariate(1/60))
message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages):
self.say(self.channel, message)
self._remember_message(message)
self.watch_id = node.tracker.verified.added.watch(new_share)
self.recent_messages = []
def joined(self, channel):
self.in_channel = True
def left(self, channel):
self.in_channel = False
def _remember_message(self, message):
self.recent_messages.append(message)
while len(self.recent_messages) > 100:
self.recent_messages.pop(0)
def privmsg(self, user, channel, message):
if channel == self.channel:
self._remember_message(message)
def connectionLost(self, reason):
node.tracker.verified.added.unwatch(self.watch_id)
print 'IRC connection lost:', reason.getErrorMessage()
class IRCClientFactory(protocol.ReconnectingClientFactory):
protocol = IRCClient
reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())

View File

@ -9,35 +9,35 @@ log = stratum.logger.get_logger('Notify_Email')
class NOTIFY_EMAIL(): class NOTIFY_EMAIL():
def notify_start(self): def notify_start(self):
if settings.NOTIFY_EMAIL_TO != '': if settings.NOTIFY_EMAIL_TO != '':
self.send_email(settings.NOTIFY_EMAIL_TO,'Stratum Server Started','Stratum server has started!') self.send_email(settings.NOTIFY_EMAIL_TO,'Stratum Server Started','Stratum server has started!')
def notify_found_block(self,worker_name): def notify_found_block(self,worker_name):
if settings.NOTIFY_EMAIL_TO != '': if settings.NOTIFY_EMAIL_TO != '':
text = '%s on Stratum server found a block!' % worker_name text = '%s on Stratum server found a block!' % worker_name
self.send_email(settings.NOTIFY_EMAIL_TO,'Stratum Server Found Block',text) self.send_email(settings.NOTIFY_EMAIL_TO,'Stratum Server Found Block',text)
def notify_dead_coindaemon(self,worker_name): def notify_dead_coindaemon(self,worker_name):
if settings.NOTIFY_EMAIL_TO != '': if settings.NOTIFY_EMAIL_TO != '':
text = 'Coin Daemon Has Crashed Please Report' % worker_name text = 'Coin Daemon Has Crashed Please Report' % worker_name
self.send_email(settings.NOTIFY_EMAIL_TO,'Coin Daemon Crashed!',text) self.send_email(settings.NOTIFY_EMAIL_TO,'Coin Daemon Crashed!',text)
def send_email(self,to,subject,message): def send_email(self,to,subject,message):
msg = MIMEText(message) msg = MIMEText(message)
msg['Subject'] = subject msg['Subject'] = subject
msg['From'] = settings.NOTIFY_EMAIL_FROM msg['From'] = settings.NOTIFY_EMAIL_FROM
msg['To'] = to msg['To'] = to
try: try:
s = smtplib.SMTP(settings.NOTIFY_EMAIL_SERVER) s = smtplib.SMTP(settings.NOTIFY_EMAIL_SERVER)
if settings.NOTIFY_EMAIL_USERNAME != '': if settings.NOTIFY_EMAIL_USERNAME != '':
if settings.NOTIFY_EMAIL_USETLS: if settings.NOTIFY_EMAIL_USETLS:
s.ehlo() s.ehlo()
s.starttls() s.starttls()
s.ehlo() s.ehlo()
s.login(settings.NOTIFY_EMAIL_USERNAME, settings.NOTIFY_EMAIL_PASSWORD) s.login(settings.NOTIFY_EMAIL_USERNAME, settings.NOTIFY_EMAIL_PASSWORD)
s.sendmail(settings.NOTIFY_EMAIL_FROM,to,msg.as_string()) s.sendmail(settings.NOTIFY_EMAIL_FROM,to,msg.as_string())
s.quit() s.quit()
except smtplib.SMTPAuthenticationError as e: except smtplib.SMTPAuthenticationError as e:
log.error('Error sending Email: %s' % e[1]) log.error('Error sending Email: %s' % e[1])
except Exception as e: except Exception as e:
log.error('Error sending Email: %s' % e[0]) log.error('Error sending Email: %s' % e[0])

206
lib/skein.py Normal file
View File

@ -0,0 +1,206 @@
# /usr/bin/env python
# coding=utf-8
# Copyright 2010 Jonathan Bowman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Pure Python implementation of the Skein 512-bit hashing algorithm"""
import array
import binascii
import os
import struct
from threefish import (add64, bigint, bytes2words, Threefish512, words,
words2bytes, words_format, xrange,
zero_bytes, zero_words)
# An empty bytestring that behaves itself whether in Python 2 or 3
empty_bytes = array.array('B').tostring()
class Skein512(object):
"""Skein 512-bit hashing algorithm
The message to be hashed may be set as `msg` when initialized, or
passed in later using the ``update`` method.
Use `key` (a bytestring with arbitrary length) for MAC
functionality.
`block_type` will typically be "msg", but may also be one of:
"key", "nonce", "cfg_final", or "out_final". These will affect the
tweak value passed to the underlying Threefish block cipher. Again,
if you don't know which one to choose, "msg" is probably what you
want.
Example:
>>> Skein512("Hello, world!").hexdigest()
'8449f597f1764274f8bf4a03ead22e0404ea2dc63c8737629e6e282303aebfd5dd96f07e21ae2e7a8b2bdfadd445bd1d71dfdd9745c95b0eb05dc01f289ad765'
"""
block_size = 64
block_bits = 512
block_type = {'key': 0,
'nonce': 0x5400000000000000,
'msg': 0x7000000000000000,
'cfg_final': 0xc400000000000000,
'out_final': 0xff00000000000000}
def __init__(self, msg='', digest_bits=512, key=None,
block_type='msg'):
self.tf = Threefish512()
if key:
self.digest_bits = 512
self._start_new_type('key')
self.update(key)
self.tf.key = bytes2words(self.final(False))
self.digest_bits = digest_bits
self.digest_size = (digest_bits + 7) >> 3
self._start_new_type('cfg_final')
b = words2bytes((0x133414853,digest_bits,0,0,0,0,0,0))
self._process_block(b,32)
self._start_new_type(block_type)
if msg:
self.update(msg)
def _start_new_type(self, block_type):
"""Setup new tweak values and internal buffer.
Primarily for internal use.
"""
self.buf = empty_bytes
self.tf.tweak = words([0, self.block_type[block_type]])
def _process_block(self, block, byte_count_add):
"""Encrypt internal state using Threefish.
Primarily for internal use.
"""
block_len = len(block)
for i in xrange(0,block_len,64):
w = bytes2words(block[i:i+64])
self.tf.tweak[0] = add64(self.tf.tweak[0], byte_count_add)
self.tf.prepare_tweak()
self.tf.prepare_key()
self.tf.key = self.tf.encrypt_block(w)
self.tf._feed_forward(self.tf.key, w)
# set second tweak value to ~SKEIN_T1_FLAG_FIRST:
self.tf.tweak[1] &= bigint(0xbfffffffffffffff)
def update(self, msg):
"""Update internal state with new data to be hashed.
`msg` is a bytestring, and should be a bytes object in Python 3
and up, or simply a string in Python 2.5 and 2.6.
"""
self.buf += msg
buflen = len(self.buf)
if buflen > 64:
end = -(buflen % 64) or (buflen-64)
data = self.buf[0:end]
self.buf = self.buf[end:]
try:
self._process_block(data, 64)
except:
print(len(data))
print(binascii.b2a_hex(data))
def final(self, output=True):
"""Return hashed data as bytestring.
`output` is primarily for internal use. It should only be False
if you have a clear reason for doing so.
This function can be called as either ``final`` or ``digest``.
"""
self.tf.tweak[1] |= bigint(0x8000000000000000) # SKEIN_T1_FLAG_FINAL
buflen = len(self.buf)
self.buf += zero_bytes[:64-buflen]
self._process_block(self.buf, buflen)
if not output:
hash_val = words2bytes(self.tf.key)
else:
hash_val = empty_bytes
self.buf = zero_bytes[:]
key = self.tf.key[:] # temporary copy
i=0
while i*64 < self.digest_size:
self.buf = words_format[1].pack(i) + self.buf[8:]
self.tf.tweak = [0, self.block_type['out_final']]
self._process_block(self.buf, 8)
n = self.digest_size - i*64
if n >= 64:
n = 64
hash_val += words2bytes(self.tf.key)[0:n]
self.tf.key = key
i+=1
return hash_val
digest = final
def hexdigest(self):
"""Return a hexadecimal representation of the hashed data"""
return binascii.b2a_hex(self.digest())
class Skein512Random(Skein512):
"""A Skein-based pseudo-random bytestring generator.
If `seed` is unspecified, ``os.urandom`` will be used to provide the
seed.
In case you are using this as an iterator, rather than generating
new data at each iteration, a pool of length `queue_size` is
generated periodically.
"""
def __init__(self, seed=None, queue_size=512):
Skein512.__init__(self, block_type='nonce')
self.queue = []
self.queue_size = queue_size
self.tf.key = zero_words[:]
if not seed:
seed = os.urandom(100)
self.reseed(seed)
def reseed(self, seed):
"""(Re)seed the generator."""
self.digest_size = 64
self.update(words2bytes(self.tf.key) + seed)
self.tf.key = bytes2words(self.final())
def getbytes(self, request_bytes):
"""Return random bytestring of length `request_bytes`."""
self.digest_size = 64 + request_bytes
self.update(words2bytes(self.tf.key))
output = self.final()
self.tf.key = bytes2words(output[0:64])
return output[64:]
def __iter__(self):
return self
def next(self):
if not self.queue:
self.queue = array.array('B', self.getbytes(self.queue_size))
return self.queue.pop()
if __name__ == '__main__':
print Skein512('123').hexdigest()

20
lib/skeinhash.py Normal file
View File

@ -0,0 +1,20 @@
import hashlib
import struct
import skein
def skeinhash(msg):
return hashlib.sha256(skein.Skein512(msg[:80]).digest()).digest()
def skeinhashmid(msg):
s = skein.Skein512(msg[:64] + '\x00') # hack to force Skein512.update()
return struct.pack('<8Q', *s.tf.key.tolist())
if __name__ == '__main__':
mesg = "dissociative1234dissociative4567dissociative1234dissociative4567dissociative1234"
h = skeinhashmid(mesg)
print h.encode('hex')
print 'ad0d423b18b47f57724e519c42c9d5623308feac3df37aca964f2aa869f170bdf23e97f644e81511df49c59c5962887d17e277e7e8513345137638334c8e59a4' == h.encode('hex')
h = skeinhash(mesg)
print h.encode('hex')
print '764da2e768811e91c6c0c649b052b7109a9bc786bce136a59c8d5a0547cddc54' == h.encode('hex')

View File

@ -4,11 +4,13 @@ import util
import StringIO import StringIO
import settings import settings
if settings.COINDAEMON_ALGO == 'scrypt': if settings.COINDAEMON_ALGO == 'scrypt':
import ltc_scrypt import ltc_scrypt
elif settings.COINDAEMON_ALGO == 'scrypt-jane': elif settings.COINDAEMON_ALGO == 'scrypt-jane':
import yac_scrypt import yac_scrypt
elif settings.COINDAEMON_ALGO == 'quark': elif settings.COINDAEMON_ALGO == 'quark':
import quark_hash import quark_hash
elif settings.COINDAEMON_ALGO == 'skeinhash':
import skeinhash
else: pass else: pass
from twisted.internet import defer from twisted.internet import defer
from lib.exceptions import SubmitException from lib.exceptions import SubmitException
@ -63,13 +65,13 @@ class TemplateRegistry(object):
def get_new_extranonce1(self): def get_new_extranonce1(self):
'''Generates unique extranonce1 (e.g. for newly '''Generates unique extranonce1 (e.g. for newly
subscribed connection.''' subscribed connection.'''
log.debug("Getting Unique Extronance") log.debug("Getting Unique Extranonce")
return self.extranonce_counter.get_new_bin() return self.extranonce_counter.get_new_bin()
def get_last_broadcast_args(self): def get_last_broadcast_args(self):
'''Returns arguments for mining.notify '''Returns arguments for mining.notify
from last known template.''' from last known template.'''
log.debug("Getting Laat Template") log.debug("Getting Laat Template")
return self.last_block.broadcast_args return self.last_block.broadcast_args
def add_template(self, block,block_height): def add_template(self, block,block_height):
@ -137,7 +139,7 @@ class TemplateRegistry(object):
start = Interfaces.timestamper.time() start = Interfaces.timestamper.time()
template = self.block_template_class(Interfaces.timestamper, self.coinbaser, JobIdGenerator.get_new_id()) template = self.block_template_class(Interfaces.timestamper, self.coinbaser, JobIdGenerator.get_new_id())
log.info(template.fill_from_rpc(data)) log.info(template.fill_from_rpc(data))
self.add_template(template,data['height']) self.add_template(template,data['height'])
log.info("Update finished, %.03f sec, %d txes" % \ log.info("Update finished, %.03f sec, %d txes" % \
@ -148,12 +150,18 @@ class TemplateRegistry(object):
def diff_to_target(self, difficulty): def diff_to_target(self, difficulty):
'''Converts difficulty to target''' '''Converts difficulty to target'''
if settings.COINDAEMON_ALGO == 'scrypt' or 'scrypt-jane': if settings.COINDAEMON_ALGO == 'scrypt':
diff1 = 0x0000ffff00000000000000000000000000000000000000000000000000000000 diff1 = 0x0000ffff00000000000000000000000000000000000000000000000000000000
elif settings.COINDAEMON_ALGO == 'scrypt-jane':
diff1 = 0x0000ffff00000000000000000000000000000000000000000000000000000000
elif settings.COINDAEMON_ALGO == 'quark': elif settings.COINDAEMON_ALGO == 'quark':
diff1 = 0x000000ffff000000000000000000000000000000000000000000000000000000 diff1 = 0x000000ffff000000000000000000000000000000000000000000000000000000
else: diff1 = 0x00000000ffff0000000000000000000000000000000000000000000000000000 elif settings.COINDAEMON_ALGO == 'riecoin':
return diff1 / difficulty return difficulty
else:
diff1 = 0x00000000ffff0000000000000000000000000000000000000000000000000000
return diff1 / difficulty
def get_job(self, job_id): def get_job(self, job_id):
'''For given job_id returns BlockTemplate instance or None''' '''For given job_id returns BlockTemplate instance or None'''
@ -198,15 +206,23 @@ class TemplateRegistry(object):
raise SubmitException("Job '%s' not found" % job_id) raise SubmitException("Job '%s' not found" % job_id)
# Check if ntime looks correct # Check if ntime looks correct
if len(ntime) != 8: if settings.COINDAEMON_ALGO == 'riecoin':
raise SubmitException("Incorrect size of ntime. Expected 8 chars") if len(ntime) != 16:
raise SubmitException("Incorrect size of ntime. Expected 16 chars")
else:
if len(ntime) != 8:
raise SubmitException("Incorrect size of ntime. Expected 8 chars")
if not job.check_ntime(int(ntime, 16)): if not job.check_ntime(int(ntime, 16)):
raise SubmitException("Ntime out of range") raise SubmitException("Ntime out of range")
# Check nonce # Check nonce
if len(nonce) != 8: if settings.COINDAEMON_ALGO == 'riecoin':
raise SubmitException("Incorrect size of nonce. Expected 8 chars") if len(nonce) != 64:
raise SubmitException("Incorrect size of nonce. Expected 64 chars")
else:
if len(nonce) != 8:
raise SubmitException("Incorrect size of nonce. Expected 8 chars")
# Check for duplicated submit # Check for duplicated submit
if not job.register_submit(extranonce1_bin, extranonce2, ntime, nonce): if not job.register_submit(extranonce1_bin, extranonce2, ntime, nonce):
@ -221,6 +237,9 @@ class TemplateRegistry(object):
extranonce2_bin = binascii.unhexlify(extranonce2) extranonce2_bin = binascii.unhexlify(extranonce2)
ntime_bin = binascii.unhexlify(ntime) ntime_bin = binascii.unhexlify(ntime)
nonce_bin = binascii.unhexlify(nonce) nonce_bin = binascii.unhexlify(nonce)
if settings.COINDAEMON_ALGO == 'riecoin':
ntime_bin = (''.join([ ntime_bin[(1-i)*4:(1-i)*4+4] for i in range(0, 2) ]))
nonce_bin = (''.join([ nonce_bin[(7-i)*4:(7-i)*4+4] for i in range(0, 8) ]))
# 1. Build coinbase # 1. Build coinbase
coinbase_bin = job.serialize_coinbase(extranonce1_bin, extranonce2_bin) coinbase_bin = job.serialize_coinbase(extranonce1_bin, extranonce2_bin)
@ -234,59 +253,78 @@ class TemplateRegistry(object):
header_bin = job.serialize_header(merkle_root_int, ntime_bin, nonce_bin) header_bin = job.serialize_header(merkle_root_int, ntime_bin, nonce_bin)
# 4. Reverse header and compare it with target of the user # 4. Reverse header and compare it with target of the user
if settings.COINDAEMON_ALGO == 'scrypt': if settings.COINDAEMON_ALGO == 'scrypt':
hash_bin = ltc_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) hash_bin = ltc_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
elif settings.COINDAEMON_ALGO == 'scrypt-jane': elif settings.COINDAEMON_ALGO == 'scrypt-jane':
hash_bin = yac_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]), int(ntime, 16)) hash_bin = yac_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]), int(ntime, 16))
elif settings.COINDAEMON_ALGO == 'quark': elif settings.COINDAEMON_ALGO == 'quark':
hash_bin = quark_hash.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) hash_bin = quark_hash.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
else: hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) elif settings.COINDAEMON_ALGO == 'skeinhash':
hash_bin = skeinhash.skeinhash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
else:
hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
hash_int = util.uint256_from_str(hash_bin) hash_int = util.uint256_from_str(hash_bin)
scrypt_hash_hex = "%064x" % hash_int scrypt_hash_hex = "%064x" % hash_int
if settings.COINDAEMON_ALGO == 'riecoin':
# this is kind of an ugly hack: we use hash_int to store the number of primes
hash_int = util.riecoinPoW( hash_int, job.target, int(nonce, 16) )
header_hex = binascii.hexlify(header_bin) header_hex = binascii.hexlify(header_bin)
if settings.COINDAEMON_ALGO == 'scrypt' or settings.COINDAEMON_ALGO == 'scrypt-jane': if settings.COINDAEMON_ALGO == 'scrypt' or settings.COINDAEMON_ALGO == 'scrypt-jane':
header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000" header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000"
elif settings.COINDAEMON_ALGO == 'quark': elif settings.COINDAEMON_ALGO == 'quark':
header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000" header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000"
else: pass elif settings.COINDAEMON_ALGO == 'riecoin':
header_hex = header_hex+"00000080000000000000000080030000"
else: pass
target_user = self.diff_to_target(difficulty) target_user = self.diff_to_target(difficulty)
if hash_int > target_user: if settings.COINDAEMON_ALGO == 'riecoin':
raise SubmitException("Share is above target") if hash_int < target_user:
raise SubmitException("Share does not meet target")
# Mostly for debugging purposes else:
target_info = self.diff_to_target(100000) if hash_int > target_user:
if hash_int <= target_info: raise SubmitException("Share is above target")
log.info("Yay, share with diff above 100000") # Mostly for debugging purposes
target_info = self.diff_to_target(100000)
if hash_int <= target_info:
log.info("Yay, share with diff above 100000")
# Algebra tells us the diff_to_target is the same as hash_to_diff # Algebra tells us the diff_to_target is the same as hash_to_diff
share_diff = int(self.diff_to_target(hash_int)) share_diff = int(self.diff_to_target(hash_int))
# 5. Compare hash with target of the network # 5. Compare hash with target of the network
if hash_int <= job.target: isBlockCandidate = False
if settings.COINDAEMON_ALGO == 'riecoin':
if hash_int == 6:
isBlockCandidate = True
else:
if hash_int <= job.target:
isBlockCandidate = True
if isBlockCandidate == True:
# Yay! It is block candidate! # Yay! It is block candidate!
log.info("We found a block candidate! %s" % scrypt_hash_hex) log.info("We found a block candidate! %s" % scrypt_hash_hex)
# Reverse the header and get the potential block hash (for scrypt only) # Reverse the header and get the potential block hash (for scrypt only)
#if settings.COINDAEMON_ALGO == 'scrypt' or settings.COINDAEMON_ALGO == 'sha256d': if settings.COINDAEMON_ALGO == 'riecoin':
# if settings.COINDAEMON_Reward == 'POW': block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 28) ]))
block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) else:
block_hash_hex = block_hash_bin[::-1].encode('hex_codec') block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
#else: block_hash_hex = hash_bin[::-1].encode('hex_codec') block_hash_hex = block_hash_bin[::-1].encode('hex_codec')
#else: block_hash_hex = hash_bin[::-1].encode('hex_codec')
# 6. Finalize and serialize block object # 6. Finalize and serialize block object
job.finalize(merkle_root_int, extranonce1_bin, extranonce2_bin, int(ntime, 16), int(nonce, 16)) job.finalize(merkle_root_int, extranonce1_bin, extranonce2_bin, int(ntime, 16), int(nonce, 16))
if not job.is_valid(): if not job.is_valid():
# Should not happen # Should not happen
log.exception("FINAL JOB VALIDATION FAILED!(Try enabling/disabling tx messages)") log.exception("FINAL JOB VALIDATION FAILED!(Try enabling/disabling tx messages)")
# 7. Submit block to the network # 7. Submit block to the network
serialized = binascii.hexlify(job.serialize()) serialized = binascii.hexlify(job.serialize())
if settings.BLOCK_CHECK_SCRYPT_HASH: on_submit = self.bitcoin_rpc.submitblock(serialized, block_hash_hex, scrypt_hash_hex)
on_submit = self.bitcoin_rpc.submitblock(serialized, scrypt_hash_hex)
else:
on_submit = self.bitcoin_rpc.submitblock(serialized, block_hash_hex)
if on_submit: if on_submit:
self.update_block() self.update_block()
@ -297,7 +335,10 @@ class TemplateRegistry(object):
if settings.SOLUTION_BLOCK_HASH: if settings.SOLUTION_BLOCK_HASH:
# Reverse the header and get the potential block hash (for scrypt only) only do this if we want to send in the block hash to the shares table # Reverse the header and get the potential block hash (for scrypt only) only do this if we want to send in the block hash to the shares table
block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) if settings.COINDAEMON_ALGO == 'riecoin':
block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 28) ]))
else:
block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
block_hash_hex = block_hash_bin[::-1].encode('hex_codec') block_hash_hex = block_hash_bin[::-1].encode('hex_codec')
return (header_hex, block_hash_hex, share_diff, None) return (header_hex, block_hash_hex, share_diff, None)
else: else:

View File

@ -1,288 +0,0 @@
import weakref
import binascii
import util
import StringIO
import settings
if settings.COINDAEMON_ALGO == 'scrypt':
import ltc_scrypt
else: pass
from twisted.internet import defer
from lib.exceptions import SubmitException
import lib.logger
log = lib.logger.get_logger('template_registry')
from mining.interfaces import Interfaces
from extranonce_counter import ExtranonceCounter
import lib.settings as settings
class JobIdGenerator(object):
'''Generate pseudo-unique job_id. It does not need to be absolutely unique,
because pool sends "clean_jobs" flag to clients and they should drop all previous jobs.'''
counter = 0
@classmethod
def get_new_id(cls):
cls.counter += 1
if cls.counter % 0xffff == 0:
cls.counter = 1
return "%x" % cls.counter
class TemplateRegistry(object):
'''Implements the main logic of the pool. Keep track
on valid block templates, provide internal interface for stratum
service and implements block validation and submits.'''
def __init__(self, block_template_class, coinbaser, bitcoin_rpc, instance_id,
on_template_callback, on_block_callback):
self.prevhashes = {}
self.jobs = weakref.WeakValueDictionary()
self.extranonce_counter = ExtranonceCounter(instance_id)
self.extranonce2_size = block_template_class.coinbase_transaction_class.extranonce_size \
- self.extranonce_counter.get_size()
self.coinbaser = coinbaser
self.block_template_class = block_template_class
self.bitcoin_rpc = bitcoin_rpc
self.on_block_callback = on_block_callback
self.on_template_callback = on_template_callback
self.last_block = None
self.update_in_progress = False
self.last_update = None
# Create first block template on startup
self.update_block()
def get_new_extranonce1(self):
'''Generates unique extranonce1 (e.g. for newly
subscribed connection.'''
return self.extranonce_counter.get_new_bin()
def get_last_broadcast_args(self):
'''Returns arguments for mining.notify
from last known template.'''
return self.last_block.broadcast_args
def add_template(self, block,block_height):
'''Adds new template to the registry.
It also clean up templates which should
not be used anymore.'''
prevhash = block.prevhash_hex
if prevhash in self.prevhashes.keys():
new_block = False
else:
new_block = True
self.prevhashes[prevhash] = []
# Blocks sorted by prevhash, so it's easy to drop
# them on blockchain update
self.prevhashes[prevhash].append(block)
# Weak reference for fast lookup using job_id
self.jobs[block.job_id] = block
# Use this template for every new request
self.last_block = block
# Drop templates of obsolete blocks
for ph in self.prevhashes.keys():
if ph != prevhash:
del self.prevhashes[ph]
log.info("New template for %s" % prevhash)
if new_block:
# Tell the system about new block
# It is mostly important for share manager
self.on_block_callback(prevhash, block_height)
# Everything is ready, let's broadcast jobs!
self.on_template_callback(new_block)
#from twisted.internet import reactor
#reactor.callLater(10, self.on_block_callback, new_block)
def update_block(self):
'''Registry calls the getblocktemplate() RPC
and build new block template.'''
if self.update_in_progress:
# Block has been already detected
return
self.update_in_progress = True
self.last_update = Interfaces.timestamper.time()
d = self.bitcoin_rpc.getblocktemplate()
d.addCallback(self._update_block)
d.addErrback(self._update_block_failed)
def _update_block_failed(self, failure):
log.error(str(failure))
self.update_in_progress = False
def _update_block(self, data):
start = Interfaces.timestamper.time()
template = self.block_template_class(Interfaces.timestamper, self.coinbaser, JobIdGenerator.get_new_id())
print("hit template registry")
template.fill_from_rpc(data) template.fill_from_rpc(data)
self.add_template(template,data['height'])
log.info("Update finished, %.03f sec, %d txes" % \
(Interfaces.timestamper.time() - start, len(template.vtx)))
self.update_in_progress = False
return data
def diff_to_target(self, difficulty):
'''Converts difficulty to target'''
if settings.COINDAEMON_ALGO == 'scrypt':
diff1 = 0x0000ffff00000000000000000000000000000000000000000000000000000000
else: diff1 = 0x00000000ffff0000000000000000000000000000000000000000000000000000
return diff1 / difficulty
def get_job(self, job_id):
'''For given job_id returns BlockTemplate instance or None'''
try:
j = self.jobs[job_id]
except:
log.info("Job id '%s' not found" % job_id)
return None
# Now we have to check if job is still valid.
# Unfortunately weak references are not bulletproof and
# old reference can be found until next run of garbage collector.
if j.prevhash_hex not in self.prevhashes:
log.info("Prevhash of job '%s' is unknown" % job_id)
return None
if j not in self.prevhashes[j.prevhash_hex]:
log.info("Job %s is unknown" % job_id)
return None
return j
def submit_share(self, job_id, worker_name, session, extranonce1_bin, extranonce2, ntime, nonce,
difficulty):
'''Check parameters and finalize block template. If it leads
to valid block candidate, asynchronously submits the block
back to the bitcoin network.
- extranonce1_bin is binary. No checks performed, it should be from session data
- job_id, extranonce2, ntime, nonce - in hex form sent by the client
- difficulty - decimal number from session, again no checks performed
- submitblock_callback - reference to method which receive result of submitblock()
'''
# Check if extranonce2 looks correctly. extranonce2 is in hex form...
if len(extranonce2) != self.extranonce2_size * 2:
raise SubmitException("Incorrect size of extranonce2. Expected %d chars" % (self.extranonce2_size*2))
# Check for job
job = self.get_job(job_id)
if job == None:
raise SubmitException("Job '%s' not found" % job_id)
# Check if ntime looks correct
if len(ntime) != 8:
raise SubmitException("Incorrect size of ntime. Expected 8 chars")
if not job.check_ntime(int(ntime, 16)):
raise SubmitException("Ntime out of range")
# Check nonce
if len(nonce) != 8:
raise SubmitException("Incorrect size of nonce. Expected 8 chars")
# Check for duplicated submit
if not job.register_submit(extranonce1_bin, extranonce2, ntime, nonce):
log.info("Duplicate from %s, (%s %s %s %s)" % \
(worker_name, binascii.hexlify(extranonce1_bin), extranonce2, ntime, nonce))
raise SubmitException("Duplicate share")
# Now let's do the hard work!
# ---------------------------
# 0. Some sugar
extranonce2_bin = binascii.unhexlify(extranonce2)
ntime_bin = binascii.unhexlify(ntime)
nonce_bin = binascii.unhexlify(nonce)
# 1. Build coinbase
coinbase_bin = job.serialize_coinbase(extranonce1_bin, extranonce2_bin)
coinbase_hash = util.doublesha(coinbase_bin)
# 2. Calculate merkle root
merkle_root_bin = job.merkletree.withFirst(coinbase_hash)
merkle_root_int = util.uint256_from_str(merkle_root_bin)
# 3. Serialize header with given merkle, ntime and nonce
header_bin = job.serialize_header(merkle_root_int, ntime_bin, nonce_bin)
# 4. Reverse header and compare it with target of the user
if settings.COINDAEMON_ALGO == 'scrypt':
hash_bin = ltc_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
else: hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
hash_int = util.uint256_from_str(hash_bin)
scrypt_hash_hex = "%064x" % hash_int
header_hex = binascii.hexlify(header_bin)
if settings.COINDAEMON_ALGO == 'scrypt':
header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000"
else: pass
target_user = self.diff_to_target(difficulty)
if hash_int > target_user and \
( 'prev_jobid' not in session or session['prev_jobid'] < job_id \
or 'prev_diff' not in session or hash_int > self.diff_to_target(session['prev_diff']) ):
raise SubmitException("Share is above target")
# Mostly for debugging purposes
target_info = self.diff_to_target(100000)
if hash_int <= target_info:
log.info("Yay, share with diff above 100000")
# Algebra tells us the diff_to_target is the same as hash_to_diff
share_diff = int(self.diff_to_target(hash_int))
# 5. Compare hash with target of the network
if hash_int <= job.target:
# Yay! It is block candidate!
log.info("We found a block candidate! %s" % scrypt_hash_hex)
# Reverse the header and get the potential block hash (for scrypt only)
block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
block_hash_hex = block_hash_bin[::-1].encode('hex_codec')
# 6. Finalize and serialize block object
job.finalize(merkle_root_int, extranonce1_bin, extranonce2_bin, int(ntime, 16), int(nonce, 16))
if not job.is_valid():
# Should not happen
log.error("Final job validation failed!")
# 7. Submit block to the network
serialized = binascii.hexlify(job.serialize())
on_submit = self.bitcoin_rpc.submitblock(serialized, block_hash_hex)
if on_submit:
self.update_block()
if settings.SOLUTION_BLOCK_HASH:
return (header_hex, block_hash_hex, share_diff, on_submit)
else:
return (header_hex, scrypt_hash_hex, share_diff, on_submit)
if settings.SOLUTION_BLOCK_HASH:
# Reverse the header and get the potential block hash (for scrypt only) only do this if we want to send in the block hash to the shares table
block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
block_hash_hex = block_hash_bin[::-1].encode('hex_codec')
return (header_hex, block_hash_hex, share_diff, None)
else:
return (header_hex, scrypt_hash_hex, share_diff, None)

147
lib/threefish.py Normal file
View File

@ -0,0 +1,147 @@
# /usr/bin/env python
# coding=utf-8
# Copyright 2010 Jonathan Bowman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Pure Python implementation of the Threefish block cipher
The core of the Skein 512-bit hashing algorithm
"""
from util_numpy import add64, bigint, bytelist, bytes2words, imap, izip, sub64, \
SKEIN_KS_PARITY, words, words2bytes, words_format, xrange, zero_bytes, zero_words, RotL_64, RotR_64, xor
from itertools import cycle
ROT = bytelist((46, 36, 19, 37,
33, 27, 14, 42,
17, 49, 36, 39,
44, 9, 54, 56,
39, 30, 34, 24,
13, 50, 10, 17,
25, 29, 39, 43,
8, 35, 56, 22))
PERM = bytelist(((0,1),(2,3),(4,5),(6,7),
(2,1),(4,7),(6,5),(0,3),
(4,1),(6,3),(0,5),(2,7),
(6,1),(0,7),(2,5),(4,3)))
class Threefish512(object):
"""The Threefish 512-bit block cipher.
The key and tweak may be set when initialized (as
bytestrings) or after initialization using the ``tweak`` or
``key`` properties. When choosing the latter, be sure to call
the ``prepare_key`` and ``prepare_tweak`` methods.
"""
def __init__(self, key=None, tweak=None):
"""Set key and tweak.
The key and the tweak will be lists of 8 64-bit words
converted from `key` and `tweak` bytestrings, or all
zeroes if not specified.
"""
if key:
self.key = bytes2words(key)
self.prepare_key()
else:
self.key = words(zero_words[:] + [0])
if tweak:
self.tweak = bytes2words(tweak, 2)
self.prepare_tweak()
else:
self.tweak = zero_words[:3]
def prepare_key(self):
"""Compute key."""
final = reduce(xor, self.key[:8]) ^ SKEIN_KS_PARITY
try:
self.key[8] = final
except IndexError:
#self.key.append(final)
self.key = words(list(self.key) + [final])
def prepare_tweak(self):
"""Compute tweak."""
final = self.tweak[0] ^ self.tweak[1]
try:
self.tweak[2] = final
except IndexError:
#self.tweak.append(final)
self.tweak = words(list(self.tweak) + [final])
def encrypt_block(self, plaintext):
"""Return 8-word ciphertext, encrypted from plaintext.
`plaintext` must be a list of 8 64-bit words.
"""
key = self.key
tweak = self.tweak
state = words(list(imap(add64, plaintext, key[:8])))
state[5] = add64(state[5], tweak[0])
state[6] = add64(state[6], tweak[1])
for r,s in izip(xrange(1,19),cycle((0,16))):
for i in xrange(16):
m,n = PERM[i]
state[m] = add64(state[m], state[n])
state[n] = RotL_64(state[n], ROT[i+s])
state[n] = state[n] ^ state[m]
for y in xrange(8):
state[y] = add64(state[y], key[(r+y) % 9])
state[5] = add64(state[5], tweak[r % 3])
state[6] = add64(state[6], tweak[(r+1) % 3])
state[7] = add64(state[7], r)
return state
def _feed_forward(self, state, plaintext):
"""Compute additional step required when hashing.
Primarily for internal use.
"""
state[:] = list(imap(xor, state, plaintext))
def decrypt_block(self, ciphertext):
"""Return 8-word plaintext, decrypted from plaintext.
`ciphertext` must be a list of 8 64-bit words.
"""
key = self.key
tweak = self.tweak
state = ciphertext[:]
for r,s in izip(xrange(18,0,-1),cycle((16,0))):
for y in xrange(8):
state[y] = sub64(state[y], key[(r+y) % 9])
state[5] = sub64(state[5], tweak[r % 3])
state[6] = sub64(state[6], tweak[(r+1) % 3])
state[7] = sub64(state[7], r)
for i in xrange(15,-1,-1):
m,n = PERM[i]
state[n] = RotR_64(state[m] ^ state[n], ROT[i+s])
state[m] = sub64(state[m], state[n])
result = list(imap(sub64, state, key))
result[5] = sub64(result[5], tweak[0])
result[6] = sub64(result[6], tweak[1])
return result

View File

@ -56,7 +56,10 @@ def uint256_from_str_be(s):
def uint256_from_compact(c): def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFFL) << (8 * (nbytes - 3)) if nbytes <= 3:
v = (c & 0xFFFFFFL) >> (8 * (3 - nbytes))
else:
v = (c & 0xFFFFFFL) << (8 * (nbytes - 3))
return v return v
def deser_vector(f, c): def deser_vector(f, c):
@ -211,6 +214,54 @@ def ser_number(n):
s.append(n) s.append(n)
return bytes(s) return bytes(s)
def isPrime( n ):
if pow( 2, n-1, n ) == 1:
return True
return False
def riecoinPoW( hash_int, diff, nNonce ):
base = 1 << 8
for i in range(256):
base = base << 1
base = base | (hash_int & 1)
hash_int = hash_int >> 1
trailingZeros = diff - 1 - 8 - 256
if trailingZeros < 16 or trailingZeros > 20000:
return 0
base = base << trailingZeros
base += nNonce
if (base % 210) != 97:
return 0
if not isPrime( base ):
return 0
primes = 1
base += 4
if isPrime( base ):
primes+=1
base += 2
if isPrime( base ):
primes+=1
base += 4
if isPrime( base ):
primes+=1
base += 2
if isPrime( base ):
primes+=1
base += 4
if isPrime( base ):
primes+=1
return primes
#if settings.COINDAEMON_Reward == 'POW': #if settings.COINDAEMON_Reward == 'POW':
def script_to_address(addr): def script_to_address(addr):
d = address_to_pubkeyhash(addr) d = address_to_pubkeyhash(addr)

100
lib/util_numpy.py Normal file
View File

@ -0,0 +1,100 @@
# /usr/bin/env python
# coding=utf-8
# Copyright 2010 Jonathan Bowman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Various helper functions for handling arrays, etc. using numpy"""
import struct
from operator import xor, add as add64, sub as sub64
import numpy as np
words = np.uint64
bytelist = np.uint8
bigint = np.uint64
# working out some differences between Python 2 and 3
try:
from itertools import imap, izip
except ImportError:
imap = map
izip = zip
try:
xrange = xrange
except:
xrange = range
SKEIN_KS_PARITY = np.uint64(0x1BD11BDAA9FC1A22)
# zeroed out byte string and list for convenience and performance
zero_bytes = struct.pack('64B', *[0] * 64)
zero_words = np.zeros(8, dtype=np.uint64)
# Build structs for conversion appropriate to this system, favoring
# native formats if possible for slight performance benefit
words_format_tpl = "%dQ"
if struct.pack('2B', 0, 1) == struct.pack('=H', 1): # big endian?
words_format_tpl = "<" + words_format_tpl # force little endian
else:
try: # is 64-bit integer native?
struct.unpack(words_format_tpl % 2, zero_bytes[:16])
except(struct.error): # Use standard instead of native
words_format_tpl = "=" + words_format_tpl
# build structs for one-, two- and eight-word sequences
words_format = dict(
(i,struct.Struct(words_format_tpl % i)) for i in (1,2,8))
def bytes2words(data, length=8):
"""Return a list of `length` 64-bit words from `data`.
`data` must consist of `length` * 8 bytes.
`length` must be 1, 2, or 8.
"""
return(np.fromstring(data, dtype=np.uint64))
def words2bytes(data, length=8):
"""Return a `length` * 8 byte string from `data`.
`data` must be a list of `length` 64-bit words
`length` must be 1, 2, or 8.
"""
try:
return(data.tostring())
except AttributeError:
return(np.uint64(data).tostring())
def RotL_64(x, N):
"""Return `x` rotated left by `N`."""
#return (x << np.uint64(N & 63)) | (x >> np.uint64((64-N) & 63))
return(np.left_shift(x, (N & 63), dtype=np.uint64) |
np.right_shift(x, ((64-N) & 63), dtype=np.uint64))
def RotR_64(x, N):
"""Return `x` rotated right by `N`."""
return(np.right_shift(x, (N & 63), dtype=np.uint64) |
np.left_shift(x, ((64-N) & 63), dtype=np.uint64))
def add64(a,b):
"""Return a 64-bit integer sum of `a` and `b`."""
return(np.add(a, b, dtype=np.uint64))
def sub64(a,b):
"""Return a 64-bit integer difference of `a` and `b`."""
return(np.subtract(a, b, dtype=np.uint64))

View File

@ -67,12 +67,12 @@ class DBInterface():
import DB_None import DB_None
return DB_None.DB_None() return DB_None.DB_None()
def scheduleImport(self): def scheduleImport(self):
# This schedule's the Import # This schedule's the Import
if settings.DATABASE_DRIVER == "sqlite": if settings.DATABASE_DRIVER == "sqlite":
use_thread = False use_thread = False
else: use_thread = True else:
use_thread = True
if use_thread: if use_thread:
self.queueclock = reactor.callLater(settings.DB_LOADER_CHECKTIME , self.run_import_thread) self.queueclock = reactor.callLater(settings.DB_LOADER_CHECKTIME , self.run_import_thread)
@ -174,11 +174,18 @@ class DBInterface():
self.cache.set(username, password) self.cache.set(username, password)
return True return True
elif settings.USERS_AUTOADD == True: elif settings.USERS_AUTOADD == True:
if self.dbi.get_uid(username) != False: if self.dbi.get_uid(username) != False:
uid = self.dbi.get_uid(username) uid = self.dbi.get_uid(username)
self.dbi.insert_worker(uid, username, password) self.dbi.insert_worker(uid, username, password)
self.cache.set(username, password) self.cache.set(username, password)
return True return True
else:
self.dbi.insert_user(username, password)
if self.dbi.get_uid(username) != False:
uid = self.dbi.get_uid(username)
self.dbi.insert_worker(uid, username, password)
self.cache.set(username, password)
return True
log.info("Authentication for %s failed" % username) log.info("Authentication for %s failed" % username)
return False return False
@ -187,11 +194,14 @@ class DBInterface():
return self.dbi.list_users() return self.dbi.list_users()
def get_user(self, id): def get_user(self, id):
return self.dbi.get_user(id) if self.cache.get(id) is None:
self.cache.set(id,self.dbi.get_user(id))
return self.cache.get(id)
def user_exists(self, username): def user_exists(self, username):
if self.cache.get(username) is not None: if self.cache.get(username) is not None:
return True return True
user = self.dbi.get_user(username) user = self.dbi.get_user(username)
return user is not None return user is not None
@ -199,7 +209,7 @@ class DBInterface():
return self.dbi.insert_user(username, password) return self.dbi.insert_user(username, password)
def delete_user(self, username): def delete_user(self, username):
self.mc.delete(username) self.mc.delete(username)
self.usercache = {} self.usercache = {}
return self.dbi.delete_user(username) return self.dbi.delete_user(username)

View File

@ -155,13 +155,13 @@ class DB_Mysql():
%(lres)s, %(result)s, %(reason)s, %(solution)s) %(lres)s, %(result)s, %(reason)s, %(solution)s)
""", """,
{ {
"time": v[4], "time": data[4],
"host": v[6], "host": data[6],
"uname": v[0], "uname": data[0],
"lres": v[5], "lres": data[5],
"result": v[5], "result": data[5],
"reason": v[9], "reason": data[9],
"solution": v[2] "solution": data[2]
} }
) )
@ -342,19 +342,6 @@ class DB_Mysql():
} }
return ret return ret
def get_uid(self, id_or_username):
log.debug("Finding user id of %s", id_or_username)
uname = id_or_username.split(".", 1)[0]
self.execute("SELECT `id` FROM `accounts` where username = %s", (uname))
row = self.dbc.fetchone()
if row is None:
return False
else:
uid = row[0]
return uid
def insert_worker(self, account_id, username, password): def insert_worker(self, account_id, username, password):
log.debug("Adding new worker %s", username) log.debug("Adding new worker %s", username)
@ -363,8 +350,6 @@ class DB_Mysql():
self.dbh.commit() self.dbh.commit()
return str(username) return str(username)
def close(self): def close(self):
self.dbh.close() self.dbh.close()

View File

@ -3,16 +3,16 @@ log = stratum.logger.get_logger('None')
class DB_None(): class DB_None():
def __init__(self): def __init__(self):
log.debug("Connecting to DB") log.debug("Connecting to DB")
def updateStats(self,averageOverTime): def updateStats(self,averageOverTime):
log.debug("Updating Stats") log.debug("Updating Stats")
def import_shares(self,data): def import_shares(self,data):
log.debug("Importing Shares") log.debug("Importing Shares")
def found_block(self,data): def found_block(self,data):
log.debug("Found Block") log.debug("Found Block")
def get_user(self, id_or_username): def get_user(self, id_or_username):
log.debug("Get User") log.debug("Get User")
@ -21,37 +21,36 @@ class DB_None():
log.debug("List Users") log.debug("List Users")
def delete_user(self,username): def delete_user(self,username):
log.debug("Deleting Username") log.debug("Deleting Username")
def insert_user(self,username,password): def insert_user(self,username,password):
log.debug("Adding Username/Password") log.debug("Adding Username/Password")
def update_user(self,username,password): def update_user(self,username,password):
log.debug("Updating Username/Password") log.debug("Updating Username/Password")
def check_password(self,username,password): def check_password(self,username,password):
log.debug("Checking Username/Password") log.debug("Checking Username/Password")
return True return True
def update_pool_info(self,pi): def update_pool_info(self,pi):
log.debug("Update Pool Info") log.debug("Update Pool Info")
def clear_worker_diff(self): def clear_worker_diff(self):
log.debug("Clear Worker Diff") log.debug("Clear Worker Diff")
def get_pool_stats(self): def get_pool_stats(self):
log.debug("Get Pool Stats") log.debug("Get Pool Stats")
ret = {} ret = {}
return ret return ret
def get_workers_stats(self): def get_workers_stats(self):
log.debug("Get Workers Stats") log.debug("Get Workers Stats")
ret = {} ret = {}
return ret return ret
def check_tables(self): def check_tables(self):
log.debug("Checking Tables") log.debug("Checking Tables")
def close(self): def close(self):
log.debug("Close Connection") log.debug("Close Connection")

View File

@ -7,131 +7,132 @@ import sqlite3
class DB_Sqlite(): class DB_Sqlite():
def __init__(self): def __init__(self):
log.debug("Connecting to DB") log.debug("Connecting to DB")
self.dbh = sqlite3.connect(settings.DB_SQLITE_FILE) self.dbh = sqlite3.connect(settings.DB_SQLITE_FILE)
self.dbc = self.dbh.cursor() self.dbc = self.dbh.cursor()
def updateStats(self,averageOverTime): def updateStats(self,averageOverTime):
log.debug("Updating Stats") log.debug("Updating Stats")
# Note: we are using transactions... so we can set the speed = 0 and it doesn't take affect until we are commited. # Note: we are using transactions... so we can set the speed = 0 and it doesn't take affect until we are commited.
self.dbc.execute("update pool_worker set speed = 0, alive = 0"); self.dbc.execute("update pool_worker set speed = 0, alive = 0");
stime = '%.2f' % ( time.time() - averageOverTime ); stime = '%.2f' % ( time.time() - averageOverTime );
self.dbc.execute("select username,SUM(difficulty) from shares where time > :time group by username", {'time':stime}) self.dbc.execute("select username,SUM(difficulty) from shares where time > :time group by username", {'time':stime})
total_speed = 0 total_speed = 0
sqldata = [] sqldata = []
for name,shares in self.dbc.fetchall(): for name,shares in self.dbc.fetchall():
speed = int(int(shares) * pow(2,32)) / ( int(averageOverTime) * 1000 * 1000) speed = int(int(shares) * pow(2,32)) / ( int(averageOverTime) * 1000 * 1000)
total_speed += speed total_speed += speed
sqldata.append({'speed':speed,'user':name}) sqldata.append({'speed':speed,'user':name})
self.dbc.executemany("update pool_worker set speed = :speed, alive = 1 where username = :user",sqldata) self.dbc.executemany("update pool_worker set speed = :speed, alive = 1 where username = :user",sqldata)
self.dbc.execute("update pool set value = :val where parameter = 'pool_speed'",{'val':total_speed}) self.dbc.execute("update pool set value = :val where parameter = 'pool_speed'",{'val':total_speed})
self.dbh.commit() self.dbh.commit()
def archive_check(self): def archive_check(self):
# Check for found shares to archive # Check for found shares to archive
self.dbc.execute("select time from shares where upstream_result = 1 order by time limit 1") self.dbc.execute("select time from shares where upstream_result = 1 order by time limit 1")
data = self.dbc.fetchone() data = self.dbc.fetchone()
if data is None or (data[0] + settings.ARCHIVE_DELAY) > time.time() : if data is None or (data[0] + settings.ARCHIVE_DELAY) > time.time() :
return False return False
return data[0] return data[0]
def archive_found(self,found_time): def archive_found(self,found_time):
self.dbc.execute("insert into shares_archive_found select * from shares where upstream_result = 1 and time <= :time",{'time':found_time}) self.dbc.execute("insert into shares_archive_found select * from shares where upstream_result = 1 and time <= :time",{'time':found_time})
self.dbh.commit() self.dbh.commit()
def archive_to_db(self,found_time): def archive_to_db(self,found_time):
self.dbc.execute("insert into shares_archive select * from shares where time <= :time",{'time':found_time}) self.dbc.execute("insert into shares_archive select * from shares where time <= :time",{'time':found_time})
self.dbh.commit() self.dbh.commit()
def archive_cleanup(self,found_time): def archive_cleanup(self,found_time):
self.dbc.execute("delete from shares where time <= :time",{'time':found_time}) self.dbc.execute("delete from shares where time <= :time",{'time':found_time})
self.dbc.execute("vacuum") self.dbc.execute("vacuum")
self.dbh.commit() self.dbh.commit()
def archive_get_shares(self,found_time): def archive_get_shares(self,found_time):
self.dbc.execute("select * from shares where time <= :time",{'time':found_time}) self.dbc.execute("select * from shares where time <= :time",{'time':found_time})
return self.dbc return self.dbc
def import_shares(self,data): def import_shares(self,data):
log.debug("Importing Shares") log.debug("Importing Shares")
# 0 1 2 3 4 5 6 7 8 9 10 # 0 1 2 3 4 5 6 7 8 9 10
# data: [worker_name,block_header,block_hash,difficulty,timestamp,is_valid,ip,block_height,prev_hash,invalid_reason,share_diff] # data: [worker_name,block_header,block_hash,difficulty,timestamp,is_valid,ip,block_height,prev_hash,invalid_reason,share_diff]
checkin_times = {} checkin_times = {}
total_shares = 0 total_shares = 0
best_diff = 0 best_diff = 0
sqldata = [] sqldata = []
for k,v in enumerate(data): for k,v in enumerate(data):
if settings.DATABASE_EXTEND : if settings.DATABASE_EXTEND :
total_shares += v[3] total_shares += v[3]
if v[0] in checkin_times: if v[0] in checkin_times:
if v[4] > checkin_times[v[0]] : if v[4] > checkin_times[v[0]] :
checkin_times[v[0]]["time"] = v[4] checkin_times[v[0]]["time"] = v[4]
else: else:
checkin_times[v[0]] = {"time": v[4], "shares": 0, "rejects": 0 } checkin_times[v[0]] = {"time": v[4], "shares": 0, "rejects": 0 }
if v[5] == True : if v[5] == True :
checkin_times[v[0]]["shares"] += v[3] checkin_times[v[0]]["shares"] += v[3]
else : else :
checkin_times[v[0]]["rejects"] += v[3] checkin_times[v[0]]["rejects"] += v[3]
if v[10] > best_diff: if v[10] > best_diff:
best_diff = v[10] best_diff = v[10]
sqldata.append({'time':v[4],'rem_host':v[6],'username':v[0],'our_result':v[5],'upstream_result':0,'reason':v[9],'solution':'', sqldata.append({'time':v[4],'rem_host':v[6],'username':v[0],'our_result':v[5],'upstream_result':0,'reason':v[9],'solution':'',
'block_num':v[7],'prev_block_hash':v[8],'ua':'','diff':v[3]} ) 'block_num':v[7],'prev_block_hash':v[8],'ua':'','diff':v[3]} )
else : else :
sqldata.append({'time':v[4],'rem_host':v[6],'username':v[0],'our_result':v[5],'upstream_result':0,'reason':v[9],'solution':''} ) sqldata.append({'time':v[4],'rem_host':v[6],'username':v[0],'our_result':v[5],'upstream_result':0,'reason':v[9],'solution':''} )
if settings.DATABASE_EXTEND : if settings.DATABASE_EXTEND :
self.dbc.executemany("insert into shares " +\ self.dbc.executemany("insert into shares " +\
"(time,rem_host,username,our_result,upstream_result,reason,solution,block_num,prev_block_hash,useragent,difficulty) " +\ "(time,rem_host,username,our_result,upstream_result,reason,solution,block_num,prev_block_hash,useragent,difficulty) " +\
"VALUES (:time,:rem_host,:username,:our_result,:upstream_result,:reason,:solution,:block_num,:prev_block_hash,:ua,:diff)",sqldata) "VALUES (:time,:rem_host,:username,:our_result,:upstream_result,:reason,:solution,:block_num,:prev_block_hash,:ua,:diff)",sqldata)
self.dbc.execute("select value from pool where parameter = 'round_shares'") self.dbc.execute("select value from pool where parameter = 'round_shares'")
round_shares = int(self.dbc.fetchone()[0]) + total_shares round_shares = int(self.dbc.fetchone()[0]) + total_shares
self.dbc.execute("update pool set value = :val where parameter = 'round_shares'",{'val':round_shares}) self.dbc.execute("update pool set value = :val where parameter = 'round_shares'",{'val':round_shares})
self.dbc.execute("select value from pool where parameter = 'round_best_share'") self.dbc.execute("select value from pool where parameter = 'round_best_share'")
round_best_share = int(self.dbc.fetchone()[0]) round_best_share = int(self.dbc.fetchone()[0])
if best_diff > round_best_share: if best_diff > round_best_share:
self.dbc.execute("update pool set value = :val where parameter = 'round_best_share'",{'val':best_diff}) self.dbc.execute("update pool set value = :val where parameter = 'round_best_share'",{'val':best_diff})
self.dbc.execute("select value from pool where parameter = 'bitcoin_difficulty'") self.dbc.execute("select value from pool where parameter = 'bitcoin_difficulty'")
difficulty = float(self.dbc.fetchone()[0]) difficulty = float(self.dbc.fetchone()[0])
if difficulty == 0: if difficulty == 0:
progress = 0 progress = 0
else: else:
progress = (round_shares/difficulty)*100 progress = (round_shares/difficulty)*100
self.dbc.execute("update pool set value = :val where parameter = 'round_progress'",{'val':progress}) self.dbc.execute("update pool set value = :val where parameter = 'round_progress'",{'val':progress})
sqldata = [] sqldata = []
for k,v in checkin_times.items(): for k,v in checkin_times.items():
sqldata.append({'last_checkin':v["time"],'addshares':v["shares"],'addrejects':v["rejects"],'user':k}) sqldata.append({'last_checkin':v["time"],'addshares':v["shares"],'addrejects':v["rejects"],'user':k})
self.dbc.executemany("update pool_worker set last_checkin = :last_checkin, total_shares = total_shares + :addshares, " +\
"total_rejects = total_rejects + :addrejects where username = :user",sqldata)
else:
self.dbc.executemany("insert into shares (time,rem_host,username,our_result,upstream_result,reason,solution) " +\
"VALUES (:time,:rem_host,:username,:our_result,:upstream_result,:reason,:solution)",sqldata)
self.dbh.commit() self.dbc.executemany("update pool_worker set last_checkin = :last_checkin, total_shares = total_shares + :addshares, " +\
"total_rejects = total_rejects + :addrejects where username = :user",sqldata)
else:
self.dbc.executemany("insert into shares (time,rem_host,username,our_result,upstream_result,reason,solution) " +\
"VALUES (:time,:rem_host,:username,:our_result,:upstream_result,:reason,:solution)",sqldata)
self.dbh.commit()
def found_block(self,data): def found_block(self,data):
# Note: difficulty = -1 here # Note: difficulty = -1 here
self.dbc.execute("update shares set upstream_result = :usr, solution = :sol where time = :time and username = :user", self.dbc.execute("update shares set upstream_result = :usr, solution = :sol where time = :time and username = :user",
{'usr':data[5],'sol':data[2],'time':data[4],'user':data[0]}) {'usr':data[5],'sol':data[2],'time':data[4],'user':data[0]})
if settings.DATABASE_EXTEND and data[5] == True : if settings.DATABASE_EXTEND and data[5] == True :
self.dbc.execute("update pool_worker set total_found = total_found + 1 where username = :user",{'user':data[0]}) self.dbc.execute("update pool_worker set total_found = total_found + 1 where username = :user",{'user':data[0]})
self.dbc.execute("select value from pool where parameter = 'pool_total_found'") self.dbc.execute("select value from pool where parameter = 'pool_total_found'")
total_found = int(self.dbc.fetchone()[0]) + 1 total_found = int(self.dbc.fetchone()[0]) + 1
self.dbc.executemany("update pool set value = :val where parameter = :parm", [{'val':0,'parm':'round_shares'}, self.dbc.executemany("update pool set value = :val where parameter = :parm", [{'val':0,'parm':'round_shares'},
{'val':0,'parm':'round_progress'}, {'val':0,'parm':'round_progress'},
{'val':0,'parm':'round_best_share'}, {'val':0,'parm':'round_best_share'},
{'val':time.time(),'parm':'round_start'}, {'val':time.time(),'parm':'round_start'},
{'val':total_found,'parm':'pool_total_found'} {'val':total_found,'parm':'pool_total_found'}
]) ])
self.dbh.commit() self.dbh.commit()
def get_user(self, id_or_username): def get_user(self, id_or_username):
raise NotImplementedError('Not implemented for SQLite') raise NotImplementedError('Not implemented for SQLite')
@ -143,157 +144,157 @@ class DB_Sqlite():
raise NotImplementedError('Not implemented for SQLite') raise NotImplementedError('Not implemented for SQLite')
def insert_user(self,username,password): def insert_user(self,username,password):
log.debug("Adding Username/Password") log.debug("Adding Username/Password")
self.dbc.execute("insert into pool_worker (username,password) VALUES (:user,:pass)", {'user':username,'pass':password}) self.dbc.execute("insert into pool_worker (username,password) VALUES (:user,:pass)", {'user':username,'pass':password})
self.dbh.commit() self.dbh.commit()
def update_user(self,username,password): def update_user(self,username,password):
raise NotImplementedError('Not implemented for SQLite') raise NotImplementedError('Not implemented for SQLite')
def check_password(self,username,password): def check_password(self,username,password):
log.debug("Checking Username/Password") log.debug("Checking Username/Password")
self.dbc.execute("select COUNT(*) from pool_worker where username = :user and password = :pass", {'user':username,'pass':password}) self.dbc.execute("select COUNT(*) from pool_worker where username = :user and password = :pass", {'user':username,'pass':password})
data = self.dbc.fetchone() data = self.dbc.fetchone()
if data[0] > 0 : if data[0] > 0 :
return True return True
return False return False
def update_worker_diff(self,username,diff): def update_worker_diff(self,username,diff):
self.dbc.execute("update pool_worker set difficulty = :diff where username = :user",{'diff':diff,'user':username}) self.dbc.execute("update pool_worker set difficulty = :diff where username = :user",{'diff':diff,'user':username})
self.dbh.commit() self.dbh.commit()
def clear_worker_diff(self): def clear_worker_diff(self):
if settings.DATABASE_EXTEND == True : if settings.DATABASE_EXTEND == True :
self.dbc.execute("update pool_worker set difficulty = 0") self.dbc.execute("update pool_worker set difficulty = 0")
self.dbh.commit() self.dbh.commit()
def update_pool_info(self,pi): def update_pool_info(self,pi):
self.dbc.executemany("update pool set value = :val where parameter = :parm",[{'val':pi['blocks'],'parm':"bitcoin_blocks"}, self.dbc.executemany("update pool set value = :val where parameter = :parm",[{'val':pi['blocks'],'parm':"bitcoin_blocks"},
{'val':pi['balance'],'parm':"bitcoin_balance"}, {'val':pi['balance'],'parm':"bitcoin_balance"},
{'val':pi['connections'],'parm':"bitcoin_connections"}, {'val':pi['connections'],'parm':"bitcoin_connections"},
{'val':pi['difficulty'],'parm':"bitcoin_difficulty"}, {'val':pi['difficulty'],'parm':"bitcoin_difficulty"},
{'val':time.time(),'parm':"bitcoin_infotime"} {'val':time.time(),'parm':"bitcoin_infotime"}
]) ])
self.dbh.commit() self.dbh.commit()
def get_pool_stats(self): def get_pool_stats(self):
self.dbc.execute("select * from pool") self.dbc.execute("select * from pool")
ret = {} ret = {}
for data in self.dbc.fetchall(): for data in self.dbc.fetchall():
ret[data[0]] = data[1] ret[data[0]] = data[1]
return ret return ret
def get_workers_stats(self): def get_workers_stats(self):
self.dbc.execute("select username,speed,last_checkin,total_shares,total_rejects,total_found,alive,difficulty from pool_worker") self.dbc.execute("select username,speed,last_checkin,total_shares,total_rejects,total_found,alive,difficulty from pool_worker")
ret = {} ret = {}
for data in self.dbc.fetchall(): for data in self.dbc.fetchall():
ret[data[0]] = { "username" : data[0], ret[data[0]] = { "username" : data[0],
"speed" : data[1], "speed" : data[1],
"last_checkin" : data[2], "last_checkin" : data[2],
"total_shares" : data[3], "total_shares" : data[3],
"total_rejects" : data[4], "total_rejects" : data[4],
"total_found" : data[5], "total_found" : data[5],
"alive" : data[6], "alive" : data[6],
"difficulty" : data[7] } "difficulty" : data[7] }
return ret return ret
def close(self): def close(self):
self.dbh.close() self.dbh.close()
def check_tables(self): def check_tables(self):
log.debug("Checking Tables") log.debug("Checking Tables")
if settings.DATABASE_EXTEND == True : if settings.DATABASE_EXTEND == True :
self.dbc.execute("create table if not exists shares" +\ self.dbc.execute("create table if not exists shares" +\
"(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\ "(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\
"block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)") "block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)")
self.dbc.execute("create table if not exists pool_worker" +\ self.dbc.execute("create table if not exists pool_worker" +\
"(username TEXT, password TEXT, speed INTEGER, last_checkin DATETIME)") "(username TEXT, password TEXT, speed INTEGER, last_checkin DATETIME)")
self.dbc.execute("create table if not exists pool(parameter TEXT, value TEXT)") self.dbc.execute("create table if not exists pool(parameter TEXT, value TEXT)")
self.dbc.execute("select COUNT(*) from pool where parameter = 'DB Version'") self.dbc.execute("select COUNT(*) from pool where parameter = 'DB Version'")
data = self.dbc.fetchone() data = self.dbc.fetchone()
if data[0] <= 0: if data[0] <= 0:
self.dbc.execute("alter table pool_worker add total_shares INTEGER default 0") self.dbc.execute("alter table pool_worker add total_shares INTEGER default 0")
self.dbc.execute("alter table pool_worker add total_rejects INTEGER default 0") self.dbc.execute("alter table pool_worker add total_rejects INTEGER default 0")
self.dbc.execute("alter table pool_worker add total_found INTEGER default 0") self.dbc.execute("alter table pool_worker add total_found INTEGER default 0")
self.dbc.execute("insert into pool (parameter,value) VALUES ('DB Version',2)") self.dbc.execute("insert into pool (parameter,value) VALUES ('DB Version',2)")
self.update_tables() self.update_tables()
else : else :
self.dbc.execute("create table if not exists shares" + \ self.dbc.execute("create table if not exists shares" + \
"(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT)") "(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT)")
self.dbc.execute("create table if not exists pool_worker(username TEXT, password TEXT)") self.dbc.execute("create table if not exists pool_worker(username TEXT, password TEXT)")
self.dbc.execute("create index if not exists pool_worker_username ON pool_worker(username)") self.dbc.execute("create index if not exists pool_worker_username ON pool_worker(username)")
def update_tables(self): def update_tables(self):
version = 0 version = 0
current_version = 6 current_version = 6
while version < current_version : while version < current_version :
self.dbc.execute("select value from pool where parameter = 'DB Version'") self.dbc.execute("select value from pool where parameter = 'DB Version'")
data = self.dbc.fetchone() data = self.dbc.fetchone()
version = int(data[0]) version = int(data[0])
if version < current_version : if version < current_version :
log.info("Updating Database from %i to %i" % (version, version +1)) log.info("Updating Database from %i to %i" % (version, version +1))
getattr(self, 'update_version_' + str(version) )() getattr(self, 'update_version_' + str(version) )()
def update_version_2(self): def update_version_2(self):
log.info("running update 2") log.info("running update 2")
self.dbc.executemany("insert into pool (parameter,value) VALUES (?,?)",[('bitcoin_blocks',0), self.dbc.executemany("insert into pool (parameter,value) VALUES (?,?)",[('bitcoin_blocks',0),
('bitcoin_balance',0), ('bitcoin_balance',0),
('bitcoin_connections',0), ('bitcoin_connections',0),
('bitcoin_difficulty',0), ('bitcoin_difficulty',0),
('pool_speed',0), ('pool_speed',0),
('pool_total_found',0), ('pool_total_found',0),
('round_shares',0), ('round_shares',0),
('round_progress',0), ('round_progress',0),
('round_start',time.time()) ('round_start',time.time())
]) ])
self.dbc.execute("create index if not exists shares_username ON shares(username)") self.dbc.execute("create index if not exists shares_username ON shares(username)")
self.dbc.execute("create index if not exists pool_worker_username ON pool_worker(username)") self.dbc.execute("create index if not exists pool_worker_username ON pool_worker(username)")
self.dbc.execute("update pool set value = 3 where parameter = 'DB Version'") self.dbc.execute("update pool set value = 3 where parameter = 'DB Version'")
self.dbh.commit() self.dbh.commit()
def update_version_3(self): def update_version_3(self):
log.info("running update 3") log.info("running update 3")
self.dbc.executemany("insert into pool (parameter,value) VALUES (?,?)",[ self.dbc.executemany("insert into pool (parameter,value) VALUES (?,?)",[
('round_best_share',0), ('round_best_share',0),
('bitcoin_infotime',0), ('bitcoin_infotime',0),
]) ])
self.dbc.execute("alter table pool_worker add alive INTEGER default 0") self.dbc.execute("alter table pool_worker add alive INTEGER default 0")
self.dbc.execute("update pool set value = 4 where parameter = 'DB Version'") self.dbc.execute("update pool set value = 4 where parameter = 'DB Version'")
self.dbh.commit() self.dbh.commit()
def update_version_4(self): def update_version_4(self):
log.info("running update 4") log.info("running update 4")
self.dbc.execute("alter table pool_worker add difficulty INTEGER default 0") self.dbc.execute("alter table pool_worker add difficulty INTEGER default 0")
self.dbc.execute("create table if not exists shares_archive" +\ self.dbc.execute("create table if not exists shares_archive" +\
"(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\ "(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\
"block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)") "block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)")
self.dbc.execute("create table if not exists shares_archive_found" +\ self.dbc.execute("create table if not exists shares_archive_found" +\
"(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\ "(time DATETIME,rem_host TEXT, username TEXT, our_result INTEGER, upstream_result INTEGER, reason TEXT, solution TEXT, " +\
"block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)") "block_num INTEGER, prev_block_hash TEXT, useragent TEXT, difficulty INTEGER)")
self.dbc.execute("update pool set value = 5 where parameter = 'DB Version'") self.dbc.execute("update pool set value = 5 where parameter = 'DB Version'")
self.dbh.commit() self.dbh.commit()
def update_version_5(self): def update_version_5(self):
log.info("running update 5") log.info("running update 5")
# Adding Primary key to table: pool # Adding Primary key to table: pool
self.dbc.execute("alter table pool rename to pool_old") self.dbc.execute("alter table pool rename to pool_old")
self.dbc.execute("create table if not exists pool(parameter TEXT, value TEXT, primary key(parameter))") self.dbc.execute("create table if not exists pool(parameter TEXT, value TEXT, primary key(parameter))")
self.dbc.execute("insert into pool select * from pool_old") self.dbc.execute("insert into pool select * from pool_old")
self.dbc.execute("drop table pool_old") self.dbc.execute("drop table pool_old")
self.dbh.commit() self.dbh.commit()
# Adding Primary key to table: pool_worker # Adding Primary key to table: pool_worker
self.dbc.execute("alter table pool_worker rename to pool_worker_old") self.dbc.execute("alter table pool_worker rename to pool_worker_old")
self.dbc.execute("CREATE TABLE pool_worker(username TEXT, password TEXT, speed INTEGER, last_checkin DATETIME, total_shares INTEGER default 0, total_rejects INTEGER default 0, total_found INTEGER default 0, alive INTEGER default 0, difficulty INTEGER default 0, primary key(username))") self.dbc.execute("CREATE TABLE pool_worker(username TEXT, password TEXT, speed INTEGER, last_checkin DATETIME, total_shares INTEGER default 0, total_rejects INTEGER default 0, total_found INTEGER default 0, alive INTEGER default 0, difficulty INTEGER default 0, primary key(username))")
self.dbc.execute("insert into pool_worker select * from pool_worker_old") self.dbc.execute("insert into pool_worker select * from pool_worker_old")
self.dbc.execute("drop table pool_worker_old") self.dbc.execute("drop table pool_worker_old")
self.dbh.commit() self.dbh.commit()
# Adjusting indicies on table: shares # Adjusting indicies on table: shares
self.dbc.execute("DROP INDEX shares_username") self.dbc.execute("DROP INDEX shares_username")
self.dbc.execute("CREATE INDEX shares_time_username ON shares(time,username)") self.dbc.execute("CREATE INDEX shares_time_username ON shares(time,username)")
self.dbc.execute("CREATE INDEX shares_upstreamresult ON shares(upstream_result)") self.dbc.execute("CREATE INDEX shares_upstreamresult ON shares(upstream_result)")
self.dbh.commit() self.dbh.commit()
self.dbc.execute("update pool set value = 6 where parameter = 'DB Version'") self.dbc.execute("update pool set value = 6 where parameter = 'DB Version'")
self.dbh.commit() self.dbh.commit()

View File

@ -39,6 +39,22 @@ def setup(on_startup):
# - we are not still downloading the blockchain (Sleep) # - we are not still downloading the blockchain (Sleep)
log.info("Connecting to litecoind...") log.info("Connecting to litecoind...")
while True: while True:
try:
result = (yield bitcoin_rpc.check_submitblock())
if result == True:
log.info("Found submitblock")
elif result == False:
log.info("Did not find submitblock")
else:
log.info("unknown submitblock result")
except ConnectionRefusedError, e:
log.error("Connection refused while trying to connect to the coind (are your COIND_* settings correct?)")
reactor.stop()
break
except Exception, e:
log.debug(str(e))
try: try:
result = (yield bitcoin_rpc.getblocktemplate()) result = (yield bitcoin_rpc.getblocktemplate())
if isinstance(result, dict): if isinstance(result, dict):
@ -49,11 +65,11 @@ def setup(on_startup):
if 'proof-of-stake' in result: if 'proof-of-stake' in result:
settings.COINDAEMON_Reward = 'POS' settings.COINDAEMON_Reward = 'POS'
log.info("Coin detected as POS") log.info("Coin detected as POS")
break; break
else: else:
settings.COINDAEMON_Reward = 'POW' settings.COINDAEMON_Reward = 'POW'
log.info("Coin detected as POW") log.info("Coin detected as POW")
break; break
else: else:
log.error("Block Version mismatch: %s" % result['version']) log.error("Block Version mismatch: %s" % result['version'])
@ -65,20 +81,20 @@ def setup(on_startup):
except Exception, e: except Exception, e:
if isinstance(e[2], str): if isinstance(e[2], str):
try: try:
if isinstance(json.loads(e[2])['error']['message'], str): if isinstance(json.loads(e[2])['error']['message'], str):
error = json.loads(e[2])['error']['message'] error = json.loads(e[2])['error']['message']
if error == "Method not found": if error == "Method not found":
log.error("CoinD does not support getblocktemplate!!! (time to upgrade.)") log.error("CoinD does not support getblocktemplate!!! (time to upgrade.)")
reactor.stop() reactor.stop()
elif "downloading blocks" in error: elif "downloading blocks" in error:
log.error("CoinD downloading blockchain... will check back in 30 sec") log.error("CoinD downloading blockchain... will check back in 30 sec")
time.sleep(29) time.sleep(29)
else: else:
log.error("Coind Error: %s", error) log.error("Coind Error: %s", error)
except ValueError: except ValueError:
log.error("Failed Connect(HTTP 500 or Invalid JSON), Check Username and Password!") log.error("Failed Connect(HTTP 500 or Invalid JSON), Check Username and Password!")
reactor.stop() reactor.stop()
time.sleep(1) # If we didn't get a result or the connect failed time.sleep(1) # If we didn't get a result or the connect failed
log.info('Connected to the coind - Begining to load Address and Module Checks!') log.info('Connected to the coind - Begining to load Address and Module Checks!')

View File

@ -126,9 +126,9 @@ class BasicShareLimiter(object):
else: else:
if ddiff > -1: if ddiff > -1:
ddiff = -1 ddiff = -1
# Don't drop below POOL_TARGET # Don't drop below POOL_TARGET
if (ddiff + current_difficulty) < settings.POOL_TARGET: if (ddiff + current_difficulty) < settings.POOL_TARGET:
ddiff = settings.VDIFF_MIN_TARGET - current_difficulty ddiff = settings.VDIFF_MIN_TARGET - current_difficulty
elif avg < self.tmin: elif avg < self.tmin:
# For fractional 0.1 ddiff's just up by 1 # For fractional 0.1 ddiff's just up by 1
if settings.VDIFF_X2_TYPE: if settings.VDIFF_X2_TYPE:
@ -174,8 +174,8 @@ class BasicShareLimiter(object):
session['difficulty'] = new_diff session['difficulty'] = new_diff
connection_ref().rpc('mining.set_difficulty', [new_diff, ], is_notification=True) connection_ref().rpc('mining.set_difficulty', [new_diff, ], is_notification=True)
log.debug("Notified of New Difficulty") log.debug("Notified of New Difficulty")
connection_ref().rpc('mining.notify', [work_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, False, ], is_notification=True) connection_ref().rpc('mining.notify', [work_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, False, ], is_notification=True)
log.debug("Sent new work") log.debug("Sent new work")
dbi.update_worker_diff(worker_name, new_diff) dbi.update_worker_diff(worker_name, new_diff)

View File

@ -36,10 +36,10 @@ class WorkerManagerInterface(object):
return (False, settings.POOL_TARGET) return (False, settings.POOL_TARGET)
def register_work(self, worker_name, job_id, difficulty): def register_work(self, worker_name, job_id, difficulty):
now = Interfaces.timestamper.time() now = Interfaces.timestamper.time()
work_id = WorkIdGenerator.get_new_id() work_id = WorkIdGenerator.get_new_id()
self.job_log.setdefault(worker_name, {})[work_id] = (job_id, difficulty, now) self.job_log.setdefault(worker_name, {})[work_id] = (job_id, difficulty, now)
return work_id return work_id
class WorkIdGenerator(object): class WorkIdGenerator(object):
counter = 1000 counter = 1000
@ -82,6 +82,7 @@ class ShareManagerInterface(object):
def on_submit_block(self, is_accepted, worker_name, block_header, block_hash, timestamp, ip, share_diff): def on_submit_block(self, is_accepted, worker_name, block_header, block_hash, timestamp, ip, share_diff):
log.info("Block %s %s" % (block_hash, 'ACCEPTED' if is_accepted else 'REJECTED')) log.info("Block %s %s" % (block_hash, 'ACCEPTED' if is_accepted else 'REJECTED'))
dbi.do_import(dbi, True)
dbi.found_block([worker_name, block_header, block_hash, -1, timestamp, is_accepted, ip, self.block_height, self.prev_hash, share_diff ]) dbi.found_block([worker_name, block_header, block_hash, -1, timestamp, is_accepted, ip, self.block_height, self.prev_hash, share_diff ])
class TimestamperInterface(object): class TimestamperInterface(object):

View File

@ -90,6 +90,8 @@ class MiningService(GenericService):
Interfaces.worker_manager.worker_log['authorized'][worker_name] = (0, 0, False, session['difficulty'], is_ext_diff, Interfaces.timestamper.time()) Interfaces.worker_manager.worker_log['authorized'][worker_name] = (0, 0, False, session['difficulty'], is_ext_diff, Interfaces.timestamper.time())
return True return True
else: else:
ip = self.connection_ref()._get_ip()
log.info("Failed worker authorization: IP %s", str(ip))
if worker_name in session['authorized']: if worker_name in session['authorized']:
del session['authorized'][worker_name] del session['authorized'][worker_name]
if worker_name in Interfaces.worker_manager.worker_log['authorized']: if worker_name in Interfaces.worker_manager.worker_log['authorized']:
@ -116,13 +118,16 @@ class MiningService(GenericService):
session.setdefault('authorized', {}) session.setdefault('authorized', {})
# Check if worker is authorized to submit shares # Check if worker is authorized to submit shares
ip = self.connection_ref()._get_ip()
if not Interfaces.worker_manager.authorize(worker_name, session['authorized'].get(worker_name)): if not Interfaces.worker_manager.authorize(worker_name, session['authorized'].get(worker_name)):
log.info("Worker is not authorized: IP %s", str(ip))
raise SubmitException("Worker is not authorized") raise SubmitException("Worker is not authorized")
# Check if extranonce1 is in connection session # Check if extranonce1 is in connection session
extranonce1_bin = session.get('extranonce1', None) extranonce1_bin = session.get('extranonce1', None)
if not extranonce1_bin: if not extranonce1_bin:
log.info("Connection is not subscribed for mining: IP %s", str(ip))
raise SubmitException("Connection is not subscribed for mining") raise SubmitException("Connection is not subscribed for mining")
# Get current block job_id # Get current block job_id
@ -136,7 +141,7 @@ class MiningService(GenericService):
#log.debug("worker_job_log: %s" % repr(Interfaces.worker_manager.job_log)) #log.debug("worker_job_log: %s" % repr(Interfaces.worker_manager.job_log))
submit_time = Interfaces.timestamper.time() submit_time = Interfaces.timestamper.time()
ip = self.connection_ref()._get_ip()
(valid, invalid, is_banned, diff, is_ext_diff, last_ts) = Interfaces.worker_manager.worker_log['authorized'][worker_name] (valid, invalid, is_banned, diff, is_ext_diff, last_ts) = Interfaces.worker_manager.worker_log['authorized'][worker_name]
percent = float(float(invalid) / (float(valid) if valid else 1) * 100) percent = float(float(invalid) / (float(valid) if valid else 1) * 100)

View File

@ -4,20 +4,20 @@ import lib.logger
log = lib.logger.get_logger('work_log_pruner') log = lib.logger.get_logger('work_log_pruner')
def _WorkLogPruner_I(wl): def _WorkLogPruner_I(wl):
now = time() now = time()
pruned = 0 pruned = 0
for username in wl: for username in wl:
userwork = wl[username] userwork = wl[username]
for wli in tuple(userwork.keys()): for wli in tuple(userwork.keys()):
if now > userwork[wli][2] + 120: if now > userwork[wli][2] + 120:
del userwork[wli] del userwork[wli]
pruned += 1 pruned += 1
log.info('Pruned %d jobs' % (pruned,)) log.info('Pruned %d jobs' % (pruned,))
def WorkLogPruner(wl): def WorkLogPruner(wl):
while True: while True:
try: try:
sleep(60) sleep(60)
_WorkLogPruner_I(wl) _WorkLogPruner_I(wl)
except: except:
log.debug(traceback.format_exc()) log.debug(traceback.format_exc())

View File

@ -1,48 +1,34 @@
# This File is used to create a list of requirements needed for testing stratum-mining or to create a clone install
BeautifulSoup==3.2.1 BeautifulSoup==3.2.1
#Brlapi==0.5.7 Jinja2==2.7.2
#GnuPGInterface==0.3.2 Magic-file-extensions==0.2
MySQL-python==1.2.3 MarkupSafe==0.18
#PAM==0.4.2 MySQL-python==1.2.5
#Pyste==0.9.10 PyYAML==3.10
#SOAPpy==0.12.0 Twisted==13.2.0
Twisted==12.0.0 ansible==1.4.5
#Twisted-Conch==12.0.0
#Twisted-Core==12.0.0
#Twisted-Lore==12.0.0
#Twisted-Mail==12.0.0
#Twisted-Names==12.0.0
#Twisted-News==12.0.0
#Twisted-Runner==12.0.0
#Twisted-Web==12.0.0
#Twisted-Words==12.0.0
#apt-xapian-index==0.45
argparse==1.2.1 argparse==1.2.1
autobahn==0.6.5 autobahn==0.8.4-3
#chardet==2.0.1 cffi==0.8.2
cryptography==0.2.2
defer==1.0.6 defer==1.0.6
distribute==0.6.28
ecdsa==0.10 ecdsa==0.10
feedparser==5.1.2 feedparser==5.1.3
fpconst==0.7.2 fpconst==0.7.2
httplib2==0.7.4 httplib2==0.8
#louis==2.4.1 numpy==1.7.1
#ltc-scrypt==1.0 paramiko==1.12.1
#numpy==1.6.2 pyOpenSSL==0.14
pyOpenSSL==0.13 pyasn1==0.1.7
pyasn1==0.1.3 pycparser==2.10
pycrypto==2.6 pycrypto==2.6.1
#pycurl==7.19.0 pylibmc==1.2.3
pyserial==2.5 pyserial==2.7
#python-apt==0.8.8.2 python-dateutil==2.1
#python-debian==0.1.21 python-memcached==1.53
#python-debianbts==1.11 pyxdg==0.25
python-memcached==1.48 requests==2.2.0
pyxdg==0.19 simplejson==3.3.3
#reportbug==6.4.4 six==1.4.1
simplejson==2.5.2 wsgiref==0.1.2
#stratum==0.2.13 zope.interface==4.1.0
#uTidylib==0.2 stratum==0.2.15
#unattended-upgrades==0.1
#wsgiref==0.1.2
zope.interface==3.6.1

260
sql/base_structure.sql Normal file
View File

@ -0,0 +1,260 @@
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
CREATE TABLE IF NOT EXISTS `accounts` (
`id` int(255) NOT NULL AUTO_INCREMENT,
`is_admin` tinyint(1) NOT NULL DEFAULT '0',
`is_anonymous` tinyint(1) NOT NULL DEFAULT '0',
`no_fees` tinyint(1) NOT NULL DEFAULT '0',
`username` varchar(40) NOT NULL,
`pass` varchar(255) NOT NULL,
`email` varchar(255) DEFAULT NULL COMMENT 'Assocaited email: used for validating users, and re-setting passwords',
`timezone` varchar(35) NOT NULL DEFAULT '415',
`notify_email` VARCHAR( 255 ) NULL DEFAULT NULL,
`loggedIp` varchar(255) DEFAULT NULL,
`is_locked` tinyint(1) NOT NULL DEFAULT '0',
`failed_logins` int(5) unsigned DEFAULT '0',
`failed_pins` int(5) unsigned DEFAULT '0',
`signup_timestamp` int(10) DEFAULT '0',
`last_login` int(10) DEFAULT NULL,
`pin` varchar(255) NOT NULL COMMENT 'four digit pin to allow account changes',
`api_key` varchar(255) DEFAULT NULL,
`token` varchar(65) DEFAULT NULL,
`donate_percent` float DEFAULT '0',
PRIMARY KEY (`id`),
UNIQUE KEY `username` (`username`),
UNIQUE KEY `email` (`email`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `blocks` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`height` int(10) unsigned NOT NULL,
`blockhash` char(65) NOT NULL,
`confirmations` int(10) NOT NULL,
`amount` double NOT NULL,
`difficulty` double NOT NULL,
`time` int(11) NOT NULL,
`accounted` tinyint(1) NOT NULL DEFAULT '0',
`account_id` int(255) unsigned DEFAULT NULL,
`worker_name` varchar(50) DEFAULT 'unknown',
`shares` double unsigned DEFAULT NULL,
`share_id` bigint(30) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `height` (`height`,`blockhash`),
KEY `time` (`time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='Discovered blocks persisted from Litecoin Service';
CREATE TABLE IF NOT EXISTS `coin_addresses` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`account_id` int(11) NOT NULL,
`currency` varchar(5) NOT NULL,
`coin_address` varchar(255) NOT NULL,
`ap_threshold` float DEFAULT '0',
PRIMARY KEY (`id`),
UNIQUE KEY `coin_address` (`coin_address`),
KEY `account_id` (`account_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `invitations` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`account_id` int(11) unsigned NOT NULL,
`email` varchar(50) NOT NULL,
`token_id` int(11) NOT NULL,
`is_activated` tinyint(1) NOT NULL DEFAULT '0',
`time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `monitoring` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(30) NOT NULL,
`type` varchar(15) NOT NULL,
`value` varchar(255) NOT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `name` (`name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='Monitoring events from cronjobs';
CREATE TABLE IF NOT EXISTS `news` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`account_id` int(10) unsigned NOT NULL,
`header` varchar(255) NOT NULL,
`content` text NOT NULL,
`time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`active` tinyint(1) NOT NULL DEFAULT '0',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `notifications` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`type` varchar(25) NOT NULL,
`data` varchar(255) NOT NULL,
`active` tinyint(1) NOT NULL DEFAULT '1',
`time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`account_id` int(10) unsigned DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `active` (`active`),
KEY `data` (`data`),
KEY `account_id` (`account_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `notification_settings` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`type` varchar(15) NOT NULL,
`account_id` int(11) NOT NULL,
`active` tinyint(1) NOT NULL DEFAULT '0',
PRIMARY KEY (`id`),
KEY `account_id` (`account_id`),
UNIQUE KEY `account_id_type` (`account_id`,`type`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `payouts` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`account_id` int(11) NOT NULL,
`time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`completed` tinyint(1) NOT NULL DEFAULT '0',
PRIMARY KEY (`id`),
KEY `account_id` (`account_id`,`completed`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `pool_worker` (
`id` int(255) NOT NULL AUTO_INCREMENT,
`account_id` int(255) NOT NULL,
`username` char(50) DEFAULT NULL,
`password` char(255) DEFAULT NULL,
`difficulty` float NOT NULL DEFAULT '0',
`monitor` tinyint(1) NOT NULL DEFAULT '0',
PRIMARY KEY (`id`),
UNIQUE KEY `username` (`username`),
KEY `account_id` (`account_id`),
KEY `pool_worker_username` (`username`(10))
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `settings` (
`name` varchar(255) NOT NULL,
`value` text DEFAULT NULL,
PRIMARY KEY (`name`),
UNIQUE KEY `setting` (`name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
INSERT INTO `settings` (`name`, `value`) VALUES ('DB_VERSION', '1.0.3');
CREATE TABLE IF NOT EXISTS `shares` (
`id` bigint(30) NOT NULL AUTO_INCREMENT,
`rem_host` varchar(255) NOT NULL,
`username` varchar(120) NOT NULL,
`our_result` enum('Y','N') NOT NULL,
`upstream_result` enum('Y','N') DEFAULT NULL,
`reason` varchar(50) DEFAULT NULL,
`solution` varchar(257) NOT NULL,
`difficulty` float NOT NULL DEFAULT '0',
`time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `time` (`time`),
KEY `upstream_result` (`upstream_result`),
KEY `our_result` (`our_result`),
KEY `username` (`username`),
KEY `shares_username` (`username`(10))
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `shares_archive` (
`id` bigint(30) unsigned NOT NULL AUTO_INCREMENT,
`share_id` bigint(30) unsigned NOT NULL,
`username` varchar(120) NOT NULL,
`our_result` enum('Y','N') DEFAULT NULL,
`upstream_result` enum('Y','N') DEFAULT NULL,
`block_id` int(10) unsigned NOT NULL,
`difficulty` float NOT NULL DEFAULT '0',
`time` datetime NOT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `share_id` (`share_id`),
KEY `time` (`time`),
KEY `our_result` (`our_result`),
KEY `username` (`username`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='Archive shares for potential later debugging purposes';
CREATE TABLE IF NOT EXISTS `statistics_shares` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`account_id` int(10) unsigned NOT NULL,
`block_id` int(10) unsigned NOT NULL,
`valid` float unsigned NOT NULL DEFAULT '0',
`invalid` float unsigned NOT NULL DEFAULT '0',
`pplns_valid` float unsigned NOT NULL DEFAULT '0',
`pplns_invalid` float unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`id`),
KEY `account_id` (`account_id`),
KEY `block_id` (`block_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `tokens` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`account_id` int(11) NOT NULL,
`token` varchar(65) NOT NULL,
`type` tinyint(4) NOT NULL,
`time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
UNIQUE KEY `token` (`token`),
KEY `account_id` (`account_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `token_types` (
`id` tinyint(4) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(25) NOT NULL,
`expiration` INT NULL DEFAULT '0',
PRIMARY KEY (`id`),
UNIQUE KEY `name` (`name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
INSERT INTO `token_types` (`id`, `name`, `expiration`) VALUES
(1, 'password_reset', 3600),
(2, 'confirm_email', 0),
(3, 'invitation', 0),
(4, 'account_unlock', 0),
(5, 'account_edit', 3600),
(6, 'change_pw', 3600),
(7, 'withdraw_funds', 3600);
CREATE TABLE IF NOT EXISTS `transactions` (
`id` int(255) NOT NULL AUTO_INCREMENT,
`account_id` int(255) unsigned NOT NULL,
`type` varchar(25) DEFAULT NULL,
`coin_address` varchar(255) DEFAULT NULL,
`amount` decimal(50,30) DEFAULT '0',
`block_id` int(255) DEFAULT NULL,
`timestamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`txid` varchar(256) DEFAULT NULL,
`archived` tinyint(1) NOT NULL DEFAULT '0',
PRIMARY KEY (`id`),
KEY `block_id` (`block_id`),
KEY `account_id` (`account_id`),
KEY `type` (`type`),
KEY `archived` (`archived`),
KEY `account_id_archived` (`account_id`,`archived`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE `statistics_users` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`account_id` int(11) NOT NULL,
`hashrate` bigint(20) unsigned NOT NULL,
`workers` int(11) NOT NULL,
`sharerate` float NOT NULL,
`timestamp` int(11) NOT NULL,
PRIMARY KEY (`id`),
KEY `account_id_timestamp` (`account_id`,`timestamp`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `user_settings` (
`account_id` int(11) NOT NULL,
`name` varchar(50) NOT NULL,
`value` text DEFAULT NULL,
PRIMARY KEY (`account_id`,`name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;