recommendations from pycodestyle (pep8 style)

This commit is contained in:
Nathan Marley 2017-02-07 20:12:48 -08:00
parent 09c4bdc5c3
commit a03665696c
16 changed files with 30 additions and 25 deletions

View File

@ -74,7 +74,7 @@ def rpc_send_and_wait(port, method, params, timeout=15):
def main(): def main():
'''Send the RPC command to the server and print the result.''' '''Send the RPC command to the server and print the result.'''
parser = argparse.ArgumentParser('Send electrumx an RPC command' ) parser = argparse.ArgumentParser('Send electrumx an RPC command')
parser.add_argument('-p', '--port', metavar='port_num', type=int, parser.add_argument('-p', '--port', metavar='port_num', type=int,
help='RPC port number') help='RPC port number')
parser.add_argument('command', nargs=1, default=[], parser.add_argument('command', nargs=1, default=[],

View File

@ -25,6 +25,7 @@ SUPPRESS_MESSAGES = [
'Fatal write error on socket transport', 'Fatal write error on socket transport',
] ]
def main_loop(): def main_loop():
'''Start the server.''' '''Start the server.'''
if os.geteuid() == 0: if os.geteuid() == 0:
@ -32,7 +33,7 @@ def main_loop():
'account and use that') 'account and use that')
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
#loop.set_debug(True) # loop.set_debug(True)
def on_signal(signame): def on_signal(signame):
'''Call on receipt of a signal to cleanly shutdown.''' '''Call on receipt of a signal to cleanly shutdown.'''
@ -43,7 +44,7 @@ def main_loop():
def on_exception(loop, context): def on_exception(loop, context):
'''Suppress spurious messages it appears we cannot control.''' '''Suppress spurious messages it appears we cannot control.'''
message = context.get('message') message = context.get('message')
if not message in SUPPRESS_MESSAGES: if message not in SUPPRESS_MESSAGES:
if not ('task' in context and if not ('task' in context and
'accept_connection2()' in repr(context.get('task'))): 'accept_connection2()' in repr(context.get('task'))):
loop.default_exception_handler(context) loop.default_exception_handler(context)

View File

@ -477,7 +477,7 @@ class DashTestnet(Dash):
TX_PER_BLOCK = 1 TX_PER_BLOCK = 1
RPC_PORT = 19998 RPC_PORT = 19998
IRC_PREFIX = "d_" IRC_PREFIX = "d_"
PEER_DEFAULT_PORTS = {'t':'51001', 's':'51002'} PEER_DEFAULT_PORTS = {'t': '51001', 's': '51002'}
PEERS = [ PEERS = [
'electrum.dash.siampm.com s t', 'electrum.dash.siampm.com s t',
] ]

View File

@ -87,7 +87,7 @@ class JSONRPCv1(JSONRPC):
def is_request(cls, payload): def is_request(cls, payload):
'''Returns True if the payload (which has a method) is a request. '''Returns True if the payload (which has a method) is a request.
False means it is a notification.''' False means it is a notification.'''
return payload.get('id') != None return payload.get('id') is not None
class JSONRPCv2(JSONRPC): class JSONRPCv2(JSONRPC):
@ -296,7 +296,7 @@ class JSONSessionBase(util.LoggedClass):
'''Extract and return the ID from the payload. '''Extract and return the ID from the payload.
Raises an RPCError if it is missing or invalid.''' Raises an RPCError if it is missing or invalid.'''
if not 'id' in payload: if 'id' not in payload:
raise RPCError('missing id', JSONRPC.INVALID_REQUEST) raise RPCError('missing id', JSONRPC.INVALID_REQUEST)
id_ = payload['id'] id_ = payload['id']

View File

@ -24,6 +24,7 @@ class Tx(namedtuple("Tx", "version inputs outputs locktime")):
# FIXME: add hash as a cached property? # FIXME: add hash as a cached property?
class TxInput(namedtuple("TxInput", "prev_hash prev_idx script sequence")): class TxInput(namedtuple("TxInput", "prev_hash prev_idx script sequence")):
'''Class representing a transaction input.''' '''Class representing a transaction input.'''
@ -98,10 +99,10 @@ class Deserializer(object):
def _read_input(self): def _read_input(self):
return TxInput( return TxInput(
self._read_nbytes(32), # prev_hash self._read_nbytes(32), # prev_hash
self._read_le_uint32(), # prev_idx self._read_le_uint32(), # prev_idx
self._read_varbytes(), # script self._read_varbytes(), # script
self._read_le_uint32() # sequence self._read_le_uint32() # sequence
) )
def _read_outputs(self): def _read_outputs(self):

View File

@ -109,6 +109,7 @@ def deep_getsizeof(obj):
return size(obj) return size(obj)
def subclasses(base_class, strict=True): def subclasses(base_class, strict=True):
'''Return a list of subclasses of base_class in its module.''' '''Return a list of subclasses of base_class in its module.'''
def select(obj): def select(obj):

View File

@ -71,5 +71,6 @@ def main():
print('Balance: {} {}'.format(coin.decimal_value(balance), print('Balance: {} {}'.format(coin.decimal_value(balance),
coin.SHORTNAME)) coin.SHORTNAME))
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -735,7 +735,7 @@ class BlockProcessor(server.db.DB):
for cache_key, cache_value in self.utxo_cache.items(): for cache_key, cache_value in self.utxo_cache.items():
# suffix = tx_idx + tx_num # suffix = tx_idx + tx_num
hashX = cache_value[:-12] hashX = cache_value[:-12]
suffix = cache_key[-2:] + cache_value[-12:-8] suffix = cache_key[-2:] + cache_value[-12:-8]
batch_put(b'h' + cache_key[:4] + suffix, hashX) batch_put(b'h' + cache_key[:4] + suffix, hashX)
batch_put(b'u' + hashX + suffix, cache_value[-8:]) batch_put(b'u' + hashX + suffix, cache_value[-8:])
self.utxo_cache = {} self.utxo_cache = {}

View File

@ -170,7 +170,7 @@ class Controller(util.LoggedClass):
def enqueue_session(self, session): def enqueue_session(self, session):
# Might have disconnected whilst waiting # Might have disconnected whilst waiting
if not session in self.sessions: if session not in self.sessions:
return return
priority = self.session_priority(session) priority = self.session_priority(session)
item = (priority, self.next_queue_id, session) item = (priority, self.next_queue_id, session)
@ -274,7 +274,7 @@ class Controller(util.LoggedClass):
future.cancel() future.cancel()
# Wait for all futures to finish # Wait for all futures to finish
while not all (future.done() for future in self.futures): while not all(future.done() for future in self.futures):
await asyncio.sleep(0.1) await asyncio.sleep(0.1)
# Finally shut down the block processor and executor # Finally shut down the block processor and executor
@ -329,7 +329,7 @@ class Controller(util.LoggedClass):
''' '''
self.state = self.LISTENING self.state = self.LISTENING
env= self.env env = self.env
if env.tcp_port is not None: if env.tcp_port is not None:
await self.start_server('TCP', env.host, env.tcp_port) await self.start_server('TCP', env.host, env.tcp_port)
if env.ssl_port is not None: if env.ssl_port is not None:

View File

@ -20,6 +20,7 @@ import lib.util as util
class DaemonError(Exception): class DaemonError(Exception):
'''Raised when the daemon returns an error in its results.''' '''Raised when the daemon returns an error in its results.'''
class Daemon(util.LoggedClass): class Daemon(util.LoggedClass):
'''Handles connections to a daemon at the given URL.''' '''Handles connections to a daemon at the given URL.'''

View File

@ -23,6 +23,7 @@ from server.version import VERSION
UTXO = namedtuple("UTXO", "tx_num tx_pos tx_hash height value") UTXO = namedtuple("UTXO", "tx_num tx_pos tx_hash height value")
class DB(util.LoggedClass): class DB(util.LoggedClass):
'''Simple wrapper of the backend database for querying. '''Simple wrapper of the backend database for querying.

View File

@ -67,7 +67,7 @@ class Env(LoggedClass):
'' ''
) )
self.tor_identity = NetIdentity( self.tor_identity = NetIdentity(
self.default('REPORT_HOST_TOR', ''), # must be a string self.default('REPORT_HOST_TOR', ''), # must be a string
self.integer('REPORT_TCP_PORT_TOR', self.integer('REPORT_TCP_PORT_TOR',
self.identity.tcp_port self.identity.tcp_port
if self.identity.tcp_port else if self.identity.tcp_port else
@ -85,7 +85,6 @@ class Env(LoggedClass):
if self.identity.tcp_port == self.identity.ssl_port: if self.identity.tcp_port == self.identity.ssl_port:
raise self.Error('IRC TCP and SSL ports are the same') raise self.Error('IRC TCP and SSL ports are the same')
def default(self, envvar, default): def default(self, envvar, default):
return environ.get(envvar, default) return environ.get(envvar, default)

View File

@ -93,8 +93,8 @@ class MemPool(util.LoggedClass):
process_some = self.async_process_some(unfetched, fetch_size // 2) process_some = self.async_process_some(unfetched, fetch_size // 2)
await self.daemon.mempool_refresh_event.wait() await self.daemon.mempool_refresh_event.wait()
self.logger.info ('beginning processing of daemon mempool. ' self.logger.info('beginning processing of daemon mempool. '
'This can take some time...') 'This can take some time...')
next_log = 0 next_log = 0
loops = -1 # Zero during initial catchup loops = -1 # Zero during initial catchup
@ -187,7 +187,7 @@ class MemPool(util.LoggedClass):
# Skip hashes the daemon has dropped. Either they were # Skip hashes the daemon has dropped. Either they were
# evicted or they got in a block. # evicted or they got in a block.
return {hh:raw for hh, raw in zip(hex_hashes, raw_txs) if raw} return {hh: raw for hh, raw in zip(hex_hashes, raw_txs) if raw}
def process_raw_txs(self, raw_tx_map, pending): def process_raw_txs(self, raw_tx_map, pending):
'''Process the dictionary of raw transactions and return a dictionary '''Process the dictionary of raw transactions and return a dictionary
@ -204,7 +204,7 @@ class MemPool(util.LoggedClass):
# Deserialize each tx and put it in our priority queue # Deserialize each tx and put it in our priority queue
for tx_hash, raw_tx in raw_tx_map.items(): for tx_hash, raw_tx in raw_tx_map.items():
if not tx_hash in txs: if tx_hash not in txs:
continue continue
tx, _tx_hash = deserializer(raw_tx).read_tx() tx, _tx_hash = deserializer(raw_tx).read_tx()
@ -267,7 +267,7 @@ class MemPool(util.LoggedClass):
unconfirmed is True if any txin is unconfirmed. unconfirmed is True if any txin is unconfirmed.
''' '''
# hashXs is a defaultdict # hashXs is a defaultdict
if not hashX in self.hashXs: if hashX not in self.hashXs:
return [] return []
deserializer = self.coin.deserializer() deserializer = self.coin.deserializer()

View File

@ -12,6 +12,7 @@ from functools import partial
import lib.util as util import lib.util as util
def db_class(name): def db_class(name):
'''Returns a DB engine class.''' '''Returns a DB engine class.'''
for db_class in util.subclasses(Storage): for db_class in util.subclasses(Storage):

View File

@ -44,8 +44,8 @@ def test_batch(db):
def test_iterator(db): def test_iterator(db):
""" """
The iterator should contain all key/value pairs starting with prefix ordered The iterator should contain all key/value pairs starting with prefix
by key. ordered by key.
""" """
for i in range(5): for i in range(5):
db.put(b"abc" + str.encode(str(i)), str.encode(str(i))) db.put(b"abc" + str.encode(str(i)), str.encode(str(i)))

View File

@ -19,7 +19,6 @@ def test_cachedproperty():
cls.CALL_COUNT += 1 cls.CALL_COUNT += 1
return cls.CALL_COUNT return cls.CALL_COUNT
t = Target() t = Target()
assert t.prop == t.prop == 1 assert t.prop == t.prop == 1
assert Target.cls_prop == Target.cls_prop == 1 assert Target.cls_prop == Target.cls_prop == 1
@ -56,4 +55,4 @@ def test_chunks():
def test_increment_byte_string(): def test_increment_byte_string():
assert util.increment_byte_string(b'1') == b'2' assert util.increment_byte_string(b'1') == b'2'
assert util.increment_byte_string(b'\x01\x01') == b'\x01\x02' assert util.increment_byte_string(b'\x01\x01') == b'\x01\x02'
assert util.increment_byte_string(b'\xff\xff') == None assert util.increment_byte_string(b'\xff\xff') is None