Tweak mempool logging

This commit is contained in:
Neil Booth 2018-08-10 21:32:31 +09:00
parent 564449e223
commit f20fe9d7a5

View File

@ -7,14 +7,13 @@
'''Mempool handling.''' '''Mempool handling.'''
import asyncio
import itertools import itertools
import time import time
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from collections import defaultdict from collections import defaultdict
import attr import attr
from aiorpcx import TaskGroup, run_in_thread from aiorpcx import TaskGroup, run_in_thread, sleep
from electrumx.lib.hash import hash_to_hex_str, hex_str_to_hash from electrumx.lib.hash import hash_to_hex_str, hex_str_to_hash
from electrumx.lib.util import class_logger, chunks from electrumx.lib.util import class_logger, chunks
@ -102,11 +101,19 @@ class MemPool(object):
self.hashXs = defaultdict(set) # None can be a key self.hashXs = defaultdict(set) # None can be a key
self.cached_compact_histogram = [] self.cached_compact_histogram = []
async def _log_stats(self): async def _logging(self, synchronized_event):
'''Print regular logs of mempool stats.'''
self.logger.info('beginning processing of daemon mempool. '
'This can take some time...')
start = time.time()
await synchronized_event.wait()
elapsed = time.time() - start
self.logger.info(f'synced in {elapsed:.2f}s')
while True: while True:
self.logger.info(f'{len(self.txs):,d} txs ' self.logger.info(f'{len(self.txs):,d} txs '
f'touching {len(self.hashXs):,d} addresses') f'touching {len(self.hashXs):,d} addresses')
await asyncio.sleep(120) await sleep(120)
await synchronized_event.wait()
def _update_histogram(self): def _update_histogram(self):
# Build a histogram by fee rate # Build a histogram by fee rate
@ -180,8 +187,8 @@ class MemPool(object):
async def _refresh_hashes(self, synchronized_event): async def _refresh_hashes(self, synchronized_event):
'''Refresh our view of the daemon's mempool.''' '''Refresh our view of the daemon's mempool.'''
sleep = 5 secs = 5
histogram_refresh = self.coin.MEMPOOL_HISTOGRAM_REFRESH_SECS // sleep histogram_refresh = self.coin.MEMPOOL_HISTOGRAM_REFRESH_SECS // secs
for loop_count in itertools.count(): for loop_count in itertools.count():
height = self.api.cached_height() height = self.api.cached_height()
hex_hashes = await self.api.mempool_hashes() hex_hashes = await self.api.mempool_hashes()
@ -190,11 +197,12 @@ class MemPool(object):
hashes = set(hex_str_to_hash(hh) for hh in hex_hashes) hashes = set(hex_str_to_hash(hh) for hh in hex_hashes)
touched = await self._process_mempool(hashes) touched = await self._process_mempool(hashes)
synchronized_event.set() synchronized_event.set()
synchronized_event.clear()
await self.api.on_mempool(touched, height) await self.api.on_mempool(touched, height)
# Thread mempool histogram refreshes - they can be expensive # Thread mempool histogram refreshes - they can be expensive
if loop_count % histogram_refresh == 0: if loop_count % histogram_refresh == 0:
await run_in_thread(self._update_histogram) await run_in_thread(self._update_histogram)
await asyncio.sleep(sleep) await sleep(secs)
async def _process_mempool(self, all_hashes): async def _process_mempool(self, all_hashes):
# Re-sync with the new set of hashes # Re-sync with the new set of hashes
@ -227,9 +235,6 @@ class MemPool(object):
tx_map.update(deferred) tx_map.update(deferred)
utxo_map.update(unspent) utxo_map.update(unspent)
# Handle the stragglers
if len(tx_map) >= 10:
self.logger.info(f'{len(tx_map)} stragglers')
prior_count = 0 prior_count = 0
# FIXME: this is not particularly efficient # FIXME: this is not particularly efficient
while tx_map and len(tx_map) != prior_count: while tx_map and len(tx_map) != prior_count:
@ -286,19 +291,10 @@ class MemPool(object):
# #
async def keep_synchronized(self, synchronized_event): async def keep_synchronized(self, synchronized_event):
'''Starts the mempool synchronizer. '''Keep the mempool synchronized with the daemon.'''
async with TaskGroup(wait=any) as group:
Waits for an initial synchronization before returning.
'''
self.logger.info('beginning processing of daemon mempool. '
'This can take some time...')
async with TaskGroup() as group:
await group.spawn(self._refresh_hashes(synchronized_event)) await group.spawn(self._refresh_hashes(synchronized_event))
start = time.time() await group.spawn(self._logging(synchronized_event))
await synchronized_event.wait()
elapsed = time.time() - start
self.logger.info(f'synced in {elapsed:.2f}s')
await group.spawn(self._log_stats())
async def balance_delta(self, hashX): async def balance_delta(self, hashX):
'''Return the unconfirmed amount in the mempool for hashX. '''Return the unconfirmed amount in the mempool for hashX.