1.0.8 New rollforward and rollback scripts + storing blockNumber data in activeTable now
This commit is contained in:
parent
53782d9473
commit
43f3a91107
@ -18,6 +18,7 @@ class ActiveTable(Base):
|
|||||||
transferBalance = Column('transferBalance', Float)
|
transferBalance = Column('transferBalance', Float)
|
||||||
addressBalance = Column('addressBalance', Float)
|
addressBalance = Column('addressBalance', Float)
|
||||||
orphaned_parentid = Column('orphaned_parentid', Integer)
|
orphaned_parentid = Column('orphaned_parentid', Integer)
|
||||||
|
blockNumber = Column('blockNumber', Integer)
|
||||||
|
|
||||||
|
|
||||||
class ConsumedTable(Base):
|
class ConsumedTable(Base):
|
||||||
@ -30,6 +31,8 @@ class ConsumedTable(Base):
|
|||||||
consumedpid = Column('consumedpid', String)
|
consumedpid = Column('consumedpid', String)
|
||||||
transferBalance = Column('transferBalance', Float)
|
transferBalance = Column('transferBalance', Float)
|
||||||
addressBalance = Column('addressBalance', Float)
|
addressBalance = Column('addressBalance', Float)
|
||||||
|
orphaned_parentid = Column('orphaned_parentid', Integer)
|
||||||
|
blockNumber = Column('blockNumber', Integer)
|
||||||
|
|
||||||
|
|
||||||
class TransferLogs(Base):
|
class TransferLogs(Base):
|
||||||
|
|||||||
69
planning.py
69
planning.py
@ -1,10 +1,10 @@
|
|||||||
'''
|
'''
|
||||||
TEMPLATE FOR SECOND STAGE AFTER INPUT CLASSIFIER
|
TEMPLATE FOR SECOND STAGE AFTER INPUT CLASSIFIER
|
||||||
|
|
||||||
IF BLOCK If the output of input classifier is tokensystem-C,
|
IF BLOCK If the output of input classifier is tokensystem-C,
|
||||||
JUST LINEARLY START BUILDING IT
|
JUST LINEARLY START BUILDING IT
|
||||||
|
|
||||||
then first start building the known outputs
|
then first start building the known outputs
|
||||||
|
|
||||||
// outputreturn('token_incorporation',f"{flodata}", f"{tokenname}", f"{tokenamount}")
|
// outputreturn('token_incorporation',f"{flodata}", f"{tokenname}", f"{tokenamount}")
|
||||||
|
|
||||||
@ -159,7 +159,7 @@ Conflicts -
|
|||||||
|
|
||||||
1. Token creation | Token participation
|
1. Token creation | Token participation
|
||||||
2. Smart contract CREATION of the type one-time-event-userchoice | one-time-event-timebased
|
2. Smart contract CREATION of the type one-time-event-userchoice | one-time-event-timebased
|
||||||
3. Smart contract PARTICIPATION user-choice | Smart contract DEPOSIT continuos-event token-swap
|
3. Smart contract PARTICIPATION user-choice | Smart contract DEPOSIT continuos-event token-swap
|
||||||
4. Smart contract PARTICIPATION one-time-event-timebased | Smart contract participation - continuos event - tokenswap
|
4. Smart contract PARTICIPATION one-time-event-timebased | Smart contract participation - continuos event - tokenswap
|
||||||
|
|
||||||
'''
|
'''
|
||||||
@ -188,9 +188,9 @@ Allowed formats of Smart Contract and token names
|
|||||||
Check for FLO Address
|
Check for FLO Address
|
||||||
|
|
||||||
Write checks for conditions inside contract conditions
|
Write checks for conditions inside contract conditions
|
||||||
Serious error handling for contract-conditions
|
Serious error handling for contract-conditions
|
||||||
* 2222:00 gives error
|
* 2222:00 gives error
|
||||||
* contractAmount = 0.022rt gives error | check if space is allowed between 0.022 rt
|
* contractAmount = 0.022rt gives error | check if space is allowed between 0.022 rt
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|
||||||
@ -236,4 +236,61 @@ Create another table in system.db, it simply writes what is every database in on
|
|||||||
|
|
||||||
Database_name Database type
|
Database_name Database type
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
'''
|
||||||
|
IDEA FOR NEW ROLLBACK SYSTEM - 24 Jan 2022
|
||||||
|
-------------------------------------------
|
||||||
|
|
||||||
|
245436
|
||||||
|
[
|
||||||
|
tx1 - rmt - 245436 - send 10 rmt#
|
||||||
|
tx2 - rmt - 245436 - send 4 rmt#
|
||||||
|
tx3 - rmt - 245436 - send 1 rmt#
|
||||||
|
tx4 - rmt - 245436 - send 100 rmt#
|
||||||
|
tx5 - rmt trigger(5) - 245436 - trigger
|
||||||
|
]
|
||||||
|
|
||||||
|
banana - txhash
|
||||||
|
orange - entries in activepid table
|
||||||
|
mangoes - entries in transaction history table
|
||||||
|
|
||||||
|
CURRENT SYSTEM
|
||||||
|
given a block , find out all the oranges in the block
|
||||||
|
given a block, find out all the bananas in the block and
|
||||||
|
for each banana, find corresponding databases( found through parsing of banana flodata and banana txdata)
|
||||||
|
- if token database then rollback, if contractDatabase then delete entry
|
||||||
|
|
||||||
|
|
||||||
|
NEW SYSTEM
|
||||||
|
give a block , find out all the oranges in the block
|
||||||
|
given a block, find out all the bananas in the block and their corresponding databases( found through parsing of banana flodata and banana txdata)
|
||||||
|
- start opening all those databases one by one | if token database then rollback, if contractDatabase then delete entry
|
||||||
|
|
||||||
|
send transaction -> receive the databases associated with it
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
'''
|
||||||
|
Step 1
|
||||||
|
The block that we are rolling back into is earlier than the database creation blockNumber, then delete the whole database without rolling back. Do this for both token databases and smart contract databases
|
||||||
|
|
||||||
|
Step 2
|
||||||
|
If the rolling back block is later than database creation blockNumber, then invoke rollback a database function( rollback_database )
|
||||||
|
|
||||||
|
Step 3
|
||||||
|
Create a list of databases to be opened, and creation date (creation date is block number). This will exclude the token and smart contract databases which are already deleted
|
||||||
|
|
||||||
|
Step 4
|
||||||
|
For each of the database to be opened, rollback the database to rollback point
|
||||||
|
rollback_database will take 2 inputs, a block number to which it has to rollback to and the name of the database
|
||||||
|
|
||||||
|
Step 5
|
||||||
|
Create a delete function, which will delete from transactionHistory, latestCache and contractDatabase
|
||||||
|
|
||||||
|
To-do
|
||||||
|
------
|
||||||
|
* Integrate all the functions in the following order:
|
||||||
|
1 , 2 , 3 , 4 , 5 | That will finish the operation of taking the block number as input and the roll back function will rollback upto the block number specified for all kinds of databases and all kinds of transactions
|
||||||
|
|
||||||
'''
|
'''
|
||||||
@ -7,6 +7,7 @@ import os
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import pdb
|
import pdb
|
||||||
|
import sys
|
||||||
|
|
||||||
apppath = os.path.dirname(os.path.realpath(__file__))
|
apppath = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
@ -50,13 +51,17 @@ def create_database_session_orm(type, parameters, base):
|
|||||||
return session
|
return session
|
||||||
|
|
||||||
|
|
||||||
def inspect_parsed_flodata(parsed_flodata):
|
def inspect_parsed_flodata(parsed_flodata, inputAddress, outputAddress):
|
||||||
if parsed_flodata['type'] == 'transfer':
|
if parsed_flodata['type'] == 'transfer':
|
||||||
if parsed_flodata['transferType'] == 'token':
|
if parsed_flodata['transferType'] == 'token':
|
||||||
return {'type':'tokentransfer', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
return {'type':'tokentransfer', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||||
if parsed_flodata['type'] == 'tokenIncorporation':
|
if parsed_flodata['type'] == 'tokenIncorporation':
|
||||||
return {'type':'tokenIncorporation', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
return {'type':'tokenIncorporation', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||||
|
if parsed_flodata['type'] == 'smartContractPays':
|
||||||
|
# contract address, token | both of them come from
|
||||||
|
sc_session = create_database_session_orm('smart_contract', {'contract_name':f"{parsed_flodata['contractName']}", 'contract_address':f"{outputAddress}"}, ContractBase)
|
||||||
|
token_db = sc_session.query(ContractStructure.value).filter(ContractStructure.attribute=='tokenIdentification').first()[0]
|
||||||
|
return {'type':'smartContractPays', 'token_db':f"{token_db}" , 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'triggerCondition':f"{parsed_flodata['triggerCondition']}"}
|
||||||
|
|
||||||
'''
|
'''
|
||||||
Steps to do the rollback
|
Steps to do the rollback
|
||||||
@ -119,6 +124,25 @@ def rollback_address_balance_processing(db_session, senderAddress, receiverAddre
|
|||||||
receiver_query[1].addressBalance = new_receiverBalance
|
receiver_query[1].addressBalance = new_receiverBalance
|
||||||
|
|
||||||
|
|
||||||
|
def undo_smartContractPays(tokenIdentification, inputAddress, outputAddress, transaction_data):
|
||||||
|
# Token database
|
||||||
|
'''
|
||||||
|
* rollback each pid transaction
|
||||||
|
* the addressBalance will have to be calculated after each loop, NOT at the end of the loop
|
||||||
|
'''
|
||||||
|
tokendb_session = create_database_session_orm('token', {'token_name':tokenIdentification}, Base)
|
||||||
|
transaction_history_entry = tokendb_session.query(TransactionHistory).filter(TransactionHistory.transactionHash == transaction_data.transactionHash).order_by(TransactionHistory.blockNumber.desc()).all()
|
||||||
|
|
||||||
|
active_table_last_entries = tokendb_session.query(ActiveTable).order_by(ActiveTable.id.desc()).limit(len(transaction_history_entry))
|
||||||
|
pdb.set_trace()
|
||||||
|
|
||||||
|
# Smart Contract database
|
||||||
|
'''
|
||||||
|
*
|
||||||
|
'''
|
||||||
|
print('')
|
||||||
|
|
||||||
|
|
||||||
def undo_transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress, transaction_data):
|
def undo_transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress, transaction_data):
|
||||||
# Connect to database
|
# Connect to database
|
||||||
db_session = create_database_session_orm('token', {'token_name':tokenIdentification}, Base)
|
db_session = create_database_session_orm('token', {'token_name':tokenIdentification}, Base)
|
||||||
@ -235,25 +259,31 @@ def delete_token_database(token_name):
|
|||||||
def perform_rollback(transaction):
|
def perform_rollback(transaction):
|
||||||
latestCache = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
latestCache = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
||||||
# categorize transaction and find out the databases it will affect
|
# categorize transaction and find out the databases it will affect
|
||||||
parsed_flodata = literal_eval(transaction.parsedFloData)
|
|
||||||
inspected_flodata = inspect_parsed_flodata(parsed_flodata)
|
|
||||||
transaction_data = json.loads(transaction.jsonData)
|
transaction_data = json.loads(transaction.jsonData)
|
||||||
input_output_list = find_input_output_addresses(transaction_data)
|
input_output_list = find_input_output_addresses(transaction_data)
|
||||||
inputAddress = input_output_list[0][0]
|
inputAddress = input_output_list[0][0]
|
||||||
outputAddress = input_output_list[1][0]
|
outputAddress = input_output_list[1][0]
|
||||||
|
parsed_flodata = literal_eval(transaction.parsedFloData)
|
||||||
|
inspected_flodata = inspect_parsed_flodata(parsed_flodata, inputAddress, outputAddress)
|
||||||
|
|
||||||
if inspected_flodata['type'] == 'tokentransfer':
|
if inspected_flodata['type'] == 'tokentransfer':
|
||||||
# undo the transaction in token database
|
# undo the transaction in token database
|
||||||
undo_transferToken(inspected_flodata['token_db'], inspected_flodata['token_amount'], inputAddress, outputAddress, transaction)
|
undo_transferToken(inspected_flodata['token_db'], inspected_flodata['token_amount'], inputAddress, outputAddress, transaction)
|
||||||
if inspected_flodata['type'] == 'tokenIncorporation':
|
elif inspected_flodata['type'] == 'tokenIncorporation':
|
||||||
# note - if you want you can do checks to make sure the database has only 1 entry
|
# note - if you want you can do checks to make sure the database has only 1 entry
|
||||||
# delete the token database
|
# delete the token database
|
||||||
delete_token_database(inspected_flodata['token_db'])
|
delete_token_database(inspected_flodata['token_db'])
|
||||||
|
elif inspected_flodata['type'] == 'smartContractPays':
|
||||||
|
undo_smartContractPays(inspected_flodata[''], inputAddress, outputAddress, transaction_data)
|
||||||
|
else:
|
||||||
|
print("Transaction not in any inspected_flodata category until now.. Exiting")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
# Take input from user reg how many blocks to go back in the blockchain
|
# Take input from user reg how many blocks to go back in the blockchain
|
||||||
'''
|
'''
|
||||||
parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash')
|
parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash')
|
||||||
parser.add_argument('-rbk', '--rollback', nargs='?', const=1, type=int, help='Rollback the script')
|
parser.add_argument('-rbk', '--rollback', nargs='?', const=1, type=int, help='Rollback the script')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
'''
|
'''
|
||||||
|
|
||||||
@ -291,12 +321,12 @@ for blockindex in blocknumber_list:
|
|||||||
print("Block tx hashes")
|
print("Block tx hashes")
|
||||||
print(block_tx_hashes)
|
print(block_tx_hashes)
|
||||||
|
|
||||||
#if 'e79b02d8eb1e72278d5d23db1ae4dd779bd834e4de15119a05f37fa849662277' in block_tx_hashes:
|
if 'b57cf412c8cb16e473d04bae44214705c64d2c25146be22695bf1ac36e166ee0' in block_tx_hashes:
|
||||||
# pdb.set_trace()
|
pdb.set_trace()
|
||||||
|
|
||||||
for tx in block_tx_hashes:
|
for tx in block_tx_hashes:
|
||||||
transaction = latestcache_session.query(LatestTransactions).filter(LatestTransactions.transactionHash == tx).all()
|
transaction = latestcache_session.query(LatestTransactions).filter(LatestTransactions.transactionHash == tx).all()
|
||||||
print(transaction)
|
print(transaction)
|
||||||
if len(transaction) == 1:
|
if len(transaction) == 1:
|
||||||
perform_rollback(transaction[0])
|
perform_rollback(transaction[0])
|
||||||
latestcache_session.delete(transaction[0])
|
latestcache_session.delete(transaction[0])
|
||||||
@ -338,11 +368,13 @@ for blockindex in blocknumber_list:
|
|||||||
systemdb_session.commit()
|
systemdb_session.commit()
|
||||||
latestcache_session.close()
|
latestcache_session.close()
|
||||||
systemdb_session.close()
|
systemdb_session.close()
|
||||||
|
|
||||||
|
|
||||||
|
'''
|
||||||
'''latestcache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
latestcache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
||||||
latestTransactions = latestcache_session.query(LatestTransactions).filter(LatestTransactions.blockNumber >= rollback_block).order_by(LatestTransactions.id.desc()).all()
|
latestTransactions = latestcache_session.query(LatestTransactions).filter(LatestTransactions.blockNumber >= rollback_block).order_by(LatestTransactions.id.desc()).all()
|
||||||
latestBlocks = latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber >= rollback_block).all()
|
latestBlocks = latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber >= rollback_block).all()
|
||||||
|
|
||||||
#for transaction in latestTransactions:
|
#for transaction in latestTransactions:
|
||||||
perform_rollback(latestTransactions[0])'''
|
perform_rollback(latestTransactions[0])
|
||||||
|
'''
|
||||||
483
test_rollback_new.py
Normal file
483
test_rollback_new.py
Normal file
@ -0,0 +1,483 @@
|
|||||||
|
import argparse
|
||||||
|
from sqlalchemy import create_engine, func
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, LatestTransactions, LatestBlocks, DatabaseTypeMapping, TokenAddressMapping
|
||||||
|
from ast import literal_eval
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import pdb
|
||||||
|
import sys
|
||||||
|
|
||||||
|
apppath = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
# helper functions
|
||||||
|
def check_database_existence(type, parameters):
|
||||||
|
if type == 'token':
|
||||||
|
return os.path.isfile(f"./tokens/{parameters['token_name']}.db")
|
||||||
|
|
||||||
|
if type == 'smart_contract':
|
||||||
|
return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db")
|
||||||
|
|
||||||
|
|
||||||
|
def create_database_connection(type, parameters):
|
||||||
|
if type == 'token':
|
||||||
|
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||||
|
elif type == 'smart_contract':
|
||||||
|
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||||
|
elif type == 'system_dbs':
|
||||||
|
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||||
|
|
||||||
|
connection = engine.connect()
|
||||||
|
return connection
|
||||||
|
|
||||||
|
|
||||||
|
def create_database_session_orm(type, parameters, base):
|
||||||
|
if type == 'token':
|
||||||
|
pdb.set_trace()
|
||||||
|
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||||
|
base.metadata.create_all(bind=engine)
|
||||||
|
session = sessionmaker(bind=engine)()
|
||||||
|
|
||||||
|
elif type == 'smart_contract':
|
||||||
|
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||||
|
base.metadata.create_all(bind=engine)
|
||||||
|
session = sessionmaker(bind=engine)()
|
||||||
|
|
||||||
|
elif type == 'system_dbs':
|
||||||
|
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||||
|
base.metadata.create_all(bind=engine)
|
||||||
|
session = sessionmaker(bind=engine)()
|
||||||
|
else:
|
||||||
|
pdb.set_trace()
|
||||||
|
|
||||||
|
return session
|
||||||
|
|
||||||
|
|
||||||
|
def inspect_parsed_flodata(parsed_flodata, inputAddress, outputAddress):
|
||||||
|
if parsed_flodata['type'] == 'transfer':
|
||||||
|
if parsed_flodata['transferType'] == 'token':
|
||||||
|
return {'type':'tokentransfer', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||||
|
if parsed_flodata['transferType'] == 'smartContract':
|
||||||
|
return {'type':'smartContract', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'accepting_token_db':f"{parsed_flodata['']}", 'receiving_token_db':f"{parsed_flodata['tokenIdentification']}" ,'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||||
|
if parsed_flodata['transferType'] == 'swapParticipation':
|
||||||
|
return {'type':'swapParticipation', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'accepting_token_db':f"{parsed_flodata['']}", 'receiving_token_db':f"{parsed_flodata['tokenIdentification']}" ,'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||||
|
if parsed_flodata['transferType'] == 'nft':
|
||||||
|
return {'type':'nfttransfer', 'nft_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||||
|
if parsed_flodata['type'] == 'tokenIncorporation':
|
||||||
|
return {'type':'tokenIncorporation', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||||
|
if parsed_flodata['type'] == 'smartContractPays':
|
||||||
|
# contract address, token | both of them come from
|
||||||
|
sc_session = create_database_session_orm('smart_contract', {'contract_name':f"{parsed_flodata['contractName']}", 'contract_address':f"{outputAddress}"}, ContractBase)
|
||||||
|
token_db = sc_session.query(ContractStructure.value).filter(ContractStructure.attribute=='tokenIdentification').first()[0]
|
||||||
|
return {'type':'smartContractPays', 'token_db':f"{token_db}" , 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'triggerCondition':f"{parsed_flodata['triggerCondition']}"}
|
||||||
|
if parsed_flodata['type'] == 'smartContractIncorporation':
|
||||||
|
return {'type':'smartContractIncorporation', 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'triggerCondition':f"{parsed_flodata['triggerCondition']}"}
|
||||||
|
|
||||||
|
|
||||||
|
def getDatabase_from_parsedFloData(parsed_flodata, inputAddress, outputAddress):
|
||||||
|
if parsed_flodata['type'] == 'transfer':
|
||||||
|
if parsed_flodata['transferType'] == 'token':
|
||||||
|
return {'type':'token_db', 'token_db':f"{parsed_flodata['tokenIdentification']}"}
|
||||||
|
elif parsed_flodata['transferType'] == 'smartContract':
|
||||||
|
return {'type':'smartcontract_db', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'token_db':f"{parsed_flodata['tokenIdentification']}"}
|
||||||
|
elif parsed_flodata['transferType'] == 'swapParticipation':
|
||||||
|
return {'type':'swapcontract_db', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'accepting_token_db':f"{parsed_flodata['contract-conditions']['accepting_token']}", 'selling_token_db':f"{parsed_flodata['contract-conditions']['selling_token']}"}
|
||||||
|
elif parsed_flodata['transferType'] == 'nft':
|
||||||
|
return {'type':'nft_db', 'token_db':f"{parsed_flodata['tokenIdentification']}"}
|
||||||
|
elif parsed_flodata['type'] == 'smartContractPays':
|
||||||
|
# contract address, token | both of them come from
|
||||||
|
sc_session = create_database_session_orm('smart_contract', {'contract_name':f"{parsed_flodata['contractName']}", 'contract_address':f"{outputAddress}"}, ContractBase)
|
||||||
|
token_db = sc_session.query(ContractStructure.value).filter(ContractStructure.attribute=='tokenIdentification').first()[0]
|
||||||
|
return {'type':'smartcontract_db', 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'token_db':f"{token_db}"}
|
||||||
|
'''
|
||||||
|
if parsed_flodata['type'] == 'smartContractIncorporation':
|
||||||
|
return {'type':'smartcontract_db', 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}"}
|
||||||
|
if parsed_flodata['type'] == 'tokenIncorporation':
|
||||||
|
return {'type':'token_db', 'token_db':f"{parsed_flodata['tokenIdentification']}"}
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
def undo_last_single_transaction():
|
||||||
|
consumedpid_entry = db_session.query(ConsumedTable).filter(ConsumedTable.id == key).all()
|
||||||
|
newTransferBalance = consumedpid_entry[0].transferBalance + consumedpid[key]
|
||||||
|
db_session.add(ActiveTable(id=consumedpid_entry[0].id, address=consumedpid_entry[0].address, consumedpid=consumedpid_entry[0].consumedpid, transferBalance=newTransferBalance, addressBalance = consumedpid_entry[0].addressBalance))
|
||||||
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def calc_pid_amount(transferBalance, consumedpid):
|
||||||
|
consumedpid_sum = 0
|
||||||
|
for key in list(consumedpid.keys()):
|
||||||
|
consumedpid_sum = consumedpid_sum + float(consumedpid[key])
|
||||||
|
return transferBalance - consumedpid_sum
|
||||||
|
|
||||||
|
|
||||||
|
def find_addressBalance_from_floAddress(database_session, floAddress):
|
||||||
|
query_output = database_session.query(ActiveTable).filter(ActiveTable.address==floAddress, ActiveTable.addressBalance!=None).first()
|
||||||
|
if query_output is None:
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
return query_output.addressBalance
|
||||||
|
|
||||||
|
|
||||||
|
def rollback_address_balance_processing(db_session, senderAddress, receiverAddress, transferBalance):
|
||||||
|
# Find out total sum of address
|
||||||
|
# Find out the last entry where address balance is not null, if exists make it null
|
||||||
|
|
||||||
|
# Calculation phase
|
||||||
|
current_receiverBalance = find_addressBalance_from_floAddress(db_session, receiverAddress)
|
||||||
|
current_senderBalance = find_addressBalance_from_floAddress(db_session ,senderAddress)
|
||||||
|
new_receiverBalance = current_receiverBalance - transferBalance
|
||||||
|
new_senderBalance = current_senderBalance + transferBalance
|
||||||
|
|
||||||
|
# Insertion phase
|
||||||
|
# if new receiver balance is 0, then only insert sender address balance
|
||||||
|
# if receiver balance is not 0, then update previous occurence of the receiver address and sender balance
|
||||||
|
# for sender, find out weather the last occurence of senderfloid has an addressBalance
|
||||||
|
# either query out will not come or the last occurence will have address
|
||||||
|
# for sender, in all cases we will update the addressBalance of last occurences of senderfloaddress
|
||||||
|
# for receiver, if the currentaddressbalance is 0 then do nothing .. and if the currentaddressbalance is not 0 then update the last occurence of receiver address
|
||||||
|
|
||||||
|
sender_query = db_session.query(ActiveTable).filter(ActiveTable.address==senderAddress).order_by(ActiveTable.id.desc()).first()
|
||||||
|
sender_query.addressBalance = new_senderBalance
|
||||||
|
|
||||||
|
if new_receiverBalance != 0 and new_receiverBalance > 0:
|
||||||
|
receiver_query = db_session.query(ActiveTable).filter(ActiveTable.address==receiverAddress).order_by(ActiveTable.id.desc()).limit(2)
|
||||||
|
receiver_query[1].addressBalance = new_receiverBalance
|
||||||
|
|
||||||
|
|
||||||
|
def undo_smartContractPays(tokenIdentification, inputAddress, outputAddress, transaction_data):
|
||||||
|
# Token database
|
||||||
|
'''
|
||||||
|
* rollback each pid transaction
|
||||||
|
* the addressBalance will have to be calculated after each loop, NOT at the end of the loop
|
||||||
|
'''
|
||||||
|
tokendb_session = create_database_session_orm('token', {'token_name':tokenIdentification}, Base)
|
||||||
|
transaction_history_entry = tokendb_session.query(TransactionHistory).filter(TransactionHistory.transactionHash == transaction_data.transactionHash).order_by(TransactionHistory.blockNumber.desc()).all()
|
||||||
|
|
||||||
|
active_table_last_entries = tokendb_session.query(ActiveTable).order_by(ActiveTable.id.desc()).limit(len(transaction_history_entry))
|
||||||
|
pdb.set_trace()
|
||||||
|
|
||||||
|
# Smart Contract database
|
||||||
|
'''
|
||||||
|
*
|
||||||
|
'''
|
||||||
|
print('')
|
||||||
|
|
||||||
|
|
||||||
|
def undo_transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress, transaction_data):
|
||||||
|
# Connect to database
|
||||||
|
db_session = create_database_session_orm('token', {'token_name':tokenIdentification}, Base)
|
||||||
|
transaction_history_entry = db_session.query(TransactionHistory).filter(TransactionHistory.transactionHash == transaction_data.transactionHash).order_by(TransactionHistory.blockNumber.desc()).all()
|
||||||
|
|
||||||
|
active_table_last_entries = db_session.query(ActiveTable).order_by(ActiveTable.id.desc()).limit(len(transaction_history_entry))
|
||||||
|
|
||||||
|
for idx, activeTable_entry in enumerate(active_table_last_entries):
|
||||||
|
# Find out consumedpid and partially consumed pids
|
||||||
|
parentid = None
|
||||||
|
orphaned_parentid = None
|
||||||
|
consumedpid = None
|
||||||
|
if activeTable_entry.parentid is not None:
|
||||||
|
parentid = activeTable_entry.parentid
|
||||||
|
if activeTable_entry.orphaned_parentid is not None:
|
||||||
|
orphaned_parentid = activeTable_entry.orphaned_parentid
|
||||||
|
if activeTable_entry.consumedpid is not None:
|
||||||
|
consumedpid = literal_eval(activeTable_entry.consumedpid)
|
||||||
|
|
||||||
|
# filter out based on consumped pid and partially consumed pids
|
||||||
|
if parentid is not None:
|
||||||
|
# find query in activeTable with the parentid
|
||||||
|
activeTable_pid_entry = db_session.query(ActiveTable).filter(ActiveTable.id == parentid).all()[0]
|
||||||
|
# calculate the amount taken from parentid
|
||||||
|
activeTable_pid_entry.transferBalance = activeTable_pid_entry.transferBalance + calc_pid_amount(activeTable_entry.transferBalance, consumedpid)
|
||||||
|
|
||||||
|
if consumedpid != {}:
|
||||||
|
# each key of the pid is totally consumed and with its corresponding value written in the end
|
||||||
|
# how can we maintain the order of pid consumption? The bigger pid number will be towards the end
|
||||||
|
# 1. pull the pid number and its details from the consumedpid table
|
||||||
|
for key in list(consumedpid.keys()):
|
||||||
|
consumedpid_entry = db_session.query(ConsumedTable).filter(ConsumedTable.id == key).all()[0]
|
||||||
|
newTransferBalance = consumedpid_entry.transferBalance + consumedpid[key]
|
||||||
|
db_session.add(ActiveTable(id=consumedpid_entry.id, address=consumedpid_entry.address, parentid=consumedpid_entry.parentid ,consumedpid=consumedpid_entry.consumedpid, transferBalance=newTransferBalance, addressBalance = None))
|
||||||
|
db_session.delete(consumedpid_entry)
|
||||||
|
|
||||||
|
orphaned_parentid_entries = db_session.query(ActiveTable).filter(ActiveTable.orphaned_parentid == key).all()
|
||||||
|
for orphan_entry in orphaned_parentid_entries:
|
||||||
|
orphan_entry.parentid = orphan_entry.orphaned_parentid
|
||||||
|
orphan_entry.orphaned_parentid = None
|
||||||
|
|
||||||
|
|
||||||
|
# update addressBalance
|
||||||
|
rollback_address_balance_processing(db_session, inputAddress, outputAddress, transaction_history_entry[idx].transferAmount)
|
||||||
|
|
||||||
|
# delete operations
|
||||||
|
# delete the last row in activeTable and transactionTable
|
||||||
|
db_session.delete(activeTable_entry)
|
||||||
|
db_session.delete(transaction_history_entry[idx])
|
||||||
|
|
||||||
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def find_input_output_addresses(transaction_data):
|
||||||
|
# Create vinlist and outputlist
|
||||||
|
vinlist = []
|
||||||
|
querylist = []
|
||||||
|
|
||||||
|
for vin in transaction_data["vin"]:
|
||||||
|
vinlist.append([vin["addr"], float(vin["value"])])
|
||||||
|
|
||||||
|
totalinputval = float(transaction_data["valueIn"])
|
||||||
|
|
||||||
|
# todo Rule 41 - Check if all the addresses in a transaction on the input side are the same
|
||||||
|
for idx, item in enumerate(vinlist):
|
||||||
|
if idx == 0:
|
||||||
|
temp = item[0]
|
||||||
|
continue
|
||||||
|
if item[0] != temp:
|
||||||
|
print(f"System has found more than one address as part of vin. Transaction {transaction_data['txid']} is rejected")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
inputlist = [vinlist[0][0], totalinputval]
|
||||||
|
inputadd = vinlist[0][0]
|
||||||
|
|
||||||
|
# todo Rule 42 - If the number of vout is more than 2, reject the transaction
|
||||||
|
if len(transaction_data["vout"]) > 2:
|
||||||
|
print(f"System has found more than 2 address as part of vout. Transaction {transaction_data['txid']} is rejected")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# todo Rule 43 - A transaction accepted by the system has two vouts, 1. The FLO address of the receiver
|
||||||
|
# 2. Flo address of the sender as change address. If the vout address is change address, then the other adddress
|
||||||
|
# is the recevier address
|
||||||
|
|
||||||
|
outputlist = []
|
||||||
|
addresscounter = 0
|
||||||
|
inputcounter = 0
|
||||||
|
for obj in transaction_data["vout"]:
|
||||||
|
if obj["scriptPubKey"]["type"] == "pubkeyhash":
|
||||||
|
addresscounter = addresscounter + 1
|
||||||
|
if inputlist[0] == obj["scriptPubKey"]["addresses"][0]:
|
||||||
|
inputcounter = inputcounter + 1
|
||||||
|
continue
|
||||||
|
outputlist.append([obj["scriptPubKey"]["addresses"][0], obj["value"]])
|
||||||
|
|
||||||
|
if addresscounter == inputcounter:
|
||||||
|
outputlist = [inputlist[0]]
|
||||||
|
elif len(outputlist) != 1:
|
||||||
|
print(f"Transaction's change is not coming back to the input address. Transaction {transaction_data['txid']} is rejected")
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
outputlist = outputlist[0]
|
||||||
|
|
||||||
|
return inputlist[0], outputlist[0]
|
||||||
|
|
||||||
|
|
||||||
|
def delete_token_database(token_name):
|
||||||
|
dirpath = os.path.join(apppath, 'tokens', f"{token_name}.db")
|
||||||
|
if os.path.exists(dirpath):
|
||||||
|
os.remove(dirpath)
|
||||||
|
|
||||||
|
|
||||||
|
def perform_rollback(transaction):
|
||||||
|
latestCache = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
||||||
|
# categorize transaction and find out the databases it will affect
|
||||||
|
transaction_data = json.loads(transaction.jsonData)
|
||||||
|
inputAddress, outputAddress = find_input_output_addresses(transaction_data)
|
||||||
|
parsed_flodata = literal_eval(transaction.parsedFloData)
|
||||||
|
inspected_flodata = inspect_parsed_flodata(parsed_flodata, inputAddress, outputAddress)
|
||||||
|
|
||||||
|
if inspected_flodata['type'] == 'tokentransfer':
|
||||||
|
# undo the transaction in token database
|
||||||
|
undo_transferToken(inspected_flodata['token_db'], inspected_flodata['token_amount'], inputAddress, outputAddress, transaction)
|
||||||
|
elif inspected_flodata['type'] == 'tokenIncorporation':
|
||||||
|
# note - if you want you can do checks to make sure the database has only 1 entry
|
||||||
|
# delete the token database
|
||||||
|
delete_token_database(inspected_flodata['token_db'])
|
||||||
|
elif inspected_flodata['type'] == 'smartContractPays':
|
||||||
|
undo_smartContractPays(inspected_flodata[''], inputAddress, outputAddress, transaction_data)
|
||||||
|
else:
|
||||||
|
print("Transaction not in any inspected_flodata category until now.. Exiting")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
def rollback_database(blockNumber, dbtype, dbname):
|
||||||
|
if dbtype == 'token':
|
||||||
|
# Connect to database
|
||||||
|
db_session = create_database_session_orm('token', {'token_name':dbname}, Base)
|
||||||
|
active_table_last_entries = db_session.query(ActiveTable).filter(ActiveTable.blockNumber > blockNumber).order_by(ActiveTable.id.desc())
|
||||||
|
transaction_history_entry = db_session.query(TransactionHistory).filter(TransactionHistory.blockNumber > blockNumber).order_by(TransactionHistory.blockNumber.desc()).all()
|
||||||
|
|
||||||
|
for idx, activeTable_entry in enumerate(active_table_last_entries):
|
||||||
|
# Find out consumedpid and partially consumed pids
|
||||||
|
parentid = None
|
||||||
|
orphaned_parentid = None
|
||||||
|
consumedpid = None
|
||||||
|
if activeTable_entry.parentid is not None:
|
||||||
|
parentid = activeTable_entry.parentid
|
||||||
|
if activeTable_entry.orphaned_parentid is not None:
|
||||||
|
orphaned_parentid = activeTable_entry.orphaned_parentid
|
||||||
|
if activeTable_entry.consumedpid is not None:
|
||||||
|
consumedpid = literal_eval(activeTable_entry.consumedpid)
|
||||||
|
|
||||||
|
# filter out based on consumped pid and partially consumed pids
|
||||||
|
if parentid is not None:
|
||||||
|
# find query in activeTable with the parentid
|
||||||
|
activeTable_pid_entry = db_session.query(ActiveTable).filter(ActiveTable.id == parentid).all()[0]
|
||||||
|
# calculate the amount taken from parentid
|
||||||
|
activeTable_pid_entry.transferBalance = activeTable_pid_entry.transferBalance + calc_pid_amount(activeTable_entry.transferBalance, consumedpid)
|
||||||
|
|
||||||
|
if consumedpid != {}:
|
||||||
|
# each key of the pid is totally consumed and with its corresponding value written in the end
|
||||||
|
# how can we maintain the order of pid consumption? The bigger pid number will be towards the end
|
||||||
|
# 1. pull the pid number and its details from the consumedpid table
|
||||||
|
for key in list(consumedpid.keys()):
|
||||||
|
consumedpid_entry = db_session.query(ConsumedTable).filter(ConsumedTable.id == key).all()[0]
|
||||||
|
newTransferBalance = consumedpid_entry.transferBalance + consumedpid[key]
|
||||||
|
db_session.add(ActiveTable(id=consumedpid_entry.id, address=consumedpid_entry.address, parentid=consumedpid_entry.parentid ,consumedpid=consumedpid_entry.consumedpid, transferBalance=newTransferBalance, addressBalance = None))
|
||||||
|
db_session.delete(consumedpid_entry)
|
||||||
|
|
||||||
|
orphaned_parentid_entries = db_session.query(ActiveTable).filter(ActiveTable.orphaned_parentid == key).all()
|
||||||
|
for orphan_entry in orphaned_parentid_entries:
|
||||||
|
orphan_entry.parentid = orphan_entry.orphaned_parentid
|
||||||
|
orphan_entry.orphaned_parentid = None
|
||||||
|
|
||||||
|
# update addressBalance
|
||||||
|
rollback_address_balance_processing(db_session, inputAddress, outputAddress, transaction_history_entry[idx].transferAmount)
|
||||||
|
|
||||||
|
# delete operations
|
||||||
|
# delete the last row in activeTable and transactionTable
|
||||||
|
db_session.delete(activeTable_entry)
|
||||||
|
db_session.delete(transaction_history_entry[idx])
|
||||||
|
db_session.commit()
|
||||||
|
|
||||||
|
elif dbtype == 'smartcontract':
|
||||||
|
db_session = create_database_session_orm('smart_contract', {'contract_name':f"{dbname['contract_name']}", 'contract_address':f"{dbname['contract_address']}"}, ContractBase)
|
||||||
|
db_session.query(ContractTransactionHistory).filter(ContractTransactionHistory.blockNumber > blockNumber).delete()
|
||||||
|
db_session.query(ContractParticipants).filter(ContractParticipants.blockNumber > blockNumber).delete()
|
||||||
|
|
||||||
|
|
||||||
|
def delete_database(blockNumber, dbname):
|
||||||
|
db_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase)
|
||||||
|
databases_to_delete = db_session.query(DatabaseTypeMapping.db_name, DatabaseTypeMapping.db_type).filter(DatabaseTypeMapping.blockNumber>blockNumber).all()
|
||||||
|
|
||||||
|
db_names, db_type = zip(*databases_to_delete)
|
||||||
|
|
||||||
|
for database in databases_to_delete:
|
||||||
|
if database[1] in ['token','infinite-token']:
|
||||||
|
dirpath = os.path.join(apppath, 'tokens', f"{dbname}.db")
|
||||||
|
if os.path.exists(dirpath):
|
||||||
|
os.remove(dirpath)
|
||||||
|
elif database[1] in ['smartcontract']:
|
||||||
|
dirpath = os.path.join(apppath, 'smartcontracts', f"{dbname}.db")
|
||||||
|
if os.path.exists(dirpath):
|
||||||
|
os.remove(dirpath)
|
||||||
|
return db_names
|
||||||
|
|
||||||
|
|
||||||
|
def system_database_deletions(blockNumber):
|
||||||
|
|
||||||
|
latestcache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
||||||
|
|
||||||
|
# delete latestBlocks & latestTransactions entry
|
||||||
|
latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber > blockNumber).delete()
|
||||||
|
latestcache_session.query(LatestTransactions).filter(LatestTransactions.blockNumber > blockNumber).delete()
|
||||||
|
|
||||||
|
# delete activeContracts, contractAddressMapping, DatabaseAddressMapping, rejectedContractTransactionHistory, rejectedTransactionHistory, tokenAddressMapping
|
||||||
|
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
|
||||||
|
activeContracts_session = systemdb_session.query(ActiveContracts).filter(ActiveContracts.blockNumber > blockNumber).delete()
|
||||||
|
contractAddressMapping_queries = systemdb_session.query(ContractAddressMapping).filter(ContractAddressMapping.blockNumber > blockNumber).delete()
|
||||||
|
databaseTypeMapping_queries = systemdb_session.query(DatabaseTypeMapping).filter(DatabaseTypeMapping.blockNumber > blockNumber).delete()
|
||||||
|
rejectedContractTransactionHistory_queries = systemdb_session.query(RejectedContractTransactionHistory).filter(RejectedContractTransactionHistory.blockNumber > blockNumber).delete()
|
||||||
|
rejectedTransactionHistory_queries = systemdb_session.query(RejectedTransactionHistory).filter(RejectedTransactionHistory.blockNumber > blockNumber).delete()
|
||||||
|
tokenAddressMapping_queries = systemdb_session.query(TokenAddressMapping).filter(TokenAddressMapping.blockNumber > blockNumber).delete()
|
||||||
|
|
||||||
|
systemdb_session.query(SystemData).filter(SystemData.attribute=='lastblockscanned').update({SystemData.value:str(blockNumber)})
|
||||||
|
|
||||||
|
latestcache_session.commit()
|
||||||
|
systemdb_session.commit()
|
||||||
|
latestcache_session.close()
|
||||||
|
systemdb_session.close()
|
||||||
|
|
||||||
|
|
||||||
|
# Take input from user reg how many blocks to go back in the blockchain
|
||||||
|
'''
|
||||||
|
parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash')
|
||||||
|
parser.add_argument('-rbk', '--rollback', nargs='?', const=1, type=int, help='Rollback the script')
|
||||||
|
args = parser.parse_args()
|
||||||
|
'''
|
||||||
|
|
||||||
|
number_blocks_to_rollback = 1754000
|
||||||
|
|
||||||
|
# Get all the transaction and blockdetails from latestCache reg the transactions in the block
|
||||||
|
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
|
||||||
|
lastscannedblock = systemdb_session.query(SystemData.value).filter(SystemData.attribute=='lastblockscanned').first()
|
||||||
|
systemdb_session.close()
|
||||||
|
lastscannedblock = int(lastscannedblock.value)
|
||||||
|
rollback_block = lastscannedblock - number_blocks_to_rollback
|
||||||
|
|
||||||
|
|
||||||
|
def return_token_contract_set(rollback_block):
|
||||||
|
latestcache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
||||||
|
latestBlocks = latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber >= rollback_block).all()
|
||||||
|
lblocks_dict = {}
|
||||||
|
blocknumber_list = []
|
||||||
|
for block in latestBlocks:
|
||||||
|
block_dict = block.__dict__
|
||||||
|
lblocks_dict[block_dict['blockNumber']] = {'blockHash':f"{block_dict['blockHash']}", 'jsonData':f"{block_dict['jsonData']}"}
|
||||||
|
blocknumber_list.insert(0,block_dict['blockNumber'])
|
||||||
|
|
||||||
|
for blockindex in blocknumber_list:
|
||||||
|
# Find the all the transactions that happened in this block
|
||||||
|
try:
|
||||||
|
block_tx_hashes = json.loads(lblocks_dict[str(blockindex)]['jsonData'])['tx']
|
||||||
|
except:
|
||||||
|
print(f"Block {blockindex} is not found in latestCache. Skipping this block")
|
||||||
|
continue
|
||||||
|
|
||||||
|
tokendb_set = set()
|
||||||
|
smartcontractdb_set = set()
|
||||||
|
|
||||||
|
for txhash in block_tx_hashes:
|
||||||
|
# Get the transaction details
|
||||||
|
transaction = latestcache_session.query(LatestTransactions).filter(LatestTransactions.transactionHash == txhash).first()
|
||||||
|
transaction_data = json.loads(transaction.jsonData)
|
||||||
|
inputAddress, outputAddress = find_input_output_addresses(transaction_data)
|
||||||
|
parsed_flodata = literal_eval(transaction.parsedFloData)
|
||||||
|
database_information = getDatabase_from_parsedFloData(parsed_flodata, inputAddress, outputAddress)
|
||||||
|
|
||||||
|
if database_information['token_db']:
|
||||||
|
tokendb_set.add(database_information['token_db'])
|
||||||
|
elif database_information['smartcontract_db']:
|
||||||
|
tokendb_set.add(database_information['token_db'])
|
||||||
|
smartcontractdb_set.add(database_information['contract_db'])
|
||||||
|
elif database_information['swapcontract_db']:
|
||||||
|
tokendb_set.add(database_information['accepting_token_db'])
|
||||||
|
tokendb_set.add(database_information['selling_token_db'])
|
||||||
|
smartcontractdb_set.add(database_information['contract_db'])
|
||||||
|
|
||||||
|
return tokendb_set, smartcontractdb_set
|
||||||
|
|
||||||
|
|
||||||
|
def initiate_rollback_process():
|
||||||
|
tokendb_set, smartcontractdb_set = return_token_contract_set(rollback_block)
|
||||||
|
for token_db in tokendb_set:
|
||||||
|
token_session = create_database_session_orm('token', {'token_name': token_db}, Base)
|
||||||
|
if token_session.query(TransactionHistory.blockNumber).first()[0] > rollback_block:
|
||||||
|
delete_database(rollback_block, token_db)
|
||||||
|
token_session.commit()
|
||||||
|
else:
|
||||||
|
rollback_database(rollback_block, 'token', token_db)
|
||||||
|
token_session.close()
|
||||||
|
|
||||||
|
for contract_db in smartcontractdb_set:
|
||||||
|
contract_session = create_database_session_orm('smartcontract', {'db_name': contract_db}, ContractBase)
|
||||||
|
if contract_session.query(TransactionHistory.blockNumber).first()[0] > rollback_block:
|
||||||
|
delete_database(rollback_block, contract_db)
|
||||||
|
contract_session.commit()
|
||||||
|
else:
|
||||||
|
rollback_database(rollback_block, 'smartcontract', contract_db)
|
||||||
|
contract_session.close()
|
||||||
|
|
||||||
|
system_database_deletions(rollback_block)
|
||||||
|
|
||||||
|
|
||||||
|
initiate_rollback_process()
|
||||||
186
test_rollforward.py
Normal file
186
test_rollforward.py
Normal file
@ -0,0 +1,186 @@
|
|||||||
|
from sqlalchemy import create_engine, func
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, LatestTransactions, LatestBlocks, DatabaseTypeMapping
|
||||||
|
import json
|
||||||
|
from tracktokens_smartcontracts import processTransaction, processBlock
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import argparse
|
||||||
|
import configparser
|
||||||
|
import pdb
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
# helper functions
|
||||||
|
def check_database_existence(type, parameters):
|
||||||
|
if type == 'token':
|
||||||
|
return os.path.isfile(f"./tokens/{parameters['token_name']}.db")
|
||||||
|
|
||||||
|
if type == 'smart_contract':
|
||||||
|
return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db")
|
||||||
|
|
||||||
|
|
||||||
|
def create_database_connection(type, parameters):
|
||||||
|
if type == 'token':
|
||||||
|
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||||
|
elif type == 'smart_contract':
|
||||||
|
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||||
|
elif type == 'system_dbs':
|
||||||
|
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||||
|
|
||||||
|
connection = engine.connect()
|
||||||
|
return connection
|
||||||
|
|
||||||
|
|
||||||
|
def create_database_session_orm(type, parameters, base):
|
||||||
|
if type == 'token':
|
||||||
|
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||||
|
base.metadata.create_all(bind=engine)
|
||||||
|
session = sessionmaker(bind=engine)()
|
||||||
|
|
||||||
|
elif type == 'smart_contract':
|
||||||
|
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||||
|
base.metadata.create_all(bind=engine)
|
||||||
|
session = sessionmaker(bind=engine)()
|
||||||
|
|
||||||
|
elif type == 'system_dbs':
|
||||||
|
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||||
|
base.metadata.create_all(bind=engine)
|
||||||
|
session = sessionmaker(bind=engine)()
|
||||||
|
|
||||||
|
return session
|
||||||
|
|
||||||
|
|
||||||
|
# MAIN EXECUTION STARTS
|
||||||
|
# Configuration of required variables
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
formatter = logging.Formatter('%(asctime)s:%(name)s:%(message)s')
|
||||||
|
|
||||||
|
file_handler = logging.FileHandler('tracking.log')
|
||||||
|
file_handler.setLevel(logging.INFO)
|
||||||
|
file_handler.setFormatter(formatter)
|
||||||
|
|
||||||
|
stream_handler = logging.StreamHandler()
|
||||||
|
stream_handler.setFormatter(formatter)
|
||||||
|
|
||||||
|
logger.addHandler(file_handler)
|
||||||
|
logger.addHandler(stream_handler)
|
||||||
|
|
||||||
|
|
||||||
|
# Rule 1 - Read command line arguments to reset the databases as blank
|
||||||
|
# Rule 2 - Read config to set testnet/mainnet
|
||||||
|
# Rule 3 - Set flo blockexplorer location depending on testnet or mainnet
|
||||||
|
# Rule 4 - Set the local flo-cli path depending on testnet or mainnet ( removed this feature | Flosights are the only source )
|
||||||
|
# Rule 5 - Set the block number to scan from
|
||||||
|
|
||||||
|
|
||||||
|
# Read command line arguments
|
||||||
|
parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash')
|
||||||
|
parser.add_argument('-r', '--reset', nargs='?', const=1, type=int, help='Purge existing db and rebuild it from scratch')
|
||||||
|
parser.add_argument('-rb', '--rebuild', nargs='?', const=1, type=int, help='Rebuild it')
|
||||||
|
parser.add_argument('-f', '--forwardblock', nargs='?', type=int, help='Forward block number')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
apppath = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
dirpath = os.path.join(apppath, 'tokens')
|
||||||
|
if not os.path.isdir(dirpath):
|
||||||
|
os.mkdir(dirpath)
|
||||||
|
dirpath = os.path.join(apppath, 'smartContracts')
|
||||||
|
if not os.path.isdir(dirpath):
|
||||||
|
os.mkdir(dirpath)
|
||||||
|
|
||||||
|
# Read configuration
|
||||||
|
config = configparser.ConfigParser()
|
||||||
|
config.read('config.ini')
|
||||||
|
|
||||||
|
# todo - write all assertions to make sure default configs are right
|
||||||
|
if (config['DEFAULT']['NET'] != 'mainnet') and (config['DEFAULT']['NET'] != 'testnet'):
|
||||||
|
logger.error("NET parameter in config.ini invalid. Options are either 'mainnet' or 'testnet'. Script is exiting now")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
# Specify mainnet and testnet server list for API calls and websocket calls
|
||||||
|
serverlist = None
|
||||||
|
if config['DEFAULT']['NET'] == 'mainnet':
|
||||||
|
serverlist = config['DEFAULT']['MAINNET_FLOSIGHT_SERVER_LIST']
|
||||||
|
elif config['DEFAULT']['NET'] == 'testnet':
|
||||||
|
serverlist = config['DEFAULT']['TESTNET_FLOSIGHT_SERVER_LIST']
|
||||||
|
serverlist = serverlist.split(',')
|
||||||
|
neturl = config['DEFAULT']['FLOSIGHT_NETURL']
|
||||||
|
tokenapi_sse_url = config['DEFAULT']['TOKENAPI_SSE_URL']
|
||||||
|
|
||||||
|
# Delete database and smartcontract directory if reset is set to 1
|
||||||
|
if args.reset == 1:
|
||||||
|
logger.info("Resetting the database. ")
|
||||||
|
apppath = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
dirpath = os.path.join(apppath, 'tokens')
|
||||||
|
shutil.rmtree(dirpath)
|
||||||
|
os.mkdir(dirpath)
|
||||||
|
dirpath = os.path.join(apppath, 'smartContracts')
|
||||||
|
shutil.rmtree(dirpath)
|
||||||
|
os.mkdir(dirpath)
|
||||||
|
dirpath = os.path.join(apppath, 'system.db')
|
||||||
|
if os.path.exists(dirpath):
|
||||||
|
os.remove(dirpath)
|
||||||
|
dirpath = os.path.join(apppath, 'latestCache.db')
|
||||||
|
if os.path.exists(dirpath):
|
||||||
|
os.remove(dirpath)
|
||||||
|
|
||||||
|
|
||||||
|
'''# Initialize latest cache DB
|
||||||
|
session = create_database_session_orm('system_dbs', {'db_name': "latestCache"}, LatestCacheBase)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# get all blocks and transaction data
|
||||||
|
latestCache_session = create_database_session_orm('system_dbs', {'db_name':'latestCache1'}, LatestCacheBase)
|
||||||
|
lblocks = latestCache_session.query(LatestBlocks).all()
|
||||||
|
ltransactions = latestCache_session.query(LatestTransactions).all()
|
||||||
|
latestCache_session.close()
|
||||||
|
|
||||||
|
lblocks_dict = {}
|
||||||
|
for block in lblocks:
|
||||||
|
block_dict = block.__dict__
|
||||||
|
lblocks_dict[block_dict['blockNumber']] = {'blockHash':f"{block_dict['blockHash']}", 'jsonData':f"{block_dict['jsonData']}"}
|
||||||
|
|
||||||
|
# process and rebuild all transactions
|
||||||
|
for transaction in ltransactions:
|
||||||
|
transaction_dict = transaction.__dict__
|
||||||
|
transaction_data = json.loads(transaction_dict['jsonData'])
|
||||||
|
parsed_flodata = json.loads(transaction_dict['parsedFloData'])
|
||||||
|
block_info = json.loads(lblocks_dict[transaction_dict['blockNumber']]['jsonData'])
|
||||||
|
processTransaction(transaction_data, parsed_flodata, block_info)
|
||||||
|
|
||||||
|
# copy the old block data
|
||||||
|
old_latest_cache = create_database_connection('system_dbs', {'db_name':'latestCache1'})
|
||||||
|
old_latest_cache.execute("ATTACH DATABASE 'latestCache.db' AS new_db")
|
||||||
|
old_latest_cache.execute("INSERT INTO new_db.latestBlocks SELECT * FROM latestBlocks")
|
||||||
|
old_latest_cache.close()
|
||||||
|
|
||||||
|
# delete
|
||||||
|
# system.db , latestCache.db, smartContracts, tokens
|
||||||
|
if os.path.isfile('./system1.db'):
|
||||||
|
os.remove('system1.db')
|
||||||
|
if os.path.isfile('./latestCache1.db'):
|
||||||
|
os.remove('latestCache1.db')
|
||||||
|
if os.path.isfile('./smartContracts1'):
|
||||||
|
shutil.rmtree('smartContracts1')
|
||||||
|
if os.path.isfile('./tokens1'):
|
||||||
|
shutil.rmtree('tokens1')
|
||||||
|
'''
|
||||||
|
|
||||||
|
# Read start block no
|
||||||
|
session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase)
|
||||||
|
startblock = int(session.query(SystemData).filter_by(attribute='lastblockscanned').all()[0].value) + 1
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
for blockindex in range(startblock, args.forwardblock):
|
||||||
|
processBlock(blockindex=blockindex)
|
||||||
|
|
||||||
|
# Update system.db's last scanned block
|
||||||
|
connection = create_database_connection('system_dbs', {'db_name': "system"})
|
||||||
|
connection.execute(f"UPDATE systemData SET value = {blockindex} WHERE attribute = 'lastblockscanned';")
|
||||||
|
connection.close()
|
||||||
@ -196,7 +196,7 @@ def transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress,
|
|||||||
|
|
||||||
if isInfiniteToken == True:
|
if isInfiniteToken == True:
|
||||||
# Make new entry
|
# Make new entry
|
||||||
session.add(ActiveTable(address=outputAddress, consumedpid='1', transferBalance=float(tokenAmount)))
|
session.add(ActiveTable(address=outputAddress, consumedpid='1', transferBalance=float(tokenAmount), blockNumber=blockinfo['height']))
|
||||||
blockchainReference = neturl + 'tx/' + transaction_data['txid']
|
blockchainReference = neturl + 'tx/' + transaction_data['txid']
|
||||||
session.add(TransactionHistory(sourceFloAddress=inputAddress, destFloAddress=outputAddress,
|
session.add(TransactionHistory(sourceFloAddress=inputAddress, destFloAddress=outputAddress,
|
||||||
transferAmount=tokenAmount, blockNumber=blockinfo['height'],
|
transferAmount=tokenAmount, blockNumber=blockinfo['height'],
|
||||||
@ -259,7 +259,7 @@ def transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress,
|
|||||||
else:
|
else:
|
||||||
addressBalance = receiverAddress_details.addressBalance + commentTransferAmount
|
addressBalance = receiverAddress_details.addressBalance + commentTransferAmount
|
||||||
receiverAddress_details.addressBalance = None
|
receiverAddress_details.addressBalance = None
|
||||||
session.add(ActiveTable(address=outputAddress, consumedpid=str(piddict), transferBalance=commentTransferAmount, addressBalance = addressBalance))
|
session.add(ActiveTable(address=outputAddress, consumedpid=str(piddict), transferBalance=commentTransferAmount, addressBalance=addressBalance, blockNumber=blockinfo['height']))
|
||||||
|
|
||||||
senderAddress_details = session.query(ActiveTable).filter_by(address=inputAddress).order_by(ActiveTable.id.desc()).first()
|
senderAddress_details = session.query(ActiveTable).filter_by(address=inputAddress).order_by(ActiveTable.id.desc()).first()
|
||||||
senderAddress_details.addressBalance = senderAddress_details.addressBalance - commentTransferAmount
|
senderAddress_details.addressBalance = senderAddress_details.addressBalance - commentTransferAmount
|
||||||
@ -275,7 +275,7 @@ def transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress,
|
|||||||
process_pids(entries, session, piditem)
|
process_pids(entries, session, piditem)
|
||||||
|
|
||||||
# move the pids consumed in the transaction to consumedTable and delete them from activeTable
|
# move the pids consumed in the transaction to consumedTable and delete them from activeTable
|
||||||
session.execute('INSERT INTO consumedTable (id, address, parentid, consumedpid, transferBalance, addressBalance) SELECT id, address, parentid, consumedpid, transferBalance, addressBalance FROM activeTable WHERE id={}'.format(piditem[0]))
|
session.execute('INSERT INTO consumedTable (id, address, parentid, consumedpid, transferBalance, addressBalance, orphaned_parentid, blockNumber) SELECT id, address, parentid, consumedpid, transferBalance, addressBalance, orphaned_parentid, blockNumber FROM activeTable WHERE id={}'.format(piditem[0]))
|
||||||
session.execute('DELETE FROM activeTable WHERE id={}'.format(piditem[0]))
|
session.execute('DELETE FROM activeTable WHERE id={}'.format(piditem[0]))
|
||||||
session.commit()
|
session.commit()
|
||||||
session.commit()
|
session.commit()
|
||||||
@ -315,7 +315,7 @@ def transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress,
|
|||||||
else:
|
else:
|
||||||
addressBalance = receiverAddress_details.addressBalance + commentTransferAmount
|
addressBalance = receiverAddress_details.addressBalance + commentTransferAmount
|
||||||
receiverAddress_details.addressBalance = None
|
receiverAddress_details.addressBalance = None
|
||||||
session.add(ActiveTable(address=outputAddress, parentid=pidlst[-1][0], consumedpid=str(piddict), transferBalance=commentTransferAmount, addressBalance = addressBalance))
|
session.add(ActiveTable(address=outputAddress, parentid=pidlst[-1][0], consumedpid=str(piddict), transferBalance=commentTransferAmount, addressBalance=addressBalance, blockNumber=blockinfo['height']))
|
||||||
|
|
||||||
senderAddress_details = session.query(ActiveTable).filter_by(address=inputAddress).order_by(ActiveTable.id.desc()).first()
|
senderAddress_details = session.query(ActiveTable).filter_by(address=inputAddress).order_by(ActiveTable.id.desc()).first()
|
||||||
senderAddress_details.addressBalance = senderAddress_details.addressBalance - commentTransferAmount
|
senderAddress_details.addressBalance = senderAddress_details.addressBalance - commentTransferAmount
|
||||||
@ -331,7 +331,7 @@ def transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress,
|
|||||||
process_pids(entries, session, piditem)
|
process_pids(entries, session, piditem)
|
||||||
|
|
||||||
# move the pids consumed in the transaction to consumedTable and delete them from activeTable
|
# move the pids consumed in the transaction to consumedTable and delete them from activeTable
|
||||||
session.execute('INSERT INTO consumedTable (id, address, parentid, consumedpid, transferBalance, addressBalance) SELECT id, address, parentid, consumedpid, transferBalance, addressBalance FROM activeTable WHERE id={}'.format(piditem[0]))
|
session.execute('INSERT INTO consumedTable (id, address, parentid, consumedpid, transferBalance, addressBalance, orphaned_parentid, blockNumber) SELECT id, address, parentid, consumedpid, transferBalance, addressBalance, orphaned_parentid, blockNumber FROM activeTable WHERE id={}'.format(piditem[0]))
|
||||||
session.execute('DELETE FROM activeTable WHERE id={}'.format(piditem[0]))
|
session.execute('DELETE FROM activeTable WHERE id={}'.format(piditem[0]))
|
||||||
session.commit()
|
session.commit()
|
||||||
session.commit()
|
session.commit()
|
||||||
@ -1483,7 +1483,7 @@ def processTransaction(transaction_data, parsed_data, blockinfo):
|
|||||||
elif parsed_data['type'] == 'tokenIncorporation':
|
elif parsed_data['type'] == 'tokenIncorporation':
|
||||||
if not check_database_existence('token', {'token_name':f"{parsed_data['tokenIdentification']}"}):
|
if not check_database_existence('token', {'token_name':f"{parsed_data['tokenIdentification']}"}):
|
||||||
session = create_database_session_orm('token', {'token_name': f"{parsed_data['tokenIdentification']}"}, Base)
|
session = create_database_session_orm('token', {'token_name': f"{parsed_data['tokenIdentification']}"}, Base)
|
||||||
session.add(ActiveTable(address=inputlist[0], parentid=0, transferBalance=parsed_data['tokenAmount'], addressBalance=parsed_data['tokenAmount']))
|
session.add(ActiveTable(address=inputlist[0], parentid=0, transferBalance=parsed_data['tokenAmount'], addressBalance=parsed_data['tokenAmount'], blockNumber=blockinfo['height']))
|
||||||
session.add(TransferLogs(sourceFloAddress=inputadd, destFloAddress=outputlist[0],
|
session.add(TransferLogs(sourceFloAddress=inputadd, destFloAddress=outputlist[0],
|
||||||
transferAmount=parsed_data['tokenAmount'], sourceId=0, destinationId=1,
|
transferAmount=parsed_data['tokenAmount'], sourceId=0, destinationId=1,
|
||||||
blockNumber=transaction_data['blockheight'], time=transaction_data['blocktime'],
|
blockNumber=transaction_data['blockheight'], time=transaction_data['blocktime'],
|
||||||
@ -2414,7 +2414,7 @@ def processTransaction(transaction_data, parsed_data, blockinfo):
|
|||||||
'''
|
'''
|
||||||
if not check_database_existence('token', {'token_name':f"{parsed_data['tokenIdentification']}"}):
|
if not check_database_existence('token', {'token_name':f"{parsed_data['tokenIdentification']}"}):
|
||||||
session = create_database_session_orm('token', {'token_name': f"{parsed_data['tokenIdentification']}"}, Base)
|
session = create_database_session_orm('token', {'token_name': f"{parsed_data['tokenIdentification']}"}, Base)
|
||||||
session.add(ActiveTable(address=inputlist[0], parentid=0, transferBalance=parsed_data['tokenAmount']))
|
session.add(ActiveTable(address=inputlist[0], parentid=0, transferBalance=parsed_data['tokenAmount'], blockNumber=blockinfo['height']))
|
||||||
session.add(TransferLogs(sourceFloAddress=inputadd, destFloAddress=outputlist[0],
|
session.add(TransferLogs(sourceFloAddress=inputadd, destFloAddress=outputlist[0],
|
||||||
transferAmount=parsed_data['tokenAmount'], sourceId=0, destinationId=1,
|
transferAmount=parsed_data['tokenAmount'], sourceId=0, destinationId=1,
|
||||||
blockNumber=transaction_data['blockheight'], time=transaction_data['blocktime'],
|
blockNumber=transaction_data['blockheight'], time=transaction_data['blocktime'],
|
||||||
@ -2468,7 +2468,7 @@ def processTransaction(transaction_data, parsed_data, blockinfo):
|
|||||||
if not check_database_existence('token', {'token_name':f"{parsed_data['tokenIdentification']}"}):
|
if not check_database_existence('token', {'token_name':f"{parsed_data['tokenIdentification']}"}):
|
||||||
parsed_data['tokenAmount'] = 0
|
parsed_data['tokenAmount'] = 0
|
||||||
session = create_database_session_orm('token', {'token_name': f"{parsed_data['tokenIdentification']}"}, Base)
|
session = create_database_session_orm('token', {'token_name': f"{parsed_data['tokenIdentification']}"}, Base)
|
||||||
session.add(ActiveTable(address=inputlist[0], parentid=0, transferBalance=parsed_data['tokenAmount']))
|
session.add(ActiveTable(address=inputlist[0], parentid=0, transferBalance=parsed_data['tokenAmount'], blockNumber=blockinfo['height']))
|
||||||
session.add(TransferLogs(sourceFloAddress=inputadd, destFloAddress=outputlist[0],
|
session.add(TransferLogs(sourceFloAddress=inputadd, destFloAddress=outputlist[0],
|
||||||
transferAmount=parsed_data['tokenAmount'], sourceId=0, destinationId=1,
|
transferAmount=parsed_data['tokenAmount'], sourceId=0, destinationId=1,
|
||||||
blockNumber=transaction_data['blockheight'], time=transaction_data['blocktime'],
|
blockNumber=transaction_data['blockheight'], time=transaction_data['blocktime'],
|
||||||
@ -2671,7 +2671,6 @@ if args.reset == 1:
|
|||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
# Determine API source for block and transaction information
|
# Determine API source for block and transaction information
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
# MAIN LOGIC STARTS
|
# MAIN LOGIC STARTS
|
||||||
# scan from the latest block saved locally to latest network block
|
# scan from the latest block saved locally to latest network block
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user