Updating progress
This commit is contained in:
parent
db9887679c
commit
53782d9473
@ -17,6 +17,7 @@ class ActiveTable(Base):
|
||||
consumedpid = Column('consumedpid', String)
|
||||
transferBalance = Column('transferBalance', Float)
|
||||
addressBalance = Column('addressBalance', Float)
|
||||
orphaned_parentid = Column('orphaned_parentid', Integer)
|
||||
|
||||
|
||||
class ConsumedTable(Base):
|
||||
@ -209,14 +210,15 @@ class TokenAddressMapping(SystemBase):
|
||||
blockHash = Column('blockHash', String)
|
||||
|
||||
|
||||
class DatabaseAddressMapping(SystemBase):
|
||||
__tablename__ = "databaseAddressMapping"
|
||||
class DatabaseTypeMapping(SystemBase):
|
||||
__tablename__ = "databaseTypeMapping"
|
||||
|
||||
id = Column('id', Integer, primary_key=True)
|
||||
db_name = Column('db_name', String)
|
||||
db_type = Column('db_type', String)
|
||||
keyword = Column('keyword', String)
|
||||
object_format = Column ('object_format', String)
|
||||
blockNumber = Column('blockNumber', Integer)
|
||||
|
||||
class LatestTransactions(LatestCacheBase):
|
||||
__tablename__ = "latestTransactions"
|
||||
|
||||
49
test_database.py
Normal file
49
test_database.py
Normal file
@ -0,0 +1,49 @@
|
||||
import os
|
||||
from sqlalchemy import create_engine, func
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, DatabaseTypeMapping
|
||||
import pdb
|
||||
|
||||
|
||||
def check_database_existence(type, parameters):
|
||||
if type == 'token':
|
||||
return os.path.isfile(f"./tokens/{parameters['token_name']}.db")
|
||||
elif type == 'smart_contract':
|
||||
return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db")
|
||||
|
||||
|
||||
def create_database_connection(type, parameters):
|
||||
if type == 'token':
|
||||
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||
elif type == 'smart_contract':
|
||||
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||
elif type == 'system_dbs':
|
||||
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||
|
||||
connection = engine.connect()
|
||||
return connection
|
||||
|
||||
|
||||
def create_database_session_orm(type, parameters, base):
|
||||
if type == 'token':
|
||||
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'smart_contract':
|
||||
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'system_dbs':
|
||||
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
return session
|
||||
|
||||
|
||||
session = create_database_session_orm('token', {'token_name': f"vivek"}, Base)
|
||||
session.add(ActiveTable(address='sdf', parentid=0))
|
||||
session.commit()
|
||||
session.close()
|
||||
@ -1,6 +1,6 @@
|
||||
from sqlalchemy import create_engine, func
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, LatestTransactions, LatestBlocks, DatabaseAddressMapping
|
||||
from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, LatestTransactions, LatestBlocks, DatabaseTypeMapping
|
||||
import json
|
||||
from tracktokens_smartcontracts import processTransaction
|
||||
import os
|
||||
@ -168,8 +168,6 @@ for transaction in ltransactions:
|
||||
transaction_data = json.loads(transaction_dict['jsonData'])
|
||||
parsed_flodata = json.loads(transaction_dict['parsedFloData'])
|
||||
block_info = json.loads(lblocks_dict[transaction_dict['blockNumber']]['jsonData'])
|
||||
#if transaction_data['txid'] == 'b57cf412c8cb16e473d04bae44214705c64d2c25146be22695bf1ac36e166ee0':
|
||||
# pdb.set_trace()
|
||||
processTransaction(transaction_data, parsed_flodata, block_info)
|
||||
|
||||
# copy the old block data
|
||||
|
||||
348
test_rollback.py
Normal file
348
test_rollback.py
Normal file
@ -0,0 +1,348 @@
|
||||
import argparse
|
||||
from sqlalchemy import create_engine, func
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, LatestTransactions, LatestBlocks, DatabaseTypeMapping, TokenAddressMapping
|
||||
from ast import literal_eval
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
import pdb
|
||||
|
||||
apppath = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# helper functions
|
||||
def check_database_existence(type, parameters):
|
||||
if type == 'token':
|
||||
return os.path.isfile(f"./tokens/{parameters['token_name']}.db")
|
||||
|
||||
if type == 'smart_contract':
|
||||
return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db")
|
||||
|
||||
|
||||
def create_database_connection(type, parameters):
|
||||
if type == 'token':
|
||||
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||
elif type == 'smart_contract':
|
||||
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||
elif type == 'system_dbs':
|
||||
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||
|
||||
connection = engine.connect()
|
||||
return connection
|
||||
|
||||
|
||||
def create_database_session_orm(type, parameters, base):
|
||||
if type == 'token':
|
||||
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'smart_contract':
|
||||
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'system_dbs':
|
||||
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
return session
|
||||
|
||||
|
||||
def inspect_parsed_flodata(parsed_flodata):
|
||||
if parsed_flodata['type'] == 'transfer':
|
||||
if parsed_flodata['transferType'] == 'token':
|
||||
return {'type':'tokentransfer', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||
if parsed_flodata['type'] == 'tokenIncorporation':
|
||||
return {'type':'tokenIncorporation', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||
|
||||
|
||||
'''
|
||||
Steps to do the rollback
|
||||
|
||||
1. Find out the transaction details from transaction history table ie. inputAddress,
|
||||
2. Find out the last entry from the activeTable
|
||||
3. Parse pid and consumedpids from the entry
|
||||
4. For each consumedpid number, pull put database entry from the consumedtable and then add to activeTable
|
||||
4.1. After adding the database entry back, add consumedpid number's value to transferBalance of the entry
|
||||
4.2. What will happen to addressBalance?
|
||||
4.3.
|
||||
'''
|
||||
|
||||
|
||||
def undo_last_single_transaction():
|
||||
consumedpid_entry = db_session.query(ConsumedTable).filter(ConsumedTable.id == key).all()
|
||||
newTransferBalance = consumedpid_entry[0].transferBalance + consumedpid[key]
|
||||
db_session.add(ActiveTable(id=consumedpid_entry[0].id, address=consumedpid_entry[0].address, consumedpid=consumedpid_entry[0].consumedpid, transferBalance=newTransferBalance, addressBalance = consumedpid_entry[0].addressBalance))
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def calc_pid_amount(transferBalance, consumedpid):
|
||||
consumedpid_sum = 0
|
||||
for key in list(consumedpid.keys()):
|
||||
consumedpid_sum = consumedpid_sum + float(consumedpid[key])
|
||||
return transferBalance - consumedpid_sum
|
||||
|
||||
|
||||
def find_addressBalance_from_floAddress(database_session, floAddress):
|
||||
query_output = database_session.query(ActiveTable).filter(ActiveTable.address==floAddress, ActiveTable.addressBalance!=None).first()
|
||||
if query_output is None:
|
||||
return 0
|
||||
else:
|
||||
return query_output.addressBalance
|
||||
|
||||
|
||||
def rollback_address_balance_processing(db_session, senderAddress, receiverAddress, transferBalance):
|
||||
# Find out total sum of address
|
||||
# Find out the last entry where address balance is not null, if exists make it null
|
||||
|
||||
# Calculation phase
|
||||
current_receiverBalance = find_addressBalance_from_floAddress(db_session, receiverAddress)
|
||||
current_senderBalance = find_addressBalance_from_floAddress(db_session ,senderAddress)
|
||||
new_receiverBalance = current_receiverBalance - transferBalance
|
||||
new_senderBalance = current_senderBalance + transferBalance
|
||||
|
||||
# Insertion phase
|
||||
# if new receiver balance is 0, then only insert sender address balance
|
||||
# if receiver balance is not 0, then update previous occurence of the receiver address and sender balance
|
||||
# for sender, find out weather the last occurence of senderfloid has an addressBalance
|
||||
# either query out will not come or the last occurence will have address
|
||||
# for sender, in all cases we will update the addressBalance of last occurences of senderfloaddress
|
||||
# for receiver, if the currentaddressbalance is 0 then do nothing .. and if the currentaddressbalance is not 0 then update the last occurence of receiver address
|
||||
|
||||
sender_query = db_session.query(ActiveTable).filter(ActiveTable.address==senderAddress).order_by(ActiveTable.id.desc()).first()
|
||||
sender_query.addressBalance = new_senderBalance
|
||||
|
||||
if new_receiverBalance != 0 and new_receiverBalance > 0:
|
||||
receiver_query = db_session.query(ActiveTable).filter(ActiveTable.address==receiverAddress).order_by(ActiveTable.id.desc()).limit(2)
|
||||
receiver_query[1].addressBalance = new_receiverBalance
|
||||
|
||||
|
||||
def undo_transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress, transaction_data):
|
||||
# Connect to database
|
||||
db_session = create_database_session_orm('token', {'token_name':tokenIdentification}, Base)
|
||||
transaction_history_entry = db_session.query(TransactionHistory).filter(TransactionHistory.transactionHash == transaction_data.transactionHash).order_by(TransactionHistory.blockNumber.desc()).all()
|
||||
|
||||
active_table_last_entries = db_session.query(ActiveTable).order_by(ActiveTable.id.desc()).limit(len(transaction_history_entry))
|
||||
|
||||
for idx, activeTable_entry in enumerate(active_table_last_entries):
|
||||
# Find out consumedpid and partially consumed pids
|
||||
parentid = None
|
||||
orphaned_parentid = None
|
||||
consumedpid = None
|
||||
if activeTable_entry.parentid is not None:
|
||||
parentid = activeTable_entry.parentid
|
||||
if activeTable_entry.orphaned_parentid is not None:
|
||||
orphaned_parentid = activeTable_entry.orphaned_parentid
|
||||
if activeTable_entry.consumedpid is not None:
|
||||
consumedpid = literal_eval(activeTable_entry.consumedpid)
|
||||
|
||||
# filter out based on consumped pid and partially consumed pids
|
||||
if parentid is not None:
|
||||
# find query in activeTable with the parentid
|
||||
activeTable_pid_entry = db_session.query(ActiveTable).filter(ActiveTable.id == parentid).all()[0]
|
||||
# calculate the amount taken from parentid
|
||||
activeTable_pid_entry.transferBalance = activeTable_pid_entry.transferBalance + calc_pid_amount(activeTable_entry.transferBalance, consumedpid)
|
||||
|
||||
if consumedpid != {}:
|
||||
# each key of the pid is totally consumed and with its corresponding value written in the end
|
||||
# how can we maintain the order of pid consumption? The bigger pid number will be towards the end
|
||||
# 1. pull the pid number and its details from the consumedpid table
|
||||
for key in list(consumedpid.keys()):
|
||||
consumedpid_entry = db_session.query(ConsumedTable).filter(ConsumedTable.id == key).all()[0]
|
||||
newTransferBalance = consumedpid_entry.transferBalance + consumedpid[key]
|
||||
db_session.add(ActiveTable(id=consumedpid_entry.id, address=consumedpid_entry.address, parentid=consumedpid_entry.parentid ,consumedpid=consumedpid_entry.consumedpid, transferBalance=newTransferBalance, addressBalance = None))
|
||||
db_session.delete(consumedpid_entry)
|
||||
|
||||
orphaned_parentid_entries = db_session.query(ActiveTable).filter(ActiveTable.orphaned_parentid == key).all()
|
||||
for orphan_entry in orphaned_parentid_entries:
|
||||
orphan_entry.parentid = orphan_entry.orphaned_parentid
|
||||
orphan_entry.orphaned_parentid = None
|
||||
|
||||
|
||||
# update addressBalance
|
||||
rollback_address_balance_processing(db_session, inputAddress, outputAddress, transaction_history_entry[idx].transferAmount)
|
||||
|
||||
# delete operations
|
||||
# delete the last row in activeTable and transactionTable
|
||||
db_session.delete(activeTable_entry)
|
||||
db_session.delete(transaction_history_entry[idx])
|
||||
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def find_input_output_addresses(transaction_data):
|
||||
# Create vinlist and outputlist
|
||||
vinlist = []
|
||||
querylist = []
|
||||
|
||||
for vin in transaction_data["vin"]:
|
||||
vinlist.append([vin["addr"], float(vin["value"])])
|
||||
|
||||
totalinputval = float(transaction_data["valueIn"])
|
||||
|
||||
# todo Rule 41 - Check if all the addresses in a transaction on the input side are the same
|
||||
for idx, item in enumerate(vinlist):
|
||||
if idx == 0:
|
||||
temp = item[0]
|
||||
continue
|
||||
if item[0] != temp:
|
||||
print(f"System has found more than one address as part of vin. Transaction {transaction_data['txid']} is rejected")
|
||||
return 0
|
||||
|
||||
inputlist = [vinlist[0][0], totalinputval]
|
||||
inputadd = vinlist[0][0]
|
||||
|
||||
# todo Rule 42 - If the number of vout is more than 2, reject the transaction
|
||||
if len(transaction_data["vout"]) > 2:
|
||||
print(f"System has found more than 2 address as part of vout. Transaction {transaction_data['txid']} is rejected")
|
||||
return 0
|
||||
|
||||
# todo Rule 43 - A transaction accepted by the system has two vouts, 1. The FLO address of the receiver
|
||||
# 2. Flo address of the sender as change address. If the vout address is change address, then the other adddress
|
||||
# is the recevier address
|
||||
|
||||
outputlist = []
|
||||
addresscounter = 0
|
||||
inputcounter = 0
|
||||
for obj in transaction_data["vout"]:
|
||||
if obj["scriptPubKey"]["type"] == "pubkeyhash":
|
||||
addresscounter = addresscounter + 1
|
||||
if inputlist[0] == obj["scriptPubKey"]["addresses"][0]:
|
||||
inputcounter = inputcounter + 1
|
||||
continue
|
||||
outputlist.append([obj["scriptPubKey"]["addresses"][0], obj["value"]])
|
||||
|
||||
if addresscounter == inputcounter:
|
||||
outputlist = [inputlist[0]]
|
||||
elif len(outputlist) != 1:
|
||||
print(f"Transaction's change is not coming back to the input address. Transaction {transaction_data['txid']} is rejected")
|
||||
return 0
|
||||
else:
|
||||
outputlist = outputlist[0]
|
||||
|
||||
input_output_list = [inputlist, outputlist]
|
||||
return input_output_list
|
||||
|
||||
|
||||
def delete_token_database(token_name):
|
||||
dirpath = os.path.join(apppath, 'tokens', f"{token_name}.db")
|
||||
if os.path.exists(dirpath):
|
||||
os.remove(dirpath)
|
||||
|
||||
|
||||
def perform_rollback(transaction):
|
||||
latestCache = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
||||
# categorize transaction and find out the databases it will affect
|
||||
parsed_flodata = literal_eval(transaction.parsedFloData)
|
||||
inspected_flodata = inspect_parsed_flodata(parsed_flodata)
|
||||
transaction_data = json.loads(transaction.jsonData)
|
||||
input_output_list = find_input_output_addresses(transaction_data)
|
||||
inputAddress = input_output_list[0][0]
|
||||
outputAddress = input_output_list[1][0]
|
||||
if inspected_flodata['type'] == 'tokentransfer':
|
||||
# undo the transaction in token database
|
||||
undo_transferToken(inspected_flodata['token_db'], inspected_flodata['token_amount'], inputAddress, outputAddress, transaction)
|
||||
if inspected_flodata['type'] == 'tokenIncorporation':
|
||||
# note - if you want you can do checks to make sure the database has only 1 entry
|
||||
# delete the token database
|
||||
delete_token_database(inspected_flodata['token_db'])
|
||||
|
||||
|
||||
# Take input from user reg how many blocks to go back in the blockchain
|
||||
'''
|
||||
parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash')
|
||||
parser.add_argument('-rbk', '--rollback', nargs='?', const=1, type=int, help='Rollback the script')
|
||||
args = parser.parse_args()
|
||||
'''
|
||||
|
||||
number_blocks_to_rollback = 1754000
|
||||
|
||||
# Get all the transaction and blockdetails from latestCache reg the transactions in the block
|
||||
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
|
||||
lastscannedblock = systemdb_session.query(SystemData.value).filter(SystemData.attribute=='lastblockscanned').first()
|
||||
systemdb_session.close()
|
||||
lastscannedblock = int(lastscannedblock.value)
|
||||
rollback_block = lastscannedblock - number_blocks_to_rollback
|
||||
|
||||
latestcache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
||||
latestBlocks = latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber >= rollback_block).all()
|
||||
lblocks_dict = {}
|
||||
blocknumber_list = []
|
||||
for block in latestBlocks:
|
||||
block_dict = block.__dict__
|
||||
lblocks_dict[block_dict['blockNumber']] = {'blockHash':f"{block_dict['blockHash']}", 'jsonData':f"{block_dict['jsonData']}"}
|
||||
blocknumber_list.insert(0,block_dict['blockNumber'])
|
||||
|
||||
|
||||
# Rollback block will be excluded
|
||||
for blockindex in blocknumber_list:
|
||||
# if blockindex >= rollback_block:'''
|
||||
#for blockindex in range(lastscannedblock, rollback_block, -1):
|
||||
# Find the all the transactions that happened in this block
|
||||
print(blockindex)
|
||||
try:
|
||||
block_tx_hashes = json.loads(lblocks_dict[str(blockindex)]['jsonData'])['tx']
|
||||
except:
|
||||
print(f"Block {blockindex} is not found in latestCache. Skipping this block")
|
||||
continue
|
||||
|
||||
print("Block tx hashes")
|
||||
print(block_tx_hashes)
|
||||
|
||||
#if 'e79b02d8eb1e72278d5d23db1ae4dd779bd834e4de15119a05f37fa849662277' in block_tx_hashes:
|
||||
# pdb.set_trace()
|
||||
|
||||
for tx in block_tx_hashes:
|
||||
transaction = latestcache_session.query(LatestTransactions).filter(LatestTransactions.transactionHash == tx).all()
|
||||
print(transaction)
|
||||
if len(transaction) == 1:
|
||||
perform_rollback(transaction[0])
|
||||
latestcache_session.delete(transaction[0])
|
||||
|
||||
# delete latestBlocks entry
|
||||
block_entry = latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber == blockindex).first()
|
||||
latestcache_session.delete(block_entry)
|
||||
|
||||
# delete activeContracts, contractAddressMapping, DatabaseAddressMapping, rejectedContractTransactionHistory, rejectedTransactionHistory, tokenAddressMapping
|
||||
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
|
||||
activeContracts_session = systemdb_session.query(ActiveContracts).filter(ActiveContracts.blockNumber==blockindex).all()
|
||||
contractAddressMapping_queries = systemdb_session.query(ContractAddressMapping).filter(ContractAddressMapping.blockNumber==blockindex).all()
|
||||
databaseTypeMapping_queries = systemdb_session.query(DatabaseTypeMapping).filter(DatabaseTypeMapping.blockNumber==blockindex).all()
|
||||
rejectedContractTransactionHistory_queries = systemdb_session.query(RejectedContractTransactionHistory).filter(RejectedContractTransactionHistory.blockNumber==blockindex).all()
|
||||
rejectedTransactionHistory_queries = systemdb_session.query(RejectedTransactionHistory).filter(RejectedTransactionHistory.blockNumber==blockindex).all()
|
||||
tokenAddressMapping_queries = systemdb_session.query(TokenAddressMapping).filter(TokenAddressMapping.blockNumber==blockindex).all()
|
||||
|
||||
for dbentry in activeContracts_session:
|
||||
systemdb_session.delete(dbentry)
|
||||
|
||||
for dbentry in contractAddressMapping_queries:
|
||||
systemdb_session.delete(dbentry)
|
||||
|
||||
for dbentry in databaseTypeMapping_queries:
|
||||
systemdb_session.delete(dbentry)
|
||||
|
||||
for dbentry in rejectedContractTransactionHistory_queries:
|
||||
systemdb_session.delete(dbentry)
|
||||
|
||||
for dbentry in rejectedTransactionHistory_queries:
|
||||
systemdb_session.delete(dbentry)
|
||||
|
||||
for dbentry in tokenAddressMapping_queries:
|
||||
systemdb_session.delete(dbentry)
|
||||
|
||||
systemdb_session.query(SystemData).filter(SystemData.attribute=='lastblockscanned').update({SystemData.value:str(blockindex)})
|
||||
|
||||
latestcache_session.commit()
|
||||
systemdb_session.commit()
|
||||
latestcache_session.close()
|
||||
systemdb_session.close()
|
||||
|
||||
|
||||
'''latestcache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
||||
latestTransactions = latestcache_session.query(LatestTransactions).filter(LatestTransactions.blockNumber >= rollback_block).order_by(LatestTransactions.id.desc()).all()
|
||||
latestBlocks = latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber >= rollback_block).all()
|
||||
|
||||
#for transaction in latestTransactions:
|
||||
perform_rollback(latestTransactions[0])'''
|
||||
@ -1,13 +1,13 @@
|
||||
import argparse
|
||||
import configparser
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import sqlite3
|
||||
import sys
|
||||
import pybtc
|
||||
import requests
|
||||
import argparse
|
||||
import configparser
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import sqlite3
|
||||
import sys
|
||||
import pybtc
|
||||
import requests
|
||||
import socketio
|
||||
from sqlalchemy import create_engine, func
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
@ -15,9 +15,9 @@ import time
|
||||
import parsing
|
||||
from config import *
|
||||
from datetime import datetime
|
||||
from ast import literal_eval
|
||||
from ast import literal_eval
|
||||
import pdb
|
||||
from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, DatabaseAddressMapping
|
||||
from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, DatabaseTypeMapping
|
||||
|
||||
|
||||
goodblockset = {}
|
||||
@ -179,12 +179,13 @@ def updateLatestBlock(blockData):
|
||||
|
||||
def process_pids(entries, session, piditem):
|
||||
for entry in entries:
|
||||
consumedpid_dict = literal_eval(entry.consumedpid)
|
||||
'''consumedpid_dict = literal_eval(entry.consumedpid)
|
||||
total_consumedpid_amount = 0
|
||||
for key in consumedpid_dict.keys():
|
||||
total_consumedpid_amount = total_consumedpid_amount + float(consumedpid_dict[key])
|
||||
consumedpid_dict[piditem[0]] = total_consumedpid_amount
|
||||
entry.consumedpid = str(consumedpid_dict)
|
||||
entry.consumedpid = str(consumedpid_dict)'''
|
||||
entry.orphaned_parentid = entry.parentid
|
||||
entry.parentid = None
|
||||
#session.commit()
|
||||
return 1
|
||||
@ -198,12 +199,13 @@ def transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress,
|
||||
session.add(ActiveTable(address=outputAddress, consumedpid='1', transferBalance=float(tokenAmount)))
|
||||
blockchainReference = neturl + 'tx/' + transaction_data['txid']
|
||||
session.add(TransactionHistory(sourceFloAddress=inputAddress, destFloAddress=outputAddress,
|
||||
transferAmount=tokenAmount, blockNumber=blockinfo['height'],
|
||||
blockHash=blockinfo['hash'], time=blockinfo['time'],
|
||||
transactionHash=transaction_data['txid'],
|
||||
blockchainReference=blockchainReference, jsonData=json.dumps(transaction_data),
|
||||
transactionType=parsed_data['type'],
|
||||
parsedFloData=json.dumps(parsed_data)))
|
||||
transferAmount=tokenAmount, blockNumber=blockinfo['height'],
|
||||
blockHash=blockinfo['hash'], time=blockinfo['time'],
|
||||
transactionHash=transaction_data['txid'],
|
||||
blockchainReference=blockchainReference,
|
||||
jsonData=json.dumps(transaction_data),
|
||||
transactionType=parsed_data['type'],
|
||||
parsedFloData=json.dumps(parsed_data)))
|
||||
session.commit()
|
||||
session.close()
|
||||
return 1
|
||||
@ -251,7 +253,7 @@ def transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress,
|
||||
consumedpid_string = consumedpid_string[:-1]
|
||||
|
||||
# Make new entry
|
||||
receiverAddress_details = session.query(ActiveTable).filter(ActiveTable.address==outputAddress, ActiveTable.addressBalance!=None).order_by(ActiveTable.id.desc()).first()
|
||||
receiverAddress_details = session.query(ActiveTable).filter(ActiveTable.address==outputAddress, ActiveTable.addressBalance!=None).first()
|
||||
if receiverAddress_details is None:
|
||||
addressBalance = commentTransferAmount
|
||||
else:
|
||||
@ -307,13 +309,13 @@ def transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress,
|
||||
consumedpid_string = consumedpid_string[:-1]
|
||||
|
||||
# Make new entry
|
||||
receiverAddress_details = session.query(ActiveTable).filter(ActiveTable.address==outputAddress, ActiveTable.addressBalance!=None).order_by(ActiveTable.id.desc()).first()
|
||||
receiverAddress_details = session.query(ActiveTable).filter(ActiveTable.address==outputAddress, ActiveTable.addressBalance!=None).first()
|
||||
if receiverAddress_details is None:
|
||||
addressBalance = commentTransferAmount
|
||||
else:
|
||||
addressBalance = receiverAddress_details.addressBalance + commentTransferAmount
|
||||
receiverAddress_details.addressBalance = None
|
||||
session.add(ActiveTable(address=outputAddress, consumedpid=str(piddict), transferBalance=commentTransferAmount, addressBalance = addressBalance))
|
||||
session.add(ActiveTable(address=outputAddress, parentid=pidlst[-1][0], consumedpid=str(piddict), transferBalance=commentTransferAmount, addressBalance = addressBalance))
|
||||
|
||||
senderAddress_details = session.query(ActiveTable).filter_by(address=inputAddress).order_by(ActiveTable.id.desc()).first()
|
||||
senderAddress_details.addressBalance = senderAddress_details.addressBalance - commentTransferAmount
|
||||
@ -678,7 +680,7 @@ def processTransaction(transaction_data, parsed_data, blockinfo):
|
||||
if check_database_existence('token', {'token_name':f"{parsed_data['tokenIdentification']}"}):
|
||||
# Pull details of the token type from system.db database
|
||||
connection = create_database_connection('system_dbs', {'db_name':'system'})
|
||||
db_details = connection.execute("select db_name, db_type, keyword, object_format from databaseAddressMapping where db_name='{}'".format(parsed_data['tokenIdentification']))
|
||||
db_details = connection.execute("select db_name, db_type, keyword, object_format from databaseTypeMapping where db_name='{}'".format(parsed_data['tokenIdentification']))
|
||||
db_details = list(zip(*db_details))
|
||||
if db_details[1] == 'infinite-token':
|
||||
db_object = json.loads(db_details[3])
|
||||
@ -1502,11 +1504,11 @@ def processTransaction(transaction_data, parsed_data, blockinfo):
|
||||
# add it to token address to token mapping db table
|
||||
connection = create_database_connection('system_dbs', {'db_name':'system'})
|
||||
connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{inputadd}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}');")
|
||||
connection.execute(f"INSERT INTO databaseAddressMapping (db_name, db_type, keyword, object_format) VALUES ('{parsed_data['tokenIdentification']}', 'token', '', '')")
|
||||
connection.execute(f"INSERT INTO databaseTypeMapping (db_name, db_type, keyword, object_format, blockNumber) VALUES ('{parsed_data['tokenIdentification']}', 'token', '', '', '{transaction_data['blockheight']}')")
|
||||
connection.close()
|
||||
|
||||
updateLatestTransaction(transaction_data, parsed_data)
|
||||
pushData_SSEapi(f"Token | Succesfully incorporated token {parsed_data['tokenIdentification']} at transaction {transaction_data['txid']}")
|
||||
pushData_SSEapi(f"Token | Successfully incorporated token {parsed_data['tokenIdentification']} at transaction {transaction_data['txid']}")
|
||||
return 1
|
||||
else:
|
||||
logger.info(f"Transaction {transaction_data['txid']} rejected as a token with the name {parsed_data['tokenIdentification']} has already been incorporated")
|
||||
@ -1678,10 +1680,11 @@ def processTransaction(transaction_data, parsed_data, blockinfo):
|
||||
transactionHash=transaction_data['txid'],
|
||||
blockNumber=transaction_data['blockheight'],
|
||||
blockHash=transaction_data['blockhash']))
|
||||
session.add(DatabaseAddressMapping(db_name=f"{parsed_data['contractName']}-{inputadd}",
|
||||
session.add(DatabaseTypeMapping(db_name=f"{parsed_data['contractName']}-{inputadd}",
|
||||
db_type='smartcontract',
|
||||
keyword='',
|
||||
object_format=''))
|
||||
object_format='',
|
||||
blockNumber=transaction_data['blockheight']))
|
||||
session.commit()
|
||||
|
||||
session.close()
|
||||
@ -1800,10 +1803,11 @@ def processTransaction(transaction_data, parsed_data, blockinfo):
|
||||
transactionHash=transaction_data['txid'],
|
||||
blockNumber=transaction_data['blockheight'],
|
||||
blockHash=transaction_data['blockhash']))
|
||||
session.add(DatabaseAddressMapping(db_name=f"{parsed_data['contractName']}-{inputadd}",
|
||||
session.add(DatabaseTypeMapping(db_name=f"{parsed_data['contractName']}-{inputadd}",
|
||||
db_type='smartcontract',
|
||||
keyword='',
|
||||
object_format=''))
|
||||
object_format='',
|
||||
blockNumber=transaction_data['blockheight']))
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
@ -1953,7 +1957,6 @@ def processTransaction(transaction_data, parsed_data, blockinfo):
|
||||
blockchainReference=blockchainReference,
|
||||
jsonData=json.dumps(transaction_data),
|
||||
rejectComment=f"Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} hasn't expired yet",
|
||||
|
||||
parsedFloData=json.dumps(parsed_data)
|
||||
))
|
||||
session.commit()
|
||||
@ -1964,8 +1967,7 @@ def processTransaction(transaction_data, parsed_data, blockinfo):
|
||||
|
||||
# check the type of smart contract ie. external trigger or internal trigger
|
||||
if 'payeeAddress' in contractStructure:
|
||||
logger.warning(
|
||||
f"Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} has an internal trigger")
|
||||
logger.warning(f"Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} has an internal trigger")
|
||||
# Store transfer as part of RejectedContractTransactionHistory
|
||||
session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase)
|
||||
blockchainReference = neturl + 'tx/' + transaction_data['txid']
|
||||
@ -2434,11 +2436,10 @@ def processTransaction(transaction_data, parsed_data, blockinfo):
|
||||
connection = create_database_connection('system_dbs', {'db_name':'system'})
|
||||
connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{inputadd}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}');")
|
||||
nft_data = {'sha256_hash': f"{parsed_data['nftHash']}"}
|
||||
connection.execute(f"INSERT INTO databaseAddressMapping (db_name, db_type, keyword, object_format) VALUES ('{parsed_data['tokenIdentification']}', 'nft', '', '{nft_data}'")
|
||||
connection.execute(f"INSERT INTO databaseTypeMapping (db_name, db_type, keyword, object_format, blockNumber) VALUES ('{parsed_data['tokenIdentification']}', 'nft', '', '{nft_data}', '{transaction_data['blockheight']}'")
|
||||
connection.close()
|
||||
|
||||
updateLatestTransaction(transaction_data, parsed_data)
|
||||
|
||||
pushData_SSEapi(f"Token | Succesfully incorporated token {parsed_data['tokenIdentification']} at transaction {transaction_data['txid']}")
|
||||
return 1
|
||||
else:
|
||||
@ -2489,7 +2490,7 @@ def processTransaction(transaction_data, parsed_data, blockinfo):
|
||||
connection = create_database_connection('system_dbs', {'db_name':'system'})
|
||||
connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{inputadd}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}');")
|
||||
info_object = {'root_address': inputadd}
|
||||
connection.execute(f"INSERT INTO databaseAddressMapping (db_name, db_type, keyword, object_format) VALUES ('{parsed_data['tokenIdentification']}', 'infinite-token', '', '{info_object}'")
|
||||
connection.execute(f"INSERT INTO databaseTypeMapping (db_name, db_type, keyword, object_format, blockNumber) VALUES ('{parsed_data['tokenIdentification']}', 'infinite-token', '', '{info_object}', '{transaction_data['blockheight']}'")
|
||||
connection.close()
|
||||
|
||||
updateLatestTransaction(transaction_data, parsed_data)
|
||||
@ -2671,7 +2672,6 @@ if args.reset == 1:
|
||||
|
||||
# Determine API source for block and transaction information
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# MAIN LOGIC STARTS
|
||||
# scan from the latest block saved locally to latest network block
|
||||
|
||||
Loading…
Reference in New Issue
Block a user