diff --git a/app.py b/app.py deleted file mode 100644 index 4a292ef..0000000 --- a/app.py +++ /dev/null @@ -1,22 +0,0 @@ -import os - -from flask import Flask, jsonify - -app = Flask(__name__) - -@app.route('/') -def hello_world(): - return 'Hello, World!' - - -@app.route('/getmarkerlist') -def marker_list(): - dblist = os.listdir("databases/") - dbdict = {} - for idx, item in enumerate(dblist): - dbdict[idx] = item[:-3] - - return jsonify(dbdict) - - -app.run(debug=True) \ No newline at end of file diff --git a/output_return.py b/output_return.py deleted file mode 100644 index 71ea3bc..0000000 --- a/output_return.py +++ /dev/null @@ -1,159 +0,0 @@ -def outputreturn(*argv): - if argv[0] == 'noise': - parsed_data = {'type': 'noise'} - return parsed_data - elif argv[0] == 'token_incorporation': - parsed_data = { - 'type': 'tokenIncorporation', - 'flodata': argv[1], #string - 'tokenIdentification': argv[2], #hashList[0][:-1] - 'tokenAmount': argv[3] #initTokens - } - return parsed_data - elif argv[0] == 'token_transfer': - parsed_data = { - 'type': 'transfer', - 'transferType': 'token', - 'flodata': argv[1], #string - 'tokenIdentification': argv[2], #hashList[0][:-1] - 'tokenAmount': argv[3] #amount - } - return parsed_data - elif argv[0] == 'one-time-event-userchoice-smartcontract-incorporation': - parsed_data = { - 'type': 'smartContractIncorporation', - 'contractType': 'one-time-event', - 'tokenIdentification': argv[1], #hashList[0][:-1] - 'contractName': argv[2], #atList[0][:-1] - 'contractAddress': argv[3], #contractaddress[:-1] - 'flodata': argv[4], #string - 'contractConditions': { - 'contractamount' : argv[5], - 'minimumsubscriptionamount' : argv[6], - 'maximumsubscriptionamount' : argv[7], - 'payeeaddress' : argv[8], - 'userchoice' : argv[9], - 'expiryTime' : argv[10] - } - } - return parsed_data - elif argv[0] == 'one-time-event-userchoice-smartcontract-participation': - parsed_data = { - 'type': 'transfer', - 'transferType': 'smartContract', - 'flodata': argv[1], #string - 'tokenIdentification': argv[2], #hashList[0][:-1] - 'operation': 'transfer', - 'tokenAmount': argv[3], #amount - 'contractName': argv[4], #atList[0][:-1] - 'userChoice': argv[5] #userChoice - } - return parsed_data - elif argv[0] == 'one-time-event-userchoice-smartcontract-trigger': - parsed_data = { - 'type': 'smartContractPays', - 'contractName': argv[1], #atList[0][:-1] - 'triggerCondition': argv[2] #triggerCondition.group().strip()[1:-1] - } - return parsed_data - elif argv[0] == 'one-time-event-time-smartcontract-incorporation': - parsed_data = { - 'type': 'smartContractIncorporation', - 'contractType': 'one-time-event', - 'tokenIdentification': argv[1], #hashList[0][:-1] - 'contractName': argv[2], #atList[0][:-1] - 'contractAddress': argv[3], #contractaddress[:-1] - 'flodata': argv[4], #string - 'contractConditions': { - 'contractamount' : argv[5], - 'minimumsubscriptionamount' : argv[6], - 'maximumsubscriptionamount' : argv[7], - 'payeeaddress' : argv[8], - 'expiryTime' : argv[9] - } - } - return parsed_data - elif argv[0] == 'one-time-event-time-smartcontract-participation': - parsed_data = { - 'type': 'transfer', - 'transferType': 'smartContract', - 'flodata': argv[1], #string - 'tokenIdentification': argv[2], #hashList[0][:-1] - 'operation': 'transfer', - 'tokenAmount': argv[3], #amount - 'contractName': argv[4] #atList[0][:-1] - } - return parsed_data - elif argv[0] == 'continuos-event-token-swap-incorporation': - parsed_data = { - 'type': 'smartContractIncorporation', - 'contractType': 'continuos-event', - 'tokenIdentification': argv[1], #hashList[0][:-1] - 'contractName': argv[2], #atList[0][:-1] - 'contractAddress': argv[3], #contractaddress[:-1] - 'flodata': argv[4], #string - 'contractConditions': { - 'subtype' : argv[5], #tokenswap - 'accepting_token' : argv[6], - 'selling_token' : argv[7], - 'pricetype' : argv[8], - 'price' : argv[9], - } - } - return parsed_data - elif argv[0] == 'continuos-event-token-swap-deposit': - parsed_data = { - 'type': 'smartContractDeposit', - 'tokenIdentification': argv[1], #hashList[0][:-1] - 'depositAmount': argv[2], #depositAmount - 'contractName': argv[3], #atList[0][:-1] - 'flodata': argv[4], #string - 'depositConditions': { - 'expiryTime' : argv[5] - } - } - return parsed_data - elif argv[0] == 'continuos-event-token-swap-participation': - parsed_data = { - 'type': 'smartContractParticipation', - 'tokenIdentification': argv[1], #hashList[0][:-1] - 'tokenAmount': argv[2], #tokenAmount - 'contractName': argv[3], #atList[0][:-1] - 'flodata': argv[4] #string - } - return parsed_data - -response_string = outputreturn('token_incorporation','create 5000 rmt#', 'rmt', 5000.0) -print(response_string) - -def outputreturn_parameterlist(nameoflist, value): - # if the name of list does not exist, create it - # if the name of list does exist, append value - # for eg. for creating tokens, the name of list is "create_token_list" and its elements are create_token_list[0]='tokenincorporation', create_token_list[1]='', create_token_list[2]='', create_token_list[1]='' - return nameoflist - - -outputreturn('noise') - -outputreturn('token_incorporation',f"{flodata}", f"{tokenname}", f"{tokenamount}") - -outputreturn('token_transfer',f"{flodata}", f"{tokenname}", f"{tokenamount}") - -outputreturn('one-time-event-userchoice-smartcontract-incorporation',f"{tokenIdentification}", f"{contractName}", f"{contractAddress}", f"{flodata}", f"{contractamount}", f"{minimumsubscriptionamount}", f"{maximumsubscriptionamount}", f"{userchoice}", f"{expiryTime}") - -outputreturn('one-time-event-userchoice-smartcontract-participation',f"{flodata}", f"{tokenIdentification}", f"{tokenAmount}", f"{contractName}", f"{userChoice}") - -outputreturn('one-time-event-userchoice-smartcontract-trigger', f"{contractName}", f"{triggerCondition}") - -outputreturn('one-time-event-time-smartcontract-incorporation', f"{tokenIdentification}", f"{contractName}", f"{contractAddress}", f"{flodata}", f"{contractamount}", f"{minimumsubscriptionamount}", f"{maximumsubscriptionamount}", f"{payeeaddress}", f"{expiryTime}") - -outputreturn('one-time-event-time-smartcontract-participation', f"{flodata}", f"{tokenIdentification}", f"{tokenAmount}", f"{contractName}") - -outputreturn('one-time-event-time-smartcontract-participation', f"{flodata}", f"{tokenIdentification}", f"{tokenAmount}", f"{contractName}") - -outputreturn('continuos-event-token-swap-incorporation', f"{tokenIdentification}", f"{contractName}", f"{contractAddress}", f"{flodata}", f"{subtype}", f"{accepting_token}", f"{selling_token}", f"{pricetype}", f"{price}") - -outputreturn('continuos-event-token-swap-deposit', f"{tokenIdentification}", f"{depositAmount}", f"{contractName}", f"{flodata}", f"{expiryTime}") - -outputreturn('continuos-event-token-swap-participation', f"{tokenIdentification}", f"{tokenAmount}", f"{contractName}", f"{flodata}") - diff --git a/parser_categorization.py b/parser_categorization.py deleted file mode 100644 index f2750a7..0000000 --- a/parser_categorization.py +++ /dev/null @@ -1,131 +0,0 @@ -# Noise categorization -parsed_data = {'type': 'noise'} - - -# Token incorporation -flodata="create 10 million " -parsed_data = { - 'type': 'tokenIncorporation', - 'flodata': string, - 'tokenIdentification': hashList[0][:-1], - 'tokenAmount': initTokens - } - -''' - one # | create/start/incorporate keyword | integer or float number -''' - - -# Token transfer -parsed_data = { - 'type': 'transfer', - 'transferType': 'token', - 'flodata': string, - 'tokenIdentification': hashList[0][:-1], - 'tokenAmount': amount - } - -''' - one # | send/give/transfer keyword | integer or float number -''' - - -# Smart Contract Incorporation - One time event - with userchoice -parsed_data = { - 'type': 'smartContractIncorporation', - 'contractType': 'one-time-event', - 'tokenIdentification': hashList[0][:-1], - 'contractName': atList[0][:-1], - 'contractAddress': contractaddress[:-1], - 'flodata': string, - 'contractConditions': { - 'contractamount' : , - 'minimumsubscriptionamount' : , - 'maximumsubscriptionamount' : , - 'payeeaddress' : , - 'userchoice' : , - 'expiryTime' : - } -} - - -# Smart Contract Participation - one time event - userchoice -parsed_data = { - 'type': 'transfer', - 'transferType': 'smartContract', - 'flodata': string, - 'tokenIdentification': hashList[0][:-1], - 'operation': 'transfer', - 'tokenAmount': amount, - 'contractName': atList[0][:-1], - 'userChoice': userChoice - } - - -# Smart Contract Trigger - one time event - userchoice -parsed_data = { - 'type': 'smartContractPays', - 'contractName': atList[0][:-1], - 'triggerCondition': triggerCondition.group().strip()[1:-1] - } - - -# Smart Contract Incorporation - One time event - with time as trigger -parsed_data = { - 'type': 'smartContractIncorporation', - 'contractType': 'one-time-event', - 'tokenIdentification': hashList[0][:-1], - 'contractName': atList[0][:-1], - 'contractAddress': contractaddress[:-1], - 'flodata': string, - 'contractConditions': { - 'contractamount' : , - 'minimumsubscriptionamount' : , - 'maximumsubscriptionamount' : , - 'payeeaddress' : , - 'expiryTime' : - } -} - - -# Smart Contract Participation - one time event - time trigger -parsed_data = { - 'type': 'transfer', - 'transferType': 'smartContract', - 'flodata': string, - 'tokenIdentification': hashList[0][:-1], - 'operation': 'transfer', - 'tokenAmount': amount, - 'contractName': atList[0][:-1] - } - - -# Smart Contract Incorporation - Continuos event - Token swap -parsed_data = { - 'type': 'smartContractIncorporation', - 'contractType': 'continuos-event', - 'tokenIdentification': hashList[0][:-1], - 'contractName': atList[0][:-1], - 'contractAddress': contractaddress[:-1], - 'flodata': string, - 'contractConditions': { - 'subtype' : 'tokenswap', - 'accepting_token' : , - 'selling_token' : , - 'pricetype' : , - 'price' : , - } -} - - -# Smart Contract Deposit - Token swap -parsed_data = { - 'type': 'smartContractDeposit', - 'tokenIdentification': hashList[0][:-1], - 'depositAmount': depositAmount, - 'contractName': atList[0][:-1], - 'flodata': string, - 'depositConditions': { - 'expiryTime' - } -} \ No newline at end of file diff --git a/parser_functions.py b/parser_functions.py deleted file mode 100644 index 15e3385..0000000 --- a/parser_functions.py +++ /dev/null @@ -1,143 +0,0 @@ -import pdb -import re - -def findrule1(rawstring, special_character): - wordList = [] - for word in rawstring.split(' '): - if word.endswith(special_character) and len(word) != 1: - wordList.append(word) - return wordList - -def findrule1_1(rawstring, special_character): - wordList = [] - for word in rawstring.split(' '): - if word.endswith(special_character) and len(word) != 1: - wordList.append(word) - if len(wordList)==1: - return wordList[0] - else: - False - -''' -rawstring = "Create Smart Contract with the name swap-rupee-bioscope@ of the type continuous-event* at the address oRRCHWouTpMSPuL6yZRwFCuh87ZhuHoL78$ with contract-conditions :(1) subtype = tokenswap (2) accepting_token=rupee# (3) selling_token = bioscope# (4) price = '15' (5) priceType = ‘predetermined’ (6) direction = oneway" -rawstring1 = "send 500 rmt1# rmt2# rmt3#" - -response = findrule1(rawstring1, '#') -print(f"\n\nResponse for rawstring1 searching #") -print(response) - -response = findrule1(rawstring, '#') -print(f"\n\nResponse for rawstring searching #") -print(response) -''' - -inputstring = " : rmt3# " -special_character = "#" -marker = ":" -operator = "after_marker" -output = ["rmt1#", "rmt2#"] - -def findrule2(inputstring, special_character, marker, operator): - inputstring_toprocess = None - if operator=='before_marker': - inputstring_toprocess = inputstring.split(":")[0] - elif operator=='after_marker': - inputstring_toprocess = inputstring.split(":")[1] - - wordList = [] - for word in inputstring_toprocess.split(' '): - if word.endswith(special_character) and len(word) != 1: - wordList.append(word) - return wordList - -'''response = findrule2(inputstring, special_character, marker, operator) -print(response) ''' - -########## - -def findrule3(text): - base_units = {'thousand': 10 ** 3, 'million': 10 ** 6, 'billion': 10 ** 9, 'trillion': 10 ** 12} - textList = text.split(' ') - counter = 0 - value = None - for idx, word in enumerate(textList): - try: - result = float(word) - if textList[idx + 1] in base_units: - value = result * base_units[textList[idx + 1]] - counter = counter + 1 - else: - value = result - counter = counter + 1 - except: - for unit in base_units: - result = word.split(unit) - if len(result) == 2 and result[1] == '' and result[0] != '': - try: - value = float(result[0]) * base_units[unit] - counter = counter + 1 - except: - continue - - if counter == 1: - return value - else: - return None - - -########## - -def findWholeWord(w): - return re.compile(r'\b({0})\b'.format(w), flags=re.IGNORECASE).search - -def truefalse_rule2(rawstring, permitted_list, denied_list): - # Find transfer , send , give - foundPermitted = None - foundDenied = None - - for word in permitted_list: - if findWholeWord(word)(rawstring): - foundPermitted = word - break - - for word in denied_list: - if findWholeWord(word)(rawstring): - foundDenied = word - break - - if (foundPermitted is not None) and (foundDenied is None): - return True - else: - return False - -'''teststring = "create 500 rmt# start send " -response = truefalse_rule2(teststring, permitted_list, denied_list) -print(response)''' - -# Token incorporation operation -## Existance of keyword - -def apply_rule1(*argv): - a = argv[0](*argv[1:]) - if a is False: - return None - else: - return a - - -rawstring = "create 5 million rmt# transfer" - -# desired output format - outputreturn('token_incorporation',f"{flodata}", f"{tokenname}", f"{tokenamount}") -tokenname = apply_rule1(findrule1_1,rawstring,"#") -print(tokenname) -denied_list = ['transfer', 'send', 'give'] # keep everything lowercase -permitted_list = ['incorporate', 'create', 'start'] # keep everything lowercase -if tokenname is not None: - isIncorporate = apply_rule1(truefalse_rule2, rawstring, permitted_list, denied_list) - -operationname = apply_rule1(truefalse_rule2, rawstring, permitted_list, denied_list) -if not operation: - formatOutput("noise") - -#response_string = outputreturn('token_incorporation','create 5000 rmt#', 'rmt', 5000.0) -#print(response_string) \ No newline at end of file diff --git a/planning.py b/planning.txt similarity index 100% rename from planning.py rename to planning.txt diff --git a/sqlite_tests.py b/sqlite_tests.py deleted file mode 100644 index d29d017..0000000 --- a/sqlite_tests.py +++ /dev/null @@ -1,47 +0,0 @@ -from sqlalchemy import create_engine, func -import pdb -import os -from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1 -from sqlalchemy import create_engine, func -from sqlalchemy.orm import sessionmaker - - -def create_database_connection(type, parameters): - if type == 'token': - engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True) - connection = engine.connect() - return connection - if type == 'smart_contract': - pass - - -def check_database_existence(type, parameters): - if type == 'token': - return os.path.isfile(f"./tokens/{parameters['token_name']}.db") - - if type == 'smart_contract': - return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db") - - -def create_database_session_orm(type, parameters, base): - if type == 'token': - engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - elif type == 'smart_contract': - engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - elif type == 'system_dbs': - engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - return session - - -session = create_database_session_orm('token', {'token_name': 'test'}, Base) -session = create_database_session_orm('smart_contract', {'contract_name': f"{}", 'contract_address': f"{}"}, Base) -session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase) \ No newline at end of file diff --git a/test_database.py b/test_database.py deleted file mode 100644 index ebb9380..0000000 --- a/test_database.py +++ /dev/null @@ -1,49 +0,0 @@ -import os -from sqlalchemy import create_engine, func -from sqlalchemy.orm import sessionmaker -from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, DatabaseTypeMapping -import pdb - - -def check_database_existence(type, parameters): - if type == 'token': - return os.path.isfile(f"./tokens/{parameters['token_name']}.db") - elif type == 'smart_contract': - return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db") - - -def create_database_connection(type, parameters): - if type == 'token': - engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True) - elif type == 'smart_contract': - engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True) - elif type == 'system_dbs': - engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False) - - connection = engine.connect() - return connection - - -def create_database_session_orm(type, parameters, base): - if type == 'token': - engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - elif type == 'smart_contract': - engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - elif type == 'system_dbs': - engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - return session - - -session = create_database_session_orm('token', {'token_name': f"vivek"}, Base) -session.add(ActiveTable(address='sdf', parentid=0)) -session.commit() -session.close() \ No newline at end of file diff --git a/test_db.py b/test_db.py deleted file mode 100644 index 86b0f94..0000000 --- a/test_db.py +++ /dev/null @@ -1,64 +0,0 @@ -import argparse -import configparser -import json -import logging -import os -import shutil -import sqlite3 -import sys -import pyflo -import requests -import socketio -from sqlalchemy import create_engine, func -from sqlalchemy.orm import sessionmaker -import time -import parsing -from config import * -from datetime import datetime -from ast import literal_eval -import pdb -from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, DatabaseTypeMapping, TimeActions - - -def check_database_existence(type, parameters): - if type == 'token': - return os.path.isfile(f"./tokens/{parameters['token_name']}.db") - - if type == 'smart_contract': - return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db") - - -def create_database_connection(type, parameters): - if type == 'token': - engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True) - elif type == 'smart_contract': - engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True) - elif type == 'system_dbs': - engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False) - - connection = engine.connect() - return connection - - -def create_database_session_orm(type, parameters, base): - if type == 'token': - engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - elif type == 'smart_contract': - engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - elif type == 'system_dbs': - engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - return session - - -contract_session = create_database_session_orm('smart_contract', {'contract_name':f"swap-rupee-bioscope", 'contract_address':f"oRRCHWouTpMSPuL6yZRwFCuh87ZhuHoL78"}, ContractBase) - -pdb.set_trace() \ No newline at end of file diff --git a/test_parsing.py b/test_parsing.py deleted file mode 100644 index a38e303..0000000 --- a/test_parsing.py +++ /dev/null @@ -1,34 +0,0 @@ -from input_classifier import super_main_function -import pdb - - -token_incorporation_test_cases = [ - ['create 1000 rmt#', {'type': 'tokenIncorporation','flodata': 'create 1000 rmt#', 'tokenIdentification': 'rmt', 'tokenAmount': 1000.0}], - ['create 100 rmt#', {'type' : 'tokenIncorporation','flodata': 'create 100 rmt#', 'tokenIdentification': 'rmt', 'tokenAmount': 100.0}], - ['create 100 rmt$', {'type':'noise'}] - ] - -def test_token_incorporation(): - for test_case in token_incorporation_test_cases: - parsed_data = super_main_function(test_case[0]) - expected_parsed_data = test_case[1] - assert parsed_data == expected_parsed_data - - -conflict_smart_contract_participation_deposit_test_cases = [ - ["send 0.001 rmt# to india-elections-2019@ to FLO address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1 with the userchoice:'narendra modi wins'", { - 'type': 'transfer', - 'transferType': 'smartContract', - 'flodata': "send 0.001 rmt# to india-elections-2019@ to FLO address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1 with the userchoice:'narendra modi wins'", - 'tokenIdentification': 'rmt', - 'operation': 'transfer', - 'tokenAmount': 0.001, - 'contractName': 'india-elections-2019@', - 'userChoice': 'narendra modi wins' - }] -] - -def test_conflict_smart_contract_participation_deposit(): - for test_case in conflict_smart_contract_participation_deposit_test_cases: - parsed_data = super_main_function(test_case[0]) - expected_parsed_data = test_case[1] diff --git a/test_rollback.py b/test_rollback.py deleted file mode 100644 index 006d772..0000000 --- a/test_rollback.py +++ /dev/null @@ -1,380 +0,0 @@ -import argparse -from sqlalchemy import create_engine, func -from sqlalchemy.orm import sessionmaker -from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, LatestTransactions, LatestBlocks, DatabaseTypeMapping, TokenAddressMapping -from ast import literal_eval -import os -import json -import logging -import pdb -import sys - -apppath = os.path.dirname(os.path.realpath(__file__)) - -# helper functions -def check_database_existence(type, parameters): - if type == 'token': - return os.path.isfile(f"./tokens/{parameters['token_name']}.db") - - if type == 'smart_contract': - return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db") - - -def create_database_connection(type, parameters): - if type == 'token': - engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True) - elif type == 'smart_contract': - engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True) - elif type == 'system_dbs': - engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False) - - connection = engine.connect() - return connection - - -def create_database_session_orm(type, parameters, base): - if type == 'token': - engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - elif type == 'smart_contract': - engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - elif type == 'system_dbs': - engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - return session - - -def inspect_parsed_flodata(parsed_flodata, inputAddress, outputAddress): - if parsed_flodata['type'] == 'transfer': - if parsed_flodata['transferType'] == 'token': - return {'type':'tokentransfer', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"} - if parsed_flodata['type'] == 'tokenIncorporation': - return {'type':'tokenIncorporation', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"} - if parsed_flodata['type'] == 'smartContractPays': - # contract address, token | both of them come from - sc_session = create_database_session_orm('smart_contract', {'contract_name':f"{parsed_flodata['contractName']}", 'contract_address':f"{outputAddress}"}, ContractBase) - token_db = sc_session.query(ContractStructure.value).filter(ContractStructure.attribute=='tokenIdentification').first()[0] - return {'type':'smartContractPays', 'token_db':f"{token_db}" , 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'triggerCondition':f"{parsed_flodata['triggerCondition']}"} - -''' -Steps to do the rollback - -1. Find out the transaction details from transaction history table ie. inputAddress, -2. Find out the last entry from the activeTable -3. Parse pid and consumedpids from the entry -4. For each consumedpid number, pull put database entry from the consumedtable and then add to activeTable - 4.1. After adding the database entry back, add consumedpid number's value to transferBalance of the entry - 4.2. What will happen to addressBalance? - 4.3. -''' - - -def undo_last_single_transaction(): - consumedpid_entry = db_session.query(ConsumedTable).filter(ConsumedTable.id == key).all() - newTransferBalance = consumedpid_entry[0].transferBalance + consumedpid[key] - db_session.add(ActiveTable(id=consumedpid_entry[0].id, address=consumedpid_entry[0].address, consumedpid=consumedpid_entry[0].consumedpid, transferBalance=newTransferBalance, addressBalance = consumedpid_entry[0].addressBalance)) - db_session.commit() - - -def calc_pid_amount(transferBalance, consumedpid): - consumedpid_sum = 0 - for key in list(consumedpid.keys()): - consumedpid_sum = consumedpid_sum + float(consumedpid[key]) - return transferBalance - consumedpid_sum - - -def find_addressBalance_from_floAddress(database_session, floAddress): - query_output = database_session.query(ActiveTable).filter(ActiveTable.address==floAddress, ActiveTable.addressBalance!=None).first() - if query_output is None: - return 0 - else: - return query_output.addressBalance - - -def rollback_address_balance_processing(db_session, senderAddress, receiverAddress, transferBalance): - # Find out total sum of address - # Find out the last entry where address balance is not null, if exists make it null - - # Calculation phase - current_receiverBalance = find_addressBalance_from_floAddress(db_session, receiverAddress) - current_senderBalance = find_addressBalance_from_floAddress(db_session ,senderAddress) - new_receiverBalance = current_receiverBalance - transferBalance - new_senderBalance = current_senderBalance + transferBalance - - # Insertion phase - # if new receiver balance is 0, then only insert sender address balance - # if receiver balance is not 0, then update previous occurence of the receiver address and sender balance - # for sender, find out weather the last occurence of senderfloid has an addressBalance - # either query out will not come or the last occurence will have address - # for sender, in all cases we will update the addressBalance of last occurences of senderfloaddress - # for receiver, if the currentaddressbalance is 0 then do nothing .. and if the currentaddressbalance is not 0 then update the last occurence of receiver address - - sender_query = db_session.query(ActiveTable).filter(ActiveTable.address==senderAddress).order_by(ActiveTable.id.desc()).first() - sender_query.addressBalance = new_senderBalance - - if new_receiverBalance != 0 and new_receiverBalance > 0: - receiver_query = db_session.query(ActiveTable).filter(ActiveTable.address==receiverAddress).order_by(ActiveTable.id.desc()).limit(2) - receiver_query[1].addressBalance = new_receiverBalance - - -def undo_smartContractPays(tokenIdentification, inputAddress, outputAddress, transaction_data): - # Token database - ''' - * rollback each pid transaction - * the addressBalance will have to be calculated after each loop, NOT at the end of the loop - ''' - tokendb_session = create_database_session_orm('token', {'token_name':tokenIdentification}, Base) - transaction_history_entry = tokendb_session.query(TransactionHistory).filter(TransactionHistory.transactionHash == transaction_data.transactionHash).order_by(TransactionHistory.blockNumber.desc()).all() - - active_table_last_entries = tokendb_session.query(ActiveTable).order_by(ActiveTable.id.desc()).limit(len(transaction_history_entry)) - pdb.set_trace() - - # Smart Contract database - ''' - * - ''' - print('') - - -def undo_transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress, transaction_data): - # Connect to database - db_session = create_database_session_orm('token', {'token_name':tokenIdentification}, Base) - transaction_history_entry = db_session.query(TransactionHistory).filter(TransactionHistory.transactionHash == transaction_data.transactionHash).order_by(TransactionHistory.blockNumber.desc()).all() - - active_table_last_entries = db_session.query(ActiveTable).order_by(ActiveTable.id.desc()).limit(len(transaction_history_entry)) - - for idx, activeTable_entry in enumerate(active_table_last_entries): - # Find out consumedpid and partially consumed pids - parentid = None - orphaned_parentid = None - consumedpid = None - if activeTable_entry.parentid is not None: - parentid = activeTable_entry.parentid - if activeTable_entry.orphaned_parentid is not None: - orphaned_parentid = activeTable_entry.orphaned_parentid - if activeTable_entry.consumedpid is not None: - consumedpid = literal_eval(activeTable_entry.consumedpid) - - # filter out based on consumped pid and partially consumed pids - if parentid is not None: - # find query in activeTable with the parentid - activeTable_pid_entry = db_session.query(ActiveTable).filter(ActiveTable.id == parentid).all()[0] - # calculate the amount taken from parentid - activeTable_pid_entry.transferBalance = activeTable_pid_entry.transferBalance + calc_pid_amount(activeTable_entry.transferBalance, consumedpid) - - if consumedpid != {}: - # each key of the pid is totally consumed and with its corresponding value written in the end - # how can we maintain the order of pid consumption? The bigger pid number will be towards the end - # 1. pull the pid number and its details from the consumedpid table - for key in list(consumedpid.keys()): - consumedpid_entry = db_session.query(ConsumedTable).filter(ConsumedTable.id == key).all()[0] - newTransferBalance = consumedpid_entry.transferBalance + consumedpid[key] - db_session.add(ActiveTable(id=consumedpid_entry.id, address=consumedpid_entry.address, parentid=consumedpid_entry.parentid ,consumedpid=consumedpid_entry.consumedpid, transferBalance=newTransferBalance, addressBalance = None)) - db_session.delete(consumedpid_entry) - - orphaned_parentid_entries = db_session.query(ActiveTable).filter(ActiveTable.orphaned_parentid == key).all() - for orphan_entry in orphaned_parentid_entries: - orphan_entry.parentid = orphan_entry.orphaned_parentid - orphan_entry.orphaned_parentid = None - - # update addressBalance - rollback_address_balance_processing(db_session, inputAddress, outputAddress, transaction_history_entry[idx].transferAmount) - - # delete operations - # delete the last row in activeTable and transactionTable - db_session.delete(activeTable_entry) - db_session.delete(transaction_history_entry[idx]) - - db_session.commit() - - -def find_input_output_addresses(transaction_data): - # Create vinlist and outputlist - vinlist = [] - querylist = [] - - for vin in transaction_data["vin"]: - vinlist.append([vin["addr"], float(vin["value"])]) - - totalinputval = float(transaction_data["valueIn"]) - - # todo Rule 41 - Check if all the addresses in a transaction on the input side are the same - for idx, item in enumerate(vinlist): - if idx == 0: - temp = item[0] - continue - if item[0] != temp: - print(f"System has found more than one address as part of vin. Transaction {transaction_data['txid']} is rejected") - return 0 - - inputlist = [vinlist[0][0], totalinputval] - inputadd = vinlist[0][0] - - # todo Rule 42 - If the number of vout is more than 2, reject the transaction - if len(transaction_data["vout"]) > 2: - print(f"System has found more than 2 address as part of vout. Transaction {transaction_data['txid']} is rejected") - return 0 - - # todo Rule 43 - A transaction accepted by the system has two vouts, 1. The FLO address of the receiver - # 2. Flo address of the sender as change address. If the vout address is change address, then the other adddress - # is the recevier address - - outputlist = [] - addresscounter = 0 - inputcounter = 0 - for obj in transaction_data["vout"]: - if obj["scriptPubKey"]["type"] == "pubkeyhash": - addresscounter = addresscounter + 1 - if inputlist[0] == obj["scriptPubKey"]["addresses"][0]: - inputcounter = inputcounter + 1 - continue - outputlist.append([obj["scriptPubKey"]["addresses"][0], obj["value"]]) - - if addresscounter == inputcounter: - outputlist = [inputlist[0]] - elif len(outputlist) != 1: - print(f"Transaction's change is not coming back to the input address. Transaction {transaction_data['txid']} is rejected") - return 0 - else: - outputlist = outputlist[0] - - input_output_list = [inputlist, outputlist] - return input_output_list - - -def delete_token_database(token_name): - dirpath = os.path.join(apppath, 'tokens', f"{token_name}.db") - if os.path.exists(dirpath): - os.remove(dirpath) - - -def perform_rollback(transaction): - latestCache = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase) - # categorize transaction and find out the databases it will affect - transaction_data = json.loads(transaction.jsonData) - input_output_list = find_input_output_addresses(transaction_data) - inputAddress = input_output_list[0][0] - outputAddress = input_output_list[1][0] - parsed_flodata = literal_eval(transaction.parsedFloData) - inspected_flodata = inspect_parsed_flodata(parsed_flodata, inputAddress, outputAddress) - - if inspected_flodata['type'] == 'tokentransfer': - # undo the transaction in token database - undo_transferToken(inspected_flodata['token_db'], inspected_flodata['token_amount'], inputAddress, outputAddress, transaction) - elif inspected_flodata['type'] == 'tokenIncorporation': - # note - if you want you can do checks to make sure the database has only 1 entry - # delete the token database - delete_token_database(inspected_flodata['token_db']) - elif inspected_flodata['type'] == 'smartContractPays': - undo_smartContractPays(inspected_flodata[''], inputAddress, outputAddress, transaction_data) - else: - print("Transaction not in any inspected_flodata category until now.. Exiting") - sys.exit(0) - - -# Take input from user reg how many blocks to go back in the blockchain -parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash') -parser.add_argument('-b', '--toblocknumer', nargs='?', type=int, help='Rollback the script to the specified block number') -parser.add_argument('-n', '--blockcount', nargs='?', type=int, help='Rollback the script to the number of blocks specified') -args = parser.parse_args() - - -# Get all the transaction and blockdetails from latestCache reg the transactions in the block -systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase) -lastscannedblock = systemdb_session.query(SystemData.value).filter(SystemData.attribute=='lastblockscanned').first() -systemdb_session.close() -lastscannedblock = int(lastscannedblock.value) - - -#number_blocks_to_rollback = 1754000 -if (args.blockcount and args.toblocknumber): - print("You can only specify one of the options -b or -c") - sys.exit(0) -elif args.blockcount: - rollback_block = lastscannedblock - args.blockcount -elif args.toblocknumer: - rollback_block = args.toblocknumer -else: - print("Please specify the number of blocks to rollback") - sys.exit(0) - - -latestcache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase) -latestBlocks = latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber >= rollback_block).all() -lblocks_dict = {} -blocknumber_list = [] -for block in latestBlocks: - block_dict = block.__dict__ - lblocks_dict[block_dict['blockNumber']] = {'blockHash':f"{block_dict['blockHash']}", 'jsonData':f"{block_dict['jsonData']}"} - blocknumber_list.insert(0,block_dict['blockNumber']) - - -# Rollback block will be excluded -for blockindex in blocknumber_list: - # if blockindex >= rollback_block:''' -#for blockindex in range(lastscannedblock, rollback_block, -1): - # Find the all the transactions that happened in this block - print(blockindex) - try: - block_tx_hashes = json.loads(lblocks_dict[str(blockindex)]['jsonData'])['tx'] - except: - print(f"Block {blockindex} is not found in latestCache. Skipping this block") - continue - - print("Block tx hashes") - print(block_tx_hashes) - - if 'b57cf412c8cb16e473d04bae44214705c64d2c25146be22695bf1ac36e166ee0' in block_tx_hashes: - pdb.set_trace() - - for tx in block_tx_hashes: - transaction = latestcache_session.query(LatestTransactions).filter(LatestTransactions.transactionHash == tx).all() - print(transaction) - if len(transaction) == 1: - perform_rollback(transaction[0]) - latestcache_session.delete(transaction[0]) - - # delete latestBlocks entry - block_entry = latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber == blockindex).first() - latestcache_session.delete(block_entry) - - # delete activeContracts, contractAddressMapping, DatabaseAddressMapping, rejectedContractTransactionHistory, rejectedTransactionHistory, tokenAddressMapping - systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase) - activeContracts_session = systemdb_session.query(ActiveContracts).filter(ActiveContracts.blockNumber==blockindex).all() - contractAddressMapping_queries = systemdb_session.query(ContractAddressMapping).filter(ContractAddressMapping.blockNumber==blockindex).all() - databaseTypeMapping_queries = systemdb_session.query(DatabaseTypeMapping).filter(DatabaseTypeMapping.blockNumber==blockindex).all() - rejectedContractTransactionHistory_queries = systemdb_session.query(RejectedContractTransactionHistory).filter(RejectedContractTransactionHistory.blockNumber==blockindex).all() - rejectedTransactionHistory_queries = systemdb_session.query(RejectedTransactionHistory).filter(RejectedTransactionHistory.blockNumber==blockindex).all() - tokenAddressMapping_queries = systemdb_session.query(TokenAddressMapping).filter(TokenAddressMapping.blockNumber==blockindex).all() - - for dbentry in activeContracts_session: - systemdb_session.delete(dbentry) - - for dbentry in contractAddressMapping_queries: - systemdb_session.delete(dbentry) - - for dbentry in databaseTypeMapping_queries: - systemdb_session.delete(dbentry) - - for dbentry in rejectedContractTransactionHistory_queries: - systemdb_session.delete(dbentry) - - for dbentry in rejectedTransactionHistory_queries: - systemdb_session.delete(dbentry) - - for dbentry in tokenAddressMapping_queries: - systemdb_session.delete(dbentry) - - systemdb_session.query(SystemData).filter(SystemData.attribute=='lastblockscanned').update({SystemData.value:str(blockindex)}) - - latestcache_session.commit() - systemdb_session.commit() - latestcache_session.close() - systemdb_session.close() diff --git a/test_rollforward.py b/test_rollforward.py deleted file mode 100644 index 384324f..0000000 --- a/test_rollforward.py +++ /dev/null @@ -1,196 +0,0 @@ -from sqlalchemy import create_engine, func -from sqlalchemy.orm import sessionmaker -from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, LatestTransactions, LatestBlocks, DatabaseTypeMapping -import json -from tracktokens_smartcontracts import processTransaction, processBlock -import os -import logging -import argparse -import configparser -import pdb -import shutil -import sys - - -# helper functions -def check_database_existence(type, parameters): - if type == 'token': - return os.path.isfile(f"./tokens/{parameters['token_name']}.db") - - if type == 'smart_contract': - return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db") - - -def create_database_connection(type, parameters): - if type == 'token': - engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True) - elif type == 'smart_contract': - engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True) - elif type == 'system_dbs': - engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False) - - connection = engine.connect() - return connection - - -def create_database_session_orm(type, parameters, base): - if type == 'token': - engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - elif type == 'smart_contract': - engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - elif type == 'system_dbs': - engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False) - base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - - return session - - -# MAIN EXECUTION STARTS -# Configuration of required variables -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - -formatter = logging.Formatter('%(asctime)s:%(name)s:%(message)s') - -file_handler = logging.FileHandler('tracking.log') -file_handler.setLevel(logging.INFO) -file_handler.setFormatter(formatter) - -stream_handler = logging.StreamHandler() -stream_handler.setFormatter(formatter) - -logger.addHandler(file_handler) -logger.addHandler(stream_handler) - - -# Rule 1 - Read command line arguments to reset the databases as blank -# Rule 2 - Read config to set testnet/mainnet -# Rule 3 - Set flo blockexplorer location depending on testnet or mainnet -# Rule 4 - Set the local flo-cli path depending on testnet or mainnet ( removed this feature | Flosights are the only source ) -# Rule 5 - Set the block number to scan from - - -# Read command line arguments -parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash') -parser.add_argument('-b', '--toblocknumer', nargs='?', type=int, help='Forward to the specified block number') -parser.add_argument('-n', '--blockcount', nargs='?', type=int, help='Forward to the specified block count') -args = parser.parse_args() - -if (args.blockcount and args.toblocknumber): - print("You can only specify one of the options -b or -c") - sys.exit(0) -elif args.blockcount: - forward_block = lastscannedblock + args.blockcount -elif args.toblocknumer: - forward_block = args.toblocknumer -else: - print("Please specify the number of blocks to rollback") - sys.exit(0) - - -apppath = os.path.dirname(os.path.realpath(__file__)) -dirpath = os.path.join(apppath, 'tokens') -if not os.path.isdir(dirpath): - os.mkdir(dirpath) -dirpath = os.path.join(apppath, 'smartContracts') -if not os.path.isdir(dirpath): - os.mkdir(dirpath) - -# Read configuration -config = configparser.ConfigParser() -config.read('config.ini') - -# todo - write all assertions to make sure default configs are right -if (config['DEFAULT']['NET'] != 'mainnet') and (config['DEFAULT']['NET'] != 'testnet'): - logger.error("NET parameter in config.ini invalid. Options are either 'mainnet' or 'testnet'. Script is exiting now") - sys.exit(0) - -# Specify mainnet and testnet server list for API calls and websocket calls -serverlist = None -if config['DEFAULT']['NET'] == 'mainnet': - serverlist = config['DEFAULT']['MAINNET_FLOSIGHT_SERVER_LIST'] -elif config['DEFAULT']['NET'] == 'testnet': - serverlist = config['DEFAULT']['TESTNET_FLOSIGHT_SERVER_LIST'] -serverlist = serverlist.split(',') -neturl = config['DEFAULT']['FLOSIGHT_NETURL'] -tokenapi_sse_url = config['DEFAULT']['TOKENAPI_SSE_URL'] - -# Delete database and smartcontract directory if reset is set to 1 -if args.reset == 1: - logger.info("Resetting the database. ") - apppath = os.path.dirname(os.path.realpath(__file__)) - dirpath = os.path.join(apppath, 'tokens') - shutil.rmtree(dirpath) - os.mkdir(dirpath) - dirpath = os.path.join(apppath, 'smartContracts') - shutil.rmtree(dirpath) - os.mkdir(dirpath) - dirpath = os.path.join(apppath, 'system.db') - if os.path.exists(dirpath): - os.remove(dirpath) - dirpath = os.path.join(apppath, 'latestCache.db') - if os.path.exists(dirpath): - os.remove(dirpath) - - -'''# Initialize latest cache DB -session = create_database_session_orm('system_dbs', {'db_name': "latestCache"}, LatestCacheBase) -session.commit() -session.close() - -# get all blocks and transaction data -latestCache_session = create_database_session_orm('system_dbs', {'db_name':'latestCache1'}, LatestCacheBase) -lblocks = latestCache_session.query(LatestBlocks).all() -ltransactions = latestCache_session.query(LatestTransactions).all() -latestCache_session.close() - -lblocks_dict = {} -for block in lblocks: - block_dict = block.__dict__ - lblocks_dict[block_dict['blockNumber']] = {'blockHash':f"{block_dict['blockHash']}", 'jsonData':f"{block_dict['jsonData']}"} - -# process and rebuild all transactions -for transaction in ltransactions: - transaction_dict = transaction.__dict__ - transaction_data = json.loads(transaction_dict['jsonData']) - parsed_flodata = json.loads(transaction_dict['parsedFloData']) - block_info = json.loads(lblocks_dict[transaction_dict['blockNumber']]['jsonData']) - processTransaction(transaction_data, parsed_flodata, block_info) - -# copy the old block data -old_latest_cache = create_database_connection('system_dbs', {'db_name':'latestCache1'}) -old_latest_cache.execute("ATTACH DATABASE 'latestCache.db' AS new_db") -old_latest_cache.execute("INSERT INTO new_db.latestBlocks SELECT * FROM latestBlocks") -old_latest_cache.close() - -# delete -# system.db , latestCache.db, smartContracts, tokens -if os.path.isfile('./system1.db'): - os.remove('system1.db') -if os.path.isfile('./latestCache1.db'): - os.remove('latestCache1.db') -if os.path.isfile('./smartContracts1'): - shutil.rmtree('smartContracts1') -if os.path.isfile('./tokens1'): - shutil.rmtree('tokens1') - ''' - -# Read start block no -session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase) -startblock = int(session.query(SystemData).filter_by(attribute='lastblockscanned').all()[0].value) + 1 -session.commit() -session.close() - -for blockindex in range(startblock, forward_block): - processBlock(blockindex=blockindex) - # Update system.db's last scanned block - connection = create_database_connection('system_dbs', {'db_name': "system"}) - connection.execute(f"UPDATE systemData SET value = {blockindex} WHERE attribute = 'lastblockscanned';") - connection.close() \ No newline at end of file