Changes required for blockbook migration

This commit is contained in:
RanchiMall Dev 2023-08-09 14:48:56 +00:00
parent 258eea97e9
commit 63a3c2344c
22 changed files with 5411 additions and 3675 deletions

31
.github/workflows/test_parsing.yml vendored Normal file
View File

@ -0,0 +1,31 @@
# This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
name: Test flodata parsing
on:
push:
branches: [ "swap-statef-testing" ]
pull_request:
branches: [ "swap-statef-testing" ]
permissions:
contents: read
jobs:
build:
runs-on: self-hosted
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.8
uses: actions/setup-python@v3
with:
python-version: "3.8"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install arrow==1.1.0 pyflo-lib==2.0.9 requests==2.25.0
- name: Test with unittest
run: |
python -m unittest tests/test_parsing.py

3
.gitignore vendored
View File

@ -10,6 +10,9 @@ config.ini
config.py
*.log
py3/
py3.9.0
__pycache__/
*.pyc
.vscode/
error-notes.txt
snippets*

View File

@ -1 +0,0 @@
3.9.0

View File

@ -1,57 +1,3 @@
# FLO Token & Smart Contract System
## Important versions and their hashes
The python script scans the FLO Blockchain for Token and Smart Contract activity and creates/updates local SQLite databases accordingly.
`339dac6a50bcd973dda4caf43998fc61dd79ea68`
The legacy token and smart contract system running currently on the server
`41c4078db98e878ecef3452007893136c531ba05` ==> WORKING VERSION | Token swap branch
The latest version with token swap smart contract and token transfer with the following problems:
1. Parsing module is not able to detect token creation and transfer floData
2. The smart contract system is not moving forward because it is not able to detect token databases as they are created when run form scratch, however it is working with old created token databases
`89d96501b9fcdd3c91c8900e1fb3dd5a8d8684c1`
Docker-compatibility branch is needed right now because Docker image made for flo-token-tracking required some changes which have been made in that branch.
## How to start the system
1. Create a virtual environment with python3.7 and activate it
```
python3.7 -m venv py3.7
source py3.7/bin/activate
```
2. Install python packages required for the virtual environment from `pip3 install -r requirements.txt`
3. Setup config files with the following information
For testnet
```
# config.ini
[DEFAULT]
NET = testnet
FLO_CLI_PATH = /usr/local/bin/flo-cli
START_BLOCK = 740400
# config.py
committeeAddressList = ['oVwmQnQGtXjRpP7dxJeiRGd5azCrJiB6Ka']
sseAPI_url = 'https://ranchimallflo-testnet.duckdns.org/'
```
For mainnet
```
# config.ini
[DEFAULT]
NET = mainnet
FLO_CLI_PATH = /usr/local/bin/flo-cli
START_BLOCK = 3387900
# config.py
committeeAddressList = ['FRwwCqbP7DN4z5guffzzhCSgpD8Q33hUG8']
sseAPI_url = 'https://ranchimallflo.duckdns.org/'
```
4. If running for the first time, run `python3.7 tracktokens-smartcontracts.py --reset` otherwise run `python3.7 tracktokens-smartcontracts.py`
If you want to listen to RanchiMall's Token Tracker scanner's events you have to subscribe to Ranchimallflo API's end point `/sse`
Reference - https://ably.com/topic/server-sent-events

23
app.py
View File

@ -1,23 +0,0 @@
import os
from flask import Flask, jsonify
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/getmarkerlist')
def marker_list():
dblist = os.listdir("databases/")
dbdict = {}
for idx, item in enumerate(dblist):
dbdict[idx] = item[:-3]
return jsonify(dbdict)
app.run(debug=True)

13
config-example.ini Normal file
View File

@ -0,0 +1,13 @@
[DEFAULT]
NET = testnet
FLO_CLI_PATH = /usr/local/bin/flo-cli
START_BLOCK = 740400
FLOSIGHT_NETURL = https://flosight-testnet.ranchimall.net/
TOKENAPI_SSE_URL = https://ranchimallflo-testnet.duckdns.org
MAINNET_FLOSIGHT_SERVER_LIST = https://flosight.ranchimall.net/ , https://flosight.duckdns.org/
TESTNET_FLOSIGHT_SERVER_LIST = https://flosight-testnet.ranchimall.net/ , https://flosight-testnet.duckdns.org/
IGNORE_BLOCK_LIST = 902446
IGNORE_TRANSACTION_LIST = b4ac4ddb51188b28b39bcb3aa31357d5bfe562c21e8aaf8dde0ec560fc893174
DATA_PATH = /home/production/Dev/flo-token-tracker
APP_ADMIN = oWooGLbBELNnwq8Z5YmjoVjw8GhBGH3qSP

View File

@ -1 +0,0 @@
committeeAddressList = [<committeeAddress>]

View File

@ -1,4 +0,0 @@
[DEFAULT]
NET = mainnet
FLO_CLI_PATH = /usr/local/bin/flo-cli
START_BLOCK = 3387900

43
convert_db.py Normal file
View File

@ -0,0 +1,43 @@
from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, LatestTransactions, LatestBlocks, DatabaseTypeMapping, TokenAddressMapping, LatestCacheBase1, LatestTransactions1, LatestBlocks1
import pdb
from sqlalchemy import create_engine, func
from sqlalchemy.orm import sessionmaker
def create_database_session_orm(type, parameters, base):
if type == 'token':
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
base.metadata.create_all(bind=engine)
session = sessionmaker(bind=engine)()
elif type == 'smart_contract':
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
base.metadata.create_all(bind=engine)
session = sessionmaker(bind=engine)()
elif type == 'system_dbs':
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
base.metadata.create_all(bind=engine)
session = sessionmaker(bind=engine)()
return session
# connect to the database convert_db
convert_db = create_database_session_orm('system_dbs', {'db_name': 'convertdb'}, LatestCacheBase1)
latest_blocks = convert_db.query(LatestBlocks1).all()
latest_txs = convert_db.query(LatestTransactions1).all()
# create a new database convert_db_new
convert_db_1 = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
for block in latest_blocks:
convert_db_1.add(LatestBlocks(blockNumber=block.blockNumber, blockHash=block.blockHash, jsonData=block.jsonData))
for tx in latest_txs:
convert_db_1.add(LatestTransactions(transactionHash=tx.transactionHash, blockNumber=tx.blockNumber, jsonData=tx.jsonData, transactionType=tx.transactionType, parsedFloData=tx.parsedFloData))
convert_db_1.commit()
convert_db_1.close()
convert_db.close()

View File

@ -1,3 +0,0 @@
cd /home/production/Desktop/flo-token-tracking/
python3 tracktokens-smartcontracts.py

192
models.py
View File

@ -1,13 +1,14 @@
from sqlalchemy import Column, Integer, Float, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
TokenBase = declarative_base()
ContractBase = declarative_base()
ContinuosContractBase = declarative_base()
SystemBase = declarative_base()
LatestCacheBase = declarative_base()
class ActiveTable(Base):
class ActiveTable(TokenBase):
__tablename__ = "activeTable"
id = Column('id', Integer, primary_key=True)
@ -15,9 +16,12 @@ class ActiveTable(Base):
parentid = Column('parentid', Integer)
consumedpid = Column('consumedpid', String)
transferBalance = Column('transferBalance', Float)
addressBalance = Column('addressBalance', Float)
orphaned_parentid = Column('orphaned_parentid', Integer)
blockNumber = Column('blockNumber', Integer)
class ConsumedTable(Base):
class ConsumedTable(TokenBase):
__tablename__ = "consumedTable"
primaryKey = Column('primaryKey', Integer, primary_key=True)
@ -26,9 +30,12 @@ class ConsumedTable(Base):
parentid = Column('parentid', Integer)
consumedpid = Column('consumedpid', String)
transferBalance = Column('transferBalance', Float)
addressBalance = Column('addressBalance', Float)
orphaned_parentid = Column('orphaned_parentid', Integer)
blockNumber = Column('blockNumber', Integer)
class TransferLogs(Base):
class TransferLogs(TokenBase):
__tablename__ = "transferlogs"
primary_key = Column('id', Integer, primary_key=True)
@ -42,7 +49,7 @@ class TransferLogs(Base):
transactionHash = Column('transactionHash', String)
class TransactionHistory(Base):
class TransactionHistory(TokenBase):
__tablename__ = "transactionHistory"
primary_key = Column('id', Integer, primary_key=True)
@ -59,7 +66,7 @@ class TransactionHistory(Base):
parsedFloData = Column('parsedFloData', String)
class TokenContractAssociation(Base):
class TokenContractAssociation(TokenBase):
__tablename__ = "tokenContractAssociation"
primary_key = Column('id', Integer, primary_key=True)
@ -116,14 +123,81 @@ class ContractTransactionHistory(ContractBase):
parsedFloData = Column('parsedFloData', String)
class RejectedContractTransactionHistory(SystemBase):
__tablename__ = "rejectedContractTransactionHistory"
class ContractDeposits(ContractBase):
__tablename__ = "contractdeposits"
id = Column('id', Integer, primary_key=True)
depositorAddress = Column('depositorAddress', String)
depositAmount = Column('depositAmount', Float)
depositBalance = Column('depositBalance', Float)
expiryTime = Column('expiryTime', String)
unix_expiryTime = Column('unix_expiryTime', Integer)
status = Column('status', String)
transactionHash = Column('transactionHash', String)
blockNumber = Column('blockNumber', Integer)
blockHash = Column('blockHash', String)
class ConsumedInfo(ContractBase):
__tablename__ = "consumedinfo"
id = Column('id', Integer, primary_key=True)
id_deposittable = Column('id_deposittable', Integer)
transactionHash = Column('transactionHash', String)
blockNumber = Column('blockNumber', Integer)
class ContractWinners(ContractBase):
__tablename__ = "contractwinners"
id = Column('id', Integer, primary_key=True)
participantAddress = Column('participantAddress', String)
winningAmount = Column('winningAmount', Float)
userChoice = Column('userChoice', String)
transactionHash = Column('transactionHash', String)
blockNumber = Column('blockNumber', Integer)
blockHash = Column('blockHash', String)
class ContractStructure2(ContinuosContractBase):
__tablename__ = "contractstructure"
id = Column('id', Integer, primary_key=True)
attribute = Column('attribute', String)
index = Column('index', Integer)
value = Column('value', String)
class ContractParticipants2(ContinuosContractBase):
__tablename__ = "contractparticipants"
id = Column('id', Integer, primary_key=True)
participantAddress = Column('participantAddress', String)
tokenAmount = Column('tokenAmount', Float)
transactionHash = Column('transactionHash', String)
blockNumber = Column('blockNumber', Integer)
blockHash = Column('blockHash', String)
class ContractDeposits2(ContinuosContractBase):
__tablename__ = "contractdeposits"
id = Column('id', Integer, primary_key=True)
depositorAddress = Column('depositorAddress', String)
depositAmount = Column('depositAmount', Float)
expiryTime = Column('expiryTime', String)
status = Column('status', String)
transactionHash = Column('transactionHash', String)
blockNumber = Column('blockNumber', Integer)
blockHash = Column('blockHash', String)
class ContractTransactionHistory2(ContinuosContractBase):
__tablename__ = "contractTransactionHistory"
primary_key = Column('id', Integer, primary_key=True)
transactionType = Column('transactionType', String)
transactionSubType = Column('transactionSubType', String)
contractName = Column('contractName', String)
contractAddress = Column('contractAddress', String)
sourceFloAddress = Column('sourceFloAddress', String)
destFloAddress = Column('destFloAddress', String)
transferAmount = Column('transferAmount', Float)
@ -133,26 +207,6 @@ class RejectedContractTransactionHistory(SystemBase):
transactionHash = Column('transactionHash', String)
blockchainReference = Column('blockchainReference', String)
jsonData = Column('jsonData', String)
rejectComment = Column('rejectComment', String)
parsedFloData = Column('parsedFloData', String)
class RejectedTransactionHistory(SystemBase):
__tablename__ = "rejectedTransactionHistory"
primary_key = Column('id', Integer, primary_key=True)
tokenIdentification = Column('tokenIdentification', String)
sourceFloAddress = Column('sourceFloAddress', String)
destFloAddress = Column('destFloAddress', String)
transferAmount = Column('transferAmount', Float)
blockNumber = Column('blockNumber', Integer)
blockHash = Column('blockHash', String)
time = Column('time', Integer)
transactionHash = Column('transactionHash', String)
blockchainReference = Column('blockchainReference', String)
jsonData = Column('jsonData', String)
rejectComment = Column('rejectComment', String)
transactionType = Column('transactionType', String)
parsedFloData = Column('parsedFloData', String)
@ -206,19 +260,89 @@ class TokenAddressMapping(SystemBase):
blockHash = Column('blockHash', String)
class LatestTransactions(LatestCacheBase):
__tablename__ = "latestTransactions"
class DatabaseTypeMapping(SystemBase):
__tablename__ = "databaseTypeMapping"
id = Column('id', Integer, primary_key=True)
db_name = Column('db_name', String)
db_type = Column('db_type', String)
keyword = Column('keyword', String)
object_format = Column ('object_format', String)
blockNumber = Column('blockNumber', Integer)
class TimeActions(SystemBase):
__tablename__ = "time_actions"
id = Column('id', Integer, primary_key=True)
time = Column('time', String)
activity = Column('activity', String)
status = Column('status', String)
contractName = Column('contractName', String)
contractAddress = Column('contractAddress', String)
contractType = Column('contractType', String)
tokens_db = Column('tokens_db', String)
parsed_data = Column('parsed_data', String)
transactionHash = Column('transactionHash', String)
blockNumber = Column('blockNumber', String)
blockNumber = Column('blockNumber', Integer)
class RejectedContractTransactionHistory(SystemBase):
__tablename__ = "rejectedContractTransactionHistory"
primary_key = Column('id', Integer, primary_key=True)
transactionType = Column('transactionType', String)
transactionSubType = Column('transactionSubType', String)
contractName = Column('contractName', String)
contractAddress = Column('contractAddress', String)
sourceFloAddress = Column('sourceFloAddress', String)
destFloAddress = Column('destFloAddress', String)
transferAmount = Column('transferAmount', Float)
blockNumber = Column('blockNumber', Integer)
blockHash = Column('blockHash', String)
time = Column('time', Integer)
transactionHash = Column('transactionHash', String)
blockchainReference = Column('blockchainReference', String)
jsonData = Column('jsonData', String)
rejectComment = Column('rejectComment', String)
parsedFloData = Column('parsedFloData', String)
class RejectedTransactionHistory(SystemBase):
__tablename__ = "rejectedTransactionHistory"
primary_key = Column('id', Integer, primary_key=True)
tokenIdentification = Column('tokenIdentification', String)
sourceFloAddress = Column('sourceFloAddress', String)
destFloAddress = Column('destFloAddress', String)
transferAmount = Column('transferAmount', Float)
blockNumber = Column('blockNumber', Integer)
blockHash = Column('blockHash', String)
time = Column('time', Integer)
transactionHash = Column('transactionHash', String)
blockchainReference = Column('blockchainReference', String)
jsonData = Column('jsonData', String)
rejectComment = Column('rejectComment', String)
transactionType = Column('transactionType', String)
parsedFloData = Column('parsedFloData', String)
class LatestTransactions(LatestCacheBase):
__tablename__ = "latestTransactions"
id = Column('id', Integer, primary_key=True)
transactionHash = Column('transactionHash', String)
blockNumber = Column('blockNumber', Integer)
jsonData = Column('jsonData', String)
transactionType = Column('transactionType', String)
parsedFloData = Column('parsedFloData', String)
db_reference = Column('db_reference', String)
class LatestBlocks(LatestCacheBase):
__tablename__ = "latestBlocks"
id = Column('id', Integer, primary_key=True)
blockNumber = Column('blockNumber', String)
blockNumber = Column('blockNumber', Integer)
blockHash = Column('blockHash', String)
jsonData = Column('jsonData', String)

View File

@ -0,0 +1,281 @@
"""
DEFINITIONS:
Special character words - A word followed by either of the special character(#,*,@)
#-word - Token name
@-word - Smart Contract name
*-word - Smart Contract type
"""
"""
FIND RULES
1. Identify all Special character words in a text string >> and output as a list of those words
2. Apply rule 1, but only before a marker or keyword like ":" and output as a list of those words
3. Find a number in the string
5. Check for an occurance of exact order of pattern of special character words
eg. for one-time-event smart contract( identified using *-word), the existence of #-word should be checked before the ':' and output the #-word
for continuos-event smart contract( identified using *-word)(with subtype tokenswap), the #-words should be checked after the ':' and output two hash words
6. Given a string of the type contract conditions, format and output an object string by removing = and by removing number references
7. Idenitfy all the special character words in a text string such that spaces are not taken into account, for eg. Input string => "contract-conditions :(2) accepting_token=rupee#(3) selling_token = bioscope# " |
Output string => ["rupee#","bioscope#"]
"""
def findrule1(rawstring, special_character):
wordList = []
for word in rawstring.split(' '):
if word.endswith(special_character) and len(word) != 1:
wordList.append(word)
return wordList
def findrule3(text):
base_units = {'thousand': 10 ** 3, 'million': 10 ** 6, 'billion': 10 ** 9, 'trillion': 10 ** 12}
textList = text.split(' ')
counter = 0
value = None
for idx, word in enumerate(textList):
try:
result = float(word)
if textList[idx + 1] in base_units:
value = result * base_units[textList[idx + 1]]
counter = counter + 1
else:
value = result
counter = counter + 1
except:
for unit in base_units:
result = word.split(unit)
if len(result) == 2 and result[1] == '' and result[0] != '':
try:
value = float(result[0]) * base_units[unit]
counter = counter + 1
except:
continue
if counter == 1:
return value
else:
return None
"""
TRUE-FALSE RULES
1. Check if subtype = tokenswap exists in a given string,
2. Find if any one of special word in list is present, ie. [start, create, incorporate] and any of the words in second list is not present like [send,transfer, give]
"""
import re
def findWholeWord(w):
return re.compile(r'\b({0})\b'.format(w), flags=re.IGNORECASE).search
'''
findWholeWord('seek')('those who seek shall find') # -> <match object>
findWholeWord('word')('swordsmith')
'''
def truefalse_rule1(rawstring, string_tobe_checked):
nowhites_rawstring = rawstring.replace(" ","").lower()
if string_tobe_checked.replace(" ","").lower() in nowhites_rawstring:
return True
else:
return False
denied_list = ['transfer', 'send', 'give'] # keep everything lowercase
permitted_list = ['incorporate', 'create', 'start'] # keep everything lowercase
def truefalse_rule2(rawstring, permitted_list, denied_list):
# Find transfer , send , give
foundPermitted = None
foundDenied = None
for word in permitted_list:
if findWholeWord(word)(rawstring):
foundPermitted = word
break
for word in denied_list:
if findWholeWord(word)(rawstring):
foundDenied = word
break
if (foundPermitted in not None) and (foundDenied is None):
return True
else:
return False
def selectCateogry(rawstring, wordlist, category1, category2):
"""
CLASSIFY RULES
1. Based on various combinations of the special character words and special words, create categorizations
eg. 1.1 if there is only one #-word, then the flodata is related purely to token system
1.2 if there is one #-word, one @-word .. then it is related to smart contract system, but cannot be a creation type since smart contract creaton needs to specify contract type with *-word
1.3 if there is one
2. Check if it is of the value 'one-time-event' or 'continuos-event'
"""
"""
REJECT RULES
1. *-words have to be equal to 1 ie. You can specify only one contract type at once , otherwise noise
2. *-word has to fall in the following type ['one-time-event*', 'continuous-event*'], otherwise noise
3. @-word should exist only before the : , otherwise noise
4. There should be only one @-word, otherwise noise
5. for one-time-event smart contract( identified using one-time-event*), if there is a no #-word before : -> reject as noise
6. for one-time-event smart contract( identified using one-time-event*) if there is more than one #-word before : -> reject as noise
7. for one-time-event smart contract( identified using one-time-event*) if there is/are #-word(s) after colon -> reject as noise
8. for continuos-event smart contract( identified using continuos-event*) if there is one or more #-word before : > reject as noise
9. for continuos-event smart contract( identified using continuos-event*)( with subtype token-swap ) if there is one or more than two #-word after : > reject as noise
10.
"""
def rejectrule9(rawtext, starword):
pass
extractContractConditions(cleanstring, contracttype, blocktime=blockinfo['time'], marker=hashList[0][:-1])
# Token incorporation operation
## Existance of keyword
"""
APPLY RULES
1. After application of apply rule1, a parser rule will either return a value or will classify the result as noise
"""
def apply_rule1(*argv):
a = argv[0](*argv[1:])
if a is False:
return "noise"
elif a if True:
return a
# If any of the parser rule returns a value, then queue it for further processing, otherwise send noise to the output engine
apply_rule1(findrule_1, rawstring, special_character)
def outputreturn(*argv):
if argv[0] == 'noise':
parsed_data = {'type': 'noise'}
elif argv[0] == 'token_incorporation':
parsed_data = {
'type': 'tokenIncorporation',
'flodata': argv[1], #string
'tokenIdentification': argv[2], #hashList[0][:-1]
'tokenAmount': argv[3] #initTokens
}
elif argv[0] == 'token_transfer':
parsed_data = {
'type': 'transfer',
'transferType': 'token',
'flodata': argv[1], #string
'tokenIdentification': argv[2], #hashList[0][:-1]
'tokenAmount': argv[3] #amount
}
elif argv[0] == 'one-time-event-userchoice-smartcontract-incorporation':
parsed_data = {
'type': 'smartContractIncorporation',
'contractType': 'one-time-event',
'tokenIdentification': argv[1], #hashList[0][:-1]
'contractName': argv[2], #atList[0][:-1]
'contractAddress': argv[3], #contractaddress[:-1]
'flodata': argv[4], #string
'contractConditions': {
'contractamount' : argv[5],
'minimumsubscriptionamount' : argv[6],
'maximumsubscriptionamount' : argv[7],
'payeeaddress' : argv[8],
'userchoice' : argv[9],
'expiryTime' : argv[10]
}
}
elif argv[0] == 'one-time-event-userchoice-smartcontract-participation':
parsed_data = {
'type': 'transfer',
'transferType': 'smartContract',
'flodata': argv[1], #string
'tokenIdentification': argv[2], #hashList[0][:-1]
'operation': 'transfer',
'tokenAmount': argv[3], #amount
'contractName': argv[4], #atList[0][:-1]
'userChoice': argv[5] #userChoice
}
elif argv[0] == 'one-time-event-userchoice-smartcontract-trigger':
parsed_data = {
'type': 'smartContractPays',
'contractName': argv[1], #atList[0][:-1]
'triggerCondition': argv[2] #triggerCondition.group().strip()[1:-1]
}
elif argv[0] == 'one-time-event-time-smartcontract-incorporation':
parsed_data = {
'type': 'smartContractIncorporation',
'contractType': 'one-time-event',
'tokenIdentification': argv[1], #hashList[0][:-1]
'contractName': argv[2], #atList[0][:-1]
'contractAddress': argv[3], #contractaddress[:-1]
'flodata': argv[4], #string
'contractConditions': {
'contractamount' : argv[5],
'minimumsubscriptionamount' : argv[6],
'maximumsubscriptionamount' : argv[7],
'payeeaddress' : argv[8],
'expiryTime' : argv[9]
}
}
elif argv[0] == 'one-time-event-time-smartcontract-participation':
parsed_data = {
'type': 'transfer',
'transferType': 'smartContract',
'flodata': argv[1], #string
'tokenIdentification': argv[2], #hashList[0][:-1]
'operation': 'transfer',
'tokenAmount': argv[3], #amount
'contractName': argv[4] #atList[0][:-1]
}
elif argv[0] == 'continuos-event-token-swap-incorporation':
parsed_data = {
'type': 'smartContractIncorporation',
'contractType': 'continuos-event',
'tokenIdentification': argv[1], #hashList[0][:-1]
'contractName': argv[2], #atList[0][:-1]
'contractAddress': argv[3], #contractaddress[:-1]
'flodata': argv[4], #string
'contractConditions': {
'subtype' : argv[5], #tokenswap
'accepting_token' : argv[6],
'selling_token' : argv[7],
'pricetype' : argv[8],
'price' : argv[9],
}
}
elif argv[0] == 'continuos-event-token-swap-deposit':
parsed_data = {
'type': 'smartContractDeposit',
'tokenIdentification': argv[1], #hashList[0][:-1]
'depositAmount': argv[2], #depositAmount
'contractName': argv[3], #atList[0][:-1]
'flodata': argv[4], #string
'depositConditions': {
'expiryTime' : argv[5]
}
}
elif argv[0] == 'continuos-event-token-swap-participation':
parsed_data = {
'type': 'smartContractParticipation',
'tokenIdentification': argv[1], #hashList[0][:-1]
'sendAmount': argv[2], #sendtAmount
'receiveAmount': argv[3], #receiveAmount
'contractName': argv[4], #atList[0][:-1]
'flodata': argv[5] #string
}

1476
parsing.py

File diff suppressed because it is too large Load Diff

296
planning.txt Normal file
View File

@ -0,0 +1,296 @@
'''
TEMPLATE FOR SECOND STAGE AFTER INPUT CLASSIFIER
IF BLOCK If the output of input classifier is tokensystem-C,
JUST LINEARLY START BUILDING IT
then first start building the known outputs
// outputreturn('token_incorporation',f"{flodata}", f"{tokenname}", f"{tokenamount}")
f"{flodata} = rawstring
f"{tokenname}" = wordlist entry
tokensystem-C-resolved = Output of second stage classification
f"{tokenamount}" = find_number_function
'''
'''
The problem we are facing:
* Token transactions don't have * or @ symbols
* Smart Contract transactions have * , @ , # symbols
* Smart Contract transaction of the type one time event have 1 # before colon
* Smart Contract transaction of the type continuous event has 2 # after colon
* So we are checking for hashes based on the type of smart contract(identified by *)
* But the above check disregards checking hashes in token transactions
'''
# Write down all the possible flodata( with all combinations possible) for
'''
Token creation
create 500 million rmt#
['#']
Token transfer
transfer 200 rmt#
['#']
One time event userchoice creation
Create Smart Contract with the name India-elections-2019@ of the type one-time-event* using the asset rmt# at the address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1$ with contract-conditions: (1) contractAmount=0.001rmt (2) userChoices=Narendra Modi wins| Narendra Modi loses (3) expiryTime= Wed May 22 2019 21:00:00 GMT+0530
['@','*','#','$',':']
['@','*','#','$',':','#']
One time event userchoice participation
send 0.001 rmt# to india-elections-2019@ to FLO address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1 with the userchoice:'narendra modi wins'
['#','@',':']
['#','@','$',':']
One time event userchoice trigger
india-elections-2019@ winning-choice:'narendra modi wins'
['@',':']
One time event timeevent creation
Create Smart Contract with the name India-elections-2019@ of the type one-time-event* using the asset rmt# at the address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1$ with contract-conditions: (1) contractAmount=0.001rmt (2) expiryTime= Wed May 22 2019 21:00:00 GMT+0530
['@','*','#','$',':']
['@','*','#','$',':','#']
One time event timeevent participation
send 0.001 rmt# to india-elections-2019@ to FLO address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1
['#','@']
['#','@','$']
Continuos event token swap creation
Create Smart Contract with the name swap-rupee-bioscope@ of the type continuous-event* at the address oRRCHWouTpMSPuL6yZRwFCuh87ZhuHoL78$ with contract-conditions :
(1) subtype = tokenswap
(2) accepting_token = rupee#
(3) selling_token = bioscope#
(4) price = '15'
(5) priceType = predetermined
(6) direction = oneway
['@','*','$',':','#','#']
Continuos event tokenswap deposit
Deposit 15 bioscope# to swap-rupee-bioscope@ its FLO address being oRRCHWouTpMSPuL6yZRwFCuh87ZhuHoL78$ with deposit-conditions: (1) expiryTime= Wed Nov 17 2021 21:00:00 GMT+0530
['#','@',':']
['#','@','$',':']
Continuos event tokenswap participation
Send 15 rupee# to swap-rupee-article@ its FLO address being FJXw6QGVVaZVvqpyF422Aj4FWQ6jm8p2dL$
['#','@']
['#','@','$']
'''
'''
['#'] - Token creation
['#'] - Token particiation
['@','*','#','$',':'] - Smart contract creation user-choice
['@','*','#','$',':','#']
['#','@',':'] - Smart contract participation user-choice
['#','@','$',':']
['@',':'] - Smart contract trigger user-choice
['@','*','#','$',':'] - Smart contract creation - ote-timebased
['@','*','#','$',':','#']
['#','@'] - Smart contract particiation - ote-timebased
['#','@','$']
['@','*','$',':','#','#'] - Smart contract creation - continuos event - tokenswap
['#','@',':'] - Smart contract deposit - continuos event - tokenswap
['#','@','$',':']
['#','@'] - Smart contract participation - continuos event - tokenswap
['#','@','$'] - Smart contract participation - continuos event - tokenswap
'''
'''
['#'] - Token creation
['#'] - Token particiation
['@','*','#','$',':'] - Smart contract creation ote-userchoice
['@','*','#','$',':','#']
['@','*','#','$',':'] - Smart contract creation - ote-timebased
['@','*','#','$',':','#']
['#','@',':'] - Smart contract participation user-choice
['#','@','$',':']
['#','@',':'] - Smart contract deposit - continuos event - tokenswap
['#','@','$',':']
['@',':'] - Smart contract trigger user-choice
['#','@'] - Smart contract particiation - ote-timebased
['#','@','$']
['#','@'] - Smart contract participation - continuos event - tokenswap
['#','@','$'] - Smart contract participation - continuos event - tokenswap
['@','*','$',':','#','#'] - Smart contract creation - continuos event - tokenswap
'''
'''
Conflicts -
1. Token creation | Token participation
2. Smart contract CREATION of the type one-time-event-userchoice | one-time-event-timebased
3. Smart contract PARTICIPATION user-choice | Smart contract DEPOSIT continuos-event token-swap
4. Smart contract PARTICIPATION one-time-event-timebased | Smart contract participation - continuos event - tokenswap
'''
'''
Emerging parser design
Phase 1 - Input processing | Special character position based classification and noise detection (FINISHED)
Phase 2 - Conflict recognition (FINISHED)
Phase 3 - Category based keyword checks
Phase 4 - Parser rules for finding data
Phase 5 - Rules for applying parser rules
Phase 6 - Category based data field extraction
Phase 7 - Output formatting and return (FINISHED)
'''
'''
Allowed formats of Smart Contract and token names
1. First character should always be an Alphabet, lower case or upper case
2. The last character should always be an Alphabet, lower case or upper case
3. The middle characters can be a - or _
Check for FLO Address
Write checks for conditions inside contract conditions
Serious error handling for contract-conditions
* 2222:00 gives error
* contractAmount = 0.022rt gives error | check if space is allowed between 0.022 rt
'''
'''
What we need for NFT contract code
1. NFT-address mapping table in system.db
2. New main transaction category class
3. New sub-category for transfer category class ie. NFT transfer
NFT Smart Contract end cases
1. NFT against an address
2. NFT against another NFT
3.
flodata format for NFT
Create 1000 NFT with bioscope# with nft-details: (1) name = 'bioscope' (2) hash =
Create 100 albumname# as NFT with 2CF24DBA5FB0A30E26E83B2AC5B9E29E1B161E5C1FA7425E73043362938B9824 as asset hash
[#]
Rules
-----
DIFFERENT BETWEEN TOKEN AND NFT
System.db will have a differnent entry
in creation nft word will be extra
NFT Hash must be present
Creation and transfer amount .. only integer parts will be taken
Keyword nft must be present in both creation and transfer
'''
'''
Need infinite tokens to create stable coins, so they can be created without worrying about the upper limit of the coins
'''
'''
Create another table in system.db, it simply writes what is every database in one place
Database_name Database type
'''
'''
IDEA FOR NEW ROLLBACK SYSTEM - 24 Jan 2022
-------------------------------------------
245436
[
tx1 - rmt - 245436 - send 10 rmt#
tx2 - rmt - 245436 - send 4 rmt#
tx3 - rmt - 245436 - send 1 rmt#
tx4 - rmt - 245436 - send 100 rmt#
tx5 - rmt trigger(5) - 245436 - trigger
]
banana - txhash
orange - entries in activepid table
mangoes - entries in transaction history table
CURRENT SYSTEM
given a block , find out all the oranges in the block
given a block, find out all the bananas in the block and
for each banana, find corresponding databases( found through parsing of banana flodata and banana txdata)
- if token database then rollback, if contractDatabase then delete entry
NEW SYSTEM
give a block , find out all the oranges in the block
given a block, find out all the bananas in the block and their corresponding databases( found through parsing of banana flodata and banana txdata)
- start opening all those databases one by one | if token database then rollback, if contractDatabase then delete entry
send transaction -> receive the databases associated with it
'''
'''
Step 1
The block that we are rolling back into is earlier than the database creation blockNumber, then delete the whole database without rolling back. Do this for both token databases and smart contract databases
Step 2
If the rolling back block is later than database creation blockNumber, then invoke rollback a database function( rollback_database )
Step 3
Create a list of databases to be opened, and creation date (creation date is block number). This will exclude the token and smart contract databases which are already deleted
Step 4
For each of the database to be opened, rollback the database to rollback point
rollback_database will take 2 inputs, a block number to which it has to rollback to and the name of the database
Step 5
Create a delete function, which will delete from transactionHistory, latestCache and contractDatabase
To-do
------
* Integrate all the functions in the following order:
1 , 2 , 3 , 4 , 5 | That will finish the operation of taking the block number as input and the roll back function will rollback upto the block number specified for all kinds of databases and all kinds of transactions
'''

View File

@ -2,7 +2,6 @@ arrow==1.1.0
bidict==0.21.2
certifi==2021.5.30
cffi==1.14.5
requests==2.25.0
chardet==3.0.4
greenlet==1.1.0
idna==2.10
@ -10,7 +9,8 @@ pycparser==2.20
python-dateutil==2.8.1
python-engineio==3.14.2
python-socketio==4.6.1
secp256k1==0.13.2
requests==2.25.0
six==1.16.0
SQLAlchemy==1.4.18
urllib3==1.26.5
pyflo-lib==2.0.9

View File

@ -0,0 +1,29 @@
DATABASES
* Database operations have to be optimized
- in terms of not repeating too often
- Save changes only when all business logic is approved, since we are working with multiple databases currently
* Too much of repitition in database operations right now
* Database model classes, for SQL alchemy, have to be optimized ie. base classes for tokenswap and one-time-event totally different right now
* Make all database operations to follow SQLAlchemy, no direct SQL commands
* Remove all position based queries
PROGRAM STRUCTURE
* Optimize overall program structure
NEW FEATURES
* Rollback feature
* When processing blocks from the websocket API, check the blockheight of the new block vs the latest block in the database | this is to make sure none of the transactions go missing
-----
processBlocks
* find the last scanned block in the database
* find the latest block at the API
* for loop for lastscannedblock to latestblock
* processEach transaction based on business logic
* Update system.db to reflect currently scanned block as the latest block
* Check for local smart contract triggers
* Check if any token swap contract deposits have to be returned

87
statef_processing.py Normal file
View File

@ -0,0 +1,87 @@
import requests
from operator import attrgetter
import json
import pdb
'''
USD-INR
https://api.exchangerate-api.com/v4/latest/usd
Parsed stateF
"stateF":{
"bitcoin_price_source":"bitpay",
"usd_inr_exchange_source":"bitpay"
}
'''
'''
stateF notes for amount split on contracts
stateF_object = {
"floaddresses": "oPkHWcvqBHfCortTHScrVBjXLsZhWie99C-oPkHWcvqBHfCortTHScrVBjXLsZhWie99C-oPkHWcvqBHfCortTHScrVBjXLsZhWie99C",
"splits": "10-20-30",
}
'''
# stateF
stateF_address = 'oPkHWcvqBHfCortTHScrVBjXLsZhWie99C'
stateF_object = {
"bitcoin_price_source":"bitpay",
"usd_inr_exchange_source":"bitpay"
}
# Flodata object
flodata_object = {
"bitpay": {
"bitcoin_price_source":{
"api" : "https://bitpay.com/api/rates",
"path" : [2,"rate"],
"data_type" : "float"
},
"usd_inr_exchange_source":{
"api" : "https://api.exchangerate-api.com/v4/latest/usd",
"path" : ["rates","INR"],
"data_type" : "float"
}
}
}
def pull_stateF(floID):
response = requests.get(f"https://flosight-testnet.ranchimall.net/api/txs/?address={floID}")
if response.status_code == 200:
address_details = response.json()
latest_stateF = address_details['txs'][0]['floData']
latest_stateF = json.loads(latest_stateF)
return latest_stateF['stateF']
else:
print('API response not valid')
def query_api(api_object):
api, path, data_type = api_object.values()
response = requests.get(api)
if response.status_code == 200:
# Use path keys to reach the value
api_response = response.json()
for key in path:
api_response = api_response[key]
# todo: how to use datatype to convert
if data_type == 'float':
value_at_path = float(api_response)
return value_at_path
else:
print('API response not valid')
def process_stateF(stateF_object, stateF_address):
flodata_object = pull_stateF(stateF_address)
processed_values = {}
for key, value in stateF_object.items():
external_value = query_api(flodata_object[value][key])
processed_values[key] = external_value
return processed_values
if __name__ == '__main__':
processed_statef = process_stateF(stateF_object, stateF_address)
print(processed_statef)

219
tests/test_parsing.py Normal file
View File

@ -0,0 +1,219 @@
import unittest
import sys
sys.path.append("..")
import parsing
class TestParsing(unittest.TestCase):
blockinfo_stub = {'time': 25634}
def test_token_creation(self):
text = 'create 100 rmt#'
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'mainnet')
expected_result = {
'type': 'tokenIncorporation',
'flodata': 'create 100 rmt#',
'tokenIdentification': 'rmt',
'tokenAmount': 100.0,
'stateF': False
}
self.assertEqual(result, expected_result)
def test_token_transfer(self):
text = 'transfer 10.340 rmt#'
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'mainnet')
expected_result = {
'type': 'transfer',
'transferType': 'token',
'flodata': 'transfer 10.340 rmt#',
'tokenIdentification': 'rmt',
'tokenAmount': 10.34,
'stateF': False
}
self.assertEqual(result, expected_result)
def test_nft_creation(self):
pass
def test_nft_transfer(self):
pass
def test_infinite_token_incorporation(self):
text = 'create usd# as infinite-token'
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'mainnet')
expected_result = {
'type': 'infiniteTokenIncorporation',
'flodata': 'create usd# as infinite-token',
'tokenIdentification': 'usd',
'stateF': False
}
self.assertEqual(result, expected_result)
text = 'create usd# as infinite-token send'
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'mainnet')
expected_result = {'type': 'noise'}
self.assertEqual(result, expected_result)
def test_infinite_token_transfer(self):
pass
def test_onetimeevent_timetrigger_creation(self):
# contractamount
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) contractAmount=0.1 end-contract-conditions'''
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {
'type': 'smartContractIncorporation',
'contractType': 'one-time-event',
'subtype': 'time-trigger',
'tokenIdentification': 'bioscope',
'contractName': 'all-crowd-fund-1',
'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz',
'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) contractAmount=0.1 end-contract-conditions',
'contractConditions': {
'contractAmount': '0.1',
'payeeAddress': {
'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0
},
'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530',
'unix_expiryTime': 1668387900.0
}
}
self.assertEqual(result, expected_result)
# minimumsubscriptionamount
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 end-contract-conditions'''
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype':'time-trigger','tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 end-contract-conditions', 'contractConditions': {'minimumsubscriptionamount': '1.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
self.assertEqual(result, expected_result)
# maximumsubscriptionamount
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) maximumsubscriptionamount=10 end-contract-conditions'''
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger','tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) maximumsubscriptionamount=10 end-contract-conditions', 'contractConditions': {'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
self.assertEqual(result, expected_result)
# minimumsubscriptionamount | contractamount
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1.600 (4) contractAmount=0.1 end-contract-conditions'''
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1.600 (4) contractAmount=0.1 end-contract-conditions', 'contractConditions': {'contractAmount': '0.1', 'minimumsubscriptionamount': '1.6', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
self.assertEqual(result, expected_result)
# maximumsubscriptionamount | contractamount
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) maximumsubscriptionamount=10 (4) contractAmount=0.1 end-contract-conditions'''
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) maximumsubscriptionamount=10 (4) contractAmount=0.1 end-contract-conditions', 'contractConditions': {'contractAmount': '0.1', 'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
self.assertEqual(result, expected_result)
# minimumsubscriptionamount | maximumsubscriptionamount
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 end-contract-conditions'''
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype':'time-trigger','tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 end-contract-conditions', 'contractConditions': {'minimumsubscriptionamount': '1.0', 'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
self.assertEqual(result, expected_result)
# minimumsubscriptionamount | maximumsubscriptionamount | contractamount
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 (5) contractAmount=0.1 end-contract-conditions'''
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 (5) contractAmount=0.1 end-contract-conditions', 'contractConditions': {'contractAmount': '0.1', 'minimumsubscriptionamount': '1.0', 'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
self.assertEqual(result, expected_result)
# With single payeeAddress with : format
text = "Create a smart contract of the name album-fund@ of the type one-time-event* using asset bioscope# at the FLO address ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt$ with contract-conditions: (1) expiryTime= Thu May 04 2023 18:57:00 GMT+0530 (India Standard Time) (2) payeeAddress= objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc:100 end-contract-conditions"
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'album-fund', 'contractAddress': 'ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt', 'flodata': 'Create a smart contract of the name album-fund@ of the type one-time-event* using asset bioscope# at the FLO address ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt$ with contract-conditions: (1) expiryTime= Thu May 04 2023 18:57:00 GMT+0530 (India Standard Time) (2) payeeAddress= objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc:100 end-contract-conditions', 'contractConditions': {'payeeAddress': {'objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc': 100.0}, 'expiryTime': 'thu may 04 2023 18:57:00 gmt+0530 (india standard time)', 'unix_expiryTime': 1683246420.0}}
self.assertEqual(result, expected_result)
# With single payeeAddress with normal format
text = "Create a smart contract of the name album-fund@ of the type one-time-event* using asset bioscope# at the FLO address ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt$ with contract-conditions: (1) expiryTime= Thu May 04 2023 18:57:00 GMT+0530 (India Standard Time) (2) payeeAddress= objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc end-contract-conditions"
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'album-fund', 'contractAddress': 'ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt', 'flodata': 'Create a smart contract of the name album-fund@ of the type one-time-event* using asset bioscope# at the FLO address ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt$ with contract-conditions: (1) expiryTime= Thu May 04 2023 18:57:00 GMT+0530 (India Standard Time) (2) payeeAddress= objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc end-contract-conditions', 'contractConditions': {'payeeAddress': {'objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc': 100}, 'expiryTime': 'thu may 04 2023 18:57:00 gmt+0530 (india standard time)', 'unix_expiryTime': 1683246420.0}}
self.assertEqual(result, expected_result)
# With multiple payeeAddress with : format
text = "Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 (5) contractAmount=0.1 end-contract-conditions"
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 (5) contractAmount=0.1 end-contract-conditions', 'contractConditions': {'contractAmount': '0.1', 'minimumsubscriptionamount': '1.0', 'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
self.assertEqual(result, expected_result)
def test_onetimeevent_timetrigger_participation(self):
text = '''send 2.2 bioscope# to all-crowd-fund@'''
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'transfer', 'transferType': 'smartContract', 'flodata': 'send 2.2 bioscope# to all-crowd-fund@', 'tokenIdentification': 'bioscope', 'tokenAmount': 2.2, 'contractName': 'all-crowd-fund'}
self.assertEqual(result, expected_result)
text = 'transfer 6.20000 bioscope# to all-crowd-fund-7@'
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'transfer', 'transferType': 'smartContract', 'flodata': 'transfer 6.20000 bioscope# to all-crowd-fund-7@', 'tokenIdentification': 'bioscope', 'tokenAmount': 6.2, 'contractName': 'all-crowd-fund-7'}
self.assertEqual(result, expected_result)
text = 'transfer 6.20000 bioscope# to all-crowd-fund-7@ 24'
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'noise'}
self.assertEqual(result, expected_result)
text = 'transfer 6.20000 bioscope# to all-crowd-fund-7@ 24 '
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'noise'}
self.assertEqual(result, expected_result)
text = '6.20.000 transfer bioscope# to all-crowd-fund-7@ 24'
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'transfer', 'transferType': 'smartContract', 'flodata': '6.20.000 transfer bioscope# to all-crowd-fund-7@ 24', 'tokenIdentification': 'bioscope', 'tokenAmount': 24.0, 'contractName': 'all-crowd-fund-7'}
self.assertEqual(result, expected_result)
def test_onetimeevent_externaltrigger_creation(self):
# contractamount
text = '''Create a smart contract of the name twitter-survive@ of the type one-time-event* using asset bioscope# at the FLO address oVbebBNuERWbouDg65zLfdataWEMTnsL8r$ with contract-conditions:(1) expiryTime= Sun Nov 15 2022 14:55:00 GMT+0530 (2) userchoices= survives | dies (3) contractAmount=0.02 end-contract-conditions'''
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {
'type': 'smartContractIncorporation',
'contractType': 'one-time-event',
'subtype': 'external-trigger',
'tokenIdentification': 'bioscope',
'contractName': 'twitter-survive',
'contractAddress': 'oVbebBNuERWbouDg65zLfdataWEMTnsL8r',
'flodata': 'Create a smart contract of the name twitter-survive@ of the type one-time-event* using asset bioscope# at the FLO address oVbebBNuERWbouDg65zLfdataWEMTnsL8r$ with contract-conditions: (1) expiryTime= Sun Nov 15 2022 14:55:00 GMT+0530 (2) userchoices= survives | dies (3) contractAmount=0.02 end-contract-conditions',
'contractConditions': {
'contractAmount': '0.02',
'userchoices': "{0: 'survives', 1: 'dies'}",
'expiryTime': 'sun nov 15 2022 14:55:00 gmt+0530',
'unix_expiryTime': 1668543900.0
}
}
self.assertEqual(result, expected_result)
def test_tokenswap_deposits(self):
text = 'Deposit 1 bioscope# to swap-rupee-bioscope-1@ its FLO address being oTzrcpLPRXsejSdYQ3XN6V4besrAPuJQrk$ with deposit-conditions: (1) expiryTime= Thu Apr 13 2023 21:45:00 GMT+0530'
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {
'type': 'smartContractDeposit',
'tokenIdentification': 'bioscope',
'depositAmount': 1.0,
'contractName': 'swap-rupee-bioscope-1',
'flodata': 'Deposit 1 bioscope# to swap-rupee-bioscope-1@ its FLO address being oTzrcpLPRXsejSdYQ3XN6V4besrAPuJQrk$ with deposit-conditions: (1) expiryTime= Thu Apr 13 2023 21:45:00 GMT+0530',
'depositConditions': {
'expiryTime': 'thu apr 13 2023 21:45:00 gmt+0530'
},
'stateF': False}
self.assertEqual(result, expected_result)
def test_contract_trigger(self):
text = 'contract@ triggerCondition:"twitter-survives"'
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {
'type': 'smartContractPays',
'contractName': 'contract',
'triggerCondition': 'twitter-survives',
'stateF': False}
self.assertEqual(result, expected_result)
def test_deposit_invalid(self):
text = 'Deposit 1 bioscope# to swap-rupee-bioscope-1@ its FLO address being oTzrcpLPRXsejSdYQ3XN6V4besrAPuJQrk$ with deposit-conditions: (1) expiryTime= Tue, 25 Apr 2023 13:40:00 GMT'
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
expected_result = {'type': 'noise'}
self.assertEqual(result, expected_result)
if __name__ == '__main__':
unittest.main()

File diff suppressed because it is too large Load Diff

2498
tracktokens_smartcontracts.py Executable file

File diff suppressed because it is too large Load Diff

92
util_db_connect.py Normal file
View File

@ -0,0 +1,92 @@
import argparse
import configparser
import json
import logging
import os
import shutil
import sys
import pyflo
import requests
import socketio
from sqlalchemy import create_engine, func
from sqlalchemy.orm import sessionmaker
import time
import arrow
import parsing
from datetime import datetime
from ast import literal_eval
from models import SystemData, TokenBase, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, TokenContractAssociation, RejectedTransactionHistory, ContractBase, ContractStructure, ContractParticipants, ContractTransactionHistory, ContractDeposits, ConsumedInfo, ContractWinners, ContinuosContractBase, ContractStructure2, ContractParticipants2, ContractDeposits2, ContractTransactionHistory2, SystemBase, ActiveContracts, SystemData, ContractAddressMapping, TokenAddressMapping, DatabaseTypeMapping, TimeActions, RejectedContractTransactionHistory, RejectedTransactionHistory, LatestCacheBase, LatestTransactions, LatestBlocks
from statef_processing import process_stateF
# Configuration of required variables
config = configparser.ConfigParser()
config.read('config.ini')
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s:%(name)s:%(message)s')
file_handler = logging.FileHandler(os.path.join(config['DEFAULT']['DATA_PATH'],'tracking.log'))
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(formatter)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.addHandler(stream_handler)
def create_database_connection(type, parameters):
if type == 'token':
path = os.path.join(config['DEFAULT']['DATA_PATH'], 'tokens', f"{parameters['token_name']}.db")
engine = create_engine(f"sqlite:///{path}", echo=True)
elif type == 'smart_contract':
path = os.path.join(config['DEFAULT']['DATA_PATH'], 'smartContracts', f"{parameters['contract_name']}-{parameters['contract_address']}.db")
engine = create_engine(f"sqlite:///{path}", echo=True)
elif type == 'system_dbs':
path = os.path.join(config['DEFAULT']['DATA_PATH'], f"system.db")
engine = create_engine(f"sqlite:///{path}", echo=False)
elif type == 'latest_cache':
path = os.path.join(config['DEFAULT']['DATA_PATH'], f"latestCache.db")
engine = create_engine(f"sqlite:///{path}", echo=False)
connection = engine.connect()
return connection
def create_database_session_orm(type, parameters, base):
if type == 'token':
path = os.path.join(config['DEFAULT']['DATA_PATH'], 'tokens', f"{parameters['token_name']}.db")
engine = create_engine(f"sqlite:///{path}", echo=True)
base.metadata.create_all(bind=engine)
session = sessionmaker(bind=engine)()
elif type == 'smart_contract':
path = os.path.join(config['DEFAULT']['DATA_PATH'], 'smartContracts', f"{parameters['contract_name']}-{parameters['contract_address']}.db")
engine = create_engine(f"sqlite:///{path}", echo=True)
base.metadata.create_all(bind=engine)
session = sessionmaker(bind=engine)()
elif type == 'system_dbs':
path = os.path.join(config['DEFAULT']['DATA_PATH'], f"{parameters['db_name']}.db")
engine = create_engine(f"sqlite:///{path}", echo=False)
base.metadata.create_all(bind=engine)
session = sessionmaker(bind=engine)()
return session
# Connect to system.db with a session
'''session = create_database_session_orm('system_dbs', {'db_name':'system1'}, SystemBase)
subquery_filter = session.query(TimeActions.id).group_by(TimeActions.transactionHash).having(func.count(TimeActions.transactionHash)==1).subquery()
contract_deposits = session.query(TimeActions).filter(TimeActions.id.in_(subquery_filter), TimeActions.status=='active', TimeActions.activity=='contract-deposit').all()
for contract in contract_deposits:
print(contract.transactionHash)'''
systemdb_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase)
query = systemdb_session.query(TokenAddressMapping).filter(TokenAddressMapping.tokenAddress == 'contractAddress')
results = query.all()
pdb.set_trace()
print('Lets investigate this now')

475
util_rollback.py Normal file
View File

@ -0,0 +1,475 @@
import argparse
from sqlalchemy import create_engine, func
from sqlalchemy.orm import sessionmaker
from models import SystemData, TokenBase, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, TokenContractAssociation, RejectedTransactionHistory, ContractBase, ContractStructure, ContractParticipants, ContractTransactionHistory, ContractDeposits, ConsumedInfo, ContractWinners, ContinuosContractBase, ContractStructure2, ContractParticipants2, ContractDeposits2, ContractTransactionHistory2, SystemBase, ActiveContracts, SystemData, ContractAddressMapping, TokenAddressMapping, DatabaseTypeMapping, TimeActions, RejectedContractTransactionHistory, RejectedTransactionHistory, LatestCacheBase, LatestTransactions, LatestBlocks
from ast import literal_eval
import os
import json
import logging
import sys
apppath = os.path.dirname(os.path.realpath(__file__))
# helper functions
def check_database_existence(type, parameters):
if type == 'token':
return os.path.isfile(f"./tokens/{parameters['token_name']}.db")
if type == 'smart_contract':
return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db")
def create_database_connection(type, parameters):
if type == 'token':
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
elif type == 'smart_contract':
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
elif type == 'system_dbs':
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
connection = engine.connect()
return connection
def create_database_session_orm(type, parameters, base):
if type == 'token':
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
base.metadata.create_all(bind=engine)
session = sessionmaker(bind=engine)()
elif type == 'smart_contract':
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
base.metadata.create_all(bind=engine)
session = sessionmaker(bind=engine)()
elif type == 'system_dbs':
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
base.metadata.create_all(bind=engine)
session = sessionmaker(bind=engine)()
else:
pdb.set_trace()
return session
def inspect_parsed_flodata(parsed_flodata, inputAddress, outputAddress):
if parsed_flodata['type'] == 'transfer':
if parsed_flodata['transferType'] == 'token':
return {'type':'tokentransfer', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
if parsed_flodata['transferType'] == 'smartContract':
return {'type':'smartContract', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'accepting_token_db':f"{parsed_flodata['']}", 'receiving_token_db':f"{parsed_flodata['tokenIdentification']}" ,'token_amount':f"{parsed_flodata['tokenAmount']}"}
if parsed_flodata['transferType'] == 'swapParticipation':
return {'type':'swapParticipation', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'accepting_token_db':f"{parsed_flodata['']}", 'receiving_token_db':f"{parsed_flodata['tokenIdentification']}" ,'token_amount':f"{parsed_flodata['tokenAmount']}"}
if parsed_flodata['transferType'] == 'nft':
return {'type':'nfttransfer', 'nft_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
if parsed_flodata['type'] == 'tokenIncorporation':
return {'type':'tokenIncorporation', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
if parsed_flodata['type'] == 'smartContractPays':
# contract address, token | both of them come from
sc_session = create_database_session_orm('smart_contract', {'contract_name':f"{parsed_flodata['contractName']}", 'contract_address':f"{outputAddress}"}, ContractBase)
token_db = sc_session.query(ContractStructure.value).filter(ContractStructure.attribute=='tokenIdentification').first()[0]
return {'type':'smartContractPays', 'token_db':f"{token_db}" , 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'triggerCondition':f"{parsed_flodata['triggerCondition']}"}
if parsed_flodata['type'] == 'smartContractIncorporation':
return {'type':'smartContractIncorporation', 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'triggerCondition':f"{parsed_flodata['triggerCondition']}"}
def getDatabase_from_parsedFloData(parsed_flodata, inputAddress, outputAddress):
tokenlist = []
contractlist = []
if parsed_flodata['type'] == 'transfer':
if parsed_flodata['transferType'] == 'token':
#return {'type':'token_db', 'token_db':f"{parsed_flodata['tokenIdentification']}"}
tokenlist.append(parsed_flodata['tokenIdentification'])
elif parsed_flodata['transferType'] == 'smartContract':
#return {'type':'smartcontract_db', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'token_db':f"{parsed_flodata['tokenIdentification']}"}
tokenlist.append(parsed_flodata['tokenIdentification'])
contractlist.append(f"{parsed_flodata['contractName']}-{outputAddress}")
elif parsed_flodata['transferType'] == 'swapParticipation':
#return {'type':'swapcontract_db', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'accepting_token_db':f"{parsed_flodata['contract-conditions']['accepting_token']}", 'selling_token_db':f"{parsed_flodata['contract-conditions']['selling_token']}"}
tokenlist.append(parsed_flodata['contract-conditions']['accepting_token'])
tokenlist.append(parsed_flodata['contract-conditions']['selling_token'])
contractlist.append(f"{parsed_flodata['contractName']}-{outputAddress}")
elif parsed_flodata['transferType'] == 'nft':
#return {'type':'nft_db', 'token_db':f"{parsed_flodata['tokenIdentification']}"}
tokenlist.append(parsed_flodata['tokenIdentification'])
elif parsed_flodata['type'] == 'smartContractPays':
# contract address, token | both of them come from
sc_session = create_database_session_orm('smart_contract', {'contract_name':f"{parsed_flodata['contractName']}", 'contract_address':f"{outputAddress}"}, ContractBase)
token_db = sc_session.query(ContractStructure.value).filter(ContractStructure.attribute=='tokenIdentification').first()[0]
#return {'type':'smartcontract_db', 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'token_db':f"{token_db}"}
tokenlist.append(token_db)
contractlist.append(f"{parsed_flodata['contractName']}-{outputAddress}")
elif parsed_flodata['type'] == 'smartContractIncorporation':
#return {'type':'smartcontract_db', 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}"}
contractlist.append(f"{parsed_flodata['contractName']}-{outputAddress}")
elif parsed_flodata['type'] == 'tokenIncorporation':
#return {'type':'token_db', 'token_db':f"{parsed_flodata['tokenIdentification']}"}
tokenlist.append(parsed_flodata['tokenIdentification'])
return tokenlist, contractlist
def calc_pid_amount(transferBalance, consumedpid):
consumedpid_sum = 0
for key in list(consumedpid.keys()):
consumedpid_sum = consumedpid_sum + float(consumedpid[key])
return transferBalance - consumedpid_sum
def find_addressBalance_from_floAddress(database_session, floAddress):
query_output = database_session.query(ActiveTable).filter(ActiveTable.address==floAddress, ActiveTable.addressBalance!=None).first()
if query_output is None:
return 0
else:
return query_output.addressBalance
def rollback_address_balance_processing(db_session, senderAddress, receiverAddress, transferBalance):
# Find out total sum of address
# Find out the last entry where address balance is not null, if exists make it null
# Calculation phase
current_receiverBalance = find_addressBalance_from_floAddress(db_session, receiverAddress)
current_senderBalance = find_addressBalance_from_floAddress(db_session ,senderAddress)
new_receiverBalance = current_receiverBalance - transferBalance
new_senderBalance = current_senderBalance + transferBalance
# Insertion phase
# if new receiver balance is 0, then only insert sender address balance
# if receiver balance is not 0, then update previous occurence of the receiver address and sender balance
# for sender, find out weather
# either query out will not come or the last occurence will have address
# for sender, in all cases we will update the addressBalance of last occurences of senderfloaddress
# for receiver, if the currentaddressbalance is 0 then do nothing .. and if the currentaddressbalance is not 0 then update the last occurence of receiver address
sender_query = db_session.query(ActiveTable).filter(ActiveTable.address==senderAddress).order_by(ActiveTable.id.desc()).first()
sender_query.addressBalance = new_senderBalance
if new_receiverBalance != 0 and new_receiverBalance > 0:
receiver_query = db_session.query(ActiveTable).filter(ActiveTable.address==receiverAddress).order_by(ActiveTable.id.desc()).limit(2).all()
if len(receiver_query) == 2:
receiver_query[1].addressBalance = new_receiverBalance
def find_input_output_addresses(transaction_data):
# Create vinlist and outputlist
vinlist = []
querylist = []
for vin in transaction_data["vin"]:
vinlist.append([vin["address"][0], float(vin["value"])])
totalinputval = float(transaction_data["valueIn"])
# todo Rule 41 - Check if all the addresses in a transaction on the input side are the same
for idx, item in enumerate(vinlist):
if idx == 0:
temp = item[0]
continue
if item[0] != temp:
print(f"System has found more than one address as part of vin. Transaction {transaction_data['txid']} is rejected")
return 0
inputlist = [vinlist[0][0], totalinputval]
inputadd = vinlist[0][0]
# todo Rule 42 - If the number of vout is more than 2, reject the transaction
if len(transaction_data["vout"]) > 2:
print(f"System has found more than 2 address as part of vout. Transaction {transaction_data['txid']} is rejected")
return 0
# todo Rule 43 - A transaction accepted by the system has two vouts, 1. The FLO address of the receiver
# 2. Flo address of the sender as change address. If the vout address is change address, then the other adddress
# is the recevier address
outputlist = []
addresscounter = 0
inputcounter = 0
for obj in transaction_data["vout"]:
if obj["scriptPubKey"]["type"] == "pubkeyhash":
addresscounter = addresscounter + 1
if inputlist[0] == obj["scriptPubKey"]["addresses"][0]:
inputcounter = inputcounter + 1
continue
outputlist.append([obj["scriptPubKey"]["addresses"][0], obj["value"]])
if addresscounter == inputcounter:
outputlist = [inputlist[0]]
elif len(outputlist) != 1:
print(f"Transaction's change is not coming back to the input address. Transaction {transaction_data['txid']} is rejected")
return 0
else:
outputlist = outputlist[0]
return inputlist[0], outputlist[0]
def rollback_database(blockNumber, dbtype, dbname):
if dbtype == 'token':
# Connect to database
db_session = create_database_session_orm('token', {'token_name':dbname}, TokenBase)
while(True):
subqry = db_session.query(func.max(ActiveTable.id))
activeTable_entry = db_session.query(ActiveTable).filter(ActiveTable.id == subqry).first()
if activeTable_entry.blockNumber <= blockNumber:
break
outputAddress = activeTable_entry.address
transferAmount = activeTable_entry.transferBalance
inputAddress = None
# Find out consumedpid and partially consumed pids
parentid = None
orphaned_parentid = None
consumedpid = None
if activeTable_entry.parentid is not None:
parentid = activeTable_entry.parentid
if activeTable_entry.orphaned_parentid is not None:
orphaned_parentid = activeTable_entry.orphaned_parentid
if activeTable_entry.consumedpid is not None:
consumedpid = literal_eval(activeTable_entry.consumedpid)
# filter out based on consumped pid and partially consumed pids
if parentid is not None:
# find query in activeTable with the parentid
activeTable_pid_entry = db_session.query(ActiveTable).filter(ActiveTable.id == parentid).all()[0]
# calculate the amount taken from parentid
activeTable_pid_entry.transferBalance = activeTable_pid_entry.transferBalance + calc_pid_amount(activeTable_entry.transferBalance, consumedpid)
inputAddress = activeTable_pid_entry.address
if orphaned_parentid is not None:
try:
orphaned_parentid_entry = db_session.query(ConsumedTable).filter(ConsumedTable.id == orphaned_parentid).all()[0]
inputAddress = orphaned_parentid_entry.address
except:
pdb.set_trace()
if consumedpid != {}:
# each key of the pid is totally consumed and with its corresponding value written in the end
# how can we maintain the order of pid consumption? The bigger pid number will be towards the end
# 1. pull the pid number and its details from the consumedpid table
for key in list(consumedpid.keys()):
consumedpid_entry = db_session.query(ConsumedTable).filter(ConsumedTable.id == key).all()[0]
newTransferBalance = consumedpid_entry.transferBalance + consumedpid[key]
db_session.add(ActiveTable(id=consumedpid_entry.id, address=consumedpid_entry.address, parentid=consumedpid_entry.parentid ,consumedpid=consumedpid_entry.consumedpid, transferBalance=newTransferBalance, addressBalance = None, orphaned_parentid=consumedpid_entry.orphaned_parentid ,blockNumber=consumedpid_entry.blockNumber))
inputAddress = consumedpid_entry.address
db_session.delete(consumedpid_entry)
orphaned_parentid_entries = db_session.query(ActiveTable).filter(ActiveTable.orphaned_parentid == key).all()
if len(orphaned_parentid_entries) != 0:
for orphan_entry in orphaned_parentid_entries:
orphan_entry.parentid = orphan_entry.orphaned_parentid
orphan_entry.orphaned_parentid = None
orphaned_parentid_entries = db_session.query(ConsumedTable).filter(ConsumedTable.orphaned_parentid == key).all()
if len(orphaned_parentid_entries) != 0:
for orphan_entry in orphaned_parentid_entries:
orphan_entry.parentid = orphan_entry.orphaned_parentid
orphan_entry.orphaned_parentid = None
# update addressBalance
rollback_address_balance_processing(db_session, inputAddress, outputAddress, transferAmount)
# delete operations
# delete the last row in activeTable and transactionTable
db_session.delete(activeTable_entry)
db_session.query(TransactionHistory).filter(TransactionHistory.blockNumber > blockNumber).delete()
db_session.query(TransferLogs).filter(TransferLogs.blockNumber > blockNumber).delete()
db_session.commit()
elif dbtype == 'smartcontract':
db_session = create_database_session_orm('smart_contract', {'contract_name':f"{dbname['contract_name']}", 'contract_address':f"{dbname['contract_address']}"}, ContractBase)
db_session.query(ContractTransactionHistory).filter(ContractTransactionHistory.blockNumber > blockNumber).delete()
db_session.query(ContractParticipants).filter(ContractParticipants.blockNumber > blockNumber).delete()
db_session.query(ContractDeposits).filter(ContractDeposits.blockNumber > blockNumber).delete()
db_session.query(ConsumedInfo).filter(ConsumedInfo.blockNumber > blockNumber).delete()
db_session.query(ContractWinners).filter(ContractWinners.blockNumber > blockNumber).delete()
db_session.commit()
def delete_database_old(blockNumber, dbname):
db_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase)
databases_to_delete = db_session.query(DatabaseTypeMapping.db_name, DatabaseTypeMapping.db_type).filter(DatabaseTypeMapping.blockNumber>blockNumber).all()
db_names, db_type = zip(*databases_to_delete)
for database in databases_to_delete:
if database[1] in ['token','infinite-token']:
dirpath = os.path.join(apppath, 'tokens', f"{dbname}.db")
if os.path.exists(dirpath):
os.remove(dirpath)
elif database[1] in ['smartcontract']:
dirpath = os.path.join(apppath, 'smartcontracts', f"{dbname}.db")
if os.path.exists(dirpath):
os.remove(dirpath)
return db_names
def delete_database(blockNumber, dbname):
db_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase)
databases_to_delete = db_session.query(DatabaseTypeMapping.db_name, DatabaseTypeMapping.db_type).filter(DatabaseTypeMapping.db_name == dbname).all()
db_names, db_type = zip(*databases_to_delete)
for database in databases_to_delete:
if database[1] in ['token','infinite-token','nft']:
dirpath = os.path.join(apppath, 'tokens', f"{dbname}.db")
if os.path.exists(dirpath):
os.remove(dirpath)
elif database[1] in ['smartcontract']:
dirpath = os.path.join(apppath, 'smartContracts', f"{dbname}.db")
if os.path.exists(dirpath):
os.remove(dirpath)
return db_names
def system_database_deletions(blockNumber):
latestcache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
# delete latestBlocks & latestTransactions entry
latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber > blockNumber).delete()
latestcache_session.query(LatestTransactions).filter(LatestTransactions.blockNumber > blockNumber).delete()
# delete activeContracts, contractAddressMapping, DatabaseAddressMapping, rejectedContractTransactionHistory, rejectedTransactionHistory, tokenAddressMapping
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
activeContracts_session = systemdb_session.query(ActiveContracts).filter(ActiveContracts.blockNumber > blockNumber).delete()
contractAddressMapping_queries = systemdb_session.query(ContractAddressMapping).filter(ContractAddressMapping.blockNumber > blockNumber).delete()
databaseTypeMapping_queries = systemdb_session.query(DatabaseTypeMapping).filter(DatabaseTypeMapping.blockNumber > blockNumber).delete()
rejectedContractTransactionHistory_queries = systemdb_session.query(RejectedContractTransactionHistory).filter(RejectedContractTransactionHistory.blockNumber > blockNumber).delete()
rejectedTransactionHistory_queries = systemdb_session.query(RejectedTransactionHistory).filter(RejectedTransactionHistory.blockNumber > blockNumber).delete()
tokenAddressMapping_queries = systemdb_session.query(TokenAddressMapping).filter(TokenAddressMapping.blockNumber > blockNumber).delete()
timeAction_queries = systemdb_session.query(TimeActions).filter(TimeActions.blockNumber > blockNumber).delete()
systemdb_session.query(SystemData).filter(SystemData.attribute=='lastblockscanned').update({SystemData.value:str(blockNumber)})
latestcache_session.commit()
systemdb_session.commit()
latestcache_session.close()
systemdb_session.close()
# Take input from user reg how many blocks to go back in the blockchain
parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash')
parser.add_argument('-rb', '--toblocknumer', nargs='?', type=int, help='Rollback the script to the specified block number')
parser.add_argument('-r', '--blockcount', nargs='?', type=int, help='Rollback the script to the number of blocks specified')
args = parser.parse_args()
# Get all the transaction and blockdetails from latestCache reg the transactions in the block
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
lastscannedblock = systemdb_session.query(SystemData.value).filter(SystemData.attribute=='lastblockscanned').first()
systemdb_session.close()
lastscannedblock = int(lastscannedblock.value)
if (args.blockcount and args.toblocknumber):
print("You can only specify one of the options -b or -c")
sys.exit(0)
elif args.blockcount:
rollback_block = lastscannedblock - args.blockcount
elif args.toblocknumer:
rollback_block = args.toblocknumer
else:
print("Please specify the number of blocks to rollback")
sys.exit(0)
def return_token_contract_set(rollback_block):
latestcache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
latestBlocks = latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber > rollback_block).all()
lblocks_dict = {}
blocknumber_list = []
for block in latestBlocks:
block_dict = block.__dict__
lblocks_dict[block_dict['blockNumber']] = {'blockHash':f"{block_dict['blockHash']}", 'jsonData':f"{block_dict['jsonData']}"}
blocknumber_list.insert(0,block_dict['blockNumber'])
tokendb_set = set()
smartcontractdb_set = set()
for blockindex in blocknumber_list:
# Find the all the transactions that happened in this block
try:
block_tx_hashes = json.loads(lblocks_dict[str(blockindex)]['jsonData'])['tx']
except:
print(f"Block {blockindex} is not found in latestCache. Skipping this block")
continue
for txhash in block_tx_hashes:
# Get the transaction details
transaction = latestcache_session.query(LatestTransactions).filter(LatestTransactions.transactionHash == txhash).first()
transaction_data = json.loads(transaction.jsonData)
inputAddress, outputAddress = find_input_output_addresses(transaction_data)
parsed_flodata = literal_eval(transaction.parsedFloData)
tokenlist, contractlist = getDatabase_from_parsedFloData(parsed_flodata, inputAddress, outputAddress)
for token in tokenlist:
tokendb_set.add(token)
for contract in contractlist:
smartcontractdb_set.add(contract)
return tokendb_set, smartcontractdb_set
def initiate_rollback_process():
'''
tokendb_set, smartcontractdb_set = return_token_contract_set(rollback_block)
'''
# Connect to system.db
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
db_names = systemdb_session.query(DatabaseTypeMapping).all()
for db in db_names:
if db.db_type in ['token', 'nft', 'infinite-token']:
if db.blockNumber > rollback_block:
delete_database(rollback_block, f"{db.db_name}")
else:
rollback_database(rollback_block, 'token', f"{db.db_name}")
elif db.db_type in ['smartcontract']:
if db.blockNumber > rollback_block:
delete_database(rollback_block, f"{db.db_name}")
else:
db_split = db.db_name.rsplit('-',1)
db_name = {'contract_name':db_split[0], 'contract_address':db_split[1]}
rollback_database(rollback_block, 'smartcontract', db_name)
'''
for token_db in tokendb_set:
token_session = create_database_session_orm('token', {'token_name': token_db}, TokenBase)
if token_session.query(TransactionHistory.blockNumber).first()[0] > rollback_block:
delete_database(rollback_block, token_db)
token_session.commit()
else:
rollback_database(rollback_block, 'token', token_db)
token_session.close()
for contract_db in smartcontractdb_set:
contract_session = create_database_session_orm('smartcontract', {'db_name': contract_db}, ContractBase)
if contract_session.query(TransactionHistory.blockNumber).first()[0] > rollback_block:
delete_database(rollback_block, contract_db)
contract_session.commit()
else:
rollback_database(rollback_block, 'smartcontract', contract_db)
contract_session.close()
'''
system_database_deletions(rollback_block)
# update lastblockscanned in system_dbs
latestCache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
lastblockscanned = latestCache_session.query(LatestBlocks.blockNumber).order_by(LatestBlocks.id.desc()).first()[0]
latestCache_session.close()
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
lastblockscanned_query = systemdb_session.query(SystemData).filter(SystemData.attribute=='lastblockscanned').first()
lastblockscanned_query.value = rollback_block
systemdb_session.commit()
systemdb_session.close()
if __name__ == "__main__":
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
lastblockscanned_query = systemdb_session.query(SystemData).filter(SystemData.attribute=='lastblockscanned').first()
if(rollback_block > int(lastblockscanned_query.value)):
print('Rollback block is greater than the last scanned block\n Exiting ....')
sys.exit(0)
else:
initiate_rollback_process()