diff --git a/.github/workflows/test_parsing.yml b/.github/workflows/test_parsing.yml new file mode 100644 index 0000000..5265e25 --- /dev/null +++ b/.github/workflows/test_parsing.yml @@ -0,0 +1,31 @@ +# This workflow will install Python dependencies, run tests and lint with a single version of Python +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python + +name: Test flodata parsing + +on: + push: + branches: [ "swap-statef-testing" ] + pull_request: + branches: [ "swap-statef-testing" ] + +permissions: + contents: read + +jobs: + build: + runs-on: self-hosted + + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.8 + uses: actions/setup-python@v3 + with: + python-version: "3.8" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install arrow==1.1.0 pyflo-lib==2.0.9 requests==2.25.0 + - name: Test with unittest + run: | + python -m unittest tests/test_parsing.py diff --git a/.gitignore b/.gitignore index 476b0d4..bf2502e 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,9 @@ config.ini config.py *.log py3/ +py3.9.0 __pycache__/ *.pyc .vscode/ +error-notes.txt +snippets* \ No newline at end of file diff --git a/.python-version b/.python-version deleted file mode 100644 index a5c4c76..0000000 --- a/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.9.0 diff --git a/README.md b/README.md index 9588506..dd11b25 100644 --- a/README.md +++ b/README.md @@ -1,57 +1,3 @@ # FLO Token & Smart Contract System -## Important versions and their hashes The python script scans the FLO Blockchain for Token and Smart Contract activity and creates/updates local SQLite databases accordingly. - -`339dac6a50bcd973dda4caf43998fc61dd79ea68` -The legacy token and smart contract system running currently on the server - -`41c4078db98e878ecef3452007893136c531ba05` ==> WORKING VERSION | Token swap branch -The latest version with token swap smart contract and token transfer with the following problems: -1. Parsing module is not able to detect token creation and transfer floData -2. The smart contract system is not moving forward because it is not able to detect token databases as they are created when run form scratch, however it is working with old created token databases - -`89d96501b9fcdd3c91c8900e1fb3dd5a8d8684c1` -Docker-compatibility branch is needed right now because Docker image made for flo-token-tracking required some changes which have been made in that branch. - - -## How to start the system - -1. Create a virtual environment with python3.7 and activate it - ``` - python3.7 -m venv py3.7 - source py3.7/bin/activate - ``` -2. Install python packages required for the virtual environment from `pip3 install -r requirements.txt` -3. Setup config files with the following information - For testnet - ``` - # config.ini - [DEFAULT] - NET = testnet - FLO_CLI_PATH = /usr/local/bin/flo-cli - START_BLOCK = 740400 - - # config.py - committeeAddressList = ['oVwmQnQGtXjRpP7dxJeiRGd5azCrJiB6Ka'] - sseAPI_url = 'https://ranchimallflo-testnet.duckdns.org/' - ``` - - For mainnet - ``` - # config.ini - [DEFAULT] - NET = mainnet - FLO_CLI_PATH = /usr/local/bin/flo-cli - START_BLOCK = 3387900 - - # config.py - committeeAddressList = ['FRwwCqbP7DN4z5guffzzhCSgpD8Q33hUG8'] - sseAPI_url = 'https://ranchimallflo.duckdns.org/' - ``` - -4. If running for the first time, run `python3.7 tracktokens-smartcontracts.py --reset` otherwise run `python3.7 tracktokens-smartcontracts.py` - - -If you want to listen to RanchiMall's Token Tracker scanner's events you have to subscribe to Ranchimallflo API's end point `/sse` -Reference - https://ably.com/topic/server-sent-events diff --git a/app.py b/app.py deleted file mode 100644 index 9b76661..0000000 --- a/app.py +++ /dev/null @@ -1,23 +0,0 @@ -import os - -from flask import Flask, jsonify - -app = Flask(__name__) - - -@app.route('/') -def hello_world(): - return 'Hello, World!' - - -@app.route('/getmarkerlist') -def marker_list(): - dblist = os.listdir("databases/") - dbdict = {} - for idx, item in enumerate(dblist): - dbdict[idx] = item[:-3] - - return jsonify(dbdict) - - -app.run(debug=True) diff --git a/config-example.ini b/config-example.ini new file mode 100644 index 0000000..bce0043 --- /dev/null +++ b/config-example.ini @@ -0,0 +1,13 @@ +[DEFAULT] +NET = testnet +FLO_CLI_PATH = /usr/local/bin/flo-cli +START_BLOCK = 740400 +FLOSIGHT_NETURL = https://flosight-testnet.ranchimall.net/ +TOKENAPI_SSE_URL = https://ranchimallflo-testnet.duckdns.org +MAINNET_FLOSIGHT_SERVER_LIST = https://flosight.ranchimall.net/ , https://flosight.duckdns.org/ +TESTNET_FLOSIGHT_SERVER_LIST = https://flosight-testnet.ranchimall.net/ , https://flosight-testnet.duckdns.org/ +IGNORE_BLOCK_LIST = 902446 +IGNORE_TRANSACTION_LIST = b4ac4ddb51188b28b39bcb3aa31357d5bfe562c21e8aaf8dde0ec560fc893174 +DATA_PATH = /home/production/Dev/flo-token-tracker +APP_ADMIN = oWooGLbBELNnwq8Z5YmjoVjw8GhBGH3qSP + diff --git a/config-example.py b/config-example.py deleted file mode 100644 index 6f19a0d..0000000 --- a/config-example.py +++ /dev/null @@ -1 +0,0 @@ -committeeAddressList = [] \ No newline at end of file diff --git a/config.ini b/config.ini deleted file mode 100644 index 96b9251..0000000 --- a/config.ini +++ /dev/null @@ -1,4 +0,0 @@ -[DEFAULT] -NET = mainnet -FLO_CLI_PATH = /usr/local/bin/flo-cli -START_BLOCK = 3387900 diff --git a/convert_db.py b/convert_db.py new file mode 100644 index 0000000..43a0543 --- /dev/null +++ b/convert_db.py @@ -0,0 +1,43 @@ +from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, LatestTransactions, LatestBlocks, DatabaseTypeMapping, TokenAddressMapping, LatestCacheBase1, LatestTransactions1, LatestBlocks1 +import pdb +from sqlalchemy import create_engine, func +from sqlalchemy.orm import sessionmaker + + +def create_database_session_orm(type, parameters, base): + if type == 'token': + engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True) + base.metadata.create_all(bind=engine) + session = sessionmaker(bind=engine)() + + elif type == 'smart_contract': + engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True) + base.metadata.create_all(bind=engine) + session = sessionmaker(bind=engine)() + + elif type == 'system_dbs': + engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False) + base.metadata.create_all(bind=engine) + session = sessionmaker(bind=engine)() + + return session + + +# connect to the database convert_db +convert_db = create_database_session_orm('system_dbs', {'db_name': 'convertdb'}, LatestCacheBase1) +latest_blocks = convert_db.query(LatestBlocks1).all() +latest_txs = convert_db.query(LatestTransactions1).all() + + +# create a new database convert_db_new +convert_db_1 = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase) + +for block in latest_blocks: + convert_db_1.add(LatestBlocks(blockNumber=block.blockNumber, blockHash=block.blockHash, jsonData=block.jsonData)) + +for tx in latest_txs: + convert_db_1.add(LatestTransactions(transactionHash=tx.transactionHash, blockNumber=tx.blockNumber, jsonData=tx.jsonData, transactionType=tx.transactionType, parsedFloData=tx.parsedFloData)) + +convert_db_1.commit() +convert_db_1.close() +convert_db.close() diff --git a/cronjob.sh b/cronjob.sh deleted file mode 100755 index 9a60b8f..0000000 --- a/cronjob.sh +++ /dev/null @@ -1,3 +0,0 @@ -cd /home/production/Desktop/flo-token-tracking/ -python3 tracktokens-smartcontracts.py - diff --git a/models.py b/models.py index ca07bb1..1331bc9 100644 --- a/models.py +++ b/models.py @@ -1,13 +1,14 @@ from sqlalchemy import Column, Integer, Float, String from sqlalchemy.ext.declarative import declarative_base -Base = declarative_base() +TokenBase = declarative_base() ContractBase = declarative_base() +ContinuosContractBase = declarative_base() SystemBase = declarative_base() LatestCacheBase = declarative_base() -class ActiveTable(Base): +class ActiveTable(TokenBase): __tablename__ = "activeTable" id = Column('id', Integer, primary_key=True) @@ -15,9 +16,12 @@ class ActiveTable(Base): parentid = Column('parentid', Integer) consumedpid = Column('consumedpid', String) transferBalance = Column('transferBalance', Float) + addressBalance = Column('addressBalance', Float) + orphaned_parentid = Column('orphaned_parentid', Integer) + blockNumber = Column('blockNumber', Integer) -class ConsumedTable(Base): +class ConsumedTable(TokenBase): __tablename__ = "consumedTable" primaryKey = Column('primaryKey', Integer, primary_key=True) @@ -26,9 +30,12 @@ class ConsumedTable(Base): parentid = Column('parentid', Integer) consumedpid = Column('consumedpid', String) transferBalance = Column('transferBalance', Float) + addressBalance = Column('addressBalance', Float) + orphaned_parentid = Column('orphaned_parentid', Integer) + blockNumber = Column('blockNumber', Integer) -class TransferLogs(Base): +class TransferLogs(TokenBase): __tablename__ = "transferlogs" primary_key = Column('id', Integer, primary_key=True) @@ -42,7 +49,7 @@ class TransferLogs(Base): transactionHash = Column('transactionHash', String) -class TransactionHistory(Base): +class TransactionHistory(TokenBase): __tablename__ = "transactionHistory" primary_key = Column('id', Integer, primary_key=True) @@ -59,7 +66,7 @@ class TransactionHistory(Base): parsedFloData = Column('parsedFloData', String) -class TokenContractAssociation(Base): +class TokenContractAssociation(TokenBase): __tablename__ = "tokenContractAssociation" primary_key = Column('id', Integer, primary_key=True) @@ -116,14 +123,81 @@ class ContractTransactionHistory(ContractBase): parsedFloData = Column('parsedFloData', String) -class RejectedContractTransactionHistory(SystemBase): - __tablename__ = "rejectedContractTransactionHistory" +class ContractDeposits(ContractBase): + __tablename__ = "contractdeposits" + + id = Column('id', Integer, primary_key=True) + depositorAddress = Column('depositorAddress', String) + depositAmount = Column('depositAmount', Float) + depositBalance = Column('depositBalance', Float) + expiryTime = Column('expiryTime', String) + unix_expiryTime = Column('unix_expiryTime', Integer) + status = Column('status', String) + transactionHash = Column('transactionHash', String) + blockNumber = Column('blockNumber', Integer) + blockHash = Column('blockHash', String) + + +class ConsumedInfo(ContractBase): + __tablename__ = "consumedinfo" + + id = Column('id', Integer, primary_key=True) + id_deposittable = Column('id_deposittable', Integer) + transactionHash = Column('transactionHash', String) + blockNumber = Column('blockNumber', Integer) + + +class ContractWinners(ContractBase): + __tablename__ = "contractwinners" + + id = Column('id', Integer, primary_key=True) + participantAddress = Column('participantAddress', String) + winningAmount = Column('winningAmount', Float) + userChoice = Column('userChoice', String) + transactionHash = Column('transactionHash', String) + blockNumber = Column('blockNumber', Integer) + blockHash = Column('blockHash', String) + + +class ContractStructure2(ContinuosContractBase): + __tablename__ = "contractstructure" + + id = Column('id', Integer, primary_key=True) + attribute = Column('attribute', String) + index = Column('index', Integer) + value = Column('value', String) + + +class ContractParticipants2(ContinuosContractBase): + __tablename__ = "contractparticipants" + + id = Column('id', Integer, primary_key=True) + participantAddress = Column('participantAddress', String) + tokenAmount = Column('tokenAmount', Float) + transactionHash = Column('transactionHash', String) + blockNumber = Column('blockNumber', Integer) + blockHash = Column('blockHash', String) + + +class ContractDeposits2(ContinuosContractBase): + __tablename__ = "contractdeposits" + + id = Column('id', Integer, primary_key=True) + depositorAddress = Column('depositorAddress', String) + depositAmount = Column('depositAmount', Float) + expiryTime = Column('expiryTime', String) + status = Column('status', String) + transactionHash = Column('transactionHash', String) + blockNumber = Column('blockNumber', Integer) + blockHash = Column('blockHash', String) + + +class ContractTransactionHistory2(ContinuosContractBase): + __tablename__ = "contractTransactionHistory" primary_key = Column('id', Integer, primary_key=True) transactionType = Column('transactionType', String) transactionSubType = Column('transactionSubType', String) - contractName = Column('contractName', String) - contractAddress = Column('contractAddress', String) sourceFloAddress = Column('sourceFloAddress', String) destFloAddress = Column('destFloAddress', String) transferAmount = Column('transferAmount', Float) @@ -133,26 +207,6 @@ class RejectedContractTransactionHistory(SystemBase): transactionHash = Column('transactionHash', String) blockchainReference = Column('blockchainReference', String) jsonData = Column('jsonData', String) - rejectComment = Column('rejectComment', String) - parsedFloData = Column('parsedFloData', String) - - -class RejectedTransactionHistory(SystemBase): - __tablename__ = "rejectedTransactionHistory" - - primary_key = Column('id', Integer, primary_key=True) - tokenIdentification = Column('tokenIdentification', String) - sourceFloAddress = Column('sourceFloAddress', String) - destFloAddress = Column('destFloAddress', String) - transferAmount = Column('transferAmount', Float) - blockNumber = Column('blockNumber', Integer) - blockHash = Column('blockHash', String) - time = Column('time', Integer) - transactionHash = Column('transactionHash', String) - blockchainReference = Column('blockchainReference', String) - jsonData = Column('jsonData', String) - rejectComment = Column('rejectComment', String) - transactionType = Column('transactionType', String) parsedFloData = Column('parsedFloData', String) @@ -206,19 +260,89 @@ class TokenAddressMapping(SystemBase): blockHash = Column('blockHash', String) -class LatestTransactions(LatestCacheBase): - __tablename__ = "latestTransactions" +class DatabaseTypeMapping(SystemBase): + __tablename__ = "databaseTypeMapping" + id = Column('id', Integer, primary_key=True) + db_name = Column('db_name', String) + db_type = Column('db_type', String) + keyword = Column('keyword', String) + object_format = Column ('object_format', String) + blockNumber = Column('blockNumber', Integer) + + +class TimeActions(SystemBase): + __tablename__ = "time_actions" + + id = Column('id', Integer, primary_key=True) + time = Column('time', String) + activity = Column('activity', String) + status = Column('status', String) + contractName = Column('contractName', String) + contractAddress = Column('contractAddress', String) + contractType = Column('contractType', String) + tokens_db = Column('tokens_db', String) + parsed_data = Column('parsed_data', String) transactionHash = Column('transactionHash', String) - blockNumber = Column('blockNumber', String) + blockNumber = Column('blockNumber', Integer) + + +class RejectedContractTransactionHistory(SystemBase): + __tablename__ = "rejectedContractTransactionHistory" + + primary_key = Column('id', Integer, primary_key=True) + transactionType = Column('transactionType', String) + transactionSubType = Column('transactionSubType', String) + contractName = Column('contractName', String) + contractAddress = Column('contractAddress', String) + sourceFloAddress = Column('sourceFloAddress', String) + destFloAddress = Column('destFloAddress', String) + transferAmount = Column('transferAmount', Float) + blockNumber = Column('blockNumber', Integer) + blockHash = Column('blockHash', String) + time = Column('time', Integer) + transactionHash = Column('transactionHash', String) + blockchainReference = Column('blockchainReference', String) jsonData = Column('jsonData', String) + rejectComment = Column('rejectComment', String) + parsedFloData = Column('parsedFloData', String) + + +class RejectedTransactionHistory(SystemBase): + __tablename__ = "rejectedTransactionHistory" + + primary_key = Column('id', Integer, primary_key=True) + tokenIdentification = Column('tokenIdentification', String) + sourceFloAddress = Column('sourceFloAddress', String) + destFloAddress = Column('destFloAddress', String) + transferAmount = Column('transferAmount', Float) + blockNumber = Column('blockNumber', Integer) + blockHash = Column('blockHash', String) + time = Column('time', Integer) + transactionHash = Column('transactionHash', String) + blockchainReference = Column('blockchainReference', String) + jsonData = Column('jsonData', String) + rejectComment = Column('rejectComment', String) transactionType = Column('transactionType', String) parsedFloData = Column('parsedFloData', String) +class LatestTransactions(LatestCacheBase): + __tablename__ = "latestTransactions" + + id = Column('id', Integer, primary_key=True) + transactionHash = Column('transactionHash', String) + blockNumber = Column('blockNumber', Integer) + jsonData = Column('jsonData', String) + transactionType = Column('transactionType', String) + parsedFloData = Column('parsedFloData', String) + db_reference = Column('db_reference', String) + + class LatestBlocks(LatestCacheBase): __tablename__ = "latestBlocks" + id = Column('id', Integer, primary_key=True) - blockNumber = Column('blockNumber', String) + blockNumber = Column('blockNumber', Integer) blockHash = Column('blockHash', String) jsonData = Column('jsonData', String) diff --git a/parser_function_definitions.py b/parser_function_definitions.py new file mode 100644 index 0000000..34b55e5 --- /dev/null +++ b/parser_function_definitions.py @@ -0,0 +1,281 @@ +""" + +DEFINITIONS: + +Special character words - A word followed by either of the special character(#,*,@) +#-word - Token name +@-word - Smart Contract name +*-word - Smart Contract type + +""" + +""" +FIND RULES + +1. Identify all Special character words in a text string >> and output as a list of those words +2. Apply rule 1, but only before a marker or keyword like ":" and output as a list of those words +3. Find a number in the string +5. Check for an occurance of exact order of pattern of special character words + eg. for one-time-event smart contract( identified using *-word), the existence of #-word should be checked before the ':' and output the #-word + for continuos-event smart contract( identified using *-word)(with subtype tokenswap), the #-words should be checked after the ':' and output two hash words +6. Given a string of the type contract conditions, format and output an object string by removing = and by removing number references +7. Idenitfy all the special character words in a text string such that spaces are not taken into account, for eg. Input string => "contract-conditions :(2) accepting_token=rupee#(3) selling_token = bioscope# " | + Output string => ["rupee#","bioscope#"] +""" + +def findrule1(rawstring, special_character): + wordList = [] + for word in rawstring.split(' '): + if word.endswith(special_character) and len(word) != 1: + wordList.append(word) + return wordList + +def findrule3(text): + base_units = {'thousand': 10 ** 3, 'million': 10 ** 6, 'billion': 10 ** 9, 'trillion': 10 ** 12} + textList = text.split(' ') + counter = 0 + value = None + for idx, word in enumerate(textList): + try: + result = float(word) + if textList[idx + 1] in base_units: + value = result * base_units[textList[idx + 1]] + counter = counter + 1 + else: + value = result + counter = counter + 1 + except: + for unit in base_units: + result = word.split(unit) + if len(result) == 2 and result[1] == '' and result[0] != '': + try: + value = float(result[0]) * base_units[unit] + counter = counter + 1 + except: + continue + + if counter == 1: + return value + else: + return None + + + +""" +TRUE-FALSE RULES + +1. Check if subtype = tokenswap exists in a given string, +2. Find if any one of special word in list is present, ie. [start, create, incorporate] and any of the words in second list is not present like [send,transfer, give] + +""" +import re + +def findWholeWord(w): + return re.compile(r'\b({0})\b'.format(w), flags=re.IGNORECASE).search + +''' +findWholeWord('seek')('those who seek shall find') # -> +findWholeWord('word')('swordsmith') +''' + +def truefalse_rule1(rawstring, string_tobe_checked): + nowhites_rawstring = rawstring.replace(" ","").lower() + if string_tobe_checked.replace(" ","").lower() in nowhites_rawstring: + return True + else: + return False + +denied_list = ['transfer', 'send', 'give'] # keep everything lowercase +permitted_list = ['incorporate', 'create', 'start'] # keep everything lowercase + +def truefalse_rule2(rawstring, permitted_list, denied_list): + # Find transfer , send , give + foundPermitted = None + foundDenied = None + + for word in permitted_list: + if findWholeWord(word)(rawstring): + foundPermitted = word + break + + for word in denied_list: + if findWholeWord(word)(rawstring): + foundDenied = word + break + + if (foundPermitted in not None) and (foundDenied is None): + return True + else: + return False + + +def selectCateogry(rawstring, wordlist, category1, category2): + + +""" +CLASSIFY RULES + +1. Based on various combinations of the special character words and special words, create categorizations + eg. 1.1 if there is only one #-word, then the flodata is related purely to token system + 1.2 if there is one #-word, one @-word .. then it is related to smart contract system, but cannot be a creation type since smart contract creaton needs to specify contract type with *-word + 1.3 if there is one +2. Check if it is of the value 'one-time-event' or 'continuos-event' + +""" + +""" +REJECT RULES + +1. *-words have to be equal to 1 ie. You can specify only one contract type at once , otherwise noise +2. *-word has to fall in the following type ['one-time-event*', 'continuous-event*'], otherwise noise +3. @-word should exist only before the : , otherwise noise +4. There should be only one @-word, otherwise noise +5. for one-time-event smart contract( identified using one-time-event*), if there is a no #-word before : -> reject as noise +6. for one-time-event smart contract( identified using one-time-event*) if there is more than one #-word before : -> reject as noise +7. for one-time-event smart contract( identified using one-time-event*) if there is/are #-word(s) after colon -> reject as noise +8. for continuos-event smart contract( identified using continuos-event*) if there is one or more #-word before : > reject as noise +9. for continuos-event smart contract( identified using continuos-event*)( with subtype token-swap ) if there is one or more than two #-word after : > reject as noise +10. + +""" + +def rejectrule9(rawtext, starword): + pass + + +extractContractConditions(cleanstring, contracttype, blocktime=blockinfo['time'], marker=hashList[0][:-1]) + +# Token incorporation operation +## Existance of keyword + +""" +APPLY RULES + +1. After application of apply rule1, a parser rule will either return a value or will classify the result as noise + +""" + +def apply_rule1(*argv): + a = argv[0](*argv[1:]) + if a is False: + return "noise" + elif a if True: + return a + +# If any of the parser rule returns a value, then queue it for further processing, otherwise send noise to the output engine +apply_rule1(findrule_1, rawstring, special_character) + +def outputreturn(*argv): + if argv[0] == 'noise': + parsed_data = {'type': 'noise'} + elif argv[0] == 'token_incorporation': + parsed_data = { + 'type': 'tokenIncorporation', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + 'tokenAmount': argv[3] #initTokens + } + elif argv[0] == 'token_transfer': + parsed_data = { + 'type': 'transfer', + 'transferType': 'token', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + 'tokenAmount': argv[3] #amount + } + elif argv[0] == 'one-time-event-userchoice-smartcontract-incorporation': + parsed_data = { + 'type': 'smartContractIncorporation', + 'contractType': 'one-time-event', + 'tokenIdentification': argv[1], #hashList[0][:-1] + 'contractName': argv[2], #atList[0][:-1] + 'contractAddress': argv[3], #contractaddress[:-1] + 'flodata': argv[4], #string + 'contractConditions': { + 'contractamount' : argv[5], + 'minimumsubscriptionamount' : argv[6], + 'maximumsubscriptionamount' : argv[7], + 'payeeaddress' : argv[8], + 'userchoice' : argv[9], + 'expiryTime' : argv[10] + } + } + elif argv[0] == 'one-time-event-userchoice-smartcontract-participation': + parsed_data = { + 'type': 'transfer', + 'transferType': 'smartContract', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + 'operation': 'transfer', + 'tokenAmount': argv[3], #amount + 'contractName': argv[4], #atList[0][:-1] + 'userChoice': argv[5] #userChoice + } + elif argv[0] == 'one-time-event-userchoice-smartcontract-trigger': + parsed_data = { + 'type': 'smartContractPays', + 'contractName': argv[1], #atList[0][:-1] + 'triggerCondition': argv[2] #triggerCondition.group().strip()[1:-1] + } + elif argv[0] == 'one-time-event-time-smartcontract-incorporation': + parsed_data = { + 'type': 'smartContractIncorporation', + 'contractType': 'one-time-event', + 'tokenIdentification': argv[1], #hashList[0][:-1] + 'contractName': argv[2], #atList[0][:-1] + 'contractAddress': argv[3], #contractaddress[:-1] + 'flodata': argv[4], #string + 'contractConditions': { + 'contractamount' : argv[5], + 'minimumsubscriptionamount' : argv[6], + 'maximumsubscriptionamount' : argv[7], + 'payeeaddress' : argv[8], + 'expiryTime' : argv[9] + } + } + elif argv[0] == 'one-time-event-time-smartcontract-participation': + parsed_data = { + 'type': 'transfer', + 'transferType': 'smartContract', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + 'operation': 'transfer', + 'tokenAmount': argv[3], #amount + 'contractName': argv[4] #atList[0][:-1] + } + elif argv[0] == 'continuos-event-token-swap-incorporation': + parsed_data = { + 'type': 'smartContractIncorporation', + 'contractType': 'continuos-event', + 'tokenIdentification': argv[1], #hashList[0][:-1] + 'contractName': argv[2], #atList[0][:-1] + 'contractAddress': argv[3], #contractaddress[:-1] + 'flodata': argv[4], #string + 'contractConditions': { + 'subtype' : argv[5], #tokenswap + 'accepting_token' : argv[6], + 'selling_token' : argv[7], + 'pricetype' : argv[8], + 'price' : argv[9], + } + } + elif argv[0] == 'continuos-event-token-swap-deposit': + parsed_data = { + 'type': 'smartContractDeposit', + 'tokenIdentification': argv[1], #hashList[0][:-1] + 'depositAmount': argv[2], #depositAmount + 'contractName': argv[3], #atList[0][:-1] + 'flodata': argv[4], #string + 'depositConditions': { + 'expiryTime' : argv[5] + } + } + elif argv[0] == 'continuos-event-token-swap-participation': + parsed_data = { + 'type': 'smartContractParticipation', + 'tokenIdentification': argv[1], #hashList[0][:-1] + 'sendAmount': argv[2], #sendtAmount + 'receiveAmount': argv[3], #receiveAmount + 'contractName': argv[4], #atList[0][:-1] + 'flodata': argv[5] #string + } diff --git a/parsing.py b/parsing.py index e0fe046..a1fbd3d 100644 --- a/parsing.py +++ b/parsing.py @@ -1,170 +1,365 @@ -import configparser +import pdb import re - import arrow +import pyflo +import logging +import json -config = configparser.ConfigParser() -config.read('config.ini') +""" +Find make lists of #, *, @ words -marker = None -operation = None -address = None -amount = None +If only 1 hash word and nothing else, then it is token related ( tokencreation or tokentransfer ) -months = {'jan': 1, - 'feb': 2, - 'mar': 3, - 'apr': 4, - 'may': 5, - 'jun': 6, - 'jul': 7, - 'aug': 8, - 'sep': 9, - 'oct': 10, - 'nov': 11, - 'dec': 12} +If @ is present, then we know it is smart contract related + @ (#)pre: - participation , deposit + @ * (#)pre: - one time event creation + @ * (# #)post: - token swap creation + @ - trigger + +Check for 1 @ only +Check for 1 # only +Check for @ (#)pre: +Check for @ * (#)pre: +Check for @ * (# #)post: + +special_character_frequency = { + 'precolon': { + '#':0, + '*':0, + '@':0, + ':':0 +} + +for word in allList: + if word.endswith('#'): + special_character_frequency['#'] = special_character_frequency['#'] + 1 + elif word.endswith('*'): + special_character_frequency['*'] = special_character_frequency['*'] + 1 + elif word.endswith('@'): + special_character_frequency['@'] = special_character_frequency['@'] + 1 + elif word.endswith(':'): + special_character_frequency[':'] = special_character_frequency[':'] + 1 + +""" + +''' +def className(rawstring): + # Create a list that contains @ , # , * and : ; in actual order of occurence with their words. Only : is allowed to exist without a word in front of it. + # Check for 1 @ only followed by :, and the class is trigger + # Check for 1 # only, then the class is tokensystem + # Check for @ in the first position, * in the second position, # in the third position and : in the fourth position, then class is one time event creation + # Check for @ in the first position, * in the second position and : in the third position, then hash is in 4th position, then hash in 5th position | Token swap creation + + allList = findrules(rawstring,['#','*','@',':']) + + pattern_list1 = ['rmt@','rmt*',':',"rmt#","rmt#"] + pattern_list2 = ['rmt#',':',"rmt@"] + pattern_list3 = ['rmt#'] + pattern_list4 = ["rmt@","one-time-event*","floAddress$",':',"rupee#","bioscope#"] + patternmatch = find_first_classification(pattern_list4, search_patterns) + print(f"Patternmatch is {patternmatch}") -def isTransfer(text): - wordlist = ['transfer', 'send', 'give'] # keep everything lowercase - textList = text.split(' ') - for word in wordlist: - if word in textList: - return True - return False +rawstring = "test rmt# rmt@ rmt* : rmt# rmt# test" +#className(rawstring) ''' +# Variable configurations +search_patterns = { + 'tokensystem-C':{ + 1:['#'] + }, + 'smart-contract-creation-C':{ + 1:['@','*','#','$',':'], + 2:['@','*','#','$',':','#'] + }, + 'smart-contract-participation-deposit-C':{ + 1:['#','@',':'], + 2:['#','@','$',':'] + }, + 'userchoice-trigger':{ + 1:['@'] + }, + 'smart-contract-participation-ote-ce-C':{ + 1:['#','@'], + 2:['#','@','$'] + }, + 'smart-contract-creation-ce-tokenswap':{ + 1:['@','*','$',':','#','#'] + } +} -def isIncorp(text): - wordlist = ['incorporate', 'create', 'start'] # keep everything lowercase - textList = text.split(' ') - for word in wordlist: - if word in textList: - return True - return False +conflict_matrix = { + 'tokensystem-C':{ + # Check for send, check for create, if both are there noise, else conflict resolved + 'tokentransfer', + 'tokencreation' + }, + 'smart-contract-creation-C':{ + # Check contract-conditions for userchoice, if present then userchoice contract, else time based contract + 'creation-one-time-event-userchoice', + 'creation-one-time-event-timebased' + }, + 'smart-contract-participation-deposit-C':{ + # Check *-word, its either one-time-event or a continuos-event + 'participation-one-time-event-userchoice', + 'deposit-continuos-event-tokenswap' + }, + 'smart-contract-participation-ote-ce-C':{ + # Check *-word, its either one-time-event or a continuos-event + 'participation-one-time-event-timebased', + 'participation-continuos-event-tokenswap' + } +} +months = { + 'jan': 1, + 'feb': 2, + 'mar': 3, + 'apr': 4, + 'may': 5, + 'jun': 6, + 'jul': 7, + 'aug': 8, + 'sep': 9, + 'oct': 10, + 'nov': 11, + 'dec': 12 +} -def isSmartContract(text): - textList = text.split(' ') - for word in textList: - if word == '': - continue - if word.endswith('@') and len(word) != 1: - return word - return False +# HELPER FUNCTIONS - -def isSmartContractPay(text): - wordlist = text.split(' ') - if len(wordlist) != 2: +# Find some value or return as noise +def apply_rule1(*argv): + a = argv[0](*argv[1:]) + if a is False: return False - smartContractTrigger = re.findall(r"smartContractTrigger:'.*'", text)[0].split('smartContractTrigger:')[1] - smartContractTrigger = smartContractTrigger[1:-1] - smartContractName = re.findall(r"smartContractName:.*@", text)[0].split('smartContractName:')[1] - smartContractName = smartContractName[:-1] - - if smartContractTrigger and smartContractName: - contractconditions = {'smartContractTrigger': smartContractTrigger, 'smartContractName': smartContractName} - return contractconditions else: + return a + + +def extract_substring_between(test_str, sub1, sub2): + # getting index of substrings + idx1 = test_str.index(sub1) + idx2 = test_str.index(sub2) + + # length of substring 1 is added to + # get string from next character + res = test_str[idx1 + len(sub1) + 1: idx2] + + # return result + return res + +# StateF functions +def isStateF(text): + try: + statef_string = extract_substring_between(text, 'statef', 'end-statef').strip() + i=iter(statef_string.split(":")) + statef_list = [":".join(x) for x in zip(i,i)] + statef = {} + for keyval in statef_list: + keyval = keyval.split(':') + statef[keyval[0]] = keyval[1] + return statef + except: return False -def extractAmount(text, marker): - count = 0 - returnval = None - splitText = text.split('userchoice')[0].split(' ') - - for word in splitText: - word = word.replace(marker, '') - try: - float(word) - count = count + 1 - returnval = float(word) - except ValueError: - pass - - if count > 1: - return 'Too many' - return returnval +# conflict_list = [['userchoice','payeeaddress'],['userchoice','xxx']] +def resolve_incategory_conflict(input_dictionary , conflict_list): + for conflict_pair in conflict_list: + key0 = conflict_pair[0] + key1 = conflict_pair[1] + dictionary_keys = input_dictionary.keys() + if (key0 in dictionary_keys and key1 in dictionary_keys) or (key0 not in dictionary_keys and key1 not in dictionary_keys): + return False + else: + return True -def extractMarker(text): - textList = text.split(' ') - for word in textList: - if word == '': - continue - if word.endswith('#') and len(word) != 1: - return word - return False - - -def extractInitTokens(text): - base_units = {'thousand': 10 ** 3, 'million': 10 ** 6, 'billion': 10 ** 9, 'trillion': 10 ** 12} - textList = text.split(' ') - counter = 0 - value = None - for idx, word in enumerate(textList): - try: - result = float(word) - if textList[idx + 1] in base_units: - value = result * base_units[textList[idx + 1]] - counter = counter + 1 - else: - value = result - counter = counter + 1 - except: - for unit in base_units: - result = word.split(unit) - if len(result) == 2 and result[1] == '' and result[0] != '': - try: - value = float(result[0]) * base_units[unit] - counter = counter + 1 - except: - continue - - if counter == 1: - return value +def remove_empty_from_dict(d): + if type(d) is dict: + return dict((k, remove_empty_from_dict(v)) for k, v in d.items() if v and remove_empty_from_dict(v)) + elif type(d) is list: + return [remove_empty_from_dict(v) for v in d if v and remove_empty_from_dict(v)] else: - return None + return d -def extractAddress(text): - textList = text.split(' ') - for word in textList: - if word == '': - continue - if word[-1] == '$' and len(word) != 1: - return word - return None +def outputreturn(*argv): + if argv[0] == 'noise': + parsed_data = {'type': 'noise'} + return parsed_data + elif argv[0] == 'token_incorporation': + parsed_data = { + 'type': 'tokenIncorporation', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + 'tokenAmount': argv[3], #initTokens + 'stateF': argv[4] + } + return parsed_data + elif argv[0] == 'token_transfer': + parsed_data = { + 'type': 'transfer', + 'transferType': 'token', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + 'tokenAmount': argv[3], #amount + 'stateF': argv[4] + } + return parsed_data + elif argv[0] == 'one-time-event-userchoice-smartcontract-incorporation': + parsed_data = { + 'type': 'smartContractIncorporation', + 'contractType': 'one-time-event', + 'subtype': 'external-trigger', + 'tokenIdentification': argv[1], #hashList[0][:-1] + 'contractName': argv[2], #atList[0][:-1] + 'contractAddress': argv[3], #contractaddress[:-1] + 'flodata': argv[4], #string + 'contractConditions': { + 'contractAmount' : argv[5], + 'minimumsubscriptionamount' : argv[6], + 'maximumsubscriptionamount' : argv[7], + 'userchoices' : argv[8], + 'expiryTime' : argv[9], + 'unix_expiryTime': argv[10] + }, + 'stateF': argv[11] + } + return remove_empty_from_dict(parsed_data) + elif argv[0] == 'one-time-event-userchoice-smartcontract-participation': + parsed_data = { + 'type': 'transfer', + 'transferType': 'smartContract', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + 'operation': 'transfer', + 'tokenAmount': argv[3], #amount + 'contractName': argv[4], #atList[0][:-1] + 'contractAddress': argv[5], + 'userChoice': argv[6], #userChoice + 'stateF': argv[7] + } + return remove_empty_from_dict(parsed_data) + elif argv[0] == 'one-time-event-userchoice-smartcontract-trigger': + parsed_data = { + 'type': 'smartContractPays', + 'contractName': argv[1], #atList[0][:-1] + 'triggerCondition': argv[2], #triggerCondition.group().strip()[1:-1] + 'stateF': argv[3] + } + return parsed_data + elif argv[0] == 'one-time-event-time-smartcontract-incorporation': + parsed_data = { + 'type': 'smartContractIncorporation', + 'contractType': 'one-time-event', + 'subtype': 'time-trigger', + 'tokenIdentification': argv[1], #hashList[0][:-1] + 'contractName': argv[2], #atList[0][:-1] + 'contractAddress': argv[3], #contractaddress[:-1] + 'flodata': argv[4], #string + 'contractConditions': { + 'contractAmount' : argv[5], + 'minimumsubscriptionamount' : argv[6], + 'maximumsubscriptionamount' : argv[7], + 'payeeAddress' : argv[8], + 'expiryTime' : argv[9], + 'unix_expiryTime' : argv[10] + }, + 'stateF': argv[11] + } + return remove_empty_from_dict(parsed_data) + elif argv[0] == 'continuos-event-token-swap-incorporation': + parsed_data = { + 'type': 'smartContractIncorporation', + 'contractType': 'continuos-event', + 'tokenIdentification': argv[1], #hashList[0][:-1] + 'contractName': argv[2], #atList[0][:-1] + 'contractAddress': argv[3], #contractaddress[:-1] + 'flodata': argv[4], #string + 'contractConditions': { + 'subtype' : argv[5], #tokenswap + 'accepting_token' : argv[6], + 'selling_token' : argv[7], + 'pricetype' : argv[8], + 'price' : argv[9], + 'oracle_address' : argv[11] + }, + 'stateF': argv[10] + } + return parsed_data + elif argv[0] == 'continuos-event-token-swap-deposit': + parsed_data = { + 'type': 'smartContractDeposit', + 'tokenIdentification': argv[1], #hashList[0][:-1] + 'depositAmount': argv[2], #depositAmount + 'contractName': argv[3], #atList[0][:-1] + 'flodata': argv[4], #string + 'depositConditions': { + 'expiryTime' : argv[5] + }, + 'stateF': argv[6] + } + return parsed_data + elif argv[0] == 'smart-contract-one-time-event-continuos-event-participation': + parsed_data = { + 'type': 'transfer', + 'transferType': 'smartContract', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + 'tokenAmount': argv[3], #amount + 'contractName': argv[4], #atList[0][:-1] + 'contractAddress': argv[5], + 'stateF': argv[6] + } + return remove_empty_from_dict(parsed_data) + elif argv[0] == 'nft_create': + parsed_data = { + 'type': 'nftIncorporation', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + 'tokenAmount': argv[3], #initTokens, + 'nftHash': argv[4], #nftHash + 'stateF': argv[5] + } + return parsed_data + elif argv[0] == 'nft_transfer': + parsed_data = { + 'type': 'transfer', + 'transferType': 'nft', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + 'tokenAmount': argv[3], #initTokens, + 'stateF': argv[4] + } + return parsed_data + elif argv[0] == 'infinite_token_create': + parsed_data = { + 'type': 'infiniteTokenIncorporation', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + 'stateF': argv[3] + } + return parsed_data -def extractContractType(text): - operationList = ['one-time-event*'] # keep everything lowercase - count = 0 - returnval = None - for operation in operationList: - count = count + text.count(operation) - if count > 1: - return 'Too many' - if count == 1 and (returnval is None): - returnval = operation - return returnval +def extract_specialcharacter_words(rawstring, special_characters): + wordList = [] + for word in rawstring.split(' '): + if (len(word) not in [0,1] or word==":") and word[-1] in special_characters: + wordList.append(word) + return wordList -def extractUserchoice(text): - result = re.split('userchoice:\s*', text) - if len(result) != 1 and result[1] != '': - return result[1].strip().strip('"').strip("'") - else: - return None - - -def brackets_toNumber(item): - return float(item[1:-1]) - - -def extractContractConditions(text, contracttype, marker, blocktime): - rulestext = re.split('contract-conditions:\s*', text)[-1] +def extract_contract_conditions(text, contract_type, marker=None, blocktime=None): + try: + rulestext = extract_substring_between(text, 'contract-conditions', 'end-contract-conditions') + except: + return False + if rulestext.strip()[0] == ':': + rulestext = rulestext.strip()[1:].strip() + #rulestext = re.split('contract-conditions:\s*', text)[-1] # rulelist = re.split('\d\.\s*', rulestext) rulelist = [] numberList = re.findall(r'\(\d\d*\)', rulestext) @@ -175,8 +370,8 @@ def extractContractConditions(text, contracttype, marker, blocktime): numberList = sorted(numberList) for idx, item in enumerate(numberList): if numberList[idx] + 1 != numberList[idx + 1]: - print('Contract condition numbers are not in order') - return None + logger.info('Contract condition numbers are not in order') + return False if idx == len(numberList) - 2: break @@ -184,207 +379,904 @@ def extractContractConditions(text, contracttype, marker, blocktime): rule = rulestext.split('({})'.format(i + 1))[1].split('({})'.format(i + 2))[0] rulelist.append(rule.strip()) - if contracttype == 'one-time-event*': + if contract_type == 'one-time-event': extractedRules = {} - for rule in rulelist: if rule == '': continue elif rule[:10] == 'expirytime': expirytime = re.split('expirytime[\s]*=[\s]*', rule)[1].strip() - try: expirytime_split = expirytime.split(' ') - parse_string = '{}/{}/{} {}'.format(expirytime_split[3], months[expirytime_split[1]], - expirytime_split[2], expirytime_split[4]) + parse_string = '{}/{}/{} {}'.format(expirytime_split[3], months[expirytime_split[1]], expirytime_split[2], expirytime_split[4]) expirytime_object = arrow.get(parse_string, 'YYYY/M/D HH:mm:ss').replace(tzinfo=expirytime_split[5]) blocktime_object = arrow.get(blocktime) if expirytime_object < blocktime_object: - print( - 'Expirytime of the contract is earlier than the block it is incorporated in. This incorporation will be rejected ') - return None + logger.info('Expirytime of the contract is earlier than the block it is incorporated in. This incorporation will be rejected ') + return False extractedRules['expiryTime'] = expirytime + extractedRules['unix_expiryTime'] = expirytime_object.timestamp() except: - print('Error parsing expiry time') - return None + logger.info('Error parsing expiry time') + return False for rule in rulelist: if rule == '': continue elif rule[:14] == 'contractamount': - pattern = re.compile('[^contractamount\s*=\s*].*') - searchResult = pattern.search(rule).group(0) + pattern = re.compile('(^contractamount\s*=\s*)(.*)') + searchResult = pattern.search(rule).group(2) contractamount = searchResult.split(marker)[0] try: extractedRules['contractAmount'] = float(contractamount) except: - print("Contract amount entered is not a decimal") + logger.info("Contract amount entered is not a decimal") elif rule[:11] == 'userchoices': - pattern = re.compile('[^userchoices\s*=\s*].*') - conditions = pattern.search(rule).group(0) + pattern = re.compile('(^userchoices\s*=\s*)(.*)') + conditions = pattern.search(rule).group(2) conditionlist = conditions.split('|') extractedRules['userchoices'] = {} for idx, condition in enumerate(conditionlist): extractedRules['userchoices'][idx] = condition.strip() elif rule[:25] == 'minimumsubscriptionamount': - pattern = re.compile('[^minimumsubscriptionamount\s*=\s*].*') - searchResult = pattern.search(rule).group(0) + pattern = re.compile('(^minimumsubscriptionamount\s*=\s*)(.*)') + searchResult = pattern.search(rule).group(2) minimumsubscriptionamount = searchResult.split(marker)[0] try: extractedRules['minimumsubscriptionamount'] = float(minimumsubscriptionamount) except: - print("Minimum subscription amount entered is not a decimal") + logger.info("Minimum subscription amount entered is not a decimal") elif rule[:25] == 'maximumsubscriptionamount': - pattern = re.compile('[^maximumsubscriptionamount\s*=\s*].*') - searchResult = pattern.search(rule).group(0) + pattern = re.compile('(^maximumsubscriptionamount\s*=\s*)(.*)') + searchResult = pattern.search(rule).group(2) maximumsubscriptionamount = searchResult.split(marker)[0] try: extractedRules['maximumsubscriptionamount'] = float(maximumsubscriptionamount) except: - print("Maximum subscription amount entered is not a decimal") + logger.info("Maximum subscription amount entered is not a decimal") elif rule[:12] == 'payeeaddress': - pattern = re.compile('[^payeeAddress\s*=\s*].*') - searchResult = pattern.search(rule).group(0) + pattern = re.compile('(^payeeaddress\s*=\s*)(.*)') + searchResult = pattern.search(rule).group(2) payeeAddress = searchResult.split(marker)[0] + payeeAddress = payeeAddress.strip('"') + payeeAddress = payeeAddress.strip("'") extractedRules['payeeAddress'] = payeeAddress if len(extractedRules) > 1 and 'expiryTime' in extractedRules: return extractedRules else: - return None - return None + return False + + elif contract_type == 'continuous-event': + extractedRules = {} + for rule in rulelist: + if rule == '': + continue + elif rule[:7] == 'subtype': + # todo : recheck the regular expression for subtype, find an elegant version which covers all permutations and combinations + pattern = re.compile('(^subtype\s*=\s*)(.*)') + subtype = pattern.search(rule).group(2) + extractedRules['subtype'] = subtype + elif rule[:15] == 'accepting_token': + pattern = re.compile('(?<=accepting_token\s=\s)(.*)(? 1: + return extractedRules + else: + return False + return False -def extractTriggerCondition(text): +def extract_tokenswap_contract_conditions(processed_text, contract_type, contract_token): + rulestext = re.split('contract-conditions:\s*', processed_text)[-1] + rulelist = [] + numberList = re.findall(r'\(\d\d*\)', rulestext) + + for idx, item in enumerate(numberList): + numberList[idx] = int(item[1:-1]) + + numberList = sorted(numberList) + for idx, item in enumerate(numberList): + if numberList[idx] + 1 != numberList[idx + 1]: + logger.info('Contract condition numbers are not in order') + return False + if idx == len(numberList) - 2: + break + + for i in range(len(numberList)): + rule = rulestext.split('({})'.format(i + 1))[1].split('({})'.format(i + 2))[0] + rulelist.append(rule.strip()) + + if contract_type == 'continuous-event': + extractedRules = {} + for rule in rulelist: + if rule == '': + continue + elif rule[:7] == 'subtype': + # todo : recheck the regular expression for subtype, find an elegant version which covers all permutations and combinations + pattern = re.compile('(^subtype\s*=\s*)(.*)') + searchResult = pattern.search(rule).group(2) + subtype = searchResult.split(marker)[0] + #extractedRules['subtype'] = rule.split('=')[1].strip() + extractedRules['subtype'] = subtype + elif rule[:15] == 'accepting_token': + pattern = re.compile('(?<=accepting_token\s=\s).*(? 1: + return extractedRules + else: + return False + + return False + + +def extract_deposit_conditions(text, blocktime=None): + rulestext = re.split('deposit-conditions:\s*', text)[-1] + # rulelist = re.split('\d\.\s*', rulestext) + rulelist = [] + numberList = re.findall(r'\(\d\d*\)', rulestext) + for idx, item in enumerate(numberList): + numberList[idx] = int(item[1:-1]) + + numberList = sorted(numberList) + for idx, item in enumerate(numberList): + if len(numberList) > 1 and numberList[idx] + 1 != numberList[idx + 1]: + logger.info('Deposit condition numbers are not in order') + return False + if idx == len(numberList) - 2: + break + + for i in range(len(numberList)): + rule = rulestext.split('({})'.format(i + 1))[1].split('({})'.format(i + 2))[0] + rulelist.append(rule.strip()) + + # elif contracttype == 'continuous-event*': + extractedRules = {} + for rule in rulelist: + if rule == '': + continue + elif rule[:10] == 'expirytime': + expirytime = re.split('expirytime[\s]*=[\s]*', rule)[1].strip() + try: + expirytime_split = expirytime.split(' ') + parse_string = '{}/{}/{} {}'.format(expirytime_split[3], months[expirytime_split[1]], expirytime_split[2], expirytime_split[4]) + expirytime_object = arrow.get(parse_string, 'YYYY/M/D HH:mm:ss').replace(tzinfo=expirytime_split[5]) + blocktime_object = arrow.get(blocktime) + if expirytime_object < blocktime_object: + logger.info('Expirytime of the contract is earlier than the block it is incorporated in. This incorporation will be rejected ') + return False + extractedRules['expiryTime'] = expirytime + except: + logger.info('Error parsing expiry time') + return False + + """for rule in rulelist: + if rule == '': + continue + elif rule[:7] == 'subtype': + subtype=rule[8:] + #pattern = re.compile('[^subtype\s*=\s*].*') + #searchResult = pattern.search(rule).group(0) + #contractamount = searchResult.split(marker)[0] + extractedRules['subtype'] = subtype """ + + if len(extractedRules) > 0: + return extractedRules + else: + return False + + +def extract_special_character_word(special_character_list, special_character): + for word in special_character_list: + if word.endswith(special_character): + return word[:-1] + return False + + +def extract_NFT_hash(clean_text): + nft_hash = re.search(r"(?:0[xX])?[0-9a-fA-F]{64}",clean_text) + if nft_hash is None: + return False + else: + return nft_hash.group(0) + + +def find_original_case(contract_address, original_text): + dollar_word = extract_specialcharacter_words(original_text,["$"]) + if len(dollar_word)==1 and dollar_word[0][:-1].lower()==contract_address: + return dollar_word[0][:-1] + else: + None + +def find_original_case_regex(floaddress, original_text): + pattern = f'(?i)\b({floaddress})\b' + regex_match = re.findall(r'(?i)\b(' + re.escape(floaddress) + r')\b', original_text) + if len(regex_match) == 0: # or len(amount_tuple) > 1 : + return False + else: + return regex_match[0] + + +def find_word_index_fromstring(originaltext, word): + lowercase_text = originaltext.lower() + result = lowercase_text.find(word) + return originaltext[result:result+len(word)] + + +def find_first_classification(parsed_word_list, search_patterns): + for first_classification in search_patterns.keys(): + counter = 0 + for key in search_patterns[first_classification].keys(): + if checkSearchPattern(parsed_word_list, search_patterns[first_classification][key]): + return {'categorization':f"{first_classification}",'key':f"{key}",'pattern':search_patterns[first_classification][key], 'wordlist':parsed_word_list} + return {'categorization':"noise"} + + +def sort_specialcharacter_wordlist(inputlist): + weight_values = { + '@': 5, + '*': 4, + '#': 3, + '$': 2 + } + + weightlist = [] + for word in inputlist: + if word.endswith("@"): + weightlist.append(5) + elif word.endswith("*"): + weightlist.append(4) + elif word.endswith("#"): + weightlist.append(4) + elif word.endswith("$"): + weightlist.append(4) + + +def firstclassification_rawstring(rawstring): + specialcharacter_wordlist = extract_specialcharacter_words(rawstring,['@','*','$','#',':']) + first_classification = find_first_classification(specialcharacter_wordlist, search_patterns) + return first_classification + + +def checkSearchPattern(parsed_list, searchpattern): + if len(parsed_list)!=len(searchpattern): + return False + else: + for idx,val in enumerate(parsed_list): + if not parsed_list[idx].endswith(searchpattern[idx]): + return False + return True + + +def extractAmount_rule_new(text): + base_units = {'thousand': 10 ** 3, 'k': 10 ** 3, 'lakh':10 ** 5, 'crore':10 ** 7, 'million': 10 ** 6, 'm': 10 ** 6, 'billion': 10 ** 9, 'b': 10 ** 9, 'trillion': 10 ** 12, 'quadrillion':10 ** 15} + + # appending whitespace because the regex does not recognize a number at the start & end of string ie. "send rmt# 45" + text = f" {text} " + text = text.replace("'", "") + text = text.replace('"', '') + + amount_tuple = re.findall(r'(? 1 or len(amount_tuple) == 0: + return False + else: + amount_tuple_list = list(amount_tuple[0]) + extracted_amount = float(amount_tuple_list[0]) + extracted_base_unit = amount_tuple_list[1] + if extracted_base_unit in base_units.keys(): + extracted_amount = float(extracted_amount) * base_units[extracted_base_unit] + return extracted_amount + +def extractAmount_rule_new1(text, split_word=None, split_direction=None): + base_units = {'thousand': 10 ** 3, 'k': 10 ** 3, 'million': 10 ** 6, 'm': 10 ** 6, 'billion': 10 ** 9, 'b': 10 ** 9, 'trillion': 10 ** 12, 'lakh':10 ** 5, 'crore':10 ** 7, 'quadrillion':10 ** 15} + if split_word and split_direction: + if split_direction=='pre': + text = text.split(split_word)[0] + if split_direction=='post': + text = text.split(split_word)[1] + + # appending whitespace because the regex does not recognize a number at the start & end of string ie. "send rmt# 45" + text = f" {text} " + text = text.replace("'", "") + text = text.replace('"', '') + amount_tuple = re.findall(r'(? 1 or len(amount_tuple) == 0: + return False + else: + amount_tuple_list = list(amount_tuple[0]) + extracted_amount = float(amount_tuple_list[0]) + extracted_base_unit = amount_tuple_list[1] + if extracted_base_unit in base_units.keys(): + extracted_amount = float(extracted_amount) * base_units[extracted_base_unit] + return extracted_amount + + +def extract_userchoice(text): + result = re.split('userchoice:\s*', text) + if len(result) != 1 and result[1] != '': + return result[1].strip().strip('"').strip("'") + else: + return False + + +def findWholeWord(w): + return re.compile(r'\b({0})\b'.format(w), flags=re.IGNORECASE).search + + +def check_flo_address(floaddress, is_testnet): + if pyflo.is_address_valid(floaddress, testnet=is_testnet): + return floaddress + else: + return False + + +def extract_trigger_condition(text): searchResult = re.search('\".*\"', text) if searchResult is None: searchResult = re.search('\'.*\'', text) - return searchResult - return searchResult + + if searchResult is not None: + return searchResult.group().strip()[1:-1] + else: + return False -# Combine test -def parse_flodata(string, blockinfo, netvariable): - # todo Rule 20 - remove 'text:' from the start of flodata if it exists - if string[0:5] == 'text:': - string = string.split('text:')[1] - - # todo Rule 21 - Collapse multiple spaces into a single space in the whole of flodata - # todo Rule 22 - convert flodata to lowercase to make the system case insensitive - nospacestring = re.sub(' +', ' ', string) - cleanstring = nospacestring.lower() - - # todo Rule 23 - Count number of words ending with @ and # - atList = [] - hashList = [] - - for word in cleanstring.split(' '): - if word.endswith('@') and len(word) != 1: - atList.append(word) - if word.endswith('#') and len(word) != 1: - hashList.append(word) - - # todo Rule 24 - Reject the following conditions - a. number of # & number of @ is equal to 0 then reject - # todo Rule 25 - If number of # or number of @ is greater than 1, reject - # todo Rule 25.a - If a transaction is rejected, it means parsed_data type is noise - # Filter noise first - check if the words end with either @ or # - if (len(atList) == 0 and len(hashList) == 0) or len(atList) > 1 or len(hashList) > 1: - parsed_data = {'type': 'noise'} - - # todo Rule 26 - if number of # is 1 and number of @ is 0, then check if its token creation or token transfer transaction - - elif len(hashList) == 1 and len(atList) == 0: - # Passing the above check means token creation or transfer - incorporation = isIncorp(cleanstring) - transfer = isTransfer(cleanstring) - - # todo Rule 27 - if (neither token incorporation and token transfer) OR both token incorporation and token transfer, reject - if (not incorporation and not transfer) or (incorporation and transfer): - parsed_data = {'type': 'noise'} - - # todo Rule 28 - if token creation and not token transfer then it is confirmed that is it a token creation transaction - # todo Rule 29 - Extract total number of tokens issued, if its not mentioned then reject - elif incorporation and not transfer: - initTokens = extractInitTokens(cleanstring) - if initTokens is not None: - parsed_data = {'type': 'tokenIncorporation', 'flodata': string, 'tokenIdentification': hashList[0][:-1], - 'tokenAmount': initTokens} - else: - parsed_data = {'type': 'noise'} - - # todo Rule 30 - if not token creation and is token transfer then then process it for token transfer rules - # todo Rule 31 - Extract number of tokens to be sent and the address to which to be sent, both data is mandatory - elif not incorporation and transfer: - amount = extractAmount(cleanstring, hashList[0][:-1]) - if None not in [amount] and amount!='Too many': - parsed_data = {'type': 'transfer', 'transferType': 'token', 'flodata': string, - 'tokenIdentification': hashList[0][:-1], - 'tokenAmount': amount} - else: - parsed_data = {'type': 'noise'} - - # todo Rule 32 - if number of # is 1 and number of @ is 1, then process for smart contract transfer or creation - elif len(hashList) == 1 and len(atList) == 1: - # Passing the above check means Smart Contract creation or transfer - incorporation = isIncorp(cleanstring) - transfer = isTransfer(cleanstring) - - # todo Rule 33 - if a confusing smart contract command is given, like creating and sending at the same time, or no - if (not incorporation and not transfer) or (incorporation and transfer): - parsed_data = {'type': 'noise'} - - # todo Rule 34 - if incorporation and not transfer, then extract type of contract, address of the contract and conditions of the contract. Reject if any of those is not present - elif incorporation and not transfer: - contracttype = extractContractType(cleanstring) - contractaddress = extractAddress(nospacestring) - contractconditions = extractContractConditions(cleanstring, contracttype, marker=hashList[0][:-1], - blocktime=blockinfo['time']) - - if config['DEFAULT']['NET'] == 'mainnet' and blockinfo['height'] < 3454510: - if None not in [contracttype, contractconditions]: - parsed_data = {'type': 'smartContractIncorporation', 'contractType': contracttype[:-1], - 'tokenIdentification': hashList[0][:-1], 'contractName': atList[0][:-1], - 'contractAddress': contractaddress[:-1], 'flodata': string, - 'contractConditions': contractconditions} - else: - parsed_data = {'type': 'noise'} - else: - if None not in [contracttype, contractaddress, contractconditions]: - parsed_data = {'type': 'smartContractIncorporation', 'contractType': contracttype[:-1], - 'tokenIdentification': hashList[0][:-1], 'contractName': atList[0][:-1], - 'contractAddress': contractaddress[:-1], 'flodata': string, - 'contractConditions': contractconditions} - else: - parsed_data = {'type': 'noise'} - - # todo Rule 35 - if it is not incorporation and it is transfer, then extract smart contract amount to be locked and userPreference. If any of them is missing, then reject - elif not incorporation and transfer: - # We are at the send/transfer of smart contract - amount = extractAmount(cleanstring, hashList[0][:-1]) - userChoice = extractUserchoice(cleanstring) - contractaddress = extractAddress(nospacestring) - if None not in [amount, userChoice]: - parsed_data = {'type': 'transfer', 'transferType': 'smartContract', 'flodata': string, - 'tokenIdentification': hashList[0][:-1], - 'operation': 'transfer', 'tokenAmount': amount, 'contractName': atList[0][:-1], - 'userChoice': userChoice} - if contractaddress: - parsed_data['contractAddress'] = contractaddress[:-1] - else: - parsed_data = {'type': 'noise'} +# Regex pattern for Smart Contract and Token name ^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$ +def check_regex(pattern, test_string): + matched = re.match(pattern, test_string) + is_match = bool(matched) + return is_match - # todo Rule 36 - Check for only a single @ and the substring "smart contract system says" in flodata, else reject - elif (len(hashList) == 0 and len(atList) == 1): - # Passing the above check means Smart Contract pays | exitcondition triggered from the committee - # todo Rule 37 - Extract the trigger condition given by the committee. If its missing, reject - triggerCondition = extractTriggerCondition(cleanstring) - if triggerCondition is not None: - parsed_data = {'type': 'smartContractPays', 'contractName': atList[0][:-1], - 'triggerCondition': triggerCondition.group().strip()[1:-1]} - else: - parsed_data = {'type': 'noise'} +def check_existence_of_keyword(inputlist, keywordlist): + for word in keywordlist: + if not word in inputlist: + return False + return True + + +def check_word_existence_instring(word, text): + word_exists = re.search(fr"\b{word}\b",text) + if word_exists is None: + return False else: - parsed_data = {'type': 'noise'} + return word_exists.group(0) - return parsed_data +send_category = ['transfer', 'send', 'give'] # keep everything lowercase +create_category = ['incorporate', 'create', 'start'] # keep everything lowercase +deposit_category = ['submit','deposit'] + + +def truefalse_rule2(rawstring, permitted_list, denied_list): + # Find transfer , send , give + foundPermitted = None + foundDenied = None + + for word in permitted_list: + if findWholeWord(word)(rawstring): + foundPermitted = word + break + + for word in denied_list: + if findWholeWord(word)(rawstring): + foundDenied = word + break + + if (foundPermitted is not None) and (foundDenied is None): + return True + else: + return False + + +def selectCategory(rawstring, category1, category2): + foundCategory1 = None + foundCategory2 = None + + for word in category1: + if findWholeWord(word)(rawstring): + foundCategory1 = word + break + + for word in category2: + if findWholeWord(word)(rawstring): + foundCategory2 = word + break + + if ((foundCategory1 is not None) and (foundCategory2 is not None)) or ((foundCategory1 is None) and (foundCategory2 is None)): + return False + elif foundCategory1 is not None: + return 'category1' + elif foundCategory2 is not None: + return 'category2' + + +def select_category_reject(rawstring, category1, category2, reject_list): + foundCategory1 = None + foundCategory2 = None + rejectCategory = None + + for word in category1: + if findWholeWord(word)(rawstring): + foundCategory1 = word + break + + for word in category2: + if findWholeWord(word)(rawstring): + foundCategory2 = word + break + + for word in reject_list: + if findWholeWord(word)(rawstring): + rejectCategory = word + break + + if ((foundCategory1 is not None) and (foundCategory2 is not None)) or ((foundCategory1 is None) and (foundCategory2 is None)) or (rejectCategory is not None): + return False + elif foundCategory1 is not None: + return 'category1' + elif foundCategory2 is not None: + return 'category2' + + +def text_preprocessing(original_text): + # strip white spaces at the beginning and end + processed_text = original_text.strip() + # remove tab spaces + processed_text = re.sub('\t', ' ', processed_text) + # remove new lines/line changes + processed_text = re.sub('\n', ' ', processed_text) + # add a white space after every special character found + processed_text = re.sub("contract-conditions:", "contract-conditions: ", processed_text) + processed_text = re.sub("deposit-conditions:", "deposit-conditions: ", processed_text) + processed_text = re.sub("userchoice:", "userchoice: ", processed_text) + # remove extra whitespaces in between + processed_text = ' '.join(processed_text.split()) + processed_text = re.sub(' +', ' ', processed_text) + clean_text = processed_text + # make everything lowercase + processed_text = processed_text.lower() + + return clean_text,processed_text + + +# TODO - REMOVE SAMPLE TEXT +text_list = [ + "create 500 million rmt#", + + "transfer 200 rmt#", + + "Create Smart Contract with the name India-elections-2019@ of the type one-time-event* using the asset rmt# at the address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1$ with contract-conditions: (1) contractAmount=0.001rmt (2) userChoices=Narendra Modi wins| Narendra Modi loses (3) expiryTime= Wed May 22 2019 21:00:00 GMT+0530", + + "send 0.001 rmt# to india-elections-2019@ to FLO address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1 with the userchoice:'narendra modi wins'", + + "india-elections-2019@ winning-choice:'narendra modi wins'", + + "Create Smart Contract with the name India-elections-2019@ of the type one-time-event* using the asset rmt# at the address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1$ with contract-conditions: (1) contractAmount=0.001rmt (2) expiryTime= Wed May 22 2019 21:00:00 GMT+0530", + + "send 0.001 rmt# to india-elections-2019@ to FLO address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1", + + "Create Smart Contract with the name swap-rupee-bioscope@ of the type continuous-event* at the address oRRCHWouTpMSPuL6yZRwFCuh87ZhuHoL78$ with contract-conditions : (1) subtype = tokenswap (2) accepting_token = rupee# (3) selling_token = bioscope# (4) price = '15' (5) priceType = ‘predetermined’ (6) direction = oneway", + + "Deposit 15 bioscope# to swap-rupee-bioscope@ its FLO address being oRRCHWouTpMSPuL6yZRwFCuh87ZhuHoL78$ with deposit-conditions: (1) expiryTime= Wed Nov 17 2021 21:00:00 GMT+0530 ", + + "Send 15 rupee# to swap-rupee-article@ its FLO address being FJXw6QGVVaZVvqpyF422Aj4FWQ6jm8p2dL$", + + "send 0.001 rmt# to india-elections-2019@ to FLO address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1 with the userchoice:'narendra modi wins'" +] + +text_list1 = [ + + 'create usd# as infinite-token', + 'transfer 10 usd#', + + 'Create 100 albumname# as NFT with 2CF24DBA5FB0A30E26E83B2AC5B9E29E1B161E5C1FA7425E73043362938B9824 as asset hash', + 'Transfer 10 albumname# nft', + + 'Create 400 rmt#', + 'Transfer 20 rmt#' +] + +text_list2 = [ + '''Create Smart Contract with the name swap-rupee-bioscope@ of the type continuous-event* + at the address stateF=bitcoin_price_source:bitpay:usd_inr_exchange_source:bitpay end-stateF oYzeeUBWRpzRuczW6myh2LHGnXPyR2Bc6k$ with contract-conditions : + (1) subtype = tokenswap + (2) accepting_token = rupee# + (3) selling_token = sreeram# + (4) price = "15" + (5) priceType="predetermined" end-contract-conditions''', + + ''' + Create a smart contract of the name simple-crowd-fund@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Tue Sep 13 2022 16:10:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7 end-contract-conditions + ''', + + ''' + Create a smart contract of the name simple-crowd-fund@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Tue Sep 13 2022 16:10:00 GMT+0530 (2) payeeAddress=oU412TvcMe2ah2xzqFpA95vBJ1RoPZY1LR:10:oVq6QTUeNLh8sapQ6J6EjMQMKHxFCt3uAq:20:oLE79kdHPEZ2bxa3PwtysbJeLo9hvPgizU:60:ocdCT9RAzWVsUncMu24r3HXKXFCXD7gTqh:10 end-contract-conditions + ''', + ''' + Create a smart contract of the name simple-crowd-fund@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Tue Sep 13 2022 16:10:00 GMT+0530 (2) payeeAddress=oU412TvcMe2ah2xzqFpA95vBJ1RoPZY1LR end-contract-conditions + ''', + ''' + Create a smart contract of the name all-crowd-fund-7@ of the type one-time-event* using asset bioscope# at the FLO address oYX4GvBYtfTBNyUFRCdtYubu7ZS4gchvrb$ with contract-conditions:(1) expiryTime= Sun Nov 15 2022 12:30:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (5) contractAmount=0.6 end-contract-conditions + ''', + ''' + Create a smart contract of the name all-crowd-fund-7@ of the type one-time-event* using asset bioscope# at the FLO address oYX4GvBYtfTBNyUFRCdtYubu7ZS4gchvrb$ with contract-conditions:(1) expiryTime= Sun Nov 15 2022 12:30:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:0:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:30:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) contractAmount=0.6 end-contract-conditions + ''', + '''send 0.02 bioscope# to twitter-survive@ to FLO address oVbebBNuERWbouDg65zLfdataWEMTnsL8r with the userchoice: survives''', + ''' + Create a smart contract of the name twitter-survive@ of the type one-time-event* using asset bioscope# at the FLO address oVbebBNuERWbouDg65zLfdataWEMTnsL8r$ with contract-conditions:(1) expiryTime= Sun Nov 15 2022 14:55:00 GMT+0530 (2) userchoices= survives | dies (3) minimumsubscriptionamount=0.04 (4) maximumsubscriptionamount=1 (5) contractAmount=0.02 end-contract-conditions + ''', + ''' + create 0 teega# + ''' +] + + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) + +formatter = logging.Formatter('%(asctime)s:%(name)s:%(message)s') + +file_handler = logging.FileHandler('tracking.log') +file_handler.setLevel(logging.INFO) +file_handler.setFormatter(formatter) + +stream_handler = logging.StreamHandler() +stream_handler.setFormatter(formatter) + +logger.addHandler(file_handler) +logger.addHandler(stream_handler) + +def parse_flodata(text, blockinfo, net): + if net == 'testnet': + is_testnet = True + else: + is_testnet = False + + if text == '': + return outputreturn('noise') + + clean_text, processed_text = text_preprocessing(text) + # System state + print("Processing stateF") + stateF_mapping = isStateF(processed_text) + first_classification = firstclassification_rawstring(processed_text) + parsed_data = None + + + if first_classification['categorization'] == 'tokensystem-C': + # Resolving conflict for 'tokensystem-C' + tokenname = first_classification['wordlist'][0][:-1] + if not check_regex("^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$", tokenname): + return outputreturn('noise') + + isNFT = check_word_existence_instring('nft', processed_text) + + isInfinite = check_word_existence_instring('infinite-token', processed_text) + tokenamount = apply_rule1(extractAmount_rule_new, processed_text) + + ## Cannot be NFT and normal token and infinite token. Find what are the conflicts + # if its an NFT then tokenamount has to be integer and infinite keyword should not be present + # if its a normal token then isNFT and isInfinite should be None/False and token amount has to be present + # if its an infinite token then tokenamount should be None and isNFT should be None/False + # The supply of tokenAmount cannot be 0 + + ################################################## + + if (not tokenamount and not isInfinite) or (isNFT and not tokenamount.is_integer() and not isInfinite) or (isInfinite and tokenamount is not False and isNFT is not False) or (not isInfinite and tokenamount<=0): + return outputreturn('noise') + operation = apply_rule1(selectCategory, processed_text, send_category, create_category) + if operation == 'category1' and tokenamount is not None: + if isNFT: + return outputreturn('nft_transfer',f"{processed_text}", f"{tokenname}", tokenamount, stateF_mapping) + else: + return outputreturn('token_transfer',f"{processed_text}", f"{tokenname}", tokenamount, stateF_mapping) + elif operation == 'category2': + if isInfinite: + return outputreturn('infinite_token_create',f"{processed_text}", f"{tokenname}", stateF_mapping) + else: + if tokenamount is None: + return outputreturn('noise') + if isNFT: + nft_hash = extract_NFT_hash(clean_text) + if nft_hash is False: + return outputreturn('noise') + return outputreturn('nft_create',f"{processed_text}", f"{tokenname}", tokenamount, f"{nft_hash}", stateF_mapping) + else: + return outputreturn('token_incorporation',f"{processed_text}", f"{first_classification['wordlist'][0][:-1]}", tokenamount, stateF_mapping) + else: + return outputreturn('noise') + + if first_classification['categorization'] == 'smart-contract-creation-C': + # Resolving conflict for 'smart-contract-creation-C' + operation = apply_rule1(selectCategory, processed_text, create_category, send_category+deposit_category) + if not operation: + return outputreturn('noise') + + contract_type = extract_special_character_word(first_classification['wordlist'],'*') + if not check_existence_of_keyword(['one-time-event'],[contract_type]): + return outputreturn('noise') + + contract_name = extract_special_character_word(first_classification['wordlist'],'@') + if not check_regex("^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$", contract_name): + return outputreturn('noise') + + contract_token = extract_special_character_word(first_classification['wordlist'],'#') + if not check_regex("^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$", contract_token): + return outputreturn('noise') + + contract_address = extract_special_character_word(first_classification['wordlist'],'$') + contract_address = find_original_case(contract_address, clean_text) + if not check_flo_address(contract_address, is_testnet): + return outputreturn('noise') + + contract_conditions = extract_contract_conditions(processed_text, contract_type, contract_token, blocktime=blockinfo['time']) + if contract_conditions == False or not resolve_incategory_conflict(contract_conditions,[['userchoices','payeeAddress']]): + return outputreturn('noise') + else: + contractAmount = '' + if 'contractAmount' in contract_conditions.keys(): + contractAmount = contract_conditions['contractAmount'] + try: + if float(contractAmount)<=0: + return outputreturn('noise') + except: + return outputreturn('noise') + minimum_subscription_amount = '' + if 'minimumsubscriptionamount' in contract_conditions.keys(): + minimum_subscription_amount = contract_conditions['minimumsubscriptionamount'] + try: + if float(minimum_subscription_amount)<=0: + return outputreturn('noise') + except: + return outputreturn('noise') + maximum_subscription_amount = '' + if 'maximumsubscriptionamount' in contract_conditions.keys(): + maximum_subscription_amount = contract_conditions['maximumsubscriptionamount'] + try: + if float(maximum_subscription_amount)<=0: + return outputreturn('noise') + except: + return outputreturn('noise') + + if 'userchoices' in contract_conditions.keys(): + return outputreturn('one-time-event-userchoice-smartcontract-incorporation',f"{contract_token}", f"{contract_name}", f"{contract_address}", f"{clean_text}", f"{contractAmount}", f"{minimum_subscription_amount}" , f"{maximum_subscription_amount}", f"{contract_conditions['userchoices']}", f"{contract_conditions['expiryTime']}", contract_conditions['unix_expiryTime'], stateF_mapping) + elif 'payeeAddress' in contract_conditions.keys(): + contract_conditions['payeeAddress'] = find_word_index_fromstring(clean_text,contract_conditions['payeeAddress']) + # check if colon exists in the payeeAddress string + if ':' in contract_conditions['payeeAddress']: + colon_split = contract_conditions['payeeAddress'].split(':') + if len(colon_split)%2 != 0: + return outputreturn('noise') + split_total = 0 + payeeAddress_split_dictionary = {} + for idx, item in enumerate(colon_split): + if idx%2 == 0: + # check if floid + if not check_flo_address(item, is_testnet): + return outputreturn('noise') + if idx%2 == 1: + # check if number + try: + item = float(item) + if item <= 0: + return outputreturn('noise') + payeeAddress_split_dictionary[colon_split[idx-1]] = item + split_total += item + except: + return outputreturn('noise') + if split_total != 100: + return outputreturn('noise') + else: + contract_conditions['payeeAddress'] = payeeAddress_split_dictionary + return outputreturn('one-time-event-time-smartcontract-incorporation',f"{contract_token}", f"{contract_name}", f"{contract_address}", f"{clean_text}", f"{contractAmount}", f"{minimum_subscription_amount}" , f"{maximum_subscription_amount}", contract_conditions['payeeAddress'], f"{contract_conditions['expiryTime']}", contract_conditions['unix_expiryTime'], stateF_mapping) + else: + if not check_flo_address(contract_conditions['payeeAddress'], is_testnet): + return outputreturn('noise') + else: + contract_conditions['payeeAddress'] = {f"{contract_conditions['payeeAddress']}":100} + return outputreturn('one-time-event-time-smartcontract-incorporation',f"{contract_token}", f"{contract_name}", f"{contract_address}", f"{clean_text}", f"{contractAmount}", f"{minimum_subscription_amount}" , f"{maximum_subscription_amount}", contract_conditions['payeeAddress'], f"{contract_conditions['expiryTime']}", contract_conditions['unix_expiryTime'], stateF_mapping) + + if first_classification['categorization'] == 'smart-contract-participation-deposit-C': + # either participation of one-time-event contract or + operation = apply_rule1(select_category_reject, processed_text, send_category, deposit_category, create_category) + if not operation: + return outputreturn('noise') + else: + tokenname = first_classification['wordlist'][0][:-1] + if not check_regex("^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$", tokenname): + return outputreturn('noise') + + contract_name = extract_special_character_word(first_classification['wordlist'],'@') + if not check_regex("^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$", contract_name): + return outputreturn('noise') + + contract_address = extract_special_character_word(first_classification['wordlist'],'$') + if contract_address is False: + contract_address = '' + else: + contract_address = find_original_case(contract_address, clean_text) + if not check_flo_address(contract_address, is_testnet): + return outputreturn('noise') + + if operation == 'category1': + tokenamount = apply_rule1(extractAmount_rule_new1, processed_text, 'userchoice:', 'pre') + if not tokenamount: + return outputreturn('noise') + try: + if float(tokenamount)<=0: + return outputreturn('noise') + except: + return outputreturn('noise') + userchoice = extract_userchoice(processed_text) + # todo - do we need more validations for user choice? + if not userchoice: + return outputreturn('noise') + + return outputreturn('one-time-event-userchoice-smartcontract-participation',f"{clean_text}", f"{tokenname}", tokenamount, f"{contract_name}", f"{contract_address}", f"{userchoice}", stateF_mapping) + + elif operation == 'category2': + tokenamount = apply_rule1(extractAmount_rule_new1, processed_text, 'deposit-conditions:', 'pre') + if not tokenamount: + return outputreturn('noise') + try: + if float(tokenamount)<=0: + return outputreturn('noise') + except: + return outputreturn('noise') + deposit_conditions = extract_deposit_conditions(processed_text, blocktime=blockinfo['time']) + if not deposit_conditions: + return outputreturn("noise") + return outputreturn('continuos-event-token-swap-deposit', f"{tokenname}", tokenamount, f"{contract_name}", f"{clean_text}", f"{deposit_conditions['expiryTime']}", stateF_mapping) + + if first_classification['categorization'] == 'smart-contract-participation-ote-ce-C': + # There is no way to properly differentiate between one-time-event-time-trigger participation and token swap participation + # so we merge them in output return + tokenname = first_classification['wordlist'][0][:-1] + if not check_regex("^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$", tokenname): + return outputreturn('noise') + + tokenamount = apply_rule1(extractAmount_rule_new1, processed_text) + if not tokenamount: + return outputreturn('noise') + try: + if float(tokenamount)<=0: + return outputreturn('noise') + except: + return outputreturn('noise') + + contract_name = extract_special_character_word(first_classification['wordlist'],'@') + if not check_regex("^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$", contract_name): + return outputreturn('noise') + + contract_address = extract_special_character_word(first_classification['wordlist'],'$') + if contract_address is False: + contract_address = '' + else: + contract_address = find_original_case(contract_address, clean_text) + if not check_flo_address(contract_address, is_testnet): + return outputreturn('noise') + + return outputreturn('smart-contract-one-time-event-continuos-event-participation', f"{clean_text}", f"{tokenname}", tokenamount, f"{contract_name}", f"{contract_address}", stateF_mapping) + + if first_classification['categorization'] == 'userchoice-trigger': + contract_name = extract_special_character_word(first_classification['wordlist'],'@') + if not check_regex("^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$", contract_name): + return outputreturn('noise') + + trigger_condition = extract_trigger_condition(processed_text) + if not trigger_condition: + return outputreturn('noise') + return outputreturn('one-time-event-userchoice-smartcontract-trigger', f"{contract_name}", f"{trigger_condition}", stateF_mapping) + + if first_classification['categorization'] == 'smart-contract-creation-ce-tokenswap': + operation = apply_rule1(selectCategory, processed_text, create_category, send_category+deposit_category) + if operation != 'category1': + return outputreturn('noise') + + contract_type = extract_special_character_word(first_classification['wordlist'],'*') + if not check_existence_of_keyword(['continuous-event'],[contract_type]): + return outputreturn('noise') + + contract_name = extract_special_character_word(first_classification['wordlist'],'@') + if not check_regex("^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$", contract_name): + return outputreturn('noise') + + contract_token = extract_special_character_word(first_classification['wordlist'],'#') + if not check_regex("^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$", contract_token): + return outputreturn('noise') + + contract_address = extract_special_character_word(first_classification['wordlist'],'$') + contract_address = find_original_case(contract_address, clean_text) + if not check_flo_address(contract_address, is_testnet): + return outputreturn('noise') + + contract_conditions = extract_contract_conditions(processed_text, contract_type, contract_token, blocktime=blockinfo['time']) + if contract_conditions == False: + return outputreturn('noise') + # todo - Add checks for token swap extract contract conditions + try: + assert contract_conditions['subtype'] == 'tokenswap' + assert check_regex("^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$", contract_conditions['accepting_token']) + assert check_regex("^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$", contract_conditions['selling_token']) + if contract_conditions['priceType']=="'determined'" or contract_conditions['priceType']=='"determined"' or contract_conditions['priceType']=="determined" or contract_conditions['priceType']=="'predetermined'" or contract_conditions['priceType']=='"predetermined"' or contract_conditions['priceType']=="predetermined": + assert float(contract_conditions['price'])>0 + contract_conditions['oracle_address'] = False + elif contract_conditions['priceType']=="dynamic" or contract_conditions['priceType']=="'dynamic'" or contract_conditions['priceType']=='"dynamic"': + assert float(contract_conditions['price'])>0 + contract_conditions['oracle_address'] = find_original_case_regex(contract_conditions['oracle_address'], clean_text) # making sure the Flo Address is in its original case + assert check_flo_address(contract_conditions['oracle_address'], is_testnet) + else: + assert contract_conditions['priceType'] == 'statef' + contract_conditions['oracle_address'] = False + except AssertionError: + return outputreturn('noise') + + return outputreturn('continuos-event-token-swap-incorporation', f"{contract_token}", f"{contract_name}", f"{contract_address}", f"{clean_text}", f"{contract_conditions['subtype']}", f"{contract_conditions['accepting_token']}", f"{contract_conditions['selling_token']}", f"{contract_conditions['priceType']}", f"{contract_conditions['price']}", stateF_mapping, f"{contract_conditions['oracle_address']}") + + return outputreturn('noise') + + +text = 'send 6 usd# to swap-tokenroom-usd@' +blockinfo = {} +net = 'mainnet' +print(parse_flodata(text, blockinfo, net)) \ No newline at end of file diff --git a/planning.txt b/planning.txt new file mode 100644 index 0000000..fadbae4 --- /dev/null +++ b/planning.txt @@ -0,0 +1,296 @@ +''' +TEMPLATE FOR SECOND STAGE AFTER INPUT CLASSIFIER + +IF BLOCK If the output of input classifier is tokensystem-C, +JUST LINEARLY START BUILDING IT + +then first start building the known outputs + +// outputreturn('token_incorporation',f"{flodata}", f"{tokenname}", f"{tokenamount}") + +f"{flodata} = rawstring +f"{tokenname}" = wordlist entry +tokensystem-C-resolved = Output of second stage classification +f"{tokenamount}" = find_number_function +''' + +''' + The problem we are facing: + + * Token transactions don't have * or @ symbols + + * Smart Contract transactions have * , @ , # symbols + + * Smart Contract transaction of the type one time event have 1 # before colon + + * Smart Contract transaction of the type continuous event has 2 # after colon + + * So we are checking for hashes based on the type of smart contract(identified by *) + + * But the above check disregards checking hashes in token transactions +''' + +# Write down all the possible flodata( with all combinations possible) for +''' + Token creation + create 500 million rmt# + ['#'] + + Token transfer + transfer 200 rmt# + ['#'] + + One time event userchoice creation + Create Smart Contract with the name India-elections-2019@ of the type one-time-event* using the asset rmt# at the address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1$ with contract-conditions: (1) contractAmount=0.001rmt (2) userChoices=Narendra Modi wins| Narendra Modi loses (3) expiryTime= Wed May 22 2019 21:00:00 GMT+0530 + ['@','*','#','$',':'] + ['@','*','#','$',':','#'] + + One time event userchoice participation + send 0.001 rmt# to india-elections-2019@ to FLO address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1 with the userchoice:'narendra modi wins' + ['#','@',':'] + ['#','@','$',':'] + + One time event userchoice trigger + india-elections-2019@ winning-choice:'narendra modi wins' + ['@',':'] + + One time event timeevent creation + Create Smart Contract with the name India-elections-2019@ of the type one-time-event* using the asset rmt# at the address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1$ with contract-conditions: (1) contractAmount=0.001rmt (2) expiryTime= Wed May 22 2019 21:00:00 GMT+0530 + ['@','*','#','$',':'] + ['@','*','#','$',':','#'] + + One time event timeevent participation + send 0.001 rmt# to india-elections-2019@ to FLO address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1 + ['#','@'] + ['#','@','$'] + + Continuos event token swap creation + Create Smart Contract with the name swap-rupee-bioscope@ of the type continuous-event* at the address oRRCHWouTpMSPuL6yZRwFCuh87ZhuHoL78$ with contract-conditions : + (1) subtype = tokenswap + (2) accepting_token = rupee# + (3) selling_token = bioscope# + (4) price = '15' + (5) priceType = ‘predetermined’ + (6) direction = oneway + + ['@','*','$',':','#','#'] + + Continuos event tokenswap deposit + Deposit 15 bioscope# to swap-rupee-bioscope@ its FLO address being oRRCHWouTpMSPuL6yZRwFCuh87ZhuHoL78$ with deposit-conditions: (1) expiryTime= Wed Nov 17 2021 21:00:00 GMT+0530 + ['#','@',':'] + ['#','@','$',':'] + + Continuos event tokenswap participation + Send 15 rupee# to swap-rupee-article@ its FLO address being FJXw6QGVVaZVvqpyF422Aj4FWQ6jm8p2dL$ + ['#','@'] + ['#','@','$'] +''' + +''' + + ['#'] - Token creation + + ['#'] - Token particiation + + + ['@','*','#','$',':'] - Smart contract creation user-choice + ['@','*','#','$',':','#'] + + ['#','@',':'] - Smart contract participation user-choice + ['#','@','$',':'] + + ['@',':'] - Smart contract trigger user-choice + + + ['@','*','#','$',':'] - Smart contract creation - ote-timebased + ['@','*','#','$',':','#'] + + ['#','@'] - Smart contract particiation - ote-timebased + ['#','@','$'] + + + ['@','*','$',':','#','#'] - Smart contract creation - continuos event - tokenswap + + ['#','@',':'] - Smart contract deposit - continuos event - tokenswap + ['#','@','$',':'] + + ['#','@'] - Smart contract participation - continuos event - tokenswap + ['#','@','$'] - Smart contract participation - continuos event - tokenswap + +''' + +''' + + ['#'] - Token creation + + ['#'] - Token particiation + + + ['@','*','#','$',':'] - Smart contract creation ote-userchoice + ['@','*','#','$',':','#'] + + ['@','*','#','$',':'] - Smart contract creation - ote-timebased + ['@','*','#','$',':','#'] + + + ['#','@',':'] - Smart contract participation user-choice + ['#','@','$',':'] + + ['#','@',':'] - Smart contract deposit - continuos event - tokenswap + ['#','@','$',':'] + + + ['@',':'] - Smart contract trigger user-choice + + + ['#','@'] - Smart contract particiation - ote-timebased + ['#','@','$'] + + ['#','@'] - Smart contract participation - continuos event - tokenswap + ['#','@','$'] - Smart contract participation - continuos event - tokenswap + + + ['@','*','$',':','#','#'] - Smart contract creation - continuos event - tokenswap + +''' + +''' +Conflicts - + +1. Token creation | Token participation +2. Smart contract CREATION of the type one-time-event-userchoice | one-time-event-timebased +3. Smart contract PARTICIPATION user-choice | Smart contract DEPOSIT continuos-event token-swap +4. Smart contract PARTICIPATION one-time-event-timebased | Smart contract participation - continuos event - tokenswap + +''' + +''' + +Emerging parser design + +Phase 1 - Input processing | Special character position based classification and noise detection (FINISHED) +Phase 2 - Conflict recognition (FINISHED) +Phase 3 - Category based keyword checks +Phase 4 - Parser rules for finding data +Phase 5 - Rules for applying parser rules +Phase 6 - Category based data field extraction +Phase 7 - Output formatting and return (FINISHED) + +''' + +''' +Allowed formats of Smart Contract and token names + +1. First character should always be an Alphabet, lower case or upper case +2. The last character should always be an Alphabet, lower case or upper case +3. The middle characters can be a - or _ + +Check for FLO Address + +Write checks for conditions inside contract conditions +Serious error handling for contract-conditions +* 2222:00 gives error +* contractAmount = 0.022rt gives error | check if space is allowed between 0.022 rt +''' + + +''' + + What we need for NFT contract code + + 1. NFT-address mapping table in system.db + 2. New main transaction category class + 3. New sub-category for transfer category class ie. NFT transfer + + + NFT Smart Contract end cases + 1. NFT against an address + 2. NFT against another NFT + 3. + + flodata format for NFT + Create 1000 NFT with bioscope# with nft-details: (1) name = 'bioscope' (2) hash = + + Create 100 albumname# as NFT with 2CF24DBA5FB0A30E26E83B2AC5B9E29E1B161E5C1FA7425E73043362938B9824 as asset hash + [#] + + Rules + ----- + DIFFERENT BETWEEN TOKEN AND NFT + System.db will have a differnent entry + in creation nft word will be extra + NFT Hash must be present + Creation and transfer amount .. only integer parts will be taken + Keyword nft must be present in both creation and transfer + +''' + +''' + +Need infinite tokens to create stable coins, so they can be created without worrying about the upper limit of the coins + +''' + +''' +Create another table in system.db, it simply writes what is every database in one place + +Database_name Database type + +''' + +''' +IDEA FOR NEW ROLLBACK SYSTEM - 24 Jan 2022 +------------------------------------------- + +245436 +[ + tx1 - rmt - 245436 - send 10 rmt# + tx2 - rmt - 245436 - send 4 rmt# + tx3 - rmt - 245436 - send 1 rmt# + tx4 - rmt - 245436 - send 100 rmt# + tx5 - rmt trigger(5) - 245436 - trigger +] + +banana - txhash +orange - entries in activepid table +mangoes - entries in transaction history table + +CURRENT SYSTEM +given a block , find out all the oranges in the block +given a block, find out all the bananas in the block and +for each banana, find corresponding databases( found through parsing of banana flodata and banana txdata) + - if token database then rollback, if contractDatabase then delete entry + + +NEW SYSTEM +give a block , find out all the oranges in the block +given a block, find out all the bananas in the block and their corresponding databases( found through parsing of banana flodata and banana txdata) + - start opening all those databases one by one | if token database then rollback, if contractDatabase then delete entry + +send transaction -> receive the databases associated with it + +''' + +''' +Step 1 +The block that we are rolling back into is earlier than the database creation blockNumber, then delete the whole database without rolling back. Do this for both token databases and smart contract databases + +Step 2 +If the rolling back block is later than database creation blockNumber, then invoke rollback a database function( rollback_database ) + +Step 3 +Create a list of databases to be opened, and creation date (creation date is block number). This will exclude the token and smart contract databases which are already deleted + +Step 4 +For each of the database to be opened, rollback the database to rollback point +rollback_database will take 2 inputs, a block number to which it has to rollback to and the name of the database + +Step 5 +Create a delete function, which will delete from transactionHistory, latestCache and contractDatabase + +To-do +------ +* Integrate all the functions in the following order: + 1 , 2 , 3 , 4 , 5 | That will finish the operation of taking the block number as input and the roll back function will rollback upto the block number specified for all kinds of databases and all kinds of transactions + +''' \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 8d09655..feae3f5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,6 @@ arrow==1.1.0 bidict==0.21.2 certifi==2021.5.30 cffi==1.14.5 -requests==2.25.0 chardet==3.0.4 greenlet==1.1.0 idna==2.10 @@ -10,7 +9,8 @@ pycparser==2.20 python-dateutil==2.8.1 python-engineio==3.14.2 python-socketio==4.6.1 -secp256k1==0.13.2 +requests==2.25.0 six==1.16.0 SQLAlchemy==1.4.18 urllib3==1.26.5 +pyflo-lib==2.0.9 diff --git a/smart-contract-system-redesign.txt b/smart-contract-system-redesign.txt new file mode 100644 index 0000000..29efbb8 --- /dev/null +++ b/smart-contract-system-redesign.txt @@ -0,0 +1,29 @@ + +DATABASES +* Database operations have to be optimized + - in terms of not repeating too often + - Save changes only when all business logic is approved, since we are working with multiple databases currently +* Too much of repitition in database operations right now +* Database model classes, for SQL alchemy, have to be optimized ie. base classes for tokenswap and one-time-event totally different right now +* Make all database operations to follow SQLAlchemy, no direct SQL commands +* Remove all position based queries + +PROGRAM STRUCTURE +* Optimize overall program structure + +NEW FEATURES +* Rollback feature +* When processing blocks from the websocket API, check the blockheight of the new block vs the latest block in the database | this is to make sure none of the transactions go missing + + +----- +processBlocks + +* find the last scanned block in the database +* find the latest block at the API +* for loop for lastscannedblock to latestblock +* processEach transaction based on business logic +* Update system.db to reflect currently scanned block as the latest block + +* Check for local smart contract triggers +* Check if any token swap contract deposits have to be returned \ No newline at end of file diff --git a/statef_processing.py b/statef_processing.py new file mode 100644 index 0000000..e0f9472 --- /dev/null +++ b/statef_processing.py @@ -0,0 +1,87 @@ +import requests +from operator import attrgetter +import json +import pdb + +''' + USD-INR + https://api.exchangerate-api.com/v4/latest/usd + + Parsed stateF + "stateF":{ + "bitcoin_price_source":"bitpay", + "usd_inr_exchange_source":"bitpay" + } +''' + +''' +stateF notes for amount split on contracts + +stateF_object = { + "floaddresses": "oPkHWcvqBHfCortTHScrVBjXLsZhWie99C-oPkHWcvqBHfCortTHScrVBjXLsZhWie99C-oPkHWcvqBHfCortTHScrVBjXLsZhWie99C", + "splits": "10-20-30", +} + +''' + +# stateF +stateF_address = 'oPkHWcvqBHfCortTHScrVBjXLsZhWie99C' + +stateF_object = { + "bitcoin_price_source":"bitpay", + "usd_inr_exchange_source":"bitpay" + } + +# Flodata object +flodata_object = { + "bitpay": { + "bitcoin_price_source":{ + "api" : "https://bitpay.com/api/rates", + "path" : [2,"rate"], + "data_type" : "float" + }, + "usd_inr_exchange_source":{ + "api" : "https://api.exchangerate-api.com/v4/latest/usd", + "path" : ["rates","INR"], + "data_type" : "float" + } + } +} + + +def pull_stateF(floID): + response = requests.get(f"https://flosight-testnet.ranchimall.net/api/txs/?address={floID}") + if response.status_code == 200: + address_details = response.json() + latest_stateF = address_details['txs'][0]['floData'] + latest_stateF = json.loads(latest_stateF) + return latest_stateF['stateF'] + else: + print('API response not valid') + +def query_api(api_object): + api, path, data_type = api_object.values() + response = requests.get(api) + if response.status_code == 200: + # Use path keys to reach the value + api_response = response.json() + for key in path: + api_response = api_response[key] + # todo: how to use datatype to convert + if data_type == 'float': + value_at_path = float(api_response) + return value_at_path + else: + print('API response not valid') + +def process_stateF(stateF_object, stateF_address): + flodata_object = pull_stateF(stateF_address) + processed_values = {} + for key, value in stateF_object.items(): + external_value = query_api(flodata_object[value][key]) + processed_values[key] = external_value + return processed_values + +if __name__ == '__main__': + processed_statef = process_stateF(stateF_object, stateF_address) + print(processed_statef) \ No newline at end of file diff --git a/tests/test_parsing.py b/tests/test_parsing.py new file mode 100644 index 0000000..3bce405 --- /dev/null +++ b/tests/test_parsing.py @@ -0,0 +1,219 @@ +import unittest +import sys +sys.path.append("..") +import parsing + +class TestParsing(unittest.TestCase): + + blockinfo_stub = {'time': 25634} + + def test_token_creation(self): + text = 'create 100 rmt#' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'mainnet') + expected_result = { + 'type': 'tokenIncorporation', + 'flodata': 'create 100 rmt#', + 'tokenIdentification': 'rmt', + 'tokenAmount': 100.0, + 'stateF': False + } + self.assertEqual(result, expected_result) + + def test_token_transfer(self): + text = 'transfer 10.340 rmt#' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'mainnet') + expected_result = { + 'type': 'transfer', + 'transferType': 'token', + 'flodata': 'transfer 10.340 rmt#', + 'tokenIdentification': 'rmt', + 'tokenAmount': 10.34, + 'stateF': False + } + self.assertEqual(result, expected_result) + + def test_nft_creation(self): + pass + + def test_nft_transfer(self): + pass + + def test_infinite_token_incorporation(self): + text = 'create usd# as infinite-token' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'mainnet') + expected_result = { + 'type': 'infiniteTokenIncorporation', + 'flodata': 'create usd# as infinite-token', + 'tokenIdentification': 'usd', + 'stateF': False + } + self.assertEqual(result, expected_result) + + text = 'create usd# as infinite-token send' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'mainnet') + expected_result = {'type': 'noise'} + self.assertEqual(result, expected_result) + + def test_infinite_token_transfer(self): + pass + + def test_onetimeevent_timetrigger_creation(self): + # contractamount + text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) contractAmount=0.1 end-contract-conditions''' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = { + 'type': 'smartContractIncorporation', + 'contractType': 'one-time-event', + 'subtype': 'time-trigger', + 'tokenIdentification': 'bioscope', + 'contractName': 'all-crowd-fund-1', + 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', + 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) contractAmount=0.1 end-contract-conditions', + 'contractConditions': { + 'contractAmount': '0.1', + 'payeeAddress': { + 'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0 + }, + 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', + 'unix_expiryTime': 1668387900.0 + } + } + self.assertEqual(result, expected_result) + + # minimumsubscriptionamount + text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 end-contract-conditions''' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype':'time-trigger','tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 end-contract-conditions', 'contractConditions': {'minimumsubscriptionamount': '1.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}} + self.assertEqual(result, expected_result) + + # maximumsubscriptionamount + text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) maximumsubscriptionamount=10 end-contract-conditions''' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger','tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) maximumsubscriptionamount=10 end-contract-conditions', 'contractConditions': {'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}} + self.assertEqual(result, expected_result) + + # minimumsubscriptionamount | contractamount + text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1.600 (4) contractAmount=0.1 end-contract-conditions''' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1.600 (4) contractAmount=0.1 end-contract-conditions', 'contractConditions': {'contractAmount': '0.1', 'minimumsubscriptionamount': '1.6', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}} + self.assertEqual(result, expected_result) + + # maximumsubscriptionamount | contractamount + text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) maximumsubscriptionamount=10 (4) contractAmount=0.1 end-contract-conditions''' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) maximumsubscriptionamount=10 (4) contractAmount=0.1 end-contract-conditions', 'contractConditions': {'contractAmount': '0.1', 'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}} + self.assertEqual(result, expected_result) + + # minimumsubscriptionamount | maximumsubscriptionamount + text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 end-contract-conditions''' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype':'time-trigger','tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 end-contract-conditions', 'contractConditions': {'minimumsubscriptionamount': '1.0', 'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}} + self.assertEqual(result, expected_result) + + # minimumsubscriptionamount | maximumsubscriptionamount | contractamount + text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 (5) contractAmount=0.1 end-contract-conditions''' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 (5) contractAmount=0.1 end-contract-conditions', 'contractConditions': {'contractAmount': '0.1', 'minimumsubscriptionamount': '1.0', 'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}} + self.assertEqual(result, expected_result) + + # With single payeeAddress with : format + text = "Create a smart contract of the name album-fund@ of the type one-time-event* using asset bioscope# at the FLO address ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt$ with contract-conditions: (1) expiryTime= Thu May 04 2023 18:57:00 GMT+0530 (India Standard Time) (2) payeeAddress= objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc:100 end-contract-conditions" + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'album-fund', 'contractAddress': 'ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt', 'flodata': 'Create a smart contract of the name album-fund@ of the type one-time-event* using asset bioscope# at the FLO address ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt$ with contract-conditions: (1) expiryTime= Thu May 04 2023 18:57:00 GMT+0530 (India Standard Time) (2) payeeAddress= objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc:100 end-contract-conditions', 'contractConditions': {'payeeAddress': {'objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc': 100.0}, 'expiryTime': 'thu may 04 2023 18:57:00 gmt+0530 (india standard time)', 'unix_expiryTime': 1683246420.0}} + self.assertEqual(result, expected_result) + + # With single payeeAddress with normal format + text = "Create a smart contract of the name album-fund@ of the type one-time-event* using asset bioscope# at the FLO address ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt$ with contract-conditions: (1) expiryTime= Thu May 04 2023 18:57:00 GMT+0530 (India Standard Time) (2) payeeAddress= objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc end-contract-conditions" + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'album-fund', 'contractAddress': 'ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt', 'flodata': 'Create a smart contract of the name album-fund@ of the type one-time-event* using asset bioscope# at the FLO address ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt$ with contract-conditions: (1) expiryTime= Thu May 04 2023 18:57:00 GMT+0530 (India Standard Time) (2) payeeAddress= objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc end-contract-conditions', 'contractConditions': {'payeeAddress': {'objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc': 100}, 'expiryTime': 'thu may 04 2023 18:57:00 gmt+0530 (india standard time)', 'unix_expiryTime': 1683246420.0}} + self.assertEqual(result, expected_result) + + # With multiple payeeAddress with : format + text = "Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 (5) contractAmount=0.1 end-contract-conditions" + + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 (5) contractAmount=0.1 end-contract-conditions', 'contractConditions': {'contractAmount': '0.1', 'minimumsubscriptionamount': '1.0', 'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}} + self.assertEqual(result, expected_result) + + + def test_onetimeevent_timetrigger_participation(self): + text = '''send 2.2 bioscope# to all-crowd-fund@''' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'transfer', 'transferType': 'smartContract', 'flodata': 'send 2.2 bioscope# to all-crowd-fund@', 'tokenIdentification': 'bioscope', 'tokenAmount': 2.2, 'contractName': 'all-crowd-fund'} + self.assertEqual(result, expected_result) + + text = 'transfer 6.20000 bioscope# to all-crowd-fund-7@' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'transfer', 'transferType': 'smartContract', 'flodata': 'transfer 6.20000 bioscope# to all-crowd-fund-7@', 'tokenIdentification': 'bioscope', 'tokenAmount': 6.2, 'contractName': 'all-crowd-fund-7'} + self.assertEqual(result, expected_result) + + text = 'transfer 6.20000 bioscope# to all-crowd-fund-7@ 24' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'noise'} + self.assertEqual(result, expected_result) + + text = 'transfer 6.20000 bioscope# to all-crowd-fund-7@ 24 ' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'noise'} + self.assertEqual(result, expected_result) + + text = '6.20.000 transfer bioscope# to all-crowd-fund-7@ 24' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'transfer', 'transferType': 'smartContract', 'flodata': '6.20.000 transfer bioscope# to all-crowd-fund-7@ 24', 'tokenIdentification': 'bioscope', 'tokenAmount': 24.0, 'contractName': 'all-crowd-fund-7'} + self.assertEqual(result, expected_result) + + def test_onetimeevent_externaltrigger_creation(self): + # contractamount + text = '''Create a smart contract of the name twitter-survive@ of the type one-time-event* using asset bioscope# at the FLO address oVbebBNuERWbouDg65zLfdataWEMTnsL8r$ with contract-conditions:(1) expiryTime= Sun Nov 15 2022 14:55:00 GMT+0530 (2) userchoices= survives | dies (3) contractAmount=0.02 end-contract-conditions''' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = { + 'type': 'smartContractIncorporation', + 'contractType': 'one-time-event', + 'subtype': 'external-trigger', + 'tokenIdentification': 'bioscope', + 'contractName': 'twitter-survive', + 'contractAddress': 'oVbebBNuERWbouDg65zLfdataWEMTnsL8r', + 'flodata': 'Create a smart contract of the name twitter-survive@ of the type one-time-event* using asset bioscope# at the FLO address oVbebBNuERWbouDg65zLfdataWEMTnsL8r$ with contract-conditions: (1) expiryTime= Sun Nov 15 2022 14:55:00 GMT+0530 (2) userchoices= survives | dies (3) contractAmount=0.02 end-contract-conditions', + 'contractConditions': { + 'contractAmount': '0.02', + 'userchoices': "{0: 'survives', 1: 'dies'}", + 'expiryTime': 'sun nov 15 2022 14:55:00 gmt+0530', + 'unix_expiryTime': 1668543900.0 + } + } + self.assertEqual(result, expected_result) + + def test_tokenswap_deposits(self): + text = 'Deposit 1 bioscope# to swap-rupee-bioscope-1@ its FLO address being oTzrcpLPRXsejSdYQ3XN6V4besrAPuJQrk$ with deposit-conditions: (1) expiryTime= Thu Apr 13 2023 21:45:00 GMT+0530' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = { + 'type': 'smartContractDeposit', + 'tokenIdentification': 'bioscope', + 'depositAmount': 1.0, + 'contractName': 'swap-rupee-bioscope-1', + 'flodata': 'Deposit 1 bioscope# to swap-rupee-bioscope-1@ its FLO address being oTzrcpLPRXsejSdYQ3XN6V4besrAPuJQrk$ with deposit-conditions: (1) expiryTime= Thu Apr 13 2023 21:45:00 GMT+0530', + 'depositConditions': { + 'expiryTime': 'thu apr 13 2023 21:45:00 gmt+0530' + }, + 'stateF': False} + self.assertEqual(result, expected_result) + + def test_contract_trigger(self): + text = 'contract@ triggerCondition:"twitter-survives"' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = { + 'type': 'smartContractPays', + 'contractName': 'contract', + 'triggerCondition': 'twitter-survives', + 'stateF': False} + self.assertEqual(result, expected_result) + + def test_deposit_invalid(self): + text = 'Deposit 1 bioscope# to swap-rupee-bioscope-1@ its FLO address being oTzrcpLPRXsejSdYQ3XN6V4besrAPuJQrk$ with deposit-conditions: (1) expiryTime= Tue, 25 Apr 2023 13:40:00 GMT' + result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet') + expected_result = {'type': 'noise'} + self.assertEqual(result, expected_result) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tracktokens-smartcontracts.py b/tracktokens-smartcontracts.py deleted file mode 100755 index 035d40f..0000000 --- a/tracktokens-smartcontracts.py +++ /dev/null @@ -1,3261 +0,0 @@ -import argparse -import configparser -import json -import logging -import os -import shutil -import sqlite3 -import sys -import pybtc -import requests -import socketio -from sqlalchemy import create_engine, func -from sqlalchemy.orm import sessionmaker -import time -import parsing -from config import * -from datetime import datetime -import pdb -from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, \ - Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, \ - LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation - -goodblockset = {3387923, -3387978, -3396172, -3396192, -3396199, -3396201, -3396205, -3396206, -3396208, -3429342, -3443480, -3443669, -3443675, -3443677, -3443679, -3443680, -3443682, -3443684, -3443702, -3443718, -3443723, -3443733, -3443764, -3443895, -3444345, -3444499, -3444502, -3444503, -3444505, -3444513, -3444522, -3444595, -3444691, -3444698, -3444732, -3444756, -3444828, -3445284, -3445287, -3445463, -3445501, -3446215, -3446465, -3446877, -3446884, -3446888, -3446943, -3446980, -3447073, -3447257, -3447278, -3454503, -3503146, -3609700, -3935975, -3937128, -3937241, -3937242, -3937338, -3937525, -3939941, -3939999, -3940023, -3941146, -3941147, -3956547, -3961676, -3969778, -3970781, -3981168, -3990334, -4000081, -4000252, -4000341, -4000359, -4000456, -4001475, -4002492, -4008981, -4008986, -4008999, -4009033, -4009096, -4010850, -4010910, -4010919, -4010957, -4010980, -4030789, -4030795, -4034239, -4173382, -4174529, -4176306, -4176341, -4176352, -4176383, -4177433, -4177436, -4177705, -4179318, -4184367, -4184544, -4184706, -4186183, -4190429, -4191992, -4203044, -4204961, -4206876, -4210582, -4212515, -4212538, -4212587, -4212634, -4212648, -4213909, -4213921, -4213939, -4213967, -4221370, -4235291, -4243310, -4243333, -4248901, -4256675, -4256730, -4280238, -4281352, -4291219, -4293808, -4313846, -4313887, -4315165, -4315166, -4315223, -4315314, -4315388, -4315411, -4316891, -4316964, -4317202, -4317879, -4317887, -4317962, -4318675, -4318747, -4318796, -4318799, -4318811, -4318848, -4318881, -4318885, -4318889, -4318908, -4318927, -4318928, -4318942, -4318979, -4318987, -4318992, -4319012, -4319028, -4319426, -4319563, -4320551, -4320553, -4320554, -4320569, -4320575, -4320583, -4321197, -4321248, -4321289, -4322379, -4322494, -4323991, -4324082, -4324099, -4324108, -4324111, -4324160, -4324282, -4324285, -4324286, -4324438, -4324446, -4324450, -4324451, -4324456, -4324459, -4324537, -4324562, -4325899, -4326104, -4326179, -4328292, -4328296, -4328309, -4328402, -4338538, -4338608, -4338622, -4340833, -4341514, -4353757, -4353777, -4353779, -4353808, -4353825, -4355736, -4355799, -4355819, -4356783, -4356788, -4356793, -4356901, -4356933, -4356952, -4356970, -4357082, -4357970, -4358021, -4358201, -4358221, -4358222, -4358307, -4358308, -4358324, -4358344, -4358355, -4358379, -4358391, -4360056, -4360211, -4360576, -4360906, -4360955, -4361030, -4362187, -4362192, -4362213, -4362728, -4363547, -4363668, -4364856, -4364858, -4364868, -4364875, -4364876, -4365014, -4365152, -4365327, -4366550, -4366621, -4366675, -4366700, -4366747, -4368196, -4369662, -4369683, -4369756, -4376861, -4377318, -4378253, -4378295, -4378297, -4384839, -4385456, -4385457, -4385498, -4386139, -4389212, -4390807, -4410901, -4410906, -4410912, -4410940, -4411239, -4411273, -4411288, -4411315, -4411330, -4411339, -4411346, -4411348, -4411350, -4411353, -4411355, -4411661, -4411683, -4411740, -4411742, -4411743, -4411745, -4411746, -4411757, -4413288, -4413289, -4413291, -4413418, -4413420, -4413430, -4413457, -4413465, -4413470, -4413474, -4418348, -4420905, -4420912, -4420920, -4420940, -4420952, -4420958, -4421020, -4421091, -4421093, -4421100, -4421147, -4431335, -4433179, -4433620, -4433637, -4434828, -4434873, -4434888, -4434891, -4434898, -4434903, -4437014, -4437066, -4447198, -4447202, -4447204, -4447261, -4447314, -4448107, -4449169, -4449323, -4449488, -4473417, -4473420, -4473503, -4473517, -4473583, -4473804, -4484210, -4484247, -4508994, -4509004, -4509300, -4509950, -4528359, -4528622, -4541278, -4541295, -4552508, -4552522, -4561033, -4562937, -4576480, -4576482, -4577665, -4577712, -4597585, -4597674, -4598087, -4598090, -4599578, -4599724, -4599771, -4606282, -4608453, -4608750, -4612976, -4621941, -4621968, -4629357, -4629457, -4643323, -4643325, -4643544, -4643635, -4658051, -4658053, -4658163, -4658174, -4658778, -4661954, -4664069, -4664075, -4665838, -4665898, -4674594, -4674633, -4684574, -4684602, -4695346, -4695451, -4695529, -4695552, -3961676, -4323991, -4324562, -4361030, -4362187, -4362192, -4364858, -4364868, -4364875, -4364876} - -goodtxset = {'a74a03ec1e77fa50e0b586b1e9745225ad4f78ce96ca59d6ac025f8057dd095c', -'fdcb3ff273deb2a03c19333d4f4f3a22b1fedd22e17fef576559319c40450f50', -'8425c6d4171f8b1634fa1aa339b59e18ae3a585ed3387749cd2fa30d81e0f95e', -'cde8728abca696af417d2f69dda6f55defdf1f5a4bd4625df768ca863d620fa7', -'9ce989dec4ad8a39ca7461812232514c59a3618c38eb6a590ab1e693b17baf7d', -'8c3559b0ba78d1143d3ab4e30c3611842fc57988751be67d761e5c087a8d07c5', -'3803668250105fca8a90e691e07916ece655611afc07b2c6a01b028f35f59352', -'5c180a2961189dd40d5af35a17fbed7865443f6c4ef263924630d3c815606ee9', -'22330e8b2c98ef0d3dc1b0d7961aaad9b2a7bd5afbb4c71d6d4bf20c02595cce', -'c6eb7adc731a60b2ffa0c48d0d72d33b2ec3a33e666156e729a63b25f6c5cd56', -'7c84e9ac1a3d9a81868ca801966c75c4ab261bbc2d0e07050c0f25275cf8a5e1', -'d8436c30c3493dcf723672d337093bbe9466afa88235051c7981c39fa30c1326', -'b463b2b447433b7044bdd3022e271db02b30a3307f25e9c13e18eab1bd681d93', -'cf6de70c9e9e3d45d37bc6e4112851655d82dbd5aef48aedcee466fe4799b950', -'26f08763cd177e2d55080041637527a7769eb3507b023a25bc9edbe8649c2fe2', -'3faa8479657839e6ff9869b49fe5b20441e45e780403c3ab7e33eb70787a785d', -'511f16a69c5f62ad1cce70a2f9bfba133589e3ddc560d406c4fbf3920eae8469', -'e92bf6a8bddf177a5e2d793fa86c7ad059c89157f683f90f02b3590c0e4282c5', -'21976fe4a9a99eb31bd18a496328f7cb91bd4f708bb848771fd638949a5eaba3', -'d8a8b87bd2dd98ac3e5a6996f7bbb30612ecf13756c0f860968c2cb7c8cf01e5', -'dd050ac2ec951eaad4ee897c8348b5a9314a6e7d13debbc319757ec121c5a013', -'dfbc183b45762d7db565eeb0eddce74232acbc8a2c00a68a6ef628c16bb95a93', -'bb6cef5e9612363ed263291e8d3b39533661b3ba1b3ce8c2e9500158124266b8', -'edcd710fa6176cee9c738188a053967ffb4c5dca1a1aa721bee7e22b7f3b7e11', -'dd5512594a0402b914e8d852c918a3b0017e888deea1324512c62f62420a27e0', -'78f6e1dbb2eb8de746395d5c35b47ab91f51076bf3a134afa5fe5b0a44eba5e0', -'d07676c59c90b2363db301af4158d7f384dc0d6e84bb0619639ac358e6f311f1', -'38db5c9c78e6b6c97e622cc1576ebf3b824280f728c157beb56b974d5d802fa7', -'259974b79c94bba0481ad5e3b2f468380df9ac31da7dfdd0da8c3af78bcea561', -'4258e8a7530d4d1e3f1d7524cc927d0c9c335a779a032bbb2d83725680a47ee9', -'21b0b9564ae62d32eed3c9911686c20dea24e1a94c255d7b3de4ecaa720e3d17', -'5e67fee05ed5f6598a85e8fb12e207183bc4441c8f81d54b89ce6bfd196c5fdf', -'eec02efef5a505e141f5551604d401a872e33837edfcbb7e5895a9951c50aa2a', -'97b051261a91dfb4b9d591c44d9ab6192efc2f82d85777f5bf33861952d30cb4', -'3e52f39f785191b693d99859a9823eed281c2ff85669803ef85b55ea924bf790', -'3b8c7d37e16a2f7446411feebe9d141fda6edd4886998033bb3405a08c6026a7', -'0727f232cee6f4c1e10f7b281c8c5c7bb2e90c2c0fc50c8b1160f383840a8ae5', -'0623bea90559e1bb59f4134f6164544ef305fbeaa792e51f09e03706cfe35b79', -'67783a84997fd6b6099267afd158494a971c3aea6ff6856583d778ff5bcc2a0c', -'441c222e9953c6e4278e38362c691835a3c7b6f09f036bf6918b619d0e16cd3c', -'3c0c391d1ec5db8c6636996b6479c25a4a939fb7e9f14d899d41431c5d0a6aad', -'d5e61452a5fc98cac6a0c2c8eb5dc45c345513f2bb27240d66ef3d47a11c46bc', -'3a53a7b61f0ff1c09a25f2ffcb3a97ae0ec0e3b0eccb5e82fec4966dbc0eddcf', -'a433e8f2e7d332cd9574d7325b33720c75f459870d5e060b506a7c7c17650b37', -'2cbc28088cfb983b9b8fd7ec7ee5abcac5cdef07e706413d4094c2caa7020753', -'7edcc427a565a458ae81dcc9a9717d2592120bedd632a17d4ca3fa863e08ca81', -'27ee5479706aba9d37eb458e1a51657b418dc1c9574ed37af763d7d7d7178937', -'f38996915cd00f7bb6ab33ca522168739e4af41ac718ec77b9e7974687f48ec6', -'4db4f56b711720b1769a70d834ef3138b599143d0dcf82c1d3e22e44201cc1b4', -'ad50d8bffe7214473b6f8f454f55749d23a26ab3fc854d63e1ccc808045d98ba', -'5a36fce4646358c751b5403ec5c7465f1b11c8dca6d86e8a9cd4e26184e07b1a', -'3520cda2de65249f7629683c4b8f3efb15479076fe9521250aba2f1abe3f2ce9', -'b57cf412c8cb16e473d04bae44214705c64d2c25146be22695bf1ac36e166ee0', -'ebc99f41c4ccaea32bd9dbc251515838ba47660b7422c7cae39ee3e2ae0107fe', -'e6338071b4247003e16aae50e2696d42f42df116b928122bd94c3c718fbd70bc', -'cf54e0e1b12065ad5ac307400e19b9097f2759f4ef003753e0938eb6c1c36f00', -'ebc015c8c57acdca1020863537eaa25cd00c9ff71c4dc3975b3d4104aa456488', -'b2b4e8bb8cb463e0295f9b639c29aae443eb8e7911551220e7dd45648a4e17c6', -'0e6ec23a1755217b64a6bae24ac14a326a427bcea88c9e4a25a6a89ab6b5e1f6', -'55f144c92556075e6980de0febf5ab41ebf34af4b15eb60fa115384d2507c460', -'af9ca8eb285437a637ddc03cb25e06f15ac650b9f4b8529205380410b11697b1', -'a59a7a3f24d374679747256790908e9eec841a2220e51e479f545122e2181fdb', -'16a3a51868c0e4461958fbd725dca6c2c8d8e9ffedc52a948fea21f4e6ecbf55', -'dec6a4cf1d2ecfd1b36eae0139b8e4fe6307a44af1100f805511860d43508e48', -'f4bea61cc8831a51945fdd240ed3bc198ef3205f4dd409d13d3a36f050a808a0', -'44e17058bb67046e391027d0695cd2e494242406d0b7cec0748645ecaeb97ec1', -'cb36f02e2eb97984a5de6a22123e989ba08dcb13d8c38b101f743dc3200d7490', -'322f60f380573876b5391e77b1296640d97de56d6efcb21f83d6d010e0820165', -'ba8e3784503f5b728764ca7853fb3f07efdd68a86152b126ce8a9ccc0a85f30c', -'ac6e826fb63a62b17f4603202c2ba43cd20aa9e362ed5f7419b71844ecaadc65', -'1e6de7e90286a211335e3fbcab1022c3561d57ffcfe4036a556fa4c58b032ec0', -'cd4539baedd99a5aaa743a2ed2babc55d2d0042cb95d71761f54e918571a57ee', -'6635af4bf7b82f280a57b9696227e79280b17442ad01a2e0902b513f1ef7c017', -'d219431684852035ac19137bda6a1904461d11fc8d3a66aff3fa21139f4ffde0', -'7e4b1e283602645584b24321417dad922bb8f65b3b983c924a047e22b9ef1c8a', -'55acaa10d9cdb37636b2b5f97054fc6a10ceb1cce310009b2f9ff5e8bb274e84', -'13e8242ee6c8c90a57e902051ce38f3c87376a6672b5be132d931363ecc4ca75', -'3a9118d83177f6e9fb8d46ee97bf1e1390cac2ba24ce1e3982086c910053ade0', -'e6f3effa2b1d885e1bacec3e7ff8256c4b55b4e0bda0514508ba4fc44ce13250', -'074acad64d28f63746616ffe39dd9e1b03c56b80809bf35b02b545f9907564ae', -'01f560d06ead33b2dabba9b9fdb44ab9ff693e63c0033bb422c991a4ccaa3a9a', -'46c5734b45d45276b69364ffac6cceb789ea667853760750be1875847d226f37', -'a1f6f8834ec597b23041c44761c93af3f595941dd23eac1357f80c886c601a8f', -'4476e804db749a21d5d5ac809a487b4f31b9f4f561531879e4b507abafa18953', -'65c1a4cb0cfc2fe51107abcb77277f1ab0e16f985c78369599d98f684fb45144', -'3f71fd1649ee36cf66ffcab38bc59dc18aab2995d37ae1e3dcafe9f63598c4bb', -'61bf9bfb99417054862fcd5da7cfc8c68dcc477272ecd3e821fa07bc7d2e7a70', -'535c996c16aedb8df875dcf48e38242e79b22784a9297d663bf271644a747951', -'1ab70a9de6a8deeb0ba8f6c4ca015d60659c90953a7c8a1f3215276fe57b1fb2', -'b93810c8ab06676a42907e2b116cf8799e08c4c0f0829d77b1cc9284f308a9f0', -'612ad2b43229a5b02714ac86d8d119269e291872885312269b4d9f98c0bd4ed5', -'c1ab82c2a996103dcdb9895e453b4a8d991a14ffed79505c752ae02ae657076b', -'9bf4006ec5f4f0af4512318dd90cbd24f3af45a6280a444164a67d5af155d85a', -'89e3dc0c351d5554caa013515be89865c128a5424fea363cc6d43b53981a4290', -'911d634423646a12c7a0825b3d2dbabd6a34231835318e546142295300705254', -'fbae58129af3c09590fbf1781d7b8d331e521f8bf8e51cf22d91fecf34d68f79', -'3a2b547fb501a376c0d04fd883c0e0fc8f226413ac175ec9e345eac7611a16fd', -'db5c739bcdb88f6ba8532e66ca344dd517c527a3a38793529b53ae133c917433', -'536bb47cf8cc0499588d25ff4185e92c4edd77610a6aeb92cf551c2f74607af0', -'5783bf578fcf8cb7b193d82d961ac2b82cf3767e578f67326c96373a4feab8f7', -'d0b4e46302d55e5e08369e595df24f58726eedde6004dbd99d04f740b9e41132', -'7a51b3e2301ae30402770284c008a5905e56e534efe6dec9dee93aeb9381905f', -'db4407a6f08f08e8fe375cce5aa72c1ef1dc03522b7bf5dd625230ab18485675', -'951e7e2bf6d42f690acbcb7a0cef8a3c5f58994148c0461bcd8daf417ea8bfda', -'22e7156182f9dd6695a1f23108da144534ac783b42188c84913d99df0c0cfee3', -'ab67fb7852361fcc05e7d5550fe7a30bd1211718dbca5a66ab26892a7650c4e0', -'57caac52230ad49aa6fbb83e6d2fc84f48909ee65197034f6a8064d397066ae9', -'47c41cc4de57abb4e9fa7b1347d14bd62bc7401bfd9dce6caf11ce4b73fd94ec', -'3addd5b84ea121bbf6b4044452a806bd94adfbf35d591932229846509fe616eb', -'466e7827eb7034bdf6a1ebba4c1b385e2faa1dc21e418e66b535dcf5a73d88c7', -'4a2640abbd6921cb7ef8fdc8a6cadd27da44539aa10c5e6ad364432824bea419', -'1fd1e25e703124d294c69c67852cc1fb2017e51f37aa4b733d423badfcf90a52', -'e3ad816a90a4320d74d52324c5c657041ef29219fd7be971f51917e59ececbf8', -'641d218c815f689a461565789920d7b86ecabd1a324611f039450be91b632a4b', -'9818246afc1b90b717bb202259ba9595a82f999e3310f4369c0c50b06c9de15f', -'b8628b077b4c472e4a6ab18a7f99f7006317e7abe909f8dbe43312780cd8b0a0', -'3788c20cc073baff9b9505380d7522f2dfdbedabd31fa1326347be49adbbdcb4', -'e65c8cca158865575529159c023fdd83001fd7492e0a854191f8cbf93475c494', -'d7e160ef8119f91070f3e95426911c927b87a03cf5374d3bd75ceecebd60073b', -'6db604b62b1be5fdfcd3fb51611f7042666060c65a5217deac5c91a9b5b10453', -'e7b24ae11da6590c9d296eaeeb26c080cc97b20c73793ec205b91ed43f3816cc', -'458b8494dccf3cb91aaa49131537b5008f699004fa3ac4157f9e48420bdd9662', -'bacaea9cb7e3bb62d9e54506e038c9daf7850a7342df047858fe1ff48597a3a9', -'41d5913ca22c3ced4c08c6f8637b9e067f2d7758d0d0275dc704e8f8b64b9f7c', -'3be4222924e4ff05a6bc121e069ed51b335ed2f6cb95402593c8a27417c505f0', -'72ecc2b914745cdb0d6421b1422dfbdba96c0256e56d6d4c1d84ece56d0e3e18', -'3620e5beb80ea8e4aded4796072399a8f17fb782e5736c8d5577493ec3794702', -'2ca7477d0425c676d4e28e5b4e1c71f4b12a16ebd840d0595c3937f23c3d70b0', -'12215cf6c84e1137cfca91c2c1db822eb1caacda9befd008c2adb556cadff0f6', -'40fd93136637c31f9772c3f846cb857988fa2ca59a77282ff1c54dc754f95051', -'3d011143188a0a931bfebe76e77c11272bfb543f71286690072423bb376ee872', -'50ee03d685ad9bc06d07b049d041ca523afd46439c631b08cd684bb8b2fd5929', -'5c4ea2a61857a1179e861274ae292ad22d778eba1eb04d5b02b8901461480baf', -'86f157bb40c266caa5b7b6212fab35f32d6d059251e47e799405457bb00720b9', -'cd27fbfec5083a79986aec7d772ba9695fd6c23263c2252a6eb84dafc7fb9ced', -'e51d497ae3961ea7bca697f7347fd7156c17310be5cb9b956e6fcc7438755f57', -'392fdd08ebaed00373cd6496cde779f976a7928f02cd9cfa7aaf96bea619f6e4', -'d3a8cf88f949bb068c9ae6118afb0373a2929713a165ffcd7113be4ec9004945', -'7127bffb2681dadc8090087261676135f2a063923d19f4683816bf3a8061dbee', -'7669a68489ced1aed50ba44d8be50c6c2513481ae27a464d6c29d0df66029b4b', -'1a9077b6b2ad7c13442b0e5751b26b5ad50e64c37eed2d782d10c72b260cb46d', -'59e75e41cd2966a5ce39b8a7bbb1e89ccee203fffaade7b440c216bdb692895e', -'93638c3080f030a1c169680f542fde5c5fbee9f119a2bb78ae6f28fc4616be48', -'0080c6a7155016d55f9717851aa2ad5a9571d3b03fa3ed872095190ca7ab4136', -'260ebd1d7f40c9126466c728d8504c1fb9492bb59f9186adc91c5ac16ca5daf9', -'f4a9131c8af63f8e59c96745019d621d04c24beaeec018a49051890f869e1588', -'8fb08532dfe79c7fa5537ef95a0d1c9c806c134707a05453e62c92c59c45cb0b', -'4cf0bbfe84e73bb22ccbe802a5f90301dbb24573ce707061db0e4c995551fad9', -'5b1b2127115f71e2a10d32da321b5b76b9f6ad0cd6ca1c1dbff454a6e783e826', -'13822b25f5e92870c9fb5a6ad09ebbe0ba3ba2c909a381fc41fcab25e3d2be7f', -'4a26e4bafcd460e38ce082c44ec49725dfe26e5362dd67fc060ff21e273f549d', -'e6b7422316f7ff5d7538a3e48d5860edfabac2d475365addda2f822c45ba56ec', -'7ed1d7e63b852626dd29be2553e9c88566734282220d67703a329660776a1a3a', -'8309340f71736574268497665ab8f31948478988d2f7a9c26014c0ba7fd521fb', -'061c7f252198e284d5993fc1798050f56f27c6b46aaf90fac4df0df326b28e4c', -'ce2685d884fe5c885fb0279e7745dacee38348c5bb8cc629c72bba589943dc3e', -'9d2a9d6f3fff5645eda3e1cdf8df56411d4a92ddf7f6563d745cbd36ac79405c', -'622e602433049439913c9507bc26f45830055f5119b8926e63af6d7f81ce5138', -'60d072af2202693a2467a097b2bc74383508759c36a6fbb94e857538a9e618f7', -'2483f05eade500e24d34f7dc5f1443dca078c2c8c04fd6dd0b87da7c5f19a3e2', -'bec8026fd4a3d606970f37a3c049b9610613637c35eeee15854b8744867b0735', -'092b2ee21b233f81113aa0a0af38879b90b16021ca215b4ec6aa53b070305b49', -'600db50af61adc64812e9866cf789b67535bfa48fba3a82aea023dbb5a6df170', -'78ba189416344f22c50ad5006f5e612115bc903efc97757889fd8d706bb81f5e', -'2dcbb87dae48f4cef02342ac1caa6fe9e293e9ef8b0e109a640430dee8a474ea', -'dd8813ca9b05201b7463a1cf337a177749542eba1cbaa8fb8f7a02185dbf8932', -'ebab14d8926ff9b00a5a5d8f16747fb1008fabf0a91f202adf6d379a77d0318c', -'9219e49b8a402a185dc96bbde1d6e5988b34dca4c7da28c028cbdf0caacb3755', -'2efe394a7af45d0d631b855a44a90a37d0fbf457301e3f02ba70f7dfb069c3e9', -'8992e7200bb05a89f6b6141d82b69d9bf74b30bcc709d06c33b475902ea59e99', -'93779565a4462e6b912deaee052f26b092b11f669dcbabfb6de1b43cedbc1946', -'492d0595a3048bd9742d632e2a2643e22a3e0dff16985ab678a64d5403beb1ee', -'45a24ca98a644b889f65b31e27316bbdf5aeee3d4408ea8ec247235ed044e0a6', -'296002fedfb70ca513bedbd5fe0e72d5ceeca5c9ac633ad1d98879887146a5eb', -'4c7bd839fd63731012feb08f4f219370fe6febf56e4a709c8b8cc1afa3bac65e', -'0a10adf58dfc0a84fa6f093bb933f4bc777001c7add52b60637a18a3779e5d54', -'98981e00f3d5c7be41a8dae6e062d42555f2bd8672e9c9a8dd9375803fd4267a', -'fab5f3e8c9849fc19105f05f28f31d297a04d70ef3d85a6cd147d53bda684c0e', -'2790937711173f0d6e6095308850945408218fd5823345e2b3a2107aa65f4dae', -'ae5fb75ab5b05ea54d6089023e00ae797d09449e7e1b69f8250ebf9bdc02bf4a', -'4f84f7296875f76ab5790e176ce4284e36558e4d46591c26cd66f7daca412ff6', -'bbe2e76c54ed38930a1940988a68abde0c20a313b8d40e29cb0984ac77f9f971', -'45d44e43a04b4025456e2e6f265c5fca05245350e817b79b28fd97e3212f832c', -'172a7646479dcc6482f88e7f8fc5fad88522a4c5aadf72cda35ae84c3f4fe2d6', -'4fa284945a06ee78943203be7833711b6fd22505f87efc7ae334c4897da57c80', -'2795f8838e10ad379a639063c17b72b323f2cb790975198bc8ac0ede416477b5', -'db857c67829c6026e1bbe6dc015db074d14eda24a576b1746781c3bc5d14c0ce', -'9709ed076fef74da5ef153337404d88c25b54b6a657b72c59d8325149ddc93de', -'0dcf380356f6d2d7bcba56930e898b83126048af6020904d6196a81885f95b9c', -'2540e7376a10dd67d07f5cc2423b05a7e1641a5631a03e783701940e820fffc2', -'3975271761b85a03064b115de71fd27b1bcaa226337a35e7f7bc229a055d9c3b', -'c98477c4016c5e5efa9740158747ecc599713304d770dab5841c182bff5aa62d', -'b3b9cb17bc96663a956fadf1a8cd1d09a323eefd1a5c887153a5e05ade95283a', -'7a1fbd32552fb1fa6f30e55e17ac880affbdfba7420ec4153c90801d89dadcbc', -'49b479660361c526944d3bd4edd7babdf04c47e0b9d413a2d42e5bda0554e053', -'25e901bbf4c3a95b499ec6db269c7685c3fafcb2294ad23430ff9a38fa732d6a', -'8827a9b2baf1e4b6bc3a8f8bd987cba04064613aa035a10c8df17c87762dba7d', -'d04fa3810fd0275502d93628c6399d1f6ee1e891f4285a3bb99d3f83cda6211d', -'4266ae72d5ee29b527b283d137f9f7552b926b21aec867158fc8256df08c6b54', -'9776901f56b85ab35f50615603157893daf337bfa3af97d42d9c6d5cbe98c61d', -'50cf0f4dc221c801ca62bf524309249b5dcc9fbaa8621c20d3d152348b3974ac', -'7382fa53b6d64b269fd9b326ec46104cd30480c8e7b193ca441c70b735621a46', -'ef4430106dbfc1bfee425f4a1288fafcedae17636e3e44483ab868b729a7cdb0', -'e16f57915e4fcbc535a0eacb7c46d39decd8594b6092d435e66f85ce4091b600', -'adc15fe031ca56ac63e317a838ac99972a19cd63c7294f9c53960080be0ec289', -'dafb9bffabee8044994e9b32d98278f8a0e74647dc80519d5f113014f2142b46', -'763b6ec4637f426f6b41fce6af7f23a3537925aaf3b23cbfdbae3227f6930a4e', -'db779d26caf9a11881630256cecf4cede992c616253af77958b706835e4f71a6', -'51d4d8ec4a8536041a80ae5899635162347e316525271659c99e0f79389ffc20', -'e118f346408aba460c5acc78d416ac4d78c93cd0f3f8edab2bfd1d4777e18ced', -'f12664c4ce0206a6b7307ecdc824aaaeec0ab00bd31608ffef628e8a35d2ee8a', -'8dba8a73696e6d0d61cfe7d6b3d1baf9d17a4b336edb5ce100f8fdcc9015f1f1', -'090b2240badddc79406930aefe159d2dce96998b0c4926f829fd28052cdba11f', -'ce75b076f7956692976b3dc2080ebbab7a1062b908bf1c4a071bd404726e175a', -'18d4d6bbebe8430b4c94e2e3c17ba24cce6a3c1a2ab7e46d103e97190aa3db36', -'7cbcc0ce87a0a9f725ee7200a7e86c9bc7d6247da2a96aef37b7ca6652563d88', -'640e2365b73d4c43d84d594b226d017ff520ed8059b889d295c0f900e735b29b', -'9684451f0c396ea9adbcc86fb4abc7306cc408301752d0a0209cc0dbfbb2f4fa', -'a32334d2e357e04f6aa41feaf23e4cad58e0ee7a718dcd6b1ea004d78708a672', -'15e9df0ab69520069311a5a8c20a1e0eead8d97547e4a848404d887803af1968', -'9f6f0a29256e77c8972c3da1a24b3f765dac2fcdb93820cbd88b7f82c54da3d9', -'320feb73d8c766dd6f86b95a7420469a2ad9a56e92c88598b7c2838db5c90ac9', -'1e3e464c911bbaea8c1b2ddc1b6e7630fc773efe77cca7792b9745f67a13ff96', -'988cbcf1afe194c59c04ee381787d28fca9fa5049c18873ee001f084acc6e448', -'0d36855b7571bb8cb95b715a042fe329da3a8e4b778f9d75823e936a0e549312', -'429cc35d04c057bb691eedb0b792bc829b6fd88cd2eecf72a728d7d496d8c30f', -'604c30546332ca9fd30b048303643dde9c7946615fe6869b730d16fdb7b02878', -'6146fe7c79189d69df963819d90ec4932552dae2db32dade683c6c63c76dad84', -'5e2a58a43ff166e931d08a540685d7f039017b4573682e03dd96543a7eacb73b', -'af40d8ec09b302be543caea9729c43042fe84faa0a66d9841055081d913c2093', -'afb884d94a0fdcd789e30a11e0e2d99361a624955570560c53e925f5bc3c637e', -'b389de9d41637bb5917fbecae7b8086d5e2a7e97e32be8f3372dea99bcba498d', -'d4f1d0593d2c452839ecea58caab35f01ee260ee24327a97fa1da71523ab825d', -'6b307a5ec87cb3f56b35717963070e00c60b6da250e422f6805956cd18820d5a', -'f926a5ca4f2d6dd6d47123ad512fe52f20547b5067765c68b1c51c038e22607e', -'7e083b829d991ee91486d7fee10276c289cb47465cf9b5113fb6ffcf9ee8d509', -'5d411870089af67c73b83251ad7fa198e26963684ec1fd6f53ecc05d0d5a3fa3', -'073a7a3001d559d0777ff559ff4aebe48dcc7d53c1876271b9cec534c8502846', -'e3c3a7a9cb9c83940f864be276ff6307934a1a23680269e654d00fd520621802', -'7bf2d61f2c128435cea56eaa869dffc5feb705ddd6b1b0752092caa0ecf2cf5a', -'9fd3974b313b0e7b66802259e39e493f77976fd403a02fc006680b5e310e6587', -'33513a8fd099520b3d7797153bb1f0cfa3afaf95c3ba12ec8c8d9936d3851776', -'932208d88197c89a6a819eff1d17515f9a00ef68140e7dbd8711d59fd3344ad0', -'0594fff94b6c5eef1aa3418f89a2b9a815b0b78cd37a9f5bf5e6c7527e341938', -'f0e8db89dc4bb6f1d0afd375a7ce62a1d6a8d30bead1a9e7fc2bf0a307402209', -'3cd8260017004591c8f66263f266f969ced86b858f2e3b43533d349f5f6d4167', -'e79b02d8eb1e72278d5d23db1ae4dd779bd834e4de15119a05f37fa849662277', -'c128256462490241ae90861f5ab5648f37c3579828588996f4eafd090209bc19', -'ecbbb79b86d4e5c55bc97273290c0b1989d6fe867d219fc3ab2837981a586671', -'b22772b8fce88d52a4280bb991411b25de1c6aa64da8889cf03548d95614900b', -'01f5c6de4661bda50e99434e37fff96f6d113e1dd2907f0f768816dca57a75f3', -'ee3ced889ded3e7484b61f8a591bff2e67e3b8b486f398e10db9f3fe74445a82', -'8a90dc410bbd5404421af830def2b74e2baf8af566524ef018f0a6f45443538d', -'f47160e6901aa364a49a122c37591bd6bfc3091931931c66a87cd8002857c120', -'41284ecc6d052cb2e2674a9e78058a402a201290d8e5a0912cea571f59289ad8', -'4d328859872d13930f86cd367c2c4879efe0d13114aa233cf8fce3e42274ff92', -'a48e6af747934c4e7441f35c616cb70f4793e4cc353c30cd99eae68fad8eb178', -'3de5562c2b42023dd09d83a53dd49c3d3f15dd82034602fa694fa9f197fb0fb8', -'70742e4750d2d41fa9249ec293cfee40eb98ec96f5dd9b3566180c2ce491e59b', -'9c18f5f2f520793a670fd7d13a81ed0b5b2b2353b80c1df9a26457ada2070aef', -'41dff580778dc0d6913456134c3c616714e913d049363a6ef2320984a900d0ea', -'33212388c36ef30a4a2344fb0ba69de51024ae6e9437d5a7da62bd0273231f9e', -'ab57f854266f4d74fe9e44b59153947a7d87719574c50bce7e987294d759acce', -'f55ab35d960b8d8f3b9157e8ad35307d478b3f35f1a78e4d48046b3cf1a58704', -'1153dece5a0fed4ba5b5946578df9cddcdedfedeefd26b0e9d403c89774c7e63', -'aac7e91aece0703ee8229d0347deb38dc6c5194fe19d65566594ebca627c2426', -'4f0d12abad3bd347134db8b6c7ca9d4db2490bff589ad63c4a431ac52b77cfed', -'ac4cbb2c073b66eb1e002e0d009a209b7b407e416f361ebf3b07df8f32f2f84c', -'270bce14bbb30b331ea04bb785118ba2460e780eca07410c019596199cee6f4b', -'4d1ec4d8e74b2cd7211e8effaf24979172bfc345e3ef10248c67980feb17969a', -'df25f18d610e8cdeded67016f8d0b48cda71691627a66681dab788a4cd92460a', -'3d5e468a3f1aef9b259eef7105e8eb5a0ce4c8d3a2230cf8fc036ff0fe61b4e3', -'e62f0d358a697f8863bd7ce8d29442661a5fea1e20d3cee08b8441919d01c8f1', -'68c424677691726381d4b80b664718609f0d7e96ff21ee204dc6afa3c7eafb26', -'386ab80b0dc9609063101c4408abe43f3edba36b51f5dbed4e4349a201a9310c', -'62ae5f6a96afe8b2fb971777e3fbf8daf1dbae26ba049dedc38e137ea1c592dd', -'74baa5e9183f820df28cac718fdf1081333eba1463be6b43be8534c757aab558', -'f81031a1c3919c0f04f467531b51da84cbe92cf846629456f6d837cefdbe4b18', -'6b3c0b47bfeaecfa277e2cef7ca9833e62abdc751c4fe0c3841d35185e09e000', -'a74e8a63c64f0f93dde2eb8e4080e280b911abd3587e1e94d6cb310eff4d5c3b', -'923ca648c0358452d2d320f76435b0991782a9810fcebe113ec1bbd90676e051', -'8fd5989e62ea9db5a22c3321b559eed0f71c9113659c4c898a2f6bd1cdd177e2', -'3944bcc8ac1eeefe8ca42b164887eedf09d7831560ba3b348cafd2d8a688f9aa', -'b7f1beed64f8c3b50d7ed2aa40ef81069a1435f2070e608ed79d6227561ebdf1', -'c96a0993941fc28892a5bab0566b7efe5282239f7dad2e9bf32eca4cd7a2e341', -'297fc73b91a9727ac6c68c2301868d1d18474638dad1939e37e92308584f72a5', -'6f62cbbaca8a3c1801bed061dfb25127e9ba5180d1fd62259c62be763bf1e060', -'5c25c6297cc55a73a85b9fd05b5e7f062bdcdad2638d938cc106a74635328a48', -'0f870c311083f9066cc1536355eb9311dd4d4ebb2eb3f4be59a68f82cf101ce2', -'b1bd3c1a5148bde436c6b5fee1c6aba715e90c9c796f7052737c1d9271c11304', -'f01eca1701776198dcd54facb4ea6d0430eac1f606ef3cb60e681d6866619dae', -'254bb1d8c26bb655db414eb8da2c818e5b722102de88533e39710e5a8fa189fc', -'d643b37250556456341f1e2ea3e92917e8979029207881d6b5523185d3f982cf', -'3094eeb53e4561f924207347c7eaacbcba33c87baa805b9e861002b48bc80198', -'8e122930dbe5f9d7e6b2ddbdc6077e42c6575b0513b7dc56723b74845b31f394', -'465796280843bd23899514250aeaa4604d5f93d0de32732d906c26356c4c0272', -'ec73e56997ffe21a8989a80b7b1b004ea81c4f9209295ae4ba58c20ff1490f6f', -'a8bfd689e82a1683ef217ec4a365c3723a327f82a23748d2524a6c9f38874deb', -'352b7579bbfee90dce0a78092e5af97b6c0e9f7722106859b675b103ac70ea61', -'4e217dc342815642c42a85e80017879d1fec77e97c55fd8982b498398a4c8141', -'677ef5637eb541fb9491f420dcc146b3b184dc9e232aa57ad36ddf1529f69622', -'16a8dd1a35c2711c330fa3e4ce108b48581d7f50bd39d52774a82d6404289e1d', -'caef4776c79795a8f5907c99d7d8a64ab2ba3d2484298b6fca6247f6c92deb15', -'a50f3d1859240d306d97b0bb9fee5be3e61dbd36744b57051a4b01713c0b018f', -'b8a4a6b56ff5ccf1c29a1270b760f799825cc9f875921457132080cff07a7ccf', -'90b7c60f1a910fa507273cb1c9e9688fb3371c501fa666ebee35fe0fd6876864', -'a6e68957ec18f9424be3c35854e8b510ff69fd2fcc245625c19fe756f8a6ba22', -'42622508d48ba6652c8c5e057a9ca0d4dca52df1c2c8f2e205ec5af84a4ae034', -'d6e330a0fdaa09122efb29026e6b29ae01c67dfa97152134699e5727ce8f1a8c', -'3ef8229136529cd0097b28f71a3473f82d37d77bf30e466d1a87ee990243b926', -'42141524318ce10cd680465d66d24839cc56f92b8c8511ce237e2f8c698b8acb', -'53aa67d564e8cb18c24be81b694cd6796af777616d91a563297610bf866664ed', -'66202c052c27c4947a892be7160829134f92e15e520d995b0649c398e5ab6c97', -'e37d3e48ab530054cc6001b96f6ff65d7fac342a945453bced9e7d0e93d2ce72', -'0ac8508da892c66f210fbd271fbd105659900df11d49d55fd70ab629543332d8', -'3f86a17910c56e2038a0bd4a9429754921c3db3b231981fa7777a0716de322d4', -'d1f6f3f6aa4aab63a93df3329719f0f17ce0abf6de44f91c74189fdf79397f03', -'7160c44006ac68d56b2d3d8ee9762aef613c988dc2bd118244edd6e5fd2888f1', -'cc7e88c6ad07001d77771f8aa8b1df8240344112d4a33b0df6c201d0dfce52e0', -'c0bc8fadc1aa383211db4d8ffc33fb42b1a0c4c6a8e41b31bf17cbeef6ce256d', -'ea8028180dff361f39a1f342a5df70a29102da2808415b7c4ad91fde82a8b50d', -'d53845f444884348f2853af02f61f4026f9cb587f934a821666263a07ce4bde2', -'4da0c42cd50b30f0225399ce606df889afbcee48a476704fed5d5cd06df6903f', -'7b49f85631277f0eb805c0d6c1339c2d06e1f7245147bd848595681e26be1c16', -'c25449daa3a5a20c432ffbd7224951942eac76cc752b8d40a5b8c0b4aebc643a', -'e04e97ea87675d30569ad0f4b87c468a3f5e2d4bdd8a57d194d9dc2d694c81a7', -'16ce4899882e7903d066f561944c4eb93757f91a5656b7950452db40102d7fef', -'a576f5dcafb9b39fcc8ef7f306b2282c60d5d25edf46002e14a26fc23ff9cf6e', -'801bb9ae6e63614aa8b18d7672501952352c8e7bcb4d435287bfa358b1a6ceaf', -'c66af0d4b38d60db6839e96dd5e51c39bd66f0eabada130456011aabaa82fb03', -'8c84643fffbd3b78330eac851e77b6b90b3787996c8909f276a01ad0e4bfe42e', -'7f31a398614e8dc5aa6e4c00a84684641a07c920f0769ab1456b0fd99988c586', -'4cc5cf690d91d4b0f17482d9b3cf37a8cc3cfbd9a80031c7d0ad83009e3eae4f', -'62aa36f441c3641b07a7e8bc517920f88dd866d75326f8969ea759067bb83519', -'2b3c96cae11cf115ce41ad3ad432a1be964525ecae064b297a03b1ffc8445c26', -'c1d6efb5b64f4411884f42c2222767f808f3e02bdf288ab308669e45f453ab0d', -'3144b230521841fbd05a4c007073ada4c01ba0db361dd269ff67619741ed01a7', -'c699166ed9dd58eaa1815fd9b940109974286b5f827c9ebf7f4cadcd235e0fc6', -'8dda730c35ebfd18471cfd22a9d82c61f43972b0f5cc22046277646cf5547954', -'9bb296afaefcf0af647acfacbb8b823f8d3a15e5655f139471a84909a88eb94d', -'4e93768d652cdc5ee161efc9f16318e100de93b995cbe0c1badba2dbc140b0c1', -'92f9f687c84d3c76b9b584fb974369c81fb412f2040aa1b728c6cc5db20f2810', -'1c1458a772198d641f5e23bcabfd11c8e45881762e419c262a5b2fbfd0ca5a24', -'1c0a012649db8561b4002a417897418e7127fb756b025a542552eb63d842c80a', -'c80d5f78a14cee0be8eaae8eecd93c79e9341aaa77b86e81c931710389072179', -'aa0227a4df7ba5ccd73828333cb9d9e50cdcf138aafdf23102e95654a691dd7a', -'1c40b7e8db498b8776b70e41a5e364e8e734c9fba441cce58bfa6ca11fa16742', -'e9ec6e6c690da07b38ba005b26ba97dc8ae201b5fc70978501a1ad973fd3145e', -'3dde18eafa3d4fb8deb7a6314e3448dec3ae4ee240ed9ad018f6a3f5c22fccbe', -'d51972193c1ab27686784fbc6bdf57f2ab3512db10d087fa7daa9b223b4dcd25', -'93ed06b90fd50dab61dcf147ce69c352730fd73ca0c9d2afd0b4da3ed8a01e1c', -'ecd834cd2b93485c58d01cfe47ebf3de8e9e56b395ef5bf29480e030b5d317da', -'235e0fee34ba0972c51b174a975fa97deb0855949962a63faa227e6c89827d9b', -'e3a0472f06f169a06bb26b2e3f9f5cd0d91051f324eeb8518f39aa989a013fa1', -'8d12689f78116526aa0d6eb24450fcc52cd1417cfba960f8e49ec2ce7e0ae146', -'2adba391227f919132be08d4063328bcec9edf8f7d3f3ffbb9552cb117eaf9df', -'4938f163678066ef336a0089ed40fbb329d29168fdb54a4417128796a7f28677', -'b4f00c50d3a326448724f0199c41f9f6b375b1fb09d3f08dee06aff50cd443a6', -'814d4127765aead557be45a54bf08fd1880a8c8d5cb69de2cd54ecf0b0ba9f2c', -'44d4882a9c0a793ea5e77931109d52e943485894718c37d2190a76078b1b0f71', -'14503fc26e9699b768dc90f32d316cff443f19f5dda3d264454eb61674bd973f', -'2f8c794eac8254b4048935104bb6cafede8ce24543b3ed5cae556d2d0a7a4581', -'dabc14dd8539ae18a5a9243fb6c78b6dfdb348bf99c1ad1db57080d9b01ebfc2', -'514cd93b8d3ee13b807dee6f81071a5afa7c6bf8e6398b1f8a2670b8641410b7', -'1ce47158d3d4b55d637bffc72ec4300646477682a4b07954bd0297b6891c725f', -'e3086334be719c20df144302013188714ab70a9e712957aa397b65e197d5a260', -'9f64264fe373658c52703429a35a8ef451e78e280ebd215110acacf41c692062', -'9be7ab557421e8164e2e890bc9e9c393b911e2e7346bca586ea8ea746db5ef82', -'77de61802eeab102a1c8f835a550d3158d5f19c3cfcb0e2c341ff3370fa0cf78', -'5bef4e0dbe537b32135689ca199df434255e7914fe695454832a7ba58924aa4c', -'4cbb8510cbc98a9576a743a6848ed2987b3f1c5701152dabbcf2587ada7e11cf', -'4d9a2a4df387574c5d1df5b6c95dc72d3976778ab4b440ce665723962470414b', -'2d182aac179091eb1a452ca88b928d65b353d06b018ce76dc405382cd3e7b32c', -'1df4967e9866867aa6c108861b41c60efa4b430aeaf352274dcd1716a86e22e6', -'2136ad0d7378afea9d74fee86f193ee321a5f6b76cb3d4f00ae6c3fca0b73468', -'146a3f30fafa3bac8eb2cba7488e37523a127dfa9d5db5e62231ed073d4db98d', -'dc1d5351986f79b13fcb55dbebc14b58a760a2cc78e493782f363039031dc0a2', -'988d61985965f6319786edfacffc7ea6c60d22482257e816d0ca32c53d12a2d9', -'a04ea0bb0a58aa953ec51e3e52cbfe1bb140551f46c05d1211e01c43675d59f0', -'2b31f8378b9a1d8dddb5738490e88aabc447c3325fb093ef2675d888c40bb5e7', -'87c37a2e48a74655540eaafe311c6c83ae606611a78f5d3eb0dca7dcf0eab7f5', -'1e00f056d6bda9518676e6f30c6da5abd766ee36c79ef3fab11d3afa01854eb5', -'354dd562a379f18f6182d377ff1d73a15b059e55d230e26b381d2a8cfd00d56b', -'0b114de869ee0665af1b2b3a8732457bbdc404ef009cce95b5cec8c2603f9ff8', -'f9cc18d4cd894d1b61d9de374d9d1665af3d10d6f3342156b85c0f0ae0c73cbc', -'4a5ca92a505ba9a09c970d746c33ddaa206584ddd34e2e485b68dffbcf621cfc', -'b9a05501f19a137bcfefba702aad7628aff3fe6ce2cb8be03796373dec279907', -'f1ebd02e1cdf382cf75344a6289b8bf132227350fb46147961bf74650d38b0c5', -'466acf103d5e3fca2f406e97496af96dbdbba99ba5204e2ef18584574c12fe07', -'a03f2217b1781f1e0fa0a5eb5ca11a43c6c88f0374bc54f3930eab57247ae089', -'e6bfc52d2e4f5fe47c5d06746b6ecd28bb083a9ae7338746578e9b6c512f3878', -'f4c2aa16e5a355b5839502848b519ccf28014c7e185bbc5ab3bf787517f247b4', -'9552ceac803aac6bd88f3b95de8dce4f314730962b2d1a598cbd0bb61017d8d8', -'c9f0eac85cdbf0ce00ed19197f938084f36f81a45ed6470931287c0471e64272', -'fc8c9b7720e4671de42c4473a603c4ff7ef49e579b70bde01431b1a1e905cbd0', -'f92eda195323a5938bc3514116fa4f05dac695c785a20be8f8afa8e967901769', -'36671b6c0a92eec53a0bbf98c1e7f1e9734ea2e9185d0d5038a8c4a157ba876f', -'ce6b04307b81dacce56cb3c87972ecdd28bba40e52b9d1efeea5651f52fe691f', -'ea4a8c27e68bbb4f70121a2c7b1877cdff8737e014e9bad1abbe83b0c3997b1c', -'5ef8953f395c841e27ddc1fe3ca0d79331c8189807cec5ed0ff724da5e08cefe', -'bcd37032ad64ce978586cfb1a65b3ec5bb7446eefada4149ce9d7a88b669ed7a', -'ff1e4deba577be3a1e9c9c31c61d305bc0aa13c10bc2ec6064057adc4a36c5f1', -'a8b67d1442fd65d4d05d3bcb6da74c7a59739787df08e120fd4d733e5f966aa7', -'c9eba770b629fd32d2180553cb96d8352dc5cb0bb652f900ffc1e9e29038f98f', -'ed1da7bbd10476dd0e84d0cefa6a457230c84bb4b31190b4ff4b58e25c398210', -'3aa16d6479d93b8bd0c4642856190ea0dc8777723b74089af58bd61b4700d8f2', -'bb47dea8df3aab2f16960e5ab33fa54c9c1bc239833f69fabba64a7b099fc688', -'f2ee6e2778650b556ec5e5e6fc6127e31fe7976efd7b2401e7b36cc7ec260fd4', -'a2604f43af7a5b9f600d34e0c9a1b3829360d2ccbbe20de96a0c32f4a7d70e18', -'b55b22f981a3ffe3c8ccf0ef320f72cd4f228b74bd4bda73236a0c8f67478999', -'322f60f380573876b5391e77b1296640d97de56d6efcb21f83d6d010e0820165', -'45d44e43a04b4025456e2e6f265c5fca05245350e817b79b28fd97e3212f832c', -'4266ae72d5ee29b527b283d137f9f7552b926b21aec867158fc8256df08c6b54', -'f0e8db89dc4bb6f1d0afd375a7ce62a1d6a8d30bead1a9e7fc2bf0a307402209', -'3cd8260017004591c8f66263f266f969ced86b858f2e3b43533d349f5f6d4167', -'e79b02d8eb1e72278d5d23db1ae4dd779bd834e4de15119a05f37fa849662277', -'8a9i0dc410bbd5404421af830def2b74e2baf8af566524ef018f0a6f45443538d', -'f47160e6901aa364a49a122c37591bd6bfc3091931931c66a87cd8002857c120', -'41284ecc6d052cb2e2674a9e78058a402a201290d8e5a0912cea571f59289ad8', -'4d328859872d13930f86cd367c2c4879efe0d13114aa233cf8fce3e42274ff92' } - - -def retryRequest(tempserverlist, apicall): - while len(tempserverlist) != 0: - try: - response = requests.get('{}api/{}'.format(tempserverlist[0], apicall)) - except: - tempserverlist.pop(0) - else: - if response.status_code == 200: - return json.loads(response.content) - else: - tempserverlist.pop(0) - - if len(tempserverlist) == 0: - logger.error("None of the APIs are responding for the call {}".format(apicall)) - return 0 - - -def multiRequest(apicall, net): - testserverlist = ['http://0.0.0.0:9000/', 'https://testnet-flosight.duckdns.org/', 'https://testnet.flocha.in/'] - mainserverlist = ['https://flosight.duckdns.org/','http://0.0.0.0:9495/'] - if net == 'mainnet': - return retryRequest(mainserverlist, apicall) - elif net == 'testnet': - return retryRequest(testserverlist, apicall) - - -def pushData_SSEapi(message): - signature = pybtc.sign_message(message.encode(), privKey) - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Signature': signature} - - '''try: - r = requests.post(sseAPI_url, json={'message': '{}'.format(message)}, headers=headers) - except: - logger.error("couldn't push the following message to SSE api {}".format(message))''' - print('') - - -def processBlock(blockindex): - logger.info(f'Processing block {blockindex}') - logger.info('Processing block ' + str(blockindex)) - # Get block details - response = multiRequest(f"block-index/{blockindex}", config['DEFAULT']['NET']) - blockhash = response['blockHash'] - blockinfo = multiRequest(f"block/{blockhash}", config['DEFAULT']['NET']) - - # todo Rule 8 - read every transaction from every block to find and parse flodata - counter = 0 - acceptedTxList = [] - # Scan every transaction - logger.info("Before tx loop") - #pdb.set_trace() - counter = 0 - #pdb.set_trace() - for transaction in blockinfo["tx"]: - if blockindex < 4365011 and (transaction not in goodtxset): - continue - counter = counter + 1 - logger.info(f"Transaction {counter} {transaction}") - #transaction_data = multiRequest(f"tx/{transaction}", config['DEFAULT']['NET']) - #try: - # text = transaction_data["floData"] - # text = text.replace("\n", " \n ") - #except: - # logger.info("The API has passed the Block height test but failed transaction_data['floData'] test. transaction_data response from the API is logged below") - # logger.info(f"{transaction_data}") - - - #if current_index == -1: - current_index = -1 - while(current_index == -1): - transaction_data = multiRequest(f"tx/{transaction}", config['DEFAULT']['NET']) - try: - text = transaction_data["floData"] - text = text.replace("\n", " \n ") - current_index = 2 - except: - logger.info("The API has passed the Block height test but failed transaction_data['floData'] test") - logger.info(f"Block Height : {blockindex}") - logger.info(f"Transaction {transaction} data : ") - logger.info(transaction_data) - logger.info('Program will wait for 1 seconds and try to reconnect') - time.sleep(1) - - - # todo Rule 9 - Reject all noise transactions. Further rules are in parsing.py - returnval = None - parsed_data = parsing.parse_flodata(text, blockinfo, config['DEFAULT']['NET']) - if parsed_data['type'] != 'noise': - logger.info(f"Processing transaction {transaction}") - logger.info(f"flodata {text} is parsed to {parsed_data}") - returnval = processTransaction(transaction_data, parsed_data) - - if returnval == 1: - acceptedTxList.append(transaction) - elif returnval == 0: - logger.info("Transfer for the transaction %s is illegitimate. Moving on" % transaction) - - if len(acceptedTxList) > 0: - tempinfo = blockinfo['tx'].copy() - for tx in blockinfo['tx']: - if tx not in acceptedTxList: - tempinfo.remove(tx) - blockinfo['tx'] = tempinfo - updateLatestBlock(blockinfo) - - engine = create_engine('sqlite:///system.db') - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - entry = session.query(SystemData).filter(SystemData.attribute == 'lastblockscanned').all()[0] - entry.value = str(blockindex) - session.commit() - session.close() - - # Check smartContracts which will be triggered locally, and not by the contract committee - checkLocaltriggerContracts(blockinfo) - - -def processApiBlock(blockhash): - logger.info(config['DEFAULT']['NET']) - blockinfo = multiRequest('block/{}'.format(str(blockhash)), config['DEFAULT']['NET']) - - # todo Rule 8 - read every transaction from every block to find and parse flodata - counter = 0 - acceptedTxList = [] - # Scan every transaction - for transaction in blockinfo["tx"]: - transaction_data = multiRequest(f"tx/{transaction}", config['DEFAULT']['NET']) - text = transaction_data["floData"] - text = text.replace("\n", " \n ") - - # todo Rule 9 - Reject all noise transactions. Further rules are in parsing.py - returnval = None - parsed_data = parsing.parse_flodata(text, blockinfo, config['DEFAULT']['NET']) - if parsed_data['type'] != 'noise': - logger.info(f"Processing transaction {transaction}") - logger.info(f"flodata {text} is parsed to {parsed_data}") - returnval = processTransaction(transaction_data, parsed_data) - - if returnval == 1: - acceptedTxList.append(transaction) - elif returnval == 0: - logger.info("Transfer for the transaction %s is illegitimate. Moving on" % transaction) - - if len(acceptedTxList) > 0: - tempinfo = blockinfo['tx'].copy() - for tx in blockinfo['tx']: - if tx not in acceptedTxList: - tempinfo.remove(tx) - blockinfo['tx'] = tempinfo - updateLatestBlock(blockinfo) - - engine = create_engine('sqlite:///system.db') - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - entry = session.query(SystemData).filter(SystemData.attribute == 'lastblockscanned').all()[0] - entry.value = str(blockinfo['height']) - logger.info('Last scanned block value should be '+ str(entry.value)) - session.commit() - session.close() - - # Check smartContracts which will be triggered locally, and not by the contract committee - checkLocaltriggerContracts(blockinfo) - - -def updateLatestTransaction(transactionData, parsed_data): - # connect to latest transaction db - conn = sqlite3.connect('latestCache.db') - conn.execute( - "INSERT INTO latestTransactions(transactionHash, blockNumber, jsonData, transactionType, parsedFloData) VALUES (?,?,?,?,?)", - (transactionData['txid'], transactionData['blockheight'], json.dumps(transactionData), parsed_data['type'], - json.dumps(parsed_data))) - conn.commit() - conn.close() - - -def updateLatestBlock(blockData): - # connect to latest block db - conn = sqlite3.connect('latestCache.db') - conn.execute('INSERT INTO latestBlocks(blockNumber, blockHash, jsonData) VALUES (?,?,?)', - (blockData['height'], blockData['hash'], json.dumps(blockData))) - conn.commit() - conn.close() - - -def transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress, transaction_data=None, parsed_data=None): - engine = create_engine('sqlite:///tokens/{}.db'.format(tokenIdentification), echo=True) - Base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - availableTokens = session.query(func.sum(ActiveTable.transferBalance)).filter_by(address=inputAddress).all()[0][0] - commentTransferAmount = float(tokenAmount) - if availableTokens is None: - logger.info(f"The sender address {inputAddress} doesn't own any {tokenIdentification.upper()} tokens") - session.close() - return 0 - - elif availableTokens < commentTransferAmount: - logger.info( - "The transfer amount passed in the comments is more than the user owns\nThis transaction will be discarded\n") - session.close() - return 0 - - elif availableTokens >= commentTransferAmount: - table = session.query(ActiveTable).filter(ActiveTable.address == inputAddress).all() - block_data = multiRequest('block/{}'.format(transaction_data['blockhash']), config['DEFAULT']['NET']) - - pidlst = [] - checksum = 0 - for row in table: - if checksum >= commentTransferAmount: - break - pidlst.append([row.id, row.transferBalance]) - checksum = checksum + row.transferBalance - - if checksum == commentTransferAmount: - consumedpid_string = '' - - # Update all pids in pidlist's transferBalance to 0 - lastid = session.query(ActiveTable)[-1].id - for piditem in pidlst: - entry = session.query(ActiveTable).filter(ActiveTable.id == piditem[0]).all() - consumedpid_string = consumedpid_string + '{},'.format(piditem[0]) - session.add(TransferLogs(sourceFloAddress=inputAddress, destFloAddress=outputAddress, - transferAmount=entry[0].transferBalance, sourceId=piditem[0], - destinationId=lastid + 1, - blockNumber=block_data['height'], time=block_data['time'], - transactionHash=transaction_data['txid'])) - entry[0].transferBalance = 0 - - if len(consumedpid_string) > 1: - consumedpid_string = consumedpid_string[:-1] - - # Make new entry - session.add(ActiveTable(address=outputAddress, consumedpid=consumedpid_string, - transferBalance=commentTransferAmount)) - - # Migration - # shift pid of used utxos from active to consumed - for piditem in pidlst: - # move the parentids consumed to consumedpid column in both activeTable and consumedTable - entries = session.query(ActiveTable).filter(ActiveTable.parentid == piditem[0]).all() - for entry in entries: - entry.consumedpid = entry.consumedpid + ',{}'.format(piditem[0]) - entry.parentid = None - - entries = session.query(ConsumedTable).filter(ConsumedTable.parentid == piditem[0]).all() - for entry in entries: - entry.consumedpid = entry.consumedpid + ',{}'.format(piditem[0]) - entry.parentid = None - - # move the pids consumed in the transaction to consumedTable and delete them from activeTable - session.execute( - 'INSERT INTO consumedTable (id, address, parentid, consumedpid, transferBalance) SELECT id, address, parentid, consumedpid, transferBalance FROM activeTable WHERE id={}'.format( - piditem[0])) - session.execute('DELETE FROM activeTable WHERE id={}'.format(piditem[0])) - session.commit() - session.commit() - - elif checksum > commentTransferAmount: - consumedpid_string = '' - # Update all pids in pidlist's transferBalance - lastid = session.query(ActiveTable)[-1].id - for idx, piditem in enumerate(pidlst): - entry = session.query(ActiveTable).filter(ActiveTable.id == piditem[0]).all() - if idx != len(pidlst) - 1: - session.add(TransferLogs(sourceFloAddress=inputAddress, destFloAddress=outputAddress, - transferAmount=entry[0].transferBalance, sourceId=piditem[0], - destinationId=lastid + 1, - blockNumber=block_data['height'], time=block_data['time'], - transactionHash=transaction_data['txid'])) - entry[0].transferBalance = 0 - consumedpid_string = consumedpid_string + '{},'.format(piditem[0]) - else: - session.add(TransferLogs(sourceFloAddress=inputAddress, destFloAddress=outputAddress, - transferAmount=piditem[1] - (checksum - commentTransferAmount), - sourceId=piditem[0], - destinationId=lastid + 1, - blockNumber=block_data['height'], time=block_data['time'], - transactionHash=transaction_data['txid'])) - entry[0].transferBalance = checksum - commentTransferAmount - - if len(consumedpid_string) > 1: - consumedpid_string = consumedpid_string[:-1] - - # Make new entry - session.add(ActiveTable(address=outputAddress, parentid=pidlst[-1][0], consumedpid=consumedpid_string, - transferBalance=commentTransferAmount)) - - # Migration - # shift pid of used utxos from active to consumed - for piditem in pidlst[:-1]: - # move the parentids consumed to consumedpid column in both activeTable and consumedTable - entries = session.query(ActiveTable).filter(ActiveTable.parentid == piditem[0]).all() - for entry in entries: - entry.consumedpid = entry.consumedpid + ',{}'.format(piditem[0]) - entry.parentid = None - - entries = session.query(ConsumedTable).filter(ConsumedTable.parentid == piditem[0]).all() - for entry in entries: - entry.consumedpid = entry.consumedpid + ',{}'.format(piditem[0]) - entry.parentid = None - - # move the pids consumed in the transaction to consumedTable and delete them from activeTable - session.execute( - 'INSERT INTO consumedTable (id, address, parentid, consumedpid, transferBalance) SELECT id, address, parentid, consumedpid, transferBalance FROM activeTable WHERE id={}'.format( - piditem[0])) - session.execute('DELETE FROM activeTable WHERE id={}'.format(piditem[0])) - session.commit() - session.commit() - - block_data = multiRequest('block/{}'.format(transaction_data['blockhash']), config['DEFAULT']['NET']) - - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add(TransactionHistory(sourceFloAddress=inputAddress, destFloAddress=outputAddress, - transferAmount=tokenAmount, blockNumber=block_data['height'], - blockHash=block_data['hash'], time=block_data['time'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, jsonData=json.dumps(transaction_data), - transactionType=parsed_data['type'], - parsedFloData=json.dumps(parsed_data))) - session.commit() - session.close() - return 1 - - -def checkLocaltriggerContracts(blockinfo): - engine = create_engine('sqlite:///system.db', echo=False) - connection = engine.connect() - # todo : filter activeContracts which only have local triggers - activeContracts = connection.execute( - 'select contractName, contractAddress from activecontracts where status=="active" ').fetchall() - connection.close() - - for contract in activeContracts: - - # pull out the contract structure into a dictionary - engine = create_engine('sqlite:///smartContracts/{}-{}.db'.format(contract[0], contract[1]), echo=True) - connection = engine.connect() - # todo : filter activeContracts which only have local triggers - attributevaluepair = connection.execute( - "select attribute, value from contractstructure where attribute != 'contractName' and attribute != 'flodata' and attribute != 'contractAddress'").fetchall() - contractStructure = {} - conditionDict = {} - counter = 0 - for item in attributevaluepair: - if list(item)[0] == 'exitconditions': - conditionDict[counter] = list(item)[1] - counter = counter + 1 - else: - contractStructure[list(item)[0]] = list(item)[1] - if len(conditionDict) > 0: - contractStructure['exitconditions'] = conditionDict - del counter, conditionDict - - if contractStructure['contractType'] == 'one-time-event': - # Check if the contract has blockchain trigger or committee trigger - if 'exitconditions' in contractStructure: - # This is a committee trigger contract - expiryTime = contractStructure['expiryTime'] - expirytime_split = expiryTime.split(' ') - parse_string = '{}/{}/{} {}'.format(expirytime_split[3], parsing.months[expirytime_split[1]], - expirytime_split[2], expirytime_split[4]) - expirytime_object = parsing.arrow.get(parse_string, 'YYYY/M/D HH:mm:ss').replace( - tzinfo=expirytime_split[5][3:]) - blocktime_object = parsing.arrow.get(blockinfo['time']).to('Asia/Kolkata') - - if blocktime_object > expirytime_object: - if 'minimumsubscriptionamount' in contractStructure: - minimumsubscriptionamount = contractStructure['minimumsubscriptionamount'] - tokenAmount_sum = \ - connection.execute('select sum(tokenAmount) from contractparticipants').fetchall()[0][0] - if tokenAmount_sum < minimumsubscriptionamount: - # Initialize payback to contract participants - contractParticipants = connection.execute( - 'select participantAddress, tokenAmount, transactionHash from contractparticipants').fetchall()[ - 0][0] - - for participant in contractParticipants: - tokenIdentification = contractStructure['tokenIdentification'] - contractAddress = connection.execute( - 'select * from contractstructure where attribute="contractAddress"').fetchall()[0][ - 0] - returnval = transferToken(tokenIdentification, participant[1], contractAddress, - participant[0]) - if returnval is None: - logger.critical( - "Something went wrong in the token transfer method while doing local Smart Contract Trigger. THIS IS CRITICAL ERROR") - return - connection.execute( - 'update contractparticipants set winningAmount="{}" where participantAddress="{}" and transactionHash="{}"'.format( - (participant[1], participant[0], participant[2]))) - - # add transaction to ContractTransactionHistory - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(contract[0], - contract[1]), - echo=True) - ContractBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(ContractTransactionHistory(transactionType='trigger', - transactionSubType='minimumsubscriptionamount-payback', - transferAmount=None, - blockNumber=blockinfo['height'], - blockHash=blockinfo['hash'], - time=blockinfo['time'])) - session.commit() - session.close() - - engine = create_engine('sqlite:///system.db', echo=True) - connection = engine.connect() - connection.execute( - 'update activecontracts set status="closed" where contractName="{}" and contractAddress="{}"'.format( - contract[0], contract[1])) - connection.execute( - 'update activecontracts set closeDate="{}" where contractName="{}" and contractAddress="{}"'.format( - blockinfo['time'], - contract[0], contract[1])) - connection.close() - - engine = create_engine('sqlite:///system.db', echo=True) - connection = engine.connect() - connection.execute( - 'update activecontracts set status="expired" where contractName="{}" and contractAddress="{}"'.format( - contract[0], contract[1])) - connection.execute( - 'update activecontracts set expirydate="{}" where contractName="{}" and contractAddress="{}"'.format( - blockinfo['time'], - contract[0], contract[1])) - connection.close() - - elif 'payeeAddress' in contractStructure: - # This is a local trigger contract - if 'maximumsubscriptionamount' in contractStructure: - maximumsubscriptionamount = connection.execute( - 'select value from contractstructure where attribute=="maximumsubscriptionamount"').fetchall()[ - 0][0] - tokenAmount_sum = \ - connection.execute('select sum(tokenAmount) from contractparticipants').fetchall()[0][0] - if tokenAmount_sum >= maximumsubscriptionamount: - # Trigger the contract - payeeAddress = contractStructure['payeeAddress'] - tokenIdentification = contractStructure['tokenIdentification'] - contractAddress = contractStructure['contractAddress'] - returnval = transferToken(tokenIdentification, tokenAmount_sum, contractAddress, payeeAddress) - if returnval is None: - logger.critical( - "Something went wrong in the token transfer method while doing local Smart Contract Trigger") - return - connection.execute( - 'update contractparticipants set winningAmount="{}"'.format( - (0))) - - # add transaction to ContractTransactionHistory - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(contract[0], - contract[1]), - echo=True) - ContractBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(ContractTransactionHistory(transactionType='trigger', - transactionSubType='maximumsubscriptionamount', - sourceFloAddress=contractAddress, - destFloAddress=payeeAddress, - transferAmount=tokenAmount_sum, - blockNumber=blockinfo['height'], - blockHash=blockinfo['hash'], - time=blockinfo['time'])) - session.commit() - session.close() - - engine = create_engine('sqlite:///system.db', echo=False) - connection = engine.connect() - connection.execute( - 'update activecontracts set status="closed" where contractName="{}" and contractAddress="{}"'.format( - contract[0], contract[1])) - connection.execute( - 'update activecontracts set closeDate="{}" where contractName="{}" and contractAddress="{}"'.format( - blockinfo['time'], contract[0], contract[1])) - connection.execute( - 'update activecontracts set expiryDate="{}" where contractName="{}" and contractAddress="{}"'.format( - blockinfo['time'], contract[0], contract[1])) - connection.close() - return - - expiryTime = contractStructure['expiryTime'] - expirytime_split = expiryTime.split(' ') - parse_string = '{}/{}/{} {}'.format(expirytime_split[3], parsing.months[expirytime_split[1]], - expirytime_split[2], expirytime_split[4]) - expirytime_object = parsing.arrow.get(parse_string, 'YYYY/M/D HH:mm:ss').replace( - tzinfo=expirytime_split[5][3:]) - blocktime_object = parsing.arrow.get(blockinfo['time']).to('Asia/Kolkata') - - if blocktime_object > expirytime_object: - if 'minimumsubscriptionamount' in contractStructure: - minimumsubscriptionamount = contractStructure['minimumsubscriptionamount'] - tokenAmount_sum = \ - connection.execute('select sum(tokenAmount) from contractparticipants').fetchall()[0][0] - if tokenAmount_sum < minimumsubscriptionamount: - # Initialize payback to contract participants - contractParticipants = connection.execute( - 'select participantAddress, tokenAmount, transactionHash from contractparticipants').fetchall()[ - 0][0] - - for participant in contractParticipants: - tokenIdentification = connection.execute( - 'select * from contractstructure where attribute="tokenIdentification"').fetchall()[ - 0][ - 0] - contractAddress = connection.execute( - 'select * from contractstructure where attribute="contractAddress"').fetchall()[0][ - 0] - returnval = transferToken(tokenIdentification, participant[1], contractAddress, - participant[0]) - if returnval is None: - logger.critical( - "Something went wrong in the token transfer method while doing local Smart Contract Trigger") - return - connection.execute( - 'update contractparticipants set winningAmount="{}" where participantAddress="{}" and transactionHash="{}"'.format( - (participant[1], participant[0], participant[2]))) - - # add transaction to ContractTransactionHistory - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(contract[0], - contract[1]), - echo=True) - ContractBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(ContractTransactionHistory(transactionType='trigger', - transactionSubType='minimumsubscriptionamount-payback', - transferAmount=None, - blockNumber=blockinfo['height'], - blockHash=blockinfo['hash'], - time=blockinfo['time'])) - session.commit() - session.close() - - engine = create_engine('sqlite:///system.db', echo=False) - connection = engine.connect() - connection.execute( - 'update activecontracts set status="closed" where contractName="{}" and contractAddress="{}"'.format( - contract[0], contract[1])) - connection.execute( - 'update activecontracts set closeDate="{}" where contractName="{}" and contractAddress="{}"'.format( - blockinfo['time'], contract[0], contract[1])) - connection.execute( - 'update activecontracts set expiryDate="{}" where contractName="{}" and contractAddress="{}"'.format( - blockinfo['time'], contract[0], contract[1])) - connection.close() - return - - # Trigger the contract - payeeAddress = contractStructure['payeeAddress'] - tokenIdentification = contractStructure['tokenIdentification'] - contractAddress = contractStructure['contractAddress'] - engine = create_engine('sqlite:///smartContracts/{}-{}.db'.format(contract[0], contract[1]), - echo=True) - connection = engine.connect() - tokenAmount_sum = \ - connection.execute('select sum(tokenAmount) from contractparticipants').fetchall()[0][0] - returnval = transferToken(tokenIdentification, tokenAmount_sum, contractAddress, payeeAddress) - if returnval is None: - logger.critical( - "Something went wrong in the token transfer method while doing local Smart Contract Trigger") - return - connection.execute('update contractparticipants set winningAmount="{}"'.format(0)) - - # add transaction to ContractTransactionHistory - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(contract[0], - contract[1]), - echo=True) - ContractBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(ContractTransactionHistory(transactionType='trigger', - transactionSubType='expiryTime', - sourceFloAddress=contractAddress, - destFloAddress=payeeAddress, - transferAmount=tokenAmount_sum, - blockNumber=blockinfo['height'], - blockHash=blockinfo['hash'], - time=blockinfo['time'])) - session.commit() - session.close() - - engine = create_engine('sqlite:///system.db', echo=False) - connection = engine.connect() - connection.execute( - 'update activecontracts set status="closed" where contractName="{}" and contractAddress="{}"'.format( - contract[0], contract[1])) - connection.execute( - 'update activecontracts set closeDate="{}" where contractName="{}" and contractAddress="{}"'.format( - blockinfo['time'], contract[0], contract[1])) - connection.execute( - 'update activecontracts set expiryDate="{}" where contractName="{}" and contractAddress="{}"'.format( - blockinfo['time'], contract[0], contract[1])) - connection.close() - return - - -def processTransaction(transaction_data, parsed_data): - # Do the necessary checks for the inputs and outputs - # todo Rule 38 - Here we are doing FLO processing. We attach asset amounts to a FLO address, so every FLO address - # will have multiple feed ins of the asset. Each of those feedins will be an input to the address. - # an address can also spend the asset. Each of those spends is an output of that address feeding the asset into some - # other address an as input - - # Rule 38 reframe - For checking any asset transfer on the flo blockchain it is possible that some transactions may use more than one - # vins. However in any single transaction the system considers valid, they can be only one source address from which the flodata is - # originting. To ensure consistency, we will have to check that even if there are more than one vins in a transaction, there should be - # excatly one FLO address on the originating side and that FLO address should be the owner of the asset tokens being transferred - - # Create vinlist and outputlist - vinlist = [] - querylist = [] - - # todo Rule 39 - Create a list of vins for a given transaction id - for obj in transaction_data["vin"]: - querylist.append([obj["txid"], obj["vout"]]) - - totalinputval = 0 - inputadd = '' - - # todo Rule 40 - For each vin, find the feeding address and the fed value. Make an inputlist containing [inputaddress, n value] - for query in querylist: - content = multiRequest('tx/{}'.format(str(query[0])), config['DEFAULT']['NET']) - for objec in content["vout"]: - if objec["n"] == query[1]: - inputadd = objec["scriptPubKey"]["addresses"][0] - totalinputval = totalinputval + float(objec["value"]) - vinlist.append([inputadd, objec["value"]]) - - # todo Rule 41 - Check if all the addresses in a transaction on the input side are the same - for idx, item in enumerate(vinlist): - if idx == 0: - temp = item[0] - continue - if item[0] != temp: - logger.info( - f"System has found more than one address as part of vin. Transaction {transaction_data['txid']} is rejected") - return 0 - - inputlist = [vinlist[0][0], totalinputval] - - # todo Rule 42 - If the number of vout is more than 2, reject the transaction - if len(transaction_data["vout"]) > 2: - logger.info( - f"System has found more than 2 address as part of vout. Transaction {transaction_data['txid']} is rejected") - return 0 - - # todo Rule 43 - A transaction accepted by the system has two vouts, 1. The FLO address of the receiver - # 2. Flo address of the sender as change address. If the vout address is change address, then the other adddress - # is the recevier address - - outputlist = [] - addresscounter = 0 - inputcounter = 0 - for obj in transaction_data["vout"]: - if obj["scriptPubKey"]["type"] == "pubkeyhash": - addresscounter = addresscounter + 1 - if inputlist[0] == obj["scriptPubKey"]["addresses"][0]: - inputcounter = inputcounter + 1 - continue - outputlist.append([obj["scriptPubKey"]["addresses"][0], obj["value"]]) - - if addresscounter == inputcounter: - outputlist = [inputlist[0]] - elif len(outputlist) != 1: - logger.info( - f"Transaction's change is not coming back to the input address. Transaction {transaction_data['txid']} is rejected") - return 0 - else: - outputlist = outputlist[0] - - logger.info( - f"Input address list : {inputlist}") - logger.info( - f"Output address list : {outputlist}") - - # All FLO checks completed at this point. - # Semantic rules for parsed data begins - - # todo Rule 44 - Process as per the type of transaction - if parsed_data['type'] == 'transfer': - logger.info(f"Transaction {transaction_data['txid']} is of the type transfer") - - # todo Rule 45 - If the transfer type is token, then call the function transferToken to adjust the balances - if parsed_data['transferType'] == 'token': - # check if the token exists in the database - if os.path.isfile(f"./tokens/{parsed_data['tokenIdentification']}.db"): - # Check if the transaction hash already exists in the token db - engine = create_engine(f"sqlite:///tokens/{parsed_data['tokenIdentification']}.db", echo=True) - connection = engine.connect() - blockno_txhash = connection.execute( - 'select blockNumber, transactionHash from transactionHistory').fetchall() - connection.close() - blockno_txhash_T = list(zip(*blockno_txhash)) - - if transaction_data['txid'] in list(blockno_txhash_T[1]): - logger.warning( - f"Transaction {transaction_data['txid']} already exists in the token db. This is unusual, please check your code") - pushData_SSEapi( - f"Error | Transaction {transaction_data['txid']} already exists in the token db. This is unusual, please check your code") - return 0 - - returnval = transferToken(parsed_data['tokenIdentification'], parsed_data['tokenAmount'], inputlist[0], - outputlist[0], transaction_data, parsed_data) - if returnval is None: - logger.info("Something went wrong in the token transfer method") - pushData_SSEapi( - f"Error | Something went wrong while doing the internal db transactions for {transaction_data['txid']}") - return 0 - else: - updateLatestTransaction(transaction_data, parsed_data) - - # If this is the first interaction of the outputlist's address with the given token name, add it to token mapping - engine = create_engine('sqlite:///system.db', echo=True) - connection = engine.connect() - firstInteractionCheck = connection.execute( - f"select * from tokenAddressMapping where tokenAddress='{outputlist[0]}' and token='{parsed_data['tokenIdentification']}'").fetchall() - - if len(firstInteractionCheck) == 0: - connection.execute( - f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{outputlist[0]}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}')") - - connection.close() - - # Pass information to SSE channel - url = 'https://ranchimallflo.duckdns.org/' - headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} - # r = requests.post(url, json={f"message': 'Token Transfer | name:{parsed_data['tokenIdentification']} | transactionHash:{transaction_data['txid']}"}, headers=headers) - return 1 - else: - logger.info( - f"Token transfer at transaction {transaction_data['txid']} rejected as a token with the name {parsed_data['tokenIdentification']} doesnt not exist") - engine = create_engine(f"sqlite:///system.db", echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add(RejectedTransactionHistory(tokenIdentification=parsed_data['tokenIdentification'], - sourceFloAddress=inputadd, destFloAddress=outputlist[0], - transferAmount=parsed_data['tokenAmount'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Token transfer at transaction {transaction_data['txid']} rejected as a token with the name {parsed_data['tokenIdentification']} doesnt not exist", - transactionType=parsed_data['type'], - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error | Token transfer at transaction {transaction_data['txid']} rejected as a token with the name {parsed_data['tokenIdentification']} doesnt not exist") - return 0 - - # todo Rule 46 - If the transfer type is smart contract, then call the function transferToken to do sanity checks & lock the balance - elif parsed_data['transferType'] == 'smartContract': - if os.path.isfile(f"./smartContracts/{parsed_data['contractName']}-{outputlist[0]}.db"): - - # Check if the transaction hash already exists in the contract db (Safety check) - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(parsed_data['contractName'], outputlist[0]), echo=True) - connection = engine.connect() - participantAdd_txhash = connection.execute( - 'select participantAddress, transactionHash from contractparticipants').fetchall() - participantAdd_txhash_T = list(zip(*participantAdd_txhash)) - - if len(participantAdd_txhash) != 0 and transaction_data['txid'] in list(participantAdd_txhash_T[1]): - logger.warning( - f"Transaction {transaction_data['txid']} rejected as it already exists in the Smart Contract db. This is unusual, please check your code") - pushData_SSEapi( - f"Error | Transaction {transaction_data['txid']} rejected as it already exists in the Smart Contract db. This is unusual, please check your code") - return 0 - - # if contractAddress was passed, then check if it matches the output address of this contract - if 'contractAddress' in parsed_data: - if parsed_data['contractAddress'] != outputlist[0]: - logger.info( - f"Contract participation at transaction {transaction_data['txid']} rejected as contractAddress specified in flodata, {parsed_data['contractAddress']}, doesnt not match with transaction's output address {outputlist[0]}") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='participation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Contract participation at transaction {transaction_data['txid']} rejected as contractAddress specified in flodata, {parsed_data['contractAddress']}, doesnt not match with transaction's output address {outputlist[0]}", - - parsedFloData=json.dumps(parsed_data))) - session.commit() - session.close() - - url = 'https://ranchimallflo.duckdns.org/' - headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} - '''r = requests.post(url, json={ - 'message': f"Error | Contract participation at transaction {transaction_data['txid']} rejected as contractAddress specified in flodata, {parsed_data['contractAddress']}, doesnt not match with transaction's output address {outputlist[0]}"}, - headers=headers)''' - - # Pass information to SSE channel - pushData_SSEapi( - 'Error| Mismatch in contract address specified in flodata and the output address of the transaction {}'.format( - transaction_data['txid'])) - return 0 - - # check the status of the contract - engine = create_engine('sqlite:///system.db', echo=True) - connection = engine.connect() - contractStatus = connection.execute( - f"select status from activecontracts where contractName=='{parsed_data['contractName']}' and contractAddress='{outputlist[0]}'").fetchall()[ - 0][0] - connection.close() - contractList = [] - - if contractStatus == 'closed': - logger.info( - f"Transaction {transaction_data['txid']} closed as Smart contract {parsed_data['contractName']} at the {outputlist[0]} is closed") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='participation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} closed as Smart contract {parsed_data['contractName']} at the {outputlist[0]} is closed", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - - url = 'https://ranchimallflo.duckdns.org/' - headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} - '''r = requests.post(url, json={ - 'message': f"Error | Transaction {transaction_data['txid']} closed as Smart contract {parsed_data['contractName']} at the {outputlist[0]} is closed"}, - headers=headers)''' - return 0 - else: - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(parsed_data['contractName'], outputlist[0]), - echo=True) - ContractBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - result = session.query(ContractStructure).filter_by(attribute='expiryTime').all() - session.close() - if result: - # now parse the expiry time in python - expirytime = result[0].value.strip() - expirytime_split = expirytime.split(' ') - parse_string = '{}/{}/{} {}'.format(expirytime_split[3], parsing.months[expirytime_split[1]], - expirytime_split[2], expirytime_split[4]) - expirytime_object = parsing.arrow.get(parse_string, 'YYYY/M/D HH:mm:ss').replace( - tzinfo=expirytime_split[5][3:]) - blocktime_object = parsing.arrow.get(transaction_data['blocktime']).to('Asia/Kolkata') - - if blocktime_object > expirytime_object: - logger.info( - f"Transaction {transaction_data['txid']} rejected as Smart contract {parsed_data['contractName']}-{outputlist[0]} has expired and will not accept any user participation") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='participation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as Smart contract {parsed_data['contractName']}-{outputlist[0]} has expired and will not accept any user participation", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error| Transaction {transaction_data['txid']} rejected as Smart contract {parsed_data['contractName']}-{outputlist[0]} has expired and will not accept any user participation") - return 0 - - # pull out the contract structure into a dictionary - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(parsed_data['contractName'], outputlist[0]), echo=True) - connection = engine.connect() - attributevaluepair = connection.execute( - "select attribute, value from contractstructure where attribute != 'contractName' and attribute != 'flodata' and attribute != 'contractAddress'").fetchall() - contractStructure = {} - conditionDict = {} - counter = 0 - for item in attributevaluepair: - if list(item)[0] == 'exitconditions': - conditionDict[counter] = list(item)[1] - counter = counter + 1 - else: - contractStructure[list(item)[0]] = list(item)[1] - if len(conditionDict) > 0: - contractStructure['exitconditions'] = conditionDict - del counter, conditionDict - - # check if user choice has been passed, to the wrong contract type - if 'userChoice' in parsed_data and 'exitconditions' not in contractStructure: - logger.info( - f"Transaction {transaction_data['txid']} rejected as userChoice, {parsed_data['userChoice']}, has been passed to Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} which doesn't accept any userChoice") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='participation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as userChoice, {parsed_data['userChoice']}, has been passed to Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} which doesn't accept any userChoice", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error | Transaction {transaction_data['txid']} rejected as userChoice, {parsed_data['userChoice']}, has been passed to Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} which doesn't accept any userChoice") - return 0 - - # check if the right token is being sent for participation - if parsed_data['tokenIdentification'] != contractStructure['tokenIdentification']: - logger.info( - f"Transaction {transaction_data['txid']} rejected as the token being transferred, {parsed_data['tokenIdentidication'].upper()}, is not part of the structure of Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]}") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='participation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as the token being transferred, {parsed_data['tokenIdentidication'].upper()}, is not part of the structure of Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]}", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error| Transaction {transaction_data['txid']} rejected as the token being transferred, {parsed_data['tokenIdentidication'].upper()}, is not part of the structure of Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]}") - return 0 - - # Check if the contract is of the type one-time-event - if contractStructure['contractType'] == 'one-time-event': - - # Check if contractAmount is part of the contract structure, and enforce it if it is - if 'contractAmount' in contractStructure: - if float(contractStructure['contractAmount']) != float(parsed_data['tokenAmount']): - logger.info( - f"Transaction {transaction_data['txid']} rejected as contractAmount being transferred is not part of the structure of Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]}") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='participation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as contractAmount being transferred is not part of the structure of Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]}", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error| Transaction {transaction_data['txid']} rejected as contractAmount being transferred is not part of the structure of Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]}") - return 0 - - partialTransferCounter = 0 - # Check if maximum subscription amount has reached - if 'maximumsubscriptionamount' in contractStructure: - # now parse the expiry time in python - maximumsubscriptionamount = float(contractStructure['maximumsubscriptionamount']) - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(parsed_data['contractName'], outputlist[0]), - echo=True) - ContractBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - amountDeposited = session.query(func.sum(ContractParticipants.tokenAmount)).all()[0][0] - session.close() - - if amountDeposited is None: - amountDeposited = 0 - - if amountDeposited >= maximumsubscriptionamount: - logger.info( - f"Transaction {transaction_data['txid']} rejected as maximum subscription amount has been reached for the Smart contract named {parsed_data['contractName']} at the address {outputlist[0]}") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='participation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as maximum subscription amount has been reached for the Smart contract named {parsed_data['contractName']} at the address {outputlist[0]}", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error | Transaction {transaction_data['txid']} rejected as maximum subscription amount has been reached for the Smart contract named {parsed_data['contractName']} at the address {outputlist[0]}") - return 0 - elif ((float(amountDeposited) + float(parsed_data[ - 'tokenAmount'])) > maximumsubscriptionamount) and 'contractAmount' in contractStructure: - logger.info( - f"Transaction {transaction_data['txid']} rejected as the contractAmount surpasses the maximum subscription amount, {contractStructure['maximumsubscriptionamount']} {contractStructure['tokenIdentification'].upper()}, for the Smart contract named {parsed_data['contractName']} at the address {outputlist[0]}") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='participation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as the contractAmount surpasses the maximum subscription amount, {contractStructure['maximumsubscriptionamount']} {contractStructure['tokenIdentification'].upper()}, for the Smart contract named {parsed_data['contractName']} at the address {outputlist[0]}", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error | Transaction {transaction_data['txid']} rejected as the contractAmount surpasses the maximum subscription amount, {contractStructure['maximumsubscriptionamount']} {contractStructure['tokenIdentification'].upper()}, for the Smart contract named {parsed_data['contractName']} at the address {outputlist[0]}") - return 0 - else: - partialTransferCounter = 1 - - # Check if exitcondition exists as part of contractstructure and is given in right format - if 'exitconditions' in contractStructure: - # This means the contract has an external trigger, ie. trigger coming from the contract committee - exitconditionsList = [] - for condition in contractStructure['exitconditions']: - exitconditionsList.append(contractStructure['exitconditions'][condition]) - - if parsed_data['userChoice'] in exitconditionsList: - if partialTransferCounter == 0: - # Check if the tokenAmount being transferred exists in the address & do the token transfer - returnval = transferToken(parsed_data['tokenIdentification'], - parsed_data['tokenAmount'], inputlist[0], outputlist[0], - transaction_data, parsed_data) - if returnval is not None: - # Store participant details in the smart contract's db - session.add(ContractParticipants(participantAddress=inputadd, - tokenAmount=parsed_data['tokenAmount'], - userChoice=parsed_data['userChoice'], - transactionHash=transaction_data['txid'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'])) - session.commit() - - # Store transfer as part of ContractTransactionHistory - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add(ContractTransactionHistory(transactionType='participation', - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=parsed_data['tokenAmount'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - - parsedFloData=json.dumps(parsed_data) - )) - - session.commit() - session.close() - - # Store a mapping of participant address -> Contract participated in - engine = create_engine('sqlite:///system.db', echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(ContractAddressMapping(address=inputadd, addressType='participant', - tokenAmount=parsed_data['tokenAmount'], - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - transactionHash=transaction_data['txid'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'])) - session.commit() - - # If this is the first interaction of the outputlist's address with the given token name, add it to token mapping - engine = create_engine('sqlite:///system.db', echo=True) - connection = engine.connect() - firstInteractionCheck = connection.execute( - f"select * from tokenAddressMapping where tokenAddress='{outputlist[0]}' and token='{parsed_data['tokenIdentification']}'").fetchall() - - if len(firstInteractionCheck) == 0: - connection.execute( - f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{outputlist[0]}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}')") - - connection.close() - - updateLatestTransaction(transaction_data, parsed_data) - return 1 - - else: - logger.info("Something went wrong in the smartcontract token transfer method") - return 0 - elif partialTransferCounter == 1: - # Transfer only part of the tokens users specified, till the time it reaches maximumamount - returnval = transferToken(parsed_data['tokenIdentification'], - maximumsubscriptionamount - amountDeposited, - inputlist[0], outputlist[0], transaction_data, parsed_data) - if returnval is not None: - # Store participant details in the smart contract's db - session.add(ContractParticipants(participantAddress=inputadd, - tokenAmount=maximumsubscriptionamount - amountDeposited, - userChoice=parsed_data['userChoice'], - transactionHash=transaction_data['txid'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'])) - session.commit() - session.close() - - # Store a mapping of participant address -> Contract participated in - engine = create_engine('sqlite:///system.db', echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(ContractAddressMapping(address=inputadd, addressType='participant', - tokenAmount=maximumsubscriptionamount - amountDeposited, - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - transactionHash=transaction_data['txid'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'])) - session.commit() - session.close() - updateLatestTransaction(transaction_data, parsed_data) - return 1 - - else: - logger.info("Something went wrong in the smartcontract token transfer method") - return 0 - - else: - logger.info( - f"Transaction {transaction_data['txid']} rejected as wrong userchoice entered for the Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]}") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='participation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as wrong userchoice entered for the Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]}", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error| Transaction {transaction_data['txid']} rejected as wrong userchoice entered for the Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]}") - return 0 - - elif 'payeeAddress' in contractStructure: - # this means the contract if of the type internal trigger - if partialTransferCounter == 0: - # Check if the tokenAmount being transferred exists in the address & do the token transfer - returnval = transferToken(parsed_data['tokenIdentification'], - parsed_data['tokenAmount'], inputlist[0], outputlist[0], - transaction_data, parsed_data) - if returnval is not None: - # Store participant details in the smart contract's db - session.add(ContractParticipants(participantAddress=inputadd, - tokenAmount=parsed_data['tokenAmount'], - userChoice='-', - transactionHash=transaction_data['txid'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'])) - session.commit() - - # Store transfer as part of ContractTransactionHistory - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add(ContractTransactionHistory(transactionType='participation', - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=parsed_data['tokenAmount'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - - parsedFloData=json.dumps(parsed_data) - )) - - session.commit() - session.close() - - # Store a mapping of participant address -> Contract participated in - engine = create_engine('sqlite:///system.db', echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(ContractAddressMapping(address=inputadd, addressType='participant', - tokenAmount=parsed_data['tokenAmount'], - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - transactionHash=transaction_data['txid'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'])) - session.commit() - - updateLatestTransaction(transaction_data, parsed_data) - return 1 - - else: - logger.info("Something went wrong in the smartcontract token transfer method") - return 0 - elif partialTransferCounter == 1: - # Transfer only part of the tokens users specified, till the time it reaches maximumamount - returnval = transferToken(parsed_data['tokenIdentification'], - maximumsubscriptionamount - amountDeposited, - inputlist[0], outputlist[0], transaction_data, parsed_data) - if returnval is not None: - # Store participant details in the smart contract's db - session.add(ContractParticipants(participantAddress=inputadd, - tokenAmount=maximumsubscriptionamount - amountDeposited, - userChoice='-', - transactionHash=transaction_data['txid'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'])) - session.commit() - session.close() - - # Store a mapping of participant address -> Contract participated in - engine = create_engine('sqlite:///system.db', echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(ContractAddressMapping(address=inputadd, addressType='participant', - tokenAmount=maximumsubscriptionamount - amountDeposited, - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - transactionHash=transaction_data['txid'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'])) - session.commit() - session.close() - updateLatestTransaction(transaction_data, parsed_data) - return 1 - - else: - logger.info("Something went wrong in the smartcontract token transfer method") - return 0 - - else: - logger.info( - f"Transaction {transaction_data['txid']} rejected as the participation doesn't belong to any valid contract type") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='participation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as the participation doesn't belong to any valid contract type", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - - url = 'https://ranchimallflo.duckdns.org/' - headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} - '''r = requests.post(url, json={ - 'message': f"Error | Transaction {transaction_data['txid']} rejected as the participation doesn't belong to any valid contract type"}, - headers=headers)''' - return 0 - - else: - logger.info( - f"Transaction {transaction_data['txid']} rejected as a Smart Contract with the name {parsed_data['contractName']} at address {outputlist[0]} doesnt exist") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='participation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as a Smart Contract with the name {parsed_data['contractName']} at address {outputlist[0]} doesnt exist", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - - url = 'https://ranchimallflo.duckdns.org/' - headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} - '''r = requests.post(url, json={ - 'message': f"Error | Contract transaction {transaction_data['txid']} rejected as a smartcontract with same name {parsed_data['contractName']}-{parsed_data['contractAddress']} dosent exist "}, - headers=headers)''' - return 0 - - # todo Rule 47 - If the parsed data type is token incorporation, then check if the name hasn't been taken already - # if it has been taken then reject the incorporation. Else incorporate it - elif parsed_data['type'] == 'tokenIncorporation': - if not os.path.isfile(f"./tokens/{parsed_data['tokenIdentification']}.db"): - engine = create_engine(f"sqlite:///tokens/{parsed_data['tokenIdentification']}.db", echo=True) - Base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(ActiveTable(address=inputlist[0], parentid=0, transferBalance=parsed_data['tokenAmount'])) - session.add(TransferLogs(sourceFloAddress=inputadd, destFloAddress=outputlist[0], - transferAmount=parsed_data['tokenAmount'], sourceId=0, destinationId=1, - blockNumber=transaction_data['blockheight'], time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'])) - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add(TransactionHistory(sourceFloAddress=inputadd, destFloAddress=outputlist[0], - transferAmount=parsed_data['tokenAmount'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), transactionType=parsed_data['type'], - parsedFloData=json.dumps(parsed_data))) - session.commit() - session.close() - - # add it to token address to token mapping db table - engine = create_engine('sqlite:///system.db'.format(parsed_data['tokenIdentification']), echo=True) - connection = engine.connect() - connection.execute( - f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{inputadd}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}');") - connection.close() - - updateLatestTransaction(transaction_data, parsed_data) - - pushData_SSEapi( - f"Token | Succesfully incorporated token {parsed_data['tokenIdentification']} at transaction {transaction_data['txid']}") - return 1 - else: - logger.info( - f"Transaction {transaction_data['txid']} rejected as a token with the name {parsed_data['tokenIdentification']} has already been incorporated") - engine = create_engine(f"sqlite:///system.db", echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add(RejectedTransactionHistory(tokenIdentification=parsed_data['tokenIdentification'], - sourceFloAddress=inputadd, destFloAddress=outputlist[0], - transferAmount=parsed_data['tokenAmount'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as a token with the name {parsed_data['tokenIdentification']} has already been incorporated", - transactionType=parsed_data['type'], - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error | Token incorporation rejected at transaction {transaction_data['txid']} as token {parsed_data['tokenIdentification']} already exists") - return 0 - - # todo Rule 48 - If the parsed data type if smart contract incorporation, then check if the name hasn't been taken already - # if it has been taken then reject the incorporation. - elif parsed_data['type'] == 'smartContractIncorporation': - if not os.path.isfile(f"./smartContracts/{parsed_data['contractName']}-{parsed_data['contractAddress']}.db"): - # todo Rule 49 - If the contract name hasn't been taken before, check if the contract type is an authorized type by the system - if parsed_data['contractType'] == 'one-time-event': - logger.info("Smart contract is of the type one-time-event") - - # either userchoice or payeeAddress condition should be present. Check for it - if 'userchoices' not in parsed_data['contractConditions'] and 'payeeAddress' not in parsed_data[ - 'contractConditions']: - logger.info( - f"Either userchoice or payeeAddress should be part of the Contract conditions.\nSmart contract incorporation on transaction {transaction_data['txid']} rejected") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='incorporation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Either userchoice or payeeAddress should be part of the Contract conditions.\nSmart contract incorporation on transaction {transaction_data['txid']} rejected", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - return 0 - - # userchoice and payeeAddress conditions cannot come together. Check for it - if 'userchoices' in parsed_data['contractConditions'] and 'payeeAddress' in parsed_data[ - 'contractConditions']: - logger.info( - f"Both userchoice and payeeAddress provided as part of the Contract conditions.\nSmart contract incorporation on transaction {transaction_data['txid']} rejected") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='incorporation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Both userchoice and payeeAddress provided as part of the Contract conditions.\nSmart contract incorporation on transaction {transaction_data['txid']} rejected", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - return 0 - - # todo Rule 50 - Contract address mentioned in flodata field should be same as the receiver FLO address on the output side - # henceforth we will not consider any flo private key initiated comment as valid from this address - # Unlocking can only be done through smart contract system address - if parsed_data['contractAddress'] == inputadd: - dbName = '{}-{}'.format(parsed_data['contractName'], parsed_data['contractAddress']) - engine = create_engine('sqlite:///smartContracts/{}.db'.format(dbName), echo=True) - ContractBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(ContractStructure(attribute='contractType', index=0, value=parsed_data['contractType'])) - session.add(ContractStructure(attribute='contractName', index=0, value=parsed_data['contractName'])) - session.add( - ContractStructure(attribute='tokenIdentification', index=0, - value=parsed_data['tokenIdentification'])) - session.add( - ContractStructure(attribute='contractAddress', index=0, value=parsed_data['contractAddress'])) - session.add( - ContractStructure(attribute='flodata', index=0, - value=parsed_data['flodata'])) - session.add( - ContractStructure(attribute='expiryTime', index=0, - value=parsed_data['contractConditions']['expiryTime'])) - if 'contractAmount' in parsed_data['contractConditions']: - session.add( - ContractStructure(attribute='contractAmount', index=0, - value=parsed_data['contractConditions']['contractAmount'])) - - if 'minimumsubscriptionamount' in parsed_data['contractConditions']: - session.add( - ContractStructure(attribute='minimumsubscriptionamount', index=0, - value=parsed_data['contractConditions']['minimumsubscriptionamount'])) - if 'maximumsubscriptionamount' in parsed_data['contractConditions']: - session.add( - ContractStructure(attribute='maximumsubscriptionamount', index=0, - value=parsed_data['contractConditions']['maximumsubscriptionamount'])) - if 'userchoices' in parsed_data['contractConditions']: - for key, value in parsed_data['contractConditions']['userchoices'].items(): - session.add(ContractStructure(attribute='exitconditions', index=key, value=value)) - - if 'payeeAddress' in parsed_data['contractConditions']: - # in this case, expirydate( or maximumamount) is the trigger internally. Keep a track of expiry dates - session.add( - ContractStructure(attribute='payeeAddress', index=0, - value=parsed_data['contractConditions']['payeeAddress'])) - - session.commit() - - # Store transfer as part of ContractTransactionHistory - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add(ContractTransactionHistory(transactionType='incorporation', sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - - # add Smart Contract name in token contract association - engine = create_engine(f"sqlite:///tokens/{parsed_data['tokenIdentification']}.db", echo=True) - Base.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(TokenContractAssociation(tokenIdentification=parsed_data['tokenIdentification'], - contractName=parsed_data['contractName'], - contractAddress=parsed_data['contractAddress'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - transactionType=parsed_data['type'], - parsedFloData=json.dumps(parsed_data))) - session.commit() - session.close() - - # Store smart contract address in system's db, to be ignored during future transfers - engine = create_engine('sqlite:///system.db', echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(ActiveContracts(contractName=parsed_data['contractName'], - contractAddress=parsed_data['contractAddress'], status='active', - tokenIdentification=parsed_data['tokenIdentification'], - contractType=parsed_data['contractType'], - transactionHash=transaction_data['txid'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - incorporationDate=transaction_data['blocktime'])) - session.commit() - - session.add(ContractAddressMapping(address=inputadd, addressType='incorporation', - tokenAmount=None, - contractName=parsed_data['contractName'], - contractAddress=inputadd, - transactionHash=transaction_data['txid'], - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'])) - session.commit() - - session.close() - - updateLatestTransaction(transaction_data, parsed_data) - - pushData_SSEapi('Contract | Contract incorporated at transaction {} with name {}-{}'.format( - transaction_data['txid'], parsed_data['contractName'], parsed_data['contractAddress'])) - return 1 - else: - logger.info( - f"Contract Incorporation on transaction {transaction_data['txid']} rejected as contract address in Flodata and input address are different") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='incorporation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Contract Incorporation on transaction {transaction_data['txid']} rejected as contract address in flodata and input address are different", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - 'Error | Contract Incorporation rejected as address in Flodata and input address are different at transaction {}'.format( - transaction_data['txid'])) - return 0 - else: - logger.info( - f"Transaction {transaction_data['txid']} rejected as a Smart Contract with the name {parsed_data['contractName']} at address {parsed_data['contractAddress']} already exists") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine(f"sqlite:///system.db", echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add(RejectedContractTransactionHistory(transactionType='incorporation', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as a Smart Contract with the name {parsed_data['contractName']} at address {parsed_data['contractAddress']} already exists", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - - url = 'https://ranchimallflo.duckdns.org/' - headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} - '''r = requests.post(url, json={ - 'message': 'Error | Contract Incorporation rejected as a smartcontract with same name {}-{} is active currentlyt at transaction {}'.format(parsed_data['contractName'], parsed_data['contractAddress'], transaction_data['txid'])}, headers=headers) - ''' - return 0 - - elif parsed_data['type'] == 'smartContractPays': - logger.info(f"Transaction {transaction_data['txid']} is of the type smartContractPays") - - # Check if input address is a committee address - if inputlist[0] in committeeAddressList: - # check if the contract exists - if os.path.isfile(f"./smartContracts/{parsed_data['contractName']}-{outputlist[0]}.db"): - # Check if the transaction hash already exists in the contract db (Safety check) - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(parsed_data['contractName'], outputlist[0]), echo=True) - connection = engine.connect() - participantAdd_txhash = connection.execute( - f"select sourceFloAddress, transactionHash from contractTransactionHistory where transactionType != 'incorporation'").fetchall() - participantAdd_txhash_T = list(zip(*participantAdd_txhash)) - - if len(participantAdd_txhash) != 0 and transaction_data['txid'] in list(participantAdd_txhash_T[1]): - logger.warning( - f"Transaction {transaction_data['txid']} rejected as it already exists in the Smart Contract db. This is unusual, please check your code") - pushData_SSEapi( - f"Error | Transaction {transaction_data['txid']} rejected as it already exists in the Smart Contract db. This is unusual, please check your code") - return 0 - - # pull out the contract structure into a dictionary - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(parsed_data['contractName'], outputlist[0]), echo=True) - connection = engine.connect() - attributevaluepair = connection.execute( - "select attribute, value from contractstructure where attribute != 'contractName' and attribute != 'flodata' and attribute != 'contractAddress'").fetchall() - contractStructure = {} - conditionDict = {} - counter = 0 - for item in attributevaluepair: - if list(item)[0] == 'exitconditions': - conditionDict[counter] = list(item)[1] - counter = counter + 1 - else: - contractStructure[list(item)[0]] = list(item)[1] - if len(conditionDict) > 0: - contractStructure['exitconditions'] = conditionDict - del counter, conditionDict - - # if contractAddress has been passed, check if output address is contract Incorporation address - if 'contractAddress' in contractStructure: - if outputlist[0] != contractStructure['contractAddress']: - logger.warning( - f"Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} hasn't expired yet") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='trigger', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} hasn't expired yet", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error | Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} hasn't expired yet") - return 0 - - # check the type of smart contract ie. external trigger or internal trigger - if 'payeeAddress' in contractStructure: - logger.warning( - f"Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} has an internal trigger") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='trigger', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} has an internal trigger", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error | Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} has an internal trigger") - return 0 - - # check the status of the contract - engine = create_engine('sqlite:///system.db', echo=True) - connection = engine.connect() - contractStatus = connection.execute( - f"select status from activecontracts where contractName=='{parsed_data['contractName']}' and contractAddress='{outputlist[0]}'").fetchall()[ - 0][0] - connection.close() - contractList = [] - - if contractStatus == 'closed': - logger.info( - f"Transaction {transaction_data['txid']} closed as Smart contract {parsed_data['contractName']} at the {outputlist[0]} is closed") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='trigger', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} closed as Smart contract {parsed_data['contractName']} at the {outputlist[0]} is closed", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - - url = 'https://ranchimallflo.duckdns.org/' - headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} - '''r = requests.post(url, json={ - 'message': f"Error | Transaction {transaction_data['txid']} closed as Smart contract {parsed_data['contractName']} at the {outputlist[0]} is closed"}, - headers=headers)''' - return 0 - else: - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(parsed_data['contractName'], outputlist[0]), - echo=True) - ContractBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - result = session.query(ContractStructure).filter_by(attribute='expiryTime').all() - session.close() - if result: - # now parse the expiry time in python - expirytime = result[0].value.strip() - expirytime_split = expirytime.split(' ') - parse_string = '{}/{}/{} {}'.format(expirytime_split[3], - parsing.months[expirytime_split[1]], - expirytime_split[2], expirytime_split[4]) - expirytime_object = parsing.arrow.get(parse_string, 'YYYY/M/D HH:mm:ss').replace( - tzinfo=expirytime_split[5][3:]) - blocktime_object = parsing.arrow.get(transaction_data['blocktime']).to('Asia/Kolkata') - - if blocktime_object <= expirytime_object: - logger.info( - f"Transaction {transaction_data['txid']} rejected as Smart contract {parsed_data['contractName']}-{outputlist[0]} has not expired and will not trigger") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='trigger', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as Smart contract {parsed_data['contractName']}-{outputlist[0]} has not expired and will not trigger", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error| Transaction {transaction_data['txid']} rejected as Smart contract {parsed_data['contractName']}-{outputlist[0]} has not expired and will not trigger") - return 0 - - # check if the user choice passed is part of the contract structure - tempchoiceList = [] - for item in contractStructure['exitconditions']: - tempchoiceList.append(contractStructure['exitconditions'][item]) - - if parsed_data['triggerCondition'] not in tempchoiceList: - logger.info( - f"Transaction {transaction_data['txid']} rejected as triggerCondition, {parsed_data['triggerCondition']}, has been passed to Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} which doesn't accept any userChoice of the given name") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='trigger', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as triggerCondition, {parsed_data['triggerCondition']}, has been passed to Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} which doesn't accept any userChoice of the given name", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error | Transaction {transaction_data['txid']} rejected as triggerCondition, {parsed_data['triggerCondition']}, has been passed to Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} which doesn't accept any userChoice of the given name") - return 0 - - # check if minimumsubscriptionamount exists as part of the contract structure - if 'minimumsubscriptionamount' in contractStructure: - # if it has not been reached, close the contract and return money - minimumsubscriptionamount = float(contractStructure['minimumsubscriptionamount']) - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(parsed_data['contractName'], outputlist[0]), - echo=True) - ContractBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - amountDeposited = session.query(func.sum(ContractParticipants.tokenAmount)).all()[0][0] - session.close() - - if amountDeposited is None: - amountDeposited = 0 - - if amountDeposited < minimumsubscriptionamount: - # close the contract and return the money - logger.info( - 'Minimum subscription amount hasn\'t been reached\n The token will be returned back') - # Initialize payback to contract participants - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(parsed_data['contractName'], outputlist[0]), - echo=True) - connection = engine.connect() - contractParticipants = connection.execute( - 'select participantAddress, tokenAmount, transactionHash from contractparticipants').fetchall()[ - 0][0] - - for participant in contractParticipants: - tokenIdentification = connection.execute( - 'select * from contractstructure where attribute="tokenIdentification"').fetchall()[0][ - 0] - contractAddress = connection.execute( - 'select * from contractstructure where attribute="contractAddress"').fetchall()[0][0] - returnval = transferToken(tokenIdentification, participant[1], contractAddress, - participant[0], transaction_data, parsed_data) - if returnval is None: - logger.info( - "CRITICAL ERROR | Something went wrong in the token transfer method while doing local Smart Contract Trigger") - return 0 - - connection.execute( - 'update contractparticipants set winningAmount="{}" where participantAddress="{}" and transactionHash="{}"'.format( - (participant[1], participant[0], participant[4]))) - - # add transaction to ContractTransactionHistory - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(parsed_data['contractName'], outputlist[0]), - echo=True) - ContractBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(ContractTransactionHistory(transactionType='trigger', - transactionSubType='minimumsubscriptionamount-payback', - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - - engine = create_engine('sqlite:///system.db', echo=True) - connection = engine.connect() - connection.execute( - 'update activecontracts set status="closed" where contractName="{}" and contractAddress="{}"'.format( - parsed_data['contractName'], outputlist[0])) - connection.execute( - 'update activecontracts set status="{}" where contractName="{}" and contractAddress="{}"'.format( - transaction_data['blocktime'], - parsed_data['contractName'], outputlist[0])) - connection.close() - - updateLatestTransaction(transaction_data, parsed_data) - - pushData_SSEapi( - 'Trigger | Minimum subscription amount not reached at contract {}-{} at transaction {}. Tokens will be refunded'.format( - parsed_data['contractName'], outputlist[0], transaction_data['txid'])) - return 1 - - # Trigger the contract - engine = create_engine( - 'sqlite:///smartContracts/{}-{}.db'.format(parsed_data['contractName'], outputlist[0]), - echo=True) - connection = engine.connect() - contractWinners = connection.execute( - 'select * from contractparticipants where userChoice="{}"'.format( - parsed_data['triggerCondition'])).fetchall() - tokenSum = connection.execute('select sum(tokenAmount) from contractparticipants').fetchall()[0][0] - winnerSum = connection.execute( - 'select sum(tokenAmount) from contractparticipants where userChoice="{}"'.format( - parsed_data['triggerCondition'])).fetchall()[0][0] - tokenIdentification = connection.execute( - 'select value from contractstructure where attribute="tokenIdentification"').fetchall()[0][0] - - for winner in contractWinners: - winnerAmount = "%.8f" % ((winner[2] / winnerSum) * tokenSum) - returnval = transferToken(tokenIdentification, winnerAmount, - outputlist[0], winner[1], transaction_data, parsed_data) - if returnval is None: - logger.critical( - "Something went wrong in the token transfer method while doing local Smart Contract Trigger") - return 0 - connection.execute( - f"update contractparticipants set winningAmount='{winnerAmount}' where participantAddress='{winner[1]}' and transactionHash='{winner[4]}'") - - # add transaction to ContractTransactionHistory - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add(ContractTransactionHistory(transactionType='trigger', - transactionSubType='committee', - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - - engine = create_engine('sqlite:///system.db', echo=True) - connection = engine.connect() - connection.execute( - 'update activecontracts set status="closed" where contractName="{}" and contractAddress="{}"'.format( - parsed_data['contractName'], outputlist[0])) - connection.execute( - 'update activecontracts set closeDate="{}" where contractName="{}" and contractAddress="{}"'.format( - transaction_data['blocktime'], - parsed_data['contractName'], outputlist[0])) - connection.close() - - updateLatestTransaction(transaction_data, parsed_data) - - pushData_SSEapi( - 'Trigger | Contract triggered of the name {}-{} is active currently at transaction {}'.format( - parsed_data['contractName'], outputlist[0], transaction_data['txid'])) - return 1 - else: - logger.info( - f"Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} doesn't exist") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add( - RejectedContractTransactionHistory(transactionType='trigger', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} doesn't exist", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Error | Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} doesn't exist") - return 0 - - else: - logger.info( - f"Transaction {transaction_data['txid']} rejected as input address, {inputlist[0]}, is not part of the committee address list") - # Store transfer as part of RejectedContractTransactionHistory - engine = create_engine( - f"sqlite:///system.db", - echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - blockchainReference = neturl + 'tx/' + transaction_data['txid'] - session.add(RejectedContractTransactionHistory(transactionType='trigger', - contractName=parsed_data['contractName'], - contractAddress=outputlist[0], - sourceFloAddress=inputadd, - destFloAddress=outputlist[0], - transferAmount=None, - blockNumber=transaction_data['blockheight'], - blockHash=transaction_data['blockhash'], - time=transaction_data['blocktime'], - transactionHash=transaction_data['txid'], - blockchainReference=blockchainReference, - jsonData=json.dumps(transaction_data), - rejectComment=f"Transaction {transaction_data['txid']} rejected as input address, {inputlist[0]}, is not part of the committee address list", - - parsedFloData=json.dumps(parsed_data) - )) - session.commit() - session.close() - pushData_SSEapi( - f"Transaction {transaction_data['txid']} rejected as input address, {inputlist[0]}, is not part of the committee address list") - return 0 - - -def scanBlockchain(): - # Read start block no - engine = create_engine('sqlite:///system.db', echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - startblock = int(session.query(SystemData).filter_by(attribute='lastblockscanned').all()[0].value) + 1 - session.commit() - session.close() - - # todo Rule 6 - Find current block height - # Rule 7 - Start analysing the block contents from starting block to current height - - # Find current block height - current_index = -1 - while(current_index == -1): - response = multiRequest('blocks?limit=1', config['DEFAULT']['NET']) - try: - current_index = response['blocks'][0]['height'] - except: - logger.info('Latest block count response from multiRequest() is not in the right format. Displaying the data received in the log below') - logger.info(response) - logger.info('Program will wait for 1 seconds and try to reconnect') - time.sleep(1) - else: - logger.info("Current block height is %s" % str(current_index)) - break - - for blockindex in range(startblock, current_index): - if blockindex < 4365011: - if blockindex in goodblockset: - processBlock(blockindex) - else: - logger.info(f"Skipping block {blockindex}") - else: - processBlock(blockindex) - - # At this point the script has updated to the latest block - # Now we connect to flosight's websocket API to get information about the latest blocks - - -# Configuration of required variables -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - -formatter = logging.Formatter('%(asctime)s:%(name)s:%(message)s') - -file_handler = logging.FileHandler('tracking.log') -file_handler.setLevel(logging.INFO) -file_handler.setFormatter(formatter) - -stream_handler = logging.StreamHandler() -stream_handler.setFormatter(formatter) - -logger.addHandler(file_handler) -logger.addHandler(stream_handler) - - -# todo Rule 1 - Read command line arguments to reset the databases as blank -# Rule 2 - Read config to set testnet/mainnet -# Rule 3 - Set flo blockexplorer location depending on testnet or mainnet -# Rule 4 - Set the local flo-cli path depending on testnet or mainnet -# Rule 5 - Set the block number to scan from - - -# Read command line arguments -parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash') -parser.add_argument('-r', '--reset', nargs='?', const=1, type=int, help='Purge existing db and rebuild it') -args = parser.parse_args() - -apppath = os.path.dirname(os.path.realpath(__file__)) -dirpath = os.path.join(apppath, 'tokens') -if not os.path.isdir(dirpath): - os.mkdir(dirpath) -dirpath = os.path.join(apppath, 'smartContracts') -if not os.path.isdir(dirpath): - os.mkdir(dirpath) - -# Read configuration -config = configparser.ConfigParser() -config.read('config.ini') - -# Assignment the flo-cli command -if config['DEFAULT']['NET'] == 'mainnet': - neturl = 'http://0.0.0.0:9495/' - localapi = config['DEFAULT']['FLO_CLI_PATH'] -elif config['DEFAULT']['NET'] == 'testnet': - neturl = 'https://testnet-flosight.duckdns.org/' - localapi = '{} --testnet'.format(config['DEFAULT']['FLO_CLI_PATH']) -else: - logger.error( - "NET parameter in config.ini invalid. Options are either 'mainnet' or 'testnet'. Script is exiting now") - -# Delete database and smartcontract directory if reset is set to 1 -if args.reset == 1: - logger.info("Resetting the database. ") - apppath = os.path.dirname(os.path.realpath(__file__)) - dirpath = os.path.join(apppath, 'tokens') - shutil.rmtree(dirpath) - os.mkdir(dirpath) - dirpath = os.path.join(apppath, 'smartContracts') - shutil.rmtree(dirpath) - os.mkdir(dirpath) - dirpath = os.path.join(apppath, 'system.db') - if os.path.exists(dirpath): - os.remove(dirpath) - dirpath = os.path.join(apppath, 'latestCache.db') - if os.path.exists(dirpath): - os.remove(dirpath) - - # Read start block no - startblock = int(config['DEFAULT']['START_BLOCK']) - engine = create_engine('sqlite:///system.db', echo=True) - SystemBase.metadata.create_all(bind=engine) - session = sessionmaker(bind=engine)() - session.add(SystemData(attribute='lastblockscanned', value=startblock - 1)) - session.commit() - session.close() - - # Initialize latest cache DB - engine = create_engine('sqlite:///latestCache.db', echo=True) - LatestCacheBase.metadata.create_all(bind=engine) - session.commit() - session.close() - - -def switchNeturl(currentneturl): - mainserverlist = ['http://0.0.0.0:9495/'] - neturlindex = mainserverlist.index(currentneturl) - if neturlindex+1 >= len(mainserverlist): - return mainserverlist[neturlindex+1 - len(mainserverlist)] - else: - return mainserverlist[neturlindex+1] - -def reconnectWebsocket(socket_variable): - # Switch a to different flosight - # neturl = switchNeturl(neturl) - # Connect to Flosight websocket to get data on new incoming blocks - i=0 - newurl = neturl - while(not socket_variable.connected): - logger.info(f"While loop {i}") - logger.info(f"Sleeping for 3 seconds before attempting reconnect to {newurl}") - time.sleep(3) - try: - """ neturl = temp - logger.info(f"neturl: {neturl}") """ - scanBlockchain() - logger.info(f"Websocket endpoint which is being connected to {newurl}socket.io/socket.io.js") - socket_variable.connect(f"{newurl}socket.io/socket.io.js") - i=i+1 - except: - logger.info(f"disconnect block: Failed reconnect attempt to {newurl}") - newurl = switchNeturl(newurl) - i=i+1 - - -# MAIN LOGIC -# scan from the latest block saved locally to latest network block -scanBlockchain() - -# At this point the script has updated to the latest block -# Now we connect to flosight's websocket API to get information about the latest blocks -# Neturl is the URL for Flosight API whose websocket endpoint is being connected to - -neturl = 'https://flosight.duckdns.org/' -sio = socketio.Client() -# Connect to a websocket endpoint and wait for further events -reconnectWebsocket(sio) -#sio.connect(f"{neturl}socket.io/socket.io.js") - -@sio.on('connect') -def token_connect(): - current_time=datetime.now().strftime('%H:%M:%S') - logger.info(f"Token Tracker has connected to websocket endpoint. Time : {current_time}") - sio.emit('subscribe', 'inv') - -@sio.on('disconnect') -def token_disconnect(): - current_time = datetime.now().strftime('%H:%M:%S') - logger.info(f"disconnect block: Token Tracker disconnected from websocket endpoint. Time : {current_time}") - logger.info('disconnect block: Triggering client disconnect') - sio.disconnect() - logger.info('disconnect block: Finished triggering client disconnect') - reconnectWebsocket(sio) - -@sio.on('connect_error') -def connect_error(): - current_time = datetime.now().strftime('%H:%M:%S') - logger.info(f"connection error block: Token Tracker disconnected from websocket endpoint. Time : {current_time}") - logger.info('connection error block: Triggering client disconnect') - sio.disconnect() - logger.info('connection error block: Finished triggering client disconnect') - reconnectWebsocket(sio) - -@sio.on('block') -def on_block(data): - logger.info('New block received') - logger.info(str(data)) - processApiBlock(data) - diff --git a/tracktokens_smartcontracts.py b/tracktokens_smartcontracts.py new file mode 100755 index 0000000..fb54a5d --- /dev/null +++ b/tracktokens_smartcontracts.py @@ -0,0 +1,2498 @@ +import argparse +import configparser +import json +import logging +import os +import shutil +import sys +import pyflo +import requests +import socketio +from sqlalchemy import create_engine, func, and_ +from sqlalchemy.orm import sessionmaker +import time +import arrow +import parsing +import re +from datetime import datetime +from ast import literal_eval +from models import SystemData, TokenBase, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, TokenContractAssociation, ContractBase, ContractStructure, ContractParticipants, ContractTransactionHistory, ContractDeposits, ConsumedInfo, ContractWinners, ContinuosContractBase, ContractStructure2, ContractParticipants2, ContractDeposits2, ContractTransactionHistory2, SystemBase, ActiveContracts, SystemData, ContractAddressMapping, TokenAddressMapping, DatabaseTypeMapping, TimeActions, RejectedContractTransactionHistory, RejectedTransactionHistory, LatestCacheBase, LatestTransactions, LatestBlocks +from statef_processing import process_stateF +import pdb + + +def newMultiRequest(apicall): + current_server = serverlist[0] + while True: + try: + response = requests.get('{}api/v1/{}'.format(current_server, apicall)) + except: + current_server = switchNeturl(current_server) + logger.info(f"newMultiRequest() switched to {current_server}") + time.sleep(2) + else: + if response.status_code == 200: + return json.loads(response.content) + else: + current_server = switchNeturl(current_server) + logger.info(f"newMultiRequest() switched to {current_server}") + time.sleep(2) + + +def pushData_SSEapi(message): + '''signature = pyflo.sign_message(message.encode(), privKey) + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Signature': signature} + + try: + r = requests.post(sseAPI_url, json={'message': '{}'.format(message)}, headers=headers) + except: + logger.error("couldn't push the following message to SSE api {}".format(message))''' + print('') + + +def process_committee_flodata(flodata): + flo_address_list = [] + try: + contract_committee_actions = flodata['token-tracker']['contract-committee'] + except KeyError: + print('Flodata related to contract committee') + else: + # Adding first and removing later to maintain consistency and not to depend on floData for order of execution + for action in contract_committee_actions.keys(): + if action == 'add': + for floid in contract_committee_actions[f'{action}']: + flo_address_list.append(floid) + + for action in contract_committee_actions.keys(): + if action == 'remove': + for floid in contract_committee_actions[f'{action}']: + flo_address_list.remove(floid) + finally: + return flo_address_list + + +def refresh_committee_list_old(admin_flo_id, api_url, blocktime): + response = requests.get(f'{api_url}api/v1/address/{admin_flo_id}') + if response.status_code == 200: + response = response.json() + else: + print('Response from the Flosight API failed') + sys.exit(0) + + committee_list = [] + response['transactions'].reverse() + for idx, transaction in enumerate(response['transactions']): + transaction_info = requests.get(f'{api_url}api/v1/tx/{transaction}') + if transaction_info.status_code == 200: + transaction_info = transaction_info.json() + if transaction_info['vin'][0]['addresses'][0]==admin_flo_id and transaction_info['blocktime']<=blocktime: + try: + tx_flodata = json.loads(transaction_info['floData']) + committee_list += process_committee_flodata(tx_flodata) + except: + continue + return committee_list + + +def refresh_committee_list(admin_flo_id, api_url, blocktime): + committee_list = [] + latest_param = 'true' + mempool_param = 'false' + init_id = None + + def process_transaction(transaction_info): + if 'isCoinBase' in transaction_info or transaction_info['vin'][0]['addresses'][0] != admin_flo_id or transaction_info['blocktime'] > blocktime: + return + try: + tx_flodata = json.loads(transaction_info['floData']) + committee_list.extend(process_committee_flodata(tx_flodata)) + except: + pass + + def send_api_request(url): + response = requests.get(url) + if response.status_code == 200: + return response.json() + else: + print('Response from the Flosight API failed') + sys.exit(0) + + url = f'{api_url}api/v1/address/{admin_flo_id}?details=txs' + response = send_api_request(url) + for transaction_info in response.get('txs', []): + process_transaction(transaction_info) + + while 'incomplete' in response: + url = f'{api_url}api/v1/address/{admin_flo_id}/txs?latest={latest_param}&mempool={mempool_param}&before={init_id}' + response = send_api_request(url) + for transaction_info in response.get('items', []): + process_transaction(transaction_info) + if 'incomplete' in response: + init_id = response['initItem'] + + return committee_list + + +def find_sender_receiver(transaction_data): + # Create vinlist and outputlist + vinlist = [] + querylist = [] + + #totalinputval = 0 + #inputadd = '' + + # todo Rule 40 - For each vin, find the feeding address and the fed value. Make an inputlist containing [inputaddress, n value] + for vin in transaction_data["vin"]: + vinlist.append([vin["addresses"][0], float(vin["value"])]) + + totalinputval = float(transaction_data["valueIn"]) + + # todo Rule 41 - Check if all the addresses in a transaction on the input side are the same + for idx, item in enumerate(vinlist): + if idx == 0: + temp = item[0] + continue + if item[0] != temp: + print(f"System has found more than one address as part of vin. Transaction {transaction_data['txid']} is rejected") + return 0 + + inputlist = [vinlist[0][0], totalinputval] + inputadd = vinlist[0][0] + + # todo Rule 42 - If the number of vout is more than 2, reject the transaction + if len(transaction_data["vout"]) > 2: + print(f"System has found more than 2 address as part of vout. Transaction {transaction_data['txid']} is rejected") + return 0 + + # todo Rule 43 - A transaction accepted by the system has two vouts, 1. The FLO address of the receiver + # 2. Flo address of the sender as change address. If the vout address is change address, then the other adddress + # is the recevier address + + outputlist = [] + addresscounter = 0 + inputcounter = 0 + for obj in transaction_data["vout"]: + if obj["scriptPubKey"]["type"] == "pubkeyhash": + addresscounter = addresscounter + 1 + if inputlist[0] == obj["scriptPubKey"]["addresses"][0]: + inputcounter = inputcounter + 1 + continue + outputlist.append([obj["scriptPubKey"]["addresses"][0], obj["value"]]) + + if addresscounter == inputcounter: + outputlist = [inputlist[0]] + elif len(outputlist) != 1: + print(f"Transaction's change is not coming back to the input address. Transaction {transaction_data['txid']} is rejected") + return 0 + else: + outputlist = outputlist[0] + + return inputlist[0], outputlist[0] + + +def check_database_existence(type, parameters): + if type == 'token': + path = os.path.join(config['DEFAULT']['DATA_PATH'], 'tokens', f'{parameters["token_name"]}.db') + return os.path.isfile(path) + + if type == 'smart_contract': + path = os.path.join(config['DEFAULT']['DATA_PATH'], 'smartContracts', f"{parameters['contract_name']}-{parameters['contract_address']}.db") + return os.path.isfile(path) + + +def create_database_connection(type, parameters=None): + if type == 'token': + path = os.path.join(config['DEFAULT']['DATA_PATH'], 'tokens', f"{parameters['token_name']}.db") + engine = create_engine(f"sqlite:///{path}", echo=True) + elif type == 'smart_contract': + path = os.path.join(config['DEFAULT']['DATA_PATH'], 'smartContracts', f"{parameters['contract_name']}-{parameters['contract_address']}.db") + engine = create_engine(f"sqlite:///{path}", echo=True) + elif type == 'system_dbs': + path = os.path.join(config['DEFAULT']['DATA_PATH'], f"system.db") + engine = create_engine(f"sqlite:///{path}", echo=False) + elif type == 'latest_cache': + path = os.path.join(config['DEFAULT']['DATA_PATH'], f"latestCache.db") + engine = create_engine(f"sqlite:///{path}", echo=False) + + connection = engine.connect() + return connection + + +def create_database_session_orm(type, parameters, base): + if type == 'token': + path = os.path.join(config['DEFAULT']['DATA_PATH'], 'tokens', f"{parameters['token_name']}.db") + engine = create_engine(f"sqlite:///{path}", echo=True) + base.metadata.create_all(bind=engine) + session = sessionmaker(bind=engine)() + + elif type == 'smart_contract': + path = os.path.join(config['DEFAULT']['DATA_PATH'], 'smartContracts', f"{parameters['contract_name']}-{parameters['contract_address']}.db") + engine = create_engine(f"sqlite:///{path}", echo=True) + base.metadata.create_all(bind=engine) + session = sessionmaker(bind=engine)() + + elif type == 'system_dbs': + path = os.path.join(config['DEFAULT']['DATA_PATH'], f"{parameters['db_name']}.db") + engine = create_engine(f"sqlite:///{path}", echo=False) + base.metadata.create_all(bind=engine) + session = sessionmaker(bind=engine)() + + return session + + +def delete_contract_database(parameters): + if check_database_existence('smart_contract', {'contract_name':f"{parameters['contract_name']}", 'contract_address':f"{parameters['contract_address']}"}): + path = os.path.join(config['DEFAULT']['DATA_PATH'], 'smartContracts', f"{parameters['contract_name']}-{parameters['contract_address']}.db") + os.remove(path) + + +def add_transaction_history(token_name, sourceFloAddress, destFloAddress, transferAmount, blockNumber, blockHash, blocktime, transactionHash, jsonData, transactionType, parsedFloData): + session = create_database_session_orm('token', {'token_name': token_name}, TokenBase) + blockchainReference = neturl + 'tx/' + transactionHash + session.add(TransactionHistory( + sourceFloAddress=sourceFloAddress, + destFloAddress=destFloAddress, + transferAmount=transferAmount, + blockNumber=blockNumber, + blockHash=blockHash, + time=blocktime, + transactionHash=transactionHash, + blockchainReference=blockchainReference, + jsonData=jsonData, + transactionType=transactionType, + parsedFloData=parsedFloData + )) + session.commit() + session.close() + + +def add_contract_transaction_history(contract_name, contract_address, transactionType, transactionSubType, sourceFloAddress, destFloAddress, transferAmount, blockNumber, blockHash, blocktime, transactionHash, jsonData, parsedFloData): + session = create_database_session_orm('smart_contract', {'contract_name': f"{contract_name}", 'contract_address': f"{contract_address}"}, ContractBase) + blockchainReference = neturl + 'tx/' + transactionHash + session.add(ContractTransactionHistory(transactionType=transactionType, + sourceFloAddress=sourceFloAddress, + destFloAddress=destFloAddress, + transferAmount=transferAmount, + blockNumber=blockNumber, + blockHash=blockHash, + time=blocktime, + transactionHash=transactionHash, + blockchainReference=blockchainReference, + jsonData=jsonData, + parsedFloData=parsedFloData + )) + session.commit() + session.close() + + +def rejected_transaction_history(transaction_data, parsed_data, sourceFloAddress, destFloAddress, rejectComment): + session = create_database_session_orm('system_dbs', {'db_name': "system"}, TokenBase) + blockchainReference = neturl + 'tx/' + transaction_data['txid'] + session.add(RejectedTransactionHistory(tokenIdentification=parsed_data['tokenIdentification'], + sourceFloAddress=sourceFloAddress, destFloAddress=destFloAddress, + transferAmount=parsed_data['tokenAmount'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'], + time=transaction_data['time'], + transactionHash=transaction_data['txid'], + blockchainReference=blockchainReference, + jsonData=json.dumps(transaction_data), + rejectComment=rejectComment, + transactionType=parsed_data['type'], + parsedFloData=json.dumps(parsed_data) + )) + session.commit() + session.close() + + +def rejected_contract_transaction_history(transaction_data, parsed_data, transactionType, contractAddress, sourceFloAddress, destFloAddress, rejectComment): + session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase) + blockchainReference = neturl + 'tx/' + transaction_data['txid'] + session.add(RejectedContractTransactionHistory(transactionType=transactionType, + contractName=parsed_data['contractName'], + contractAddress=contractAddress, + sourceFloAddress=sourceFloAddress, + destFloAddress=destFloAddress, + transferAmount=None, + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'], + time=transaction_data['time'], + transactionHash=transaction_data['txid'], + blockchainReference=blockchainReference, + jsonData=json.dumps(transaction_data), + rejectComment=rejectComment, + parsedFloData=json.dumps(parsed_data))) + session.commit() + session.close() + + +def convert_datetime_to_arrowobject(expiryTime): + expirytime_split = expiryTime.split(' ') + parse_string = '{}/{}/{} {}'.format(expirytime_split[3], parsing.months[expirytime_split[1]], expirytime_split[2], expirytime_split[4]) + expirytime_object = parsing.arrow.get(parse_string, 'YYYY/M/D HH:mm:ss').replace(tzinfo=expirytime_split[5][3:]) + return expirytime_object + +def convert_datetime_to_arrowobject_regex(expiryTime): + datetime_re = re.compile(r'(\w{3}\s\w{3}\s\d{1,2}\s\d{4}\s\d{2}:\d{2}:\d{2})\s(gmt[+-]\d{4})') + match = datetime_re.search(expiryTime) + if match: + datetime_str = match.group(1) + timezone_offset = match.group(2)[3:] + dt = arrow.get(datetime_str, 'ddd MMM DD YYYY HH:mm:ss').replace(tzinfo=timezone_offset) + return dt + else: + return 0 + + +def is_a_contract_address(floAddress): + # check contract address mapping db if the address is present, and return True or False based on that + system_db = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase) + contract_number = system_db.query(func.sum(ContractAddressMapping.contractAddress)).filter(ContractAddressMapping.contractAddress == floAddress).all()[0][0] + if contract_number is None: + return False + else: + return True + + +def fetchDynamicSwapPrice_old(contractStructure, transaction_data, blockinfo): + oracle_address = contractStructure['oracle_address'] + # fetch transactions from the blockchain where from address : oracle-address... to address: contract address + # find the first contract transaction which adheres to price change format + # {"price-update":{"contract-name": "", "contract-address": "", "price": 3}} + response = requests.get(f'{neturl}api/v1/address/{oracle_address}') + if response.status_code == 200: + response = response.json() + if 'transactions' not in response.keys(): # API doesn't return 'transactions' key, if 0 txs present on address + return float(contractStructure['price']) + else: + transactions = response['transactions'] + for transaction_hash in transactions: + transaction_response = requests.get(f'{neturl}api/v1/tx/{transaction_hash}') + if transaction_response.status_code == 200: + transaction = transaction_response.json() + floData = transaction['floData'] + # If the blocktime of the transaction is < than the current block time + if transaction['time'] < blockinfo['time']: + # Check if flodata is in the format we are looking for + # ie. {"price-update":{"contract-name": "", "contract-address": "", "price": 3}} + # and receiver address should be contractAddress + try: + assert transaction_data['receiverAddress'] == contractStructure['contractAddress'] + assert find_sender_receiver(transaction)[0] == oracle_address + floData = json.loads(floData) + # Check if the contract name and address are right + assert floData['price-update']['contract-name'] == contractStructure['contractName'] + assert floData['price-update']['contract-address'] == contractStructure['contractAddress'] + return float(floData['price-update']['price']) + except: + continue + else: + continue + else: + logger.info('API error while fetchDynamicSwapPrice') + sys.exit(0) + return float(contractStructure['price']) + else: + logger.info('API error fetchDynamicSwapPrice') + sys.exit(0) + + +def fetchDynamicSwapPrice(contractStructure, blockinfo): + oracle_address = contractStructure['oracle_address'] + # fetch transactions from the blockchain where from address : oracle-address... to address: contract address + # find the first contract transaction which adheres to price change format + # {"price-update":{"contract-name": "", "contract-address": "", "price": 3}} + is_incomplete_key_present = False + latest_param = 'true' + mempool_param = 'false' + init_id = None + response = requests.get(f'{api_url}api/v1/address/{oracle_address}?details=txs') + if response.status_code == 200: + response = response.json() + if len(response['txs']) == 0: + return float(contractStructure['price']) + else: + for transaction in response['txs']: + floData = transaction['floData'] + # If the blocktime of the transaction is < than the current block time + if transaction['time'] < blockinfo['time']: + # Check if flodata is in the format we are looking for + # ie. {"price-update":{"contract-name": "", "contract-address": "", "price": 3}} + # and receiver address should be contractAddress + try: + sender_address, receiver_address = find_sender_receiver(transaction) + assert sender_address == oracle_address + assert receiver_address == contractStructure['contractAddress'] + floData = json.loads(floData) + # Check if the contract name and address are right + assert floData['price-update']['contract-name'] == contractStructure['contractName'] + assert floData['price-update']['contract-address'] == contractStructure['contractAddress'] + return float(floData['price-update']['price']) + except: + continue + else: + continue + else: + logger.info('API error fetchDynamicSwapPrice') + sys.exit(0) + + # Chain query + if 'incomplete' in response.keys(): + is_incomplete_key_present = True + init_id = response['initItem'] + + while(is_incomplete_key_present == True): + response = requests.get(f'{api_url}api/v1/address/{oracle_address}?details=txs') + if response.status_code == 200: + response = response.json() + for transaction in response['txs']: + floData = transaction['floData'] + # If the blocktime of the transaction is < than the current block time + if transaction['time'] < blockinfo['time']: + # Check if flodata is in the format we are looking for + # ie. {"price-update":{"contract-name": "", "contract-address": "", "price": 3}} + # and receiver address should be contractAddress + try: + sender_address, receiver_address = find_sender_receiver(transaction) + assert sender_address == oracle_address + assert receiver_address == contractStructure['contractAddress'] + floData = json.loads(floData) + # Check if the contract name and address are right + assert floData['price-update']['contract-name'] == contractStructure['contractName'] + assert floData['price-update']['contract-address'] == contractStructure['contractAddress'] + return float(floData['price-update']['price']) + except: + continue + else: + continue + else: + logger.info('API error fetchDynamicSwapPrice') + sys.exit(0) + return float(contractStructure['price']) + + +def processBlock(blockindex=None, blockhash=None): + if blockindex is not None and blockhash is None: + logger.info(f'Processing block {blockindex}') + # Get block details + response = newMultiRequest(f"block-index/{blockindex}") + blockhash = response['blockHash'] + + blockinfo = newMultiRequest(f"block/{blockhash}") + pause_index = [2211699, 2211700, 2211701, 2170000, 2468107, 2468108, 2489267, 2449017, 2509873, 2509874, 2291729, 2467929, 6202174] + if blockindex in pause_index: + print(f'Paused at {blockindex}') + + # Check smartContracts which will be triggered locally, and not by the contract committee + #checkLocaltriggerContracts(blockinfo) + # Check if any deposits have to be returned + #checkReturnDeposits(blockinfo) + + #checkLocal_expiry_trigger_deposit(blockinfo) + + # todo Rule 8 - read every transaction from every block to find and parse flodata + counter = 0 + acceptedTxList = [] + # Scan every transaction + logger.info("Before tx loop") + + for transaction_data in blockinfo["txs"]: + transaction = transaction_data["txid"] + + if transaction in ['ff355c3384e2568e1dd230d5c9073618b9033c7c8b20f9e8533b5837f76bc65d', 'dd35c592fa7ba76718c894b5b3195e1151e79c5fb91472c06f416c99c7827e6d', + '39ef49e0e06438bda462c794955735e7ea3ae81cb576ec5c97b528c8a257614c', + 'c58bebd583a5b24a9d342712efb9e4b2eac33fe36d8ebe9119126c02f766986c', + 'ec6604d147d99ec41f05dec82f9c241815358015904fad37ace061d7580b178e', + '39ef49e0e06438bda462c794955735e7ea3ae81cb576ec5c97b528c8a257614c', + 'd36b744d6b9d8a694a93476dbd1134dbdc8223cf3d1a604447acb09221aa3b49', + '64abe801d12224d10422de88070a76ad8c6d17b533ba5288fb0961b4cbf6adf4', + 'ec9a852aa8a27877ba79ae99cc1359c0e04f6e7f3097521279bcc68e3883d760', + '16e836ceb973447a5fd71e969d7d4cde23330547a855731003c7fc53c86937e4', + 'fe2ce0523254efc9eb2270f0efb837de3fc7844d9c64523b20c0ac48c21f64e6', + 'a74a03ec1e77fa50e0b586b1e9745225ad4f78ce96ca59d6ac025f8057dd095c']: + print(f'Paused at transaction {transaction}') + + try: + text = transaction_data["floData"] + except: + text = '' + text = text.replace("\n", " \n ") + # todo Rule 9 - Reject all noise transactions. Further rules are in parsing.py + returnval = None + parsed_data = parsing.parse_flodata(text, blockinfo, config['DEFAULT']['NET']) + if parsed_data['type'] != 'noise': + logger.info(f"Processing transaction {transaction}") + logger.info(f"flodata {text} is parsed to {parsed_data}") + returnval = processTransaction(transaction_data, parsed_data, blockinfo) + + if returnval == 1: + acceptedTxList.append(transaction) + elif returnval == 0: + logger.info("Transfer for the transaction %s is illegitimate. Moving on" % transaction) + + if len(acceptedTxList) > 0: + tempinfo = blockinfo['txs'].copy() + for tx in blockinfo['txs']: + if tx['txid'] not in acceptedTxList: + tempinfo.remove(tx) + blockinfo['txs'] = tempinfo + updateLatestBlock(blockinfo) + + session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase) + entry = session.query(SystemData).filter(SystemData.attribute == 'lastblockscanned').all()[0] + entry.value = str(blockinfo['height']) + session.commit() + session.close() + + +def updateLatestTransaction(transactionData, parsed_data, db_reference, transactionType=None ): + # connect to latest transaction db + conn = create_database_connection('latest_cache', {'db_name':"latestCache"}) + if transactionType is None: + transactionType = parsed_data['type'] + conn.execute("INSERT INTO latestTransactions(transactionHash, blockNumber, jsonData, transactionType, parsedFloData, db_reference) VALUES (?,?,?,?,?,?)", (transactionData['txid'], transactionData['blockheight'], json.dumps(transactionData), transactionType, json.dumps(parsed_data), db_reference)) + #conn.commit() + conn.close() + + +def updateLatestBlock(blockData): + # connect to latest block db + conn = create_database_connection('latest_cache', {'db_name':"latestCache"}) + conn.execute('INSERT INTO latestBlocks(blockNumber, blockHash, jsonData) VALUES (?,?,?)', (blockData['height'], blockData['hash'], json.dumps(blockData))) + #conn.commit() + conn.close() + + +def process_pids(entries, session, piditem): + for entry in entries: + '''consumedpid_dict = literal_eval(entry.consumedpid) + total_consumedpid_amount = 0 + for key in consumedpid_dict.keys(): + total_consumedpid_amount = total_consumedpid_amount + float(consumedpid_dict[key]) + consumedpid_dict[piditem[0]] = total_consumedpid_amount + entry.consumedpid = str(consumedpid_dict)''' + entry.orphaned_parentid = entry.parentid + entry.parentid = None + #session.commit() + return 1 + + +def transferToken(tokenIdentification, tokenAmount, inputAddress, outputAddress, transaction_data=None, parsed_data=None, isInfiniteToken=None, blockinfo=None, transactionType=None): + + # provide default transactionType value + if transactionType is None: + transactionType=parsed_data['type'] + + session = create_database_session_orm('token', {'token_name': f"{tokenIdentification}"}, TokenBase) + tokenAmount = float(tokenAmount) + if isInfiniteToken == True: + # Make new entry + receiverAddress_details = session.query(ActiveTable).filter(ActiveTable.address==outputAddress, ActiveTable.addressBalance!=None).first() + if receiverAddress_details is None: + addressBalance = tokenAmount + else: + addressBalance = receiverAddress_details.addressBalance + tokenAmount + receiverAddress_details.addressBalance = None + session.add(ActiveTable(address=outputAddress, consumedpid='1', transferBalance=tokenAmount, addressBalance=addressBalance, blockNumber=blockinfo['height'])) + + add_transaction_history(token_name=tokenIdentification, sourceFloAddress=inputAddress, destFloAddress=outputAddress, transferAmount=tokenAmount, blockNumber=blockinfo['height'], blockHash=blockinfo['hash'], blocktime=blockinfo['time'], transactionHash=transaction_data['txid'], jsonData=json.dumps(transaction_data), transactionType=transactionType, parsedFloData=json.dumps(parsed_data)) + session.commit() + session.close() + return 1 + + else: + availableTokens = session.query(func.sum(ActiveTable.transferBalance)).filter_by(address=inputAddress).all()[0][0] + commentTransferAmount = float(tokenAmount) + if availableTokens is None: + logger.info(f"The sender address {inputAddress} doesn't own any {tokenIdentification.upper()} tokens") + session.close() + return 0 + + elif availableTokens < commentTransferAmount: + logger.info("The transfer amount passed in the comments is more than the user owns\nThis transaction will be discarded\n") + session.close() + return 0 + + elif availableTokens >= commentTransferAmount: + table = session.query(ActiveTable).filter(ActiveTable.address == inputAddress).all() + pidlst = [] + checksum = 0 + for row in table: + if checksum >= commentTransferAmount: + break + pidlst.append([row.id, row.transferBalance]) + checksum = checksum + row.transferBalance + + if checksum == commentTransferAmount: + consumedpid_string = '' + # Update all pids in pidlist's transferBalance to 0 + lastid = session.query(ActiveTable)[-1].id + piddict = {} + for piditem in pidlst: + entry = session.query(ActiveTable).filter(ActiveTable.id == piditem[0]).all() + consumedpid_string = consumedpid_string + '{},'.format(piditem[0]) + piddict[piditem[0]] = piditem[1] + session.add(TransferLogs(sourceFloAddress=inputAddress, destFloAddress=outputAddress, + transferAmount=entry[0].transferBalance, sourceId=piditem[0], + destinationId=lastid + 1, + blockNumber=blockinfo['height'], time=blockinfo['time'], + transactionHash=transaction_data['txid'])) + entry[0].transferBalance = 0 + + if len(consumedpid_string) > 1: + consumedpid_string = consumedpid_string[:-1] + + # Make new entry + receiverAddress_details = session.query(ActiveTable).filter(ActiveTable.address==outputAddress, ActiveTable.addressBalance!=None).first() + if receiverAddress_details is None: + addressBalance = commentTransferAmount + else: + addressBalance = receiverAddress_details.addressBalance + commentTransferAmount + receiverAddress_details.addressBalance = None + session.add(ActiveTable(address=outputAddress, consumedpid=str(piddict), transferBalance=commentTransferAmount, addressBalance=addressBalance, blockNumber=blockinfo['height'])) + + senderAddress_details = session.query(ActiveTable).filter_by(address=inputAddress).order_by(ActiveTable.id.desc()).first() + senderAddress_details.addressBalance = senderAddress_details.addressBalance - commentTransferAmount + + # Migration + # shift pid of used utxos from active to consumed + for piditem in pidlst: + # move the parentids consumed to consumedpid column in both activeTable and consumedTable + entries = session.query(ActiveTable).filter(ActiveTable.parentid == piditem[0]).all() + process_pids(entries, session, piditem) + + entries = session.query(ConsumedTable).filter(ConsumedTable.parentid == piditem[0]).all() + process_pids(entries, session, piditem) + + # move the pids consumed in the transaction to consumedTable and delete them from activeTable + session.execute('INSERT INTO consumedTable (id, address, parentid, consumedpid, transferBalance, addressBalance, orphaned_parentid, blockNumber) SELECT id, address, parentid, consumedpid, transferBalance, addressBalance, orphaned_parentid, blockNumber FROM activeTable WHERE id={}'.format(piditem[0])) + session.execute('DELETE FROM activeTable WHERE id={}'.format(piditem[0])) + session.commit() + session.commit() + + elif checksum > commentTransferAmount: + consumedpid_string = '' + # Update all pids in pidlist's transferBalance + lastid = session.query(ActiveTable)[-1].id + piddict = {} + for idx, piditem in enumerate(pidlst): + entry = session.query(ActiveTable).filter(ActiveTable.id == piditem[0]).all() + if idx != len(pidlst) - 1: + session.add(TransferLogs(sourceFloAddress=inputAddress, destFloAddress=outputAddress, + transferAmount=entry[0].transferBalance, sourceId=piditem[0], + destinationId=lastid + 1, + blockNumber=blockinfo['height'], time=blockinfo['time'], + transactionHash=transaction_data['txid'])) + entry[0].transferBalance = 0 + piddict[piditem[0]] = piditem[1] + consumedpid_string = consumedpid_string + '{},'.format(piditem[0]) + else: + session.add(TransferLogs(sourceFloAddress=inputAddress, destFloAddress=outputAddress, + transferAmount=piditem[1] - (checksum - commentTransferAmount), + sourceId=piditem[0], + destinationId=lastid + 1, + blockNumber=blockinfo['height'], time=blockinfo['time'], + transactionHash=transaction_data['txid'])) + entry[0].transferBalance = checksum - commentTransferAmount + + if len(consumedpid_string) > 1: + consumedpid_string = consumedpid_string[:-1] + + # Make new entry + receiverAddress_details = session.query(ActiveTable).filter(ActiveTable.address==outputAddress, ActiveTable.addressBalance!=None).first() + if receiverAddress_details is None: + addressBalance = commentTransferAmount + else: + addressBalance = receiverAddress_details.addressBalance + commentTransferAmount + receiverAddress_details.addressBalance = None + session.add(ActiveTable(address=outputAddress, parentid=pidlst[-1][0], consumedpid=str(piddict), transferBalance=commentTransferAmount, addressBalance=addressBalance, blockNumber=blockinfo['height'])) + + senderAddress_details = session.query(ActiveTable).filter_by(address=inputAddress).order_by(ActiveTable.id.desc()).first() + senderAddress_details.addressBalance = senderAddress_details.addressBalance - commentTransferAmount + + # Migration + # shift pid of used utxos from active to consumed + for piditem in pidlst[:-1]: + # move the parentids consumed to consumedpid column in both activeTable and consumedTable + entries = session.query(ActiveTable).filter(ActiveTable.parentid == piditem[0]).all() + process_pids(entries, session, piditem) + + entries = session.query(ConsumedTable).filter(ConsumedTable.parentid == piditem[0]).all() + process_pids(entries, session, piditem) + + # move the pids consumed in the transaction to consumedTable and delete them from activeTable + session.execute('INSERT INTO consumedTable (id, address, parentid, consumedpid, transferBalance, addressBalance, orphaned_parentid, blockNumber) SELECT id, address, parentid, consumedpid, transferBalance, addressBalance, orphaned_parentid, blockNumber FROM activeTable WHERE id={}'.format(piditem[0])) + session.execute('DELETE FROM activeTable WHERE id={}'.format(piditem[0])) + session.commit() + + add_transaction_history(token_name=tokenIdentification, sourceFloAddress=inputAddress, destFloAddress=outputAddress, transferAmount=tokenAmount, blockNumber=blockinfo['height'], blockHash=blockinfo['hash'], blocktime=blockinfo['time'], transactionHash=transaction_data['txid'], jsonData=json.dumps(transaction_data), transactionType=transactionType, parsedFloData=json.dumps(parsed_data)) + + session.commit() + session.close() + return 1 + + +def trigger_internal_contract(tokenAmount_sum, contractStructure, transaction_data, blockinfo, parsed_data, connection, contract_name, contract_address, transaction_subType): + # Trigger the contract + if tokenAmount_sum <= 0: + # Add transaction to ContractTransactionHistory + add_contract_transaction_history(contract_name=contract_name, contract_address=contract_address, transactionType='trigger', transactionSubType='zero-participation', sourceFloAddress='', destFloAddress='', transferAmount=0, blockNumber=blockinfo['height'], blockHash=blockinfo['hash'], blocktime=blockinfo['time'], transactionHash=transaction_data['txid'], jsonData=json.dumps(transaction_data), parsedFloData=json.dumps(parsed_data)) + else: + payeeAddress = json.loads(contractStructure['payeeAddress']) + tokenIdentification = contractStructure['tokenIdentification'] + + for floaddress in payeeAddress.keys(): + transferAmount = tokenAmount_sum * (payeeAddress[floaddress]/100) + returnval = transferToken(tokenIdentification, transferAmount, contract_address, floaddress, transaction_data=transaction_data, blockinfo = blockinfo, parsed_data = parsed_data) + if returnval == 0: + logger.critical("Something went wrong in the token transfer method while doing local Smart Contract Trigger") + return 0 + + # Add transaction to ContractTransactionHistory + add_contract_transaction_history(contract_name=contract_name, contract_address=contract_address, transactionType='trigger', transactionSubType=transaction_subType, sourceFloAddress=contract_address, destFloAddress=floaddress, transferAmount=transferAmount, blockNumber=blockinfo['height'], blockHash=blockinfo['hash'], blocktime=blockinfo['time'], transactionHash=transaction_data['txid'], jsonData=json.dumps(transaction_data), parsedFloData=json.dumps(parsed_data)) + return 1 + + +def process_minimum_subscriptionamount(contractStructure, connection, blockinfo, transaction_data, parsed_data): + minimumsubscriptionamount = float(contractStructure['minimumsubscriptionamount']) + tokenAmount_sum = connection.execute('SELECT IFNULL(sum(tokenAmount), 0) FROM contractparticipants').fetchall()[0][0] + if tokenAmount_sum < minimumsubscriptionamount: + # Initialize payback to contract participants + contractParticipants = connection.execute('SELECT participantAddress, tokenAmount, transactionHash FROM contractparticipants').fetchall() + + for participant in contractParticipants: + tokenIdentification = contractStructure['tokenIdentification'] + contractAddress = connection.execute('SELECT * FROM contractstructure WHERE attribute="contractAddress"').fetchall()[0][0] + returnval = transferToken(tokenIdentification, participant[1], contractAddress, participant[0], blockinfo = blockinfo) + if returnval == 0: + logger.critical("Something went wrong in the token transfer method while doing local Smart Contract Trigger. THIS IS CRITICAL ERROR") + return + + connection.execute('UPDATE contractparticipants SET winningAmount="{}" WHERE participantAddress="{}" AND transactionHash="{}"'.format(participant[1], participant[0], participant[2])) + + # add transaction to ContractTransactionHistory + add_contract_transaction_history(contract_name=contractStructure['contractName'], contract_address=contractStructure['contractAddress'], transactionType=parsed_data['type'], transactionSubType='minimumsubscriptionamount-payback', sourceFloAddress=None, destFloAddress=None, transferAmount=None, blockNumber=blockinfo['height'], blockHash=blockinfo['hash'], blocktime=blockinfo['time'], transactionHash=transaction_data['txid'], jsonData=json.dumps(transaction_data), parsedFloData=json.dumps(parsed_data)) + return 1 + else: + return 0 + + +def process_maximum_subscriptionamount(contractStructure, connection, status, blockinfo, transaction_data, parsed_data): + maximumsubscriptionamount = float(contractStructure['maximumsubscriptionamount']) + tokenAmount_sum = connection.execute('SELECT IFNULL(sum(tokenAmount), 0) FROM contractparticipants').fetchall()[0][0] + if tokenAmount_sum >= maximumsubscriptionamount: + # Trigger the contract + if status == 'close': + success_returnval = trigger_internal_contract(tokenAmount_sum, contractStructure, transaction_data, blockinfo, parsed_data, connection, contract_name=query.contractName, contract_address=query.contractAddress, transaction_subType='maximumsubscriptionamount') + if not success_returnval: + return 0 + return 1 + else: + return 0 + + +def check_contract_status(contractName, contractAddress): + # Status of the contract is at 2 tables in system.db + # activecontracts and time_actions + # select the last entry form the colum + connection = create_database_connection('system_dbs') + contract_status = connection.execute(f'SELECT status FROM time_actions WHERE id=(SELECT MAX(id) FROM time_actions WHERE contractName="{contractName}" AND contractAddress="{contractAddress}")').fetchall() + return contract_status[0][0] + + +def close_expire_contract(contractStructure, contractStatus, transactionHash, blockNumber, blockHash, incorporationDate, expiryDate, closeDate, trigger_time, trigger_activity, contractName, contractAddress, contractType, tokens_db, parsed_data, blockHeight): + connection = create_database_connection('system_dbs', {'db_name':'system'}) + connection.execute('INSERT INTO activecontracts VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', (None, contractStructure['contractName'], contractStructure['contractAddress'], contractStatus, contractStructure['tokenIdentification'], contractStructure['contractType'], transactionHash, blockNumber, blockHash, incorporationDate, expiryDate, closeDate)) + + connection.execute('INSERT INTO time_actions VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', (None, trigger_time, trigger_activity, contractStatus, contractName, contractAddress, contractType, tokens_db, parsed_data, transactionHash, blockHeight)) + connection.close() + + +def return_active_contracts(session): + + active_contracts = session.execute('''SELECT t1.* FROM time_actions t1 JOIN ( SELECT contractName, contractAddress, MAX(id) AS max_id FROM time_actions GROUP BY contractName, contractAddress ) t2 ON t1.contractName = t2.contractName AND t1.contractAddress = t2.contractAddress AND t1.id = t2.max_id WHERE t1.status = 'active' AND t1.activity = 'contract-time-trigger' ''').all() + return active_contracts + + +def return_active_deposits(session): + # find all the deposits which are active + # todo - sqlalchemy gives me warning with the following method + subquery_filter = session.query(TimeActions.id).group_by(TimeActions.transactionHash).having(func.count(TimeActions.transactionHash)==1).subquery() + active_deposits = session.query(TimeActions).filter(TimeActions.id.in_(subquery_filter), TimeActions.status=='active', TimeActions.activity=='contract-deposit').all() + return active_deposits + + +def checkLocal_expiry_trigger_deposit(blockinfo): + # Connect to system.db with a session + systemdb_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase) + timeactions_tx_hashes = [] + active_contracts = return_active_contracts(systemdb_session) + active_deposits = return_active_deposits(systemdb_session) + + for query in active_contracts: + query_time = convert_datetime_to_arrowobject(query.time) + blocktime = parsing.arrow.get(blockinfo['time']).to('Asia/Kolkata') + if query.activity == 'contract-time-trigger': + contractStructure = extract_contractStructure(query.contractName, query.contractAddress) + connection = create_database_connection('smart_contract', {'contract_name':f"{query.contractName}", 'contract_address':f"{query.contractAddress}"}) + if contractStructure['contractType'] == 'one-time-event': + # TODO - FIGURE A BETTER SOLUTION FOR THIS + tx_type = 'trigger' + data = [blockinfo['hash'], blockinfo['height'] , blockinfo['time'], blockinfo['size'], tx_type] + + response = requests.get(f'https://stdops.ranchimall.net/hash?data={data}') + if response.status_code == 200: + txid = response.json() + elif response.status_code == 404: + logger.info('Internal trigger has failed') + sys.exit(0) + + transaction_data = {} + transaction_data['txid'] = txid + parsed_data = {} + parsed_data['type'] = tx_type + + activecontracts_table_info = systemdb_session.query(ActiveContracts.blockHash, ActiveContracts.incorporationDate).filter(ActiveContracts.contractName==query.contractName, ActiveContracts.contractAddress==query.contractAddress, ActiveContracts.status=='active').first() + + if 'exitconditions' in contractStructure: # Committee trigger contract type + tokenAmount_sum = connection.execute('SELECT IFNULL(sum(tokenAmount), 0) FROM contractparticipants').fetchall()[0][0] + # maximumsubscription check, if reached then expire the contract + if 'maximumsubscriptionamount' in contractStructure: + maximumsubscriptionamount = float(contractStructure['maximumsubscriptionamount']) + if tokenAmount_sum >= maximumsubscriptionamount: + # Expire the contract + close_expire_contract(contractStructure, 'expired', transaction_data['txid'], blockinfo['height'], blockinfo['hash'], activecontracts_table_info.incorporationDate, blockinfo['time'], None, query.time, query.activity, query.contractName, query.contractAddress, query.contractType, query.tokens_db, query.parsed_data, blockinfo['height']) + + if blocktime > query_time: + if 'minimumsubscriptionamount' in contractStructure: + if process_minimum_subscriptionamount(contractStructure, connection, blockinfo, transaction_data, parsed_data): + close_expire_contract(contractStructure, 'closed', transaction_data['txid'], blockinfo['height'], blockinfo['hash'], activecontracts_table_info.incorporationDate, blockinfo['time'], blockinfo['time'], query.time, query.activity, query.contractName, query.contractAddress, query.contractType, query.tokens_db, query.parsed_data, blockinfo['height']) + return + + # Expire the contract + close_expire_contract(contractStructure, 'expired', transaction_data['txid'], blockinfo['height'], blockinfo['hash'], activecontracts_table_info.incorporationDate, blockinfo['time'], None, query.time, query.activity, query.contractName, query.contractAddress, query.contractType, query.tokens_db, query.parsed_data, blockinfo['height']) + + elif 'payeeAddress' in contractStructure: # Internal trigger contract type + tokenAmount_sum = connection.execute('SELECT IFNULL(sum(tokenAmount), 0) FROM contractparticipants').fetchall()[0][0] + # maximumsubscription check, if reached then trigger the contract + if 'maximumsubscriptionamount' in contractStructure: + maximumsubscriptionamount = float(contractStructure['maximumsubscriptionamount']) + if tokenAmount_sum >= maximumsubscriptionamount: + # Trigger the contract + success_returnval = trigger_internal_contract(tokenAmount_sum, contractStructure, transaction_data, blockinfo, parsed_data, connection, contract_name=query.contractName, contract_address=query.contractAddress, transaction_subType='maximumsubscriptionamount') + if not success_returnval: + return 0 + + close_expire_contract(contractStructure, 'closed', transaction_data['txid'], blockinfo['height'], blockinfo['hash'], activecontracts_table_info.incorporationDate, blockinfo['time'], blockinfo['time'], query.time, query.activity, query.contractName, query.contractAddress, query.contractType, query.tokens_db, query.parsed_data, blockinfo['height']) + return + + if blocktime > query_time: + if 'minimumsubscriptionamount' in contractStructure: + if process_minimum_subscriptionamount(contractStructure, connection, blockinfo, transaction_data, parsed_data): + close_expire_contract(contractStructure, 'closed', transaction_data['txid'], blockinfo['height'], blockinfo['hash'], activecontracts_table_info.incorporationDate, blockinfo['time'], blockinfo['time'], query.time, query.activity, query.contractName, query.contractAddress, query.contractType, query.tokens_db, query.parsed_data, blockinfo['height']) + return + + # Trigger the contract + success_returnval = trigger_internal_contract(tokenAmount_sum, contractStructure, transaction_data, blockinfo, parsed_data, connection, contract_name=query.contractName, contract_address=query.contractAddress, transaction_subType='expiryTime') + if not success_returnval: + return 0 + + close_expire_contract(contractStructure, 'closed', transaction_data['txid'], blockinfo['height'], blockinfo['hash'], activecontracts_table_info.incorporationDate, blockinfo['time'], blockinfo['time'], query.time, query.activity, query.contractName, query.contractAddress, query.contractType, query.tokens_db, query.parsed_data, blockinfo['height']) + return + + for query in active_deposits: + query_time = convert_datetime_to_arrowobject(query.time) + blocktime = parsing.arrow.get(blockinfo['time']).to('Asia/Kolkata') + if query.activity == 'contract-deposit': + if blocktime > query_time: + # find the status of the deposit + # the deposit is unique + # find the total sum to be returned from the smart contract's participation table + contract_db = create_database_session_orm('smart_contract', {'contract_name': query.contractName, 'contract_address': query.contractAddress}, ContractBase) + + deposit_query = contract_db.query(ContractDeposits).filter(ContractDeposits.transactionHash == query.transactionHash).first() + deposit_last_latest_entry = contract_db.query(ContractDeposits).filter(ContractDeposits.transactionHash == query.transactionHash).order_by(ContractDeposits.id.desc()).first() + returnAmount = deposit_last_latest_entry.depositBalance + depositorAddress = deposit_last_latest_entry.depositorAddress + + # Do a token transfer back to the deposit address + sellingToken = contract_db.query(ContractStructure.value).filter(ContractStructure.attribute == 'selling_token').first()[0] + tx_block_string = f"{query.transactionHash}{blockinfo['height']}".encode('utf-8').hex() + parsed_data = {} + parsed_data['type'] = 'expired_deposit' + transaction_data = {} + transaction_data['txid'] = query.transactionHash + transaction_data['blockheight'] = blockinfo['height'] + returnval = transferToken(sellingToken, returnAmount, query.contractAddress, depositorAddress, transaction_data=transaction_data, parsed_data=parsed_data, blockinfo=blockinfo) + if returnval == 0: + logger.critical("Something went wrong in the token transfer method while return contract deposit. THIS IS CRITICAL ERROR") + return + else: + contract_db.add(ContractDeposits( + depositorAddress = deposit_last_latest_entry.depositorAddress, + depositAmount = -abs(returnAmount), + depositBalance = 0, + expiryTime = deposit_last_latest_entry.expiryTime, + unix_expiryTime = deposit_last_latest_entry.unix_expiryTime, + status = 'deposit-return', + transactionHash = deposit_last_latest_entry.transactionHash, + blockNumber = blockinfo['height'], + blockHash = blockinfo['hash'] + )) + + add_contract_transaction_history(contract_name=query.contractName, contract_address=query.contractAddress, transactionType='smartContractDepositReturn', transactionSubType=None, sourceFloAddress=query.contractAddress, destFloAddress=depositorAddress, transferAmount=returnAmount, blockNumber=blockinfo['height'], blockHash=blockinfo['hash'], blocktime=blockinfo['time'], transactionHash=deposit_last_latest_entry.transactionHash, jsonData=json.dumps(transaction_data), parsedFloData=json.dumps(parsed_data)) + + systemdb_session.add(TimeActions( + time = query.time, + activity = query.activity, + status = 'returned', + contractName = query.contractName, + contractAddress = query.contractAddress, + contractType = query.contractType, + tokens_db = query.tokens_db, + parsed_data = query.parsed_data, + transactionHash = query.transactionHash, + blockNumber = blockinfo['height'] + )) + + contract_db.commit() + systemdb_session.commit() + updateLatestTransaction(transaction_data, parsed_data, f"{query.contractName}-{query.contractAddress}") + + +def extract_contractStructure(contractName, contractAddress): + connection = create_database_connection('smart_contract', {'contract_name':f"{contractName}", 'contract_address':f"{contractAddress}"}) + attributevaluepair = connection.execute("SELECT attribute, value FROM contractstructure WHERE attribute != 'flodata'").fetchall() + contractStructure = {} + conditionDict = {} + counter = 0 + for item in attributevaluepair: + if list(item)[0] == 'exitconditions': + conditionDict[counter] = list(item)[1] + counter = counter + 1 + else: + contractStructure[list(item)[0]] = list(item)[1] + if len(conditionDict) > 0: + contractStructure['exitconditions'] = conditionDict + del counter, conditionDict + + return contractStructure + + +def processTransaction(transaction_data, parsed_data, blockinfo): + # Do the necessary checks for the inputs and outputs + # todo Rule 38 - Here we are doing FLO processing. We attach asset amounts to a FLO address, so every FLO address + # will have multiple feed ins of the asset. Each of those feedins will be an input to the address. + # an address can also spend the asset. Each of those spends is an output of that address feeding the asset into some + # other address an as input + # Rule 38 reframe - For checking any asset transfer on the flo blockchain it is possible that some transactions may use more than one + # vins. However in any single transaction the system considers valid, they can be only one source address from which the flodata is + # originting. To ensure consistency, we will have to check that even if there are more than one vins in a transaction, there should be + # exactly one FLO address on the originating side and that FLO address should be the owner of the asset tokens being transferred + + # Create vinlist and outputlist + vinlist = [] + querylist = [] + + #totalinputval = 0 + #inputadd = '' + + # todo Rule 40 - For each vin, find the feeding address and the fed value. Make an inputlist containing [inputaddress, n value] + for vin in transaction_data["vin"]: + vinlist.append([vin["addresses"][0], float(vin["value"])]) + + totalinputval = float(transaction_data["valueIn"]) + + # todo Rule 41 - Check if all the addresses in a transaction on the input side are the same + for idx, item in enumerate(vinlist): + if idx == 0: + temp = item[0] + continue + if item[0] != temp: + logger.info(f"System has found more than one address as part of vin. Transaction {transaction_data['txid']} is rejected") + return 0 + + inputlist = [vinlist[0][0], totalinputval] + inputadd = vinlist[0][0] + + # todo Rule 42 - If the number of vout is more than 2, reject the transaction + if len(transaction_data["vout"]) > 2: + logger.info(f"System has found more than 2 address as part of vout. Transaction {transaction_data['txid']} is rejected") + return 0 + + # todo Rule 43 - A transaction accepted by the system has two vouts, 1. The FLO address of the receiver + # 2. Flo address of the sender as change address. If the vout address is change address, then the other adddress + # is the recevier address + + outputlist = [] + addresscounter = 0 + inputcounter = 0 + for obj in transaction_data["vout"]: + if 'type' not in obj["scriptPubKey"].keys(): + continue + if obj["scriptPubKey"]["type"] in ["pubkeyhash","scripthash"]: + addresscounter = addresscounter + 1 + if inputlist[0] == obj["scriptPubKey"]["addresses"][0]: + inputcounter = inputcounter + 1 + continue + outputlist.append([obj["scriptPubKey"]["addresses"][0], obj["value"]]) + + if addresscounter == inputcounter: + outputlist = [inputlist[0]] + elif len(outputlist) != 1: + logger.info(f"Transaction's change is not coming back to the input address. Transaction {transaction_data['txid']} is rejected") + return 0 + else: + outputlist = outputlist[0] + + logger.info(f"Input address list : {inputlist}") + logger.info(f"Output address list : {outputlist}") + + transaction_data['senderAddress'] = inputlist[0] + transaction_data['receiverAddress'] = outputlist[0] + + # All FLO checks completed at this point. + # Semantic rules for parsed data begins + + # todo Rule 44 - Process as per the type of transaction + if parsed_data['type'] == 'transfer': + logger.info(f"Transaction {transaction_data['txid']} is of the type transfer") + + # todo Rule 45 - If the transfer type is token, then call the function transferToken to adjust the balances + if parsed_data['transferType'] == 'token': + if not is_a_contract_address(inputlist[0]) and not is_a_contract_address(outputlist[0]): + # check if the token exists in the database + if check_database_existence('token', {'token_name':f"{parsed_data['tokenIdentification']}"}): + # Pull details of the token type from system.db database + connection = create_database_connection('system_dbs', {'db_name':'system'}) + db_details = connection.execute("SELECT db_name, db_type, keyword, object_format FROM databaseTypeMapping WHERE db_name='{}'".format(parsed_data['tokenIdentification'])) + db_details = list(zip(*db_details)) + if db_details[1][0] == 'infinite-token': + db_object = json.loads(db_details[3][0]) + if db_object['root_address'] == inputlist[0]: + isInfiniteToken = True + else: + isInfiniteToken = False + else: + isInfiniteToken = False + + # Check if the transaction hash already exists in the token db + connection = create_database_connection('token', {'token_name':f"{parsed_data['tokenIdentification']}"}) + blockno_txhash = connection.execute('SELECT blockNumber, transactionHash FROM transactionHistory').fetchall() + connection.close() + blockno_txhash_T = list(zip(*blockno_txhash)) + + if transaction_data['txid'] in list(blockno_txhash_T[1]): + logger.warning(f"Transaction {transaction_data['txid']} already exists in the token db. This is unusual, please check your code") + pushData_SSEapi(f"Error | Transaction {transaction_data['txid']} already exists in the token db. This is unusual, please check your code") + return 0 + + returnval = transferToken(parsed_data['tokenIdentification'], parsed_data['tokenAmount'], inputlist[0],outputlist[0], transaction_data, parsed_data, isInfiniteToken=isInfiniteToken, blockinfo = blockinfo) + if returnval == 0: + logger.info("Something went wrong in the token transfer method") + pushData_SSEapi(f"Error | Something went wrong while doing the internal db transactions for {transaction_data['txid']}") + return 0 + else: + updateLatestTransaction(transaction_data, parsed_data, f"{parsed_data['tokenIdentification']}", transactionType='token-transfer') + + # If this is the first interaction of the outputlist's address with the given token name, add it to token mapping + connection = create_database_connection('system_dbs', {'db_name':'system'}) + firstInteractionCheck = connection.execute(f"SELECT * FROM tokenAddressMapping WHERE tokenAddress='{outputlist[0]}' AND token='{parsed_data['tokenIdentification']}'").fetchall() + + if len(firstInteractionCheck) == 0: + connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{outputlist[0]}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}')") + + connection.close() + + # Pass information to SSE channel + headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} + # r = requests.post(tokenapi_sse_url, json={f"message': 'Token Transfer | name:{parsed_data['tokenIdentification']} | transactionHash:{transaction_data['txid']}"}, headers=headers) + return 1 + else: + rejectComment = f"Token transfer at transaction {transaction_data['txid']} rejected as a token with the name {parsed_data['tokenIdentification']} doesnt not exist" + logger.info(rejectComment) + rejected_transaction_history(transaction_data, parsed_data, inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + else: + rejectComment = f"Token transfer at transaction {transaction_data['txid']} rejected as either the input address or the output address is part of a contract address" + logger.info(rejectComment) + rejected_transaction_history(transaction_data, parsed_data, inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + # todo Rule 46 - If the transfer type is smart contract, then call the function transferToken to do sanity checks & lock the balance + elif parsed_data['transferType'] == 'smartContract': + if check_database_existence('smart_contract', {'contract_name':f"{parsed_data['contractName']}", 'contract_address':f"{outputlist[0]}"}): + # Check type of contract and categorize between into ote-participation or continuous-event participation + # todo - replace all connection queries with session queries + connection = create_database_connection('smart_contract', {'contract_name':f"{parsed_data['contractName']}", 'contract_address':f"{outputlist[0]}"}) + contract_session = create_database_session_orm('smart_contract', {'contract_name':f"{parsed_data['contractName']}", 'contract_address':f"{outputlist[0]}"}, ContractBase) + contract_type = contract_session.query(ContractStructure.value).filter(ContractStructure.attribute == 'contractType').first()[0] + + contractStructure = extract_contractStructure(parsed_data['contractName'], outputlist[0]) + + if contract_type == 'one-time-event': + # Check if the transaction hash already exists in the contract db (Safety check) + participantAdd_txhash = connection.execute('SELECT participantAddress, transactionHash FROM contractparticipants').fetchall() + participantAdd_txhash_T = list(zip(*participantAdd_txhash)) + + if len(participantAdd_txhash) != 0 and transaction_data['txid'] in list(participantAdd_txhash_T[1]): + logger.warning(f"Transaction {transaction_data['txid']} rejected as it already exists in the Smart Contract db. This is unusual, please check your code") + pushData_SSEapi(f"Error | Transaction {transaction_data['txid']} rejected as it already exists in the Smart Contract db. This is unusual, please check your code") + return 0 + + # if contractAddress was passed, then check if it matches the output address of this contract + if 'contractAddress' in parsed_data: + if parsed_data['contractAddress'] != outputlist[0]: + rejectComment = f"Contract participation at transaction {transaction_data['txid']} rejected as contractAddress specified in flodata, {parsed_data['contractAddress']}, doesnt not match with transaction's output address {outputlist[0]}" + logger.info(rejectComment) + # Store transfer as part of RejectedContractTransactionHistory + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + # Pass information to SSE channel + pushData_SSEapi(f"Error| Mismatch in contract address specified in flodata and the output address of the transaction {transaction_data['txid']}") + return 0 + + # check the status of the contract + contractStatus = check_contract_status(parsed_data['contractName'], outputlist[0]) + contractList = [] + + if contractStatus == 'closed': + rejectComment = f"Transaction {transaction_data['txid']} closed as Smart contract {parsed_data['contractName']} at the {outputlist[0]} is closed" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + return 0 + else: + session = create_database_session_orm('smart_contract', {'contract_name': f"{parsed_data['contractName']}", 'contract_address': f"{outputlist[0]}"}, ContractBase) + result = session.query(ContractStructure).filter_by(attribute='expiryTime').all() + session.close() + if result: + # now parse the expiry time in python + expirytime = result[0].value.strip() + expirytime_split = expirytime.split(' ') + parse_string = '{}/{}/{} {}'.format(expirytime_split[3], parsing.months[expirytime_split[1]], expirytime_split[2], expirytime_split[4]) + expirytime_object = parsing.arrow.get(parse_string, 'YYYY/M/D HH:mm:ss').replace(tzinfo=expirytime_split[5][3:]) + blocktime_object = parsing.arrow.get(transaction_data['time']).to('Asia/Kolkata') + + if blocktime_object > expirytime_object: + rejectComment = f"Transaction {transaction_data['txid']} rejected as Smart contract {parsed_data['contractName']}-{outputlist[0]} has expired and will not accept any user participation" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + # check if user choice has been passed, to the wrong contract type + if 'userChoice' in parsed_data and 'exitconditions' not in contractStructure: + rejectComment = f"Transaction {transaction_data['txid']} rejected as userChoice, {parsed_data['userChoice']}, has been passed to Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} which doesn't accept any userChoice" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + # check if the right token is being sent for participation + if parsed_data['tokenIdentification'] != contractStructure['tokenIdentification']: + rejectComment = f"Transaction {transaction_data['txid']} rejected as the token being transferred, {parsed_data['tokenIdentidication'].upper()}, is not part of the structure of Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]}" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + # Check if contractAmount is part of the contract structure, and enforce it if it is + if 'contractAmount' in contractStructure: + if float(contractStructure['contractAmount']) != float(parsed_data['tokenAmount']): + rejectComment = f"Transaction {transaction_data['txid']} rejected as contractAmount being transferred is not part of the structure of Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]}" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + partialTransferCounter = 0 + # Check if maximum subscription amount has reached + if 'maximumsubscriptionamount' in contractStructure: + # now parse the expiry time in python + maximumsubscriptionamount = float(contractStructure['maximumsubscriptionamount']) + session = create_database_session_orm('smart_contract', {'contract_name': f"{parsed_data['contractName']}", 'contract_address': f"{outputlist[0]}"}, ContractBase) + amountDeposited = session.query(func.sum(ContractParticipants.tokenAmount)).all()[0][0] + session.close() + if amountDeposited is None: + amountDeposited = 0 + + if amountDeposited >= maximumsubscriptionamount: + rejectComment = f"Transaction {transaction_data['txid']} rejected as maximum subscription amount has been reached for the Smart contract named {parsed_data['contractName']} at the address {outputlist[0]}" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + elif ((float(amountDeposited) + float(parsed_data['tokenAmount'])) > maximumsubscriptionamount): + if 'contractAmount' in contractStructure: + rejectComment = f"Transaction {transaction_data['txid']} rejected as the contractAmount surpasses the maximum subscription amount, {contractStructure['maximumsubscriptionamount']} {contractStructure['tokenIdentification'].upper()}, for the Smart contract named {parsed_data['contractName']} at the address {outputlist[0]}" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + else: + partialTransferCounter = 1 + + # Check if exitcondition exists as part of contractstructure and is given in right format + if 'exitconditions' in contractStructure: + # This means the contract has an external trigger, ie. trigger coming from the contract committee + exitconditionsList = [] + for condition in contractStructure['exitconditions']: + exitconditionsList.append(contractStructure['exitconditions'][condition]) + + if parsed_data['userChoice'] in exitconditionsList: + if partialTransferCounter == 0: + # Check if the tokenAmount being transferred exists in the address & do the token transfer + returnval = transferToken(parsed_data['tokenIdentification'], parsed_data['tokenAmount'], inputlist[0], outputlist[0], transaction_data, parsed_data, blockinfo = blockinfo) + if returnval != 0: + # Store participant details in the smart contract's db + session.add(ContractParticipants(participantAddress=inputadd, + tokenAmount=parsed_data['tokenAmount'], + userChoice=parsed_data['userChoice'], + transactionHash=transaction_data['txid'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'])) + session.commit() + + # Store transfer as part of ContractTransactionHistory + add_contract_transaction_history(contract_name=parsed_data['contractName'], contract_address=outputlist[0], transactionType='participation', transactionSubType=None, sourceFloAddress=inputadd, destFloAddress=outputlist[0], transferAmount=parsed_data['tokenAmount'], blockNumber=blockinfo['height'], blockHash=blockinfo['hash'], blocktime=blockinfo['time'], transactionHash=transaction_data['txid'], jsonData=json.dumps(transaction_data), parsedFloData=json.dumps(parsed_data)) + + # Store a mapping of participant address -> Contract participated in + session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase) + session.add(ContractAddressMapping(address=inputadd, addressType='participant', + tokenAmount=parsed_data['tokenAmount'], + contractName=parsed_data['contractName'], + contractAddress=outputlist[0], + transactionHash=transaction_data['txid'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'])) + session.commit() + + # If this is the first interaction of the outputlist's address with the given token name, add it to token mapping + connection = create_database_connection('system_dbs', {'db_name':'system'}) + firstInteractionCheck = connection.execute(f"SELECT * FROM tokenAddressMapping WHERE tokenAddress='{outputlist[0]}' AND token='{parsed_data['tokenIdentification']}'").fetchall() + if len(firstInteractionCheck) == 0: + connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{outputlist[0]}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}')") + connection.close() + updateLatestTransaction(transaction_data, parsed_data, f"{parsed_data['contractName']}-{outputlist[0]}", transactionType='ote-externaltrigger-participation') + return 1 + + else: + logger.info("Something went wrong in the smartcontract token transfer method") + return 0 + elif partialTransferCounter == 1: + # Transfer only part of the tokens users specified, till the time it reaches maximumamount + returnval = transferToken(parsed_data['tokenIdentification'], maximumsubscriptionamount - amountDeposited, inputlist[0], outputlist[0], transaction_data, parsed_data, blockinfo = blockinfo) + if returnval != 0: + # Store participant details in the smart contract's db + session.add(ContractParticipants(participantAddress=inputadd, + tokenAmount=maximumsubscriptionamount - amountDeposited, + userChoice=parsed_data['userChoice'], + transactionHash=transaction_data['txid'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'])) + session.commit() + session.close() + + # Store a mapping of participant address -> Contract participated in + session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase) + session.add(ContractAddressMapping(address=inputadd, addressType='participant', + tokenAmount=maximumsubscriptionamount - amountDeposited, + contractName=parsed_data['contractName'], + contractAddress=outputlist[0], + transactionHash=transaction_data['txid'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'])) + session.commit() + session.close() + updateLatestTransaction(transaction_data, parsed_data, f"{parsed_data['contractName']}-{outputlist[0]}", transactionType='ote-externaltrigger-participation') + return 1 + + else: + logger.info("Something went wrong in the smartcontract token transfer method") + return 0 + + else: + rejectComment = f"Transaction {transaction_data['txid']} rejected as wrong userchoice entered for the Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]}" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + elif 'payeeAddress' in contractStructure: + # this means the contract is of the type internal trigger + if partialTransferCounter == 0: + transferAmount = parsed_data['tokenAmount'] + elif partialTransferCounter == 1: + transferAmount = maximumsubscriptionamount - amountDeposited + + # Check if the tokenAmount being transferred exists in the address & do the token transfer + returnval = transferToken(parsed_data['tokenIdentification'], transferAmount, inputlist[0], outputlist[0], transaction_data, parsed_data, blockinfo = blockinfo) + if returnval != 0: + # Store participant details in the smart contract's db + session.add(ContractParticipants(participantAddress=inputadd, tokenAmount=transferAmount, userChoice='-', transactionHash=transaction_data['txid'], blockNumber=transaction_data['blockheight'], blockHash=transaction_data['blockhash'])) + + # Store transfer as part of ContractTransactionHistory + add_contract_transaction_history(contract_name=parsed_data['contractName'], contract_address=outputlist[0], transactionType='participation', transactionSubType=None, sourceFloAddress=inputadd, destFloAddress=outputlist[0], transferAmount=transferAmount, blockNumber=blockinfo['height'], blockHash=blockinfo['hash'], blocktime=blockinfo['time'], transactionHash=transaction_data['txid'], jsonData=json.dumps(transaction_data), parsedFloData=json.dumps(parsed_data)) + session.commit() + session.close() + + # Store a mapping of participant address -> Contract participated in + session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase) + session.add(ContractAddressMapping(address=inputadd, addressType='participant', + tokenAmount=transferAmount, + contractName=parsed_data['contractName'], + contractAddress=outputlist[0], + transactionHash=transaction_data['txid'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'])) + session.commit() + + # If this is the first interaction of the outputlist's address with the given token name, add it to token mapping + connection = create_database_connection('system_dbs', {'db_name':'system'}) + firstInteractionCheck = connection.execute(f"SELECT * FROM tokenAddressMapping WHERE tokenAddress='{outputlist[0]}' AND token='{parsed_data['tokenIdentification']}'").fetchall() + if len(firstInteractionCheck) == 0: + connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{outputlist[0]}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}')") + connection.close() + updateLatestTransaction(transaction_data, parsed_data, f"{parsed_data['contractName']}-{outputlist[0]}", transactionType='ote-internaltrigger-participation') + return 1 + + else: + logger.info("Something went wrong in the smartcontract token transfer method") + return 0 + + elif contract_type == 'continuos-event': + contract_subtype = contract_session.query(ContractStructure.value).filter(ContractStructure.attribute == 'subtype').first()[0] + if contract_subtype == 'tokenswap': + # Check if the transaction hash already exists in the contract db (Safety check) + participantAdd_txhash = connection.execute('SELECT participantAddress, transactionHash FROM contractparticipants').fetchall() + participantAdd_txhash_T = list(zip(*participantAdd_txhash)) + + if len(participantAdd_txhash) != 0 and transaction_data['txid'] in list(participantAdd_txhash_T[1]): + logger.warning(f"Transaction {transaction_data['txid']} rejected as it already exists in the Smart Contract db. This is unusual, please check your code") + pushData_SSEapi(f"Error | Transaction {transaction_data['txid']} rejected as it already exists in the Smart Contract db. This is unusual, please check your code") + return 0 + + # if contractAddress was passed, then check if it matches the output address of this contract + if 'contractAddress' in parsed_data: + if parsed_data['contractAddress'] != outputlist[0]: + rejectComment = f"Contract participation at transaction {transaction_data['txid']} rejected as contractAddress specified in flodata, {parsed_data['contractAddress']}, doesnt not match with transaction's output address {outputlist[0]}" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + # Pass information to SSE channel + pushData_SSEapi(f"Error| Mismatch in contract address specified in flodata and the output address of the transaction {transaction_data['txid']}") + return 0 + + if contractStructure['pricetype'] in ['predetermined','determined']: + swapPrice = float(contractStructure['price']) + elif contractStructure['pricetype'] == 'dynamic': + # Oracle address cannot be a participant in the contract. Check if the sender address is oracle address + if transaction_data['senderAddress'] == contractStructure['oracle_address']: + logger.warning(f"Transaction {transaction_data['txid']} rejected as the oracle addess {contractStructure['oracle_address']} is attempting to participate. Please report this to the contract owner") + pushData_SSEapi(f"Transaction {transaction_data['txid']} rejected as the oracle addess {contractStructure['oracle_address']} is attempting to participate. Please report this to the contract owner") + return 0 + + swapPrice = fetchDynamicSwapPrice(contractStructure, blockinfo) + + swapAmount = float(parsed_data['tokenAmount'])/swapPrice + + # Check if the swap amount is available in the deposits of the selling token + # if yes do the transfers, otherwise reject the transaction + # + subquery = contract_session.query(func.max(ContractDeposits.id)).group_by(ContractDeposits.transactionHash) + active_contract_deposits = contract_session.query(ContractDeposits).filter(ContractDeposits.id.in_(subquery)).filter(ContractDeposits.status != 'deposit-return').filter(ContractDeposits.status != 'consumed').filter(ContractDeposits.status == 'active').all() + + # todo - what is the role of the next line? cleanup if not useful + available_deposits = active_contract_deposits[:] + available_deposit_sum = contract_session.query(func.sum(ContractDeposits.depositBalance)).filter(ContractDeposits.id.in_(subquery)).filter(ContractDeposits.status != 'deposit-return').filter(ContractDeposits.status == 'active').all() + if available_deposit_sum[0][0] is None: + available_deposit_sum = 0 + else: + available_deposit_sum = float(available_deposit_sum[0][0]) + + if available_deposit_sum >= swapAmount: + # accepting token transfer from participant to smart contract address + returnval = transferToken(parsed_data['tokenIdentification'], parsed_data['tokenAmount'], inputlist[0], outputlist[0], transaction_data=transaction_data, parsed_data=parsed_data, isInfiniteToken=None, blockinfo=blockinfo, transactionType='tokenswapParticipation') + if returnval == 0: + logger.info("ERROR | Something went wrong in the token transfer method while doing local Smart Contract Particiaption") + return 0 + + # If this is the first interaction of the outputlist's address with the given token name, add it to token mapping + systemdb_connection = create_database_connection('system_dbs', {'db_name':'system'}) + firstInteractionCheck = systemdb_connection.execute(f"SELECT * FROM tokenAddressMapping WHERE tokenAddress='{outputlist[0]}' AND token='{parsed_data['tokenIdentification']}'").fetchall() + if len(firstInteractionCheck) == 0: + systemdb_connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{outputlist[0]}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}')") + systemdb_connection.close() + + + # ContractDepositTable + # For each unique deposit( address, expirydate, blocknumber) there will be 2 entries added to the table + # the consumption of the deposits will start form the top of the table + deposit_counter = 0 + remaining_amount = swapAmount + for a_deposit in available_deposits: + if a_deposit.depositBalance > remaining_amount: + # accepting token transfer from the contract to depositor's address + returnval = transferToken(contractStructure['accepting_token'], remaining_amount * swapPrice, contractStructure['contractAddress'], a_deposit.depositorAddress, transaction_data=transaction_data, parsed_data=parsed_data, isInfiniteToken=None, blockinfo=blockinfo, transactionType='tokenswapDepositSettlement') + if returnval == 0: + logger.info("CRITICAL ERROR | Something went wrong in the token transfer method while doing local Smart Contract Particiaption deposit swap operation") + return 0 + + # If this is the first interaction of the outputlist's address with the given token name, add it to token mapping + systemdb_connection = create_database_connection('system_dbs', {'db_name':'system'}) + firstInteractionCheck = systemdb_connection.execute(f"SELECT * FROM tokenAddressMapping WHERE tokenAddress='{a_deposit.depositorAddress}' AND token='{contractStructure['accepting_token']}'").fetchall() + if len(firstInteractionCheck) == 0: + systemdb_connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{a_deposit.depositorAddress}', '{contractStructure['accepting_token']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}')") + systemdb_connection.close() + + + contract_session.add(ContractDeposits( depositorAddress= a_deposit.depositorAddress, + depositAmount= 0 - remaining_amount, + status='deposit-honor', + transactionHash= a_deposit.transactionHash, + blockNumber= blockinfo['height'], + blockHash= blockinfo['hash'])) + + # if the total is consumsed then the following entry won't take place + contract_session.add(ContractDeposits( depositorAddress= a_deposit.depositorAddress, + depositBalance= a_deposit.depositBalance - remaining_amount, + expiryTime = a_deposit.expiryTime, + unix_expiryTime = a_deposit.unix_expiryTime, + status='active', + transactionHash= a_deposit.transactionHash, + blockNumber= blockinfo['height'], + blockHash= blockinfo['hash'])) + # ConsumedInfoTable + contract_session.add(ConsumedInfo( id_deposittable= a_deposit.id, + transactionHash= a_deposit.transactionHash, + blockNumber= blockinfo['height'])) + remaining_amount = remaining_amount - a_deposit.depositBalance + remaining_amount = 0 + break + + elif a_deposit.depositBalance <= remaining_amount: + # accepting token transfer from the contract to depositor's address + returnval = transferToken(contractStructure['accepting_token'], a_deposit.depositBalance * swapPrice, contractStructure['contractAddress'], a_deposit.depositorAddress, transaction_data=transaction_data, parsed_data=parsed_data, isInfiniteToken=None, blockinfo=blockinfo, transactionType='tokenswapDepositSettlement') + if returnval == 0: + logger.info("CRITICAL ERROR | Something went wrong in the token transfer method while doing local Smart Contract Particiaption deposit swap operation") + return 0 + + # If this is the first interaction of the outputlist's address with the given token name, add it to token mapping + systemdb_connection = create_database_connection('system_dbs', {'db_name':'system'}) + firstInteractionCheck = systemdb_connection.execute(f"SELECT * FROM tokenAddressMapping WHERE tokenAddress='{a_deposit.depositorAddress}' AND token='{contractStructure['accepting_token']}'").fetchall() + if len(firstInteractionCheck) == 0: + systemdb_connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{a_deposit.depositorAddress}', '{contractStructure['accepting_token']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}')") + systemdb_connection.close() + + + contract_session.add(ContractDeposits( depositorAddress= a_deposit.depositorAddress, + depositAmount= 0 - a_deposit.depositBalance, + status='deposit-honor', + transactionHash= a_deposit.transactionHash, + blockNumber= blockinfo['height'], + blockHash= blockinfo['hash'])) + + contract_session.add(ContractDeposits( depositorAddress= a_deposit.depositorAddress, + depositBalance= 0, + expiryTime = a_deposit.expiryTime, + unix_expiryTime = a_deposit.unix_expiryTime, + status='consumed', + transactionHash= a_deposit.transactionHash, + blockNumber= blockinfo['height'], + blockHash= blockinfo['hash'])) + # ConsumedInfoTable + contract_session.add(ConsumedInfo( id_deposittable= a_deposit.id, + transactionHash= a_deposit.transactionHash, + blockNumber= blockinfo['height'])) + remaining_amount = remaining_amount - a_deposit.depositBalance + + systemdb_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase) + systemdb_entry = systemdb_session.query(TimeActions.activity, TimeActions.contractType, TimeActions.tokens_db, TimeActions.parsed_data).filter(TimeActions.transactionHash == a_deposit.transactionHash).first() + systemdb_session.add(TimeActions( + time = a_deposit.expiryTime, + activity = systemdb_entry[0], + status = 'consumed', + contractName = parsed_data['contractName'], + contractAddress = outputlist[0], + contractType = systemdb_entry[1], + tokens_db = systemdb_entry[2], + parsed_data = systemdb_entry[3], + transactionHash = a_deposit.transactionHash, + blockNumber = blockinfo['height'] + )) + systemdb_session.commit() + del systemdb_session + + # token transfer from the contract to participant's address + returnval = transferToken(contractStructure['selling_token'], swapAmount, outputlist[0], inputlist[0], transaction_data=transaction_data, parsed_data=parsed_data, isInfiniteToken=None, blockinfo=blockinfo, transactionType='tokenswapParticipationSettlement') + if returnval == 0: + logger.info("CRITICAL ERROR | Something went wrong in the token transfer method while doing local Smart Contract Particiaption") + return 0 + + # ContractParticipationTable + contract_session.add(ContractParticipants(participantAddress = transaction_data['senderAddress'], tokenAmount= parsed_data['tokenAmount'], userChoice= swapPrice, transactionHash= transaction_data['txid'], blockNumber= blockinfo['height'], blockHash= blockinfo['hash'], winningAmount = swapAmount)) + + add_contract_transaction_history(contract_name=parsed_data['contractName'], contract_address=outputlist[0], transactionType='participation', transactionSubType='swap', sourceFloAddress=inputlist[0], destFloAddress=outputlist[0], transferAmount=swapAmount, blockNumber=blockinfo['height'], blockHash=blockinfo['hash'], blocktime=blockinfo['time'], transactionHash=transaction_data['txid'], jsonData=json.dumps(transaction_data), parsedFloData=json.dumps(parsed_data)) + + contract_session.commit() + contract_session.close() + + # If this is the first interaction of the participant's address with the given token name, add it to token mapping + systemdb_connection = create_database_connection('system_dbs', {'db_name':'system'}) + firstInteractionCheck = systemdb_connection.execute(f"SELECT * FROM tokenAddressMapping WHERE tokenAddress='{inputlist[0]}' AND token='{contractStructure['selling_token']}'").fetchall() + if len(firstInteractionCheck) == 0: + systemdb_connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{inputlist[0]}', '{contractStructure['selling_token']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}')") + systemdb_connection.close() + + updateLatestTransaction(transaction_data, parsed_data, f"{parsed_data['contractName']}-{outputlist[0]}", transactionType='tokenswapParticipation') + pushData_SSEapi(f"Token swap successfully performed at contract {parsed_data['contractName']}-{outputlist[0]} with the transaction {transaction_data['txid']}") + + else: + # Reject the participation saying not enough deposit tokens are available + rejectComment = f"Swap participation at transaction {transaction_data['txid']} rejected as requested swap amount is {swapAmount} but {available_deposit_sum} is available" + logger.info(rejectComment) + rejected_transaction_history(transaction_data, parsed_data, inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + else: + rejectComment = f"Transaction {transaction_data['txid']} rejected as the participation doesn't belong to any valid contract type" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + return 0 + + else: + rejectComment = f"Transaction {transaction_data['txid']} rejected as a Smart Contract with the name {parsed_data['contractName']} at address {outputlist[0]} doesnt exist" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + return 0 + + elif parsed_data['transferType'] == 'nft': + if not is_a_contract_address(inputlist[0]) and not is_a_contract_address(outputlist[0]): + # check if the token exists in the database + if check_database_existence('token', {'token_name':f"{parsed_data['tokenIdentification']}"}): + # Pull details of the token type from system.db database + connection = create_database_connection('system_dbs', {'db_name':'system'}) + db_details = connection.execute("SELECT db_name, db_type, keyword, object_format FROM databaseTypeMapping WHERE db_name='{}'".format(parsed_data['tokenIdentification'])) + db_details = list(zip(*db_details)) + if db_details[1][0] == 'infinite-token': + db_object = json.loads(db_details[3][0]) + if db_object['root_address'] == inputlist[0]: + isInfiniteToken = True + else: + isInfiniteToken = False + else: + isInfiniteToken = False + + # Check if the transaction hash already exists in the token db + connection = create_database_connection('token', {'token_name':f"{parsed_data['tokenIdentification']}"}) + blockno_txhash = connection.execute('SELECT blockNumber, transactionHash FROM transactionHistory').fetchall() + connection.close() + blockno_txhash_T = list(zip(*blockno_txhash)) + + if transaction_data['txid'] in list(blockno_txhash_T[1]): + logger.warning(f"Transaction {transaction_data['txid']} already exists in the token db. This is unusual, please check your code") + pushData_SSEapi(f"Error | Transaction {transaction_data['txid']} already exists in the token db. This is unusual, please check your code") + return 0 + + returnval = transferToken(parsed_data['tokenIdentification'], parsed_data['tokenAmount'], inputlist[0],outputlist[0], transaction_data, parsed_data, isInfiniteToken=isInfiniteToken, blockinfo = blockinfo) + if returnval == 0: + logger.info("Something went wrong in the token transfer method") + pushData_SSEapi(f"Error | Something went wrong while doing the internal db transactions for {transaction_data['txid']}") + return 0 + else: + updateLatestTransaction(transaction_data, parsed_data, f"{parsed_data['tokenIdentification']}", transactionType='token-transfer') + + # If this is the first interaction of the outputlist's address with the given token name, add it to token mapping + connection = create_database_connection('system_dbs', {'db_name':'system'}) + firstInteractionCheck = connection.execute(f"SELECT * FROM tokenAddressMapping WHERE tokenAddress='{outputlist[0]}' AND token='{parsed_data['tokenIdentification']}'").fetchall() + + if len(firstInteractionCheck) == 0: + connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{outputlist[0]}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}')") + + connection.close() + + # Pass information to SSE channel + headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} + # r = requests.post(tokenapi_sse_url, json={f"message': 'Token Transfer | name:{parsed_data['tokenIdentification']} | transactionHash:{transaction_data['txid']}"}, headers=headers) + return 1 + else: + rejectComment = f"Token transfer at transaction {transaction_data['txid']} rejected as a token with the name {parsed_data['tokenIdentification']} doesnt not exist" + logger.info(rejectComment) + rejected_transaction_history(transaction_data, parsed_data, inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + else: + rejectComment = f"Token transfer at transaction {transaction_data['txid']} rejected as either the input address or the output address is part of a contract address" + logger.info(rejectComment) + rejected_transaction_history(transaction_data, parsed_data, inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + # todo Rule 47 - If the parsed data type is token incorporation, then check if the name hasn't been taken already + # if it has been taken then reject the incorporation. Else incorporate it + elif parsed_data['type'] == 'tokenIncorporation': + if not is_a_contract_address(inputlist[0]): + if not check_database_existence('token', {'token_name':f"{parsed_data['tokenIdentification']}"}): + session = create_database_session_orm('token', {'token_name': f"{parsed_data['tokenIdentification']}"}, TokenBase) + session.add(ActiveTable(address=inputlist[0], parentid=0, transferBalance=parsed_data['tokenAmount'], addressBalance=parsed_data['tokenAmount'], blockNumber=blockinfo['height'])) + session.add(TransferLogs(sourceFloAddress=inputadd, destFloAddress=outputlist[0], + transferAmount=parsed_data['tokenAmount'], sourceId=0, destinationId=1, + blockNumber=transaction_data['blockheight'], time=transaction_data['time'], + transactionHash=transaction_data['txid'])) + + add_transaction_history(token_name=parsed_data['tokenIdentification'], sourceFloAddress=inputadd, destFloAddress=outputlist[0], transferAmount=parsed_data['tokenAmount'], blockNumber=transaction_data['blockheight'], blockHash=transaction_data['blockhash'], blocktime=transaction_data['time'], transactionHash=transaction_data['txid'], jsonData=json.dumps(transaction_data), transactionType=parsed_data['type'], parsedFloData=json.dumps(parsed_data)) + + session.commit() + session.close() + + # add it to token address to token mapping db table + connection = create_database_connection('system_dbs', {'db_name':'system'}) + connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{inputadd}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}');") + connection.execute(f"INSERT INTO databaseTypeMapping (db_name, db_type, keyword, object_format, blockNumber) VALUES ('{parsed_data['tokenIdentification']}', 'token', '', '', '{transaction_data['blockheight']}')") + connection.close() + + updateLatestTransaction(transaction_data, parsed_data, f"{parsed_data['tokenIdentification']}") + pushData_SSEapi(f"Token | Successfully incorporated token {parsed_data['tokenIdentification']} at transaction {transaction_data['txid']}") + return 1 + else: + rejectComment = f"Token incorporation rejected at transaction {transaction_data['txid']} as token {parsed_data['tokenIdentification']} already exists" + logger.info(rejectComment) + rejected_transaction_history(transaction_data, parsed_data, inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + else: + rejectComment = f"Token incorporation at transaction {transaction_data['txid']} rejected as either the input address is part of a contract address" + logger.info(rejectComment) + rejected_transaction_history(transaction_data, parsed_data, inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + # todo Rule 48 - If the parsed data type if smart contract incorporation, then check if the name hasn't been taken already + # if it has been taken then reject the incorporation. + elif parsed_data['type'] == 'smartContractIncorporation': + if not check_database_existence('smart_contract', {'contract_name':f"{parsed_data['contractName']}", 'contract_address':f"{parsed_data['contractAddress']}"}): + # Cannot incorporate on an address with any previous token transaction + systemdb_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase) + tokenAddressMapping_of_contractAddress = systemdb_session.query(TokenAddressMapping).filter(TokenAddressMapping.tokenAddress == parsed_data['contractAddress']).all() + if len(tokenAddressMapping_of_contractAddress) == 0: + # todo Rule 49 - If the contract name hasn't been taken before, check if the contract type is an authorized type by the system + if parsed_data['contractType'] == 'one-time-event': + logger.info("Smart contract is of the type one-time-event") + # either userchoice or payeeAddress condition should be present. Check for it + if 'userchoices' not in parsed_data['contractConditions'] and 'payeeAddress' not in parsed_data['contractConditions']: + rejectComment = f"Either userchoice or payeeAddress should be part of the Contract conditions.\nSmart contract incorporation on transaction {transaction_data['txid']} rejected" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'incorporation', inputadd, inputadd, outputlist[0], rejectComment) + delete_contract_database({'contract_name': parsed_data['contractName'], 'contract_address': parsed_data['contractAddress']}) + return 0 + + # userchoice and payeeAddress conditions cannot come together. Check for it + if 'userchoices' in parsed_data['contractConditions'] and 'payeeAddress' in parsed_data['contractConditions']: + rejectComment = f"Both userchoice and payeeAddress provided as part of the Contract conditions.\nSmart contract incorporation on transaction {transaction_data['txid']} rejected" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'incorporation', inputadd, inputadd, outputlist[0], rejectComment) + delete_contract_database({'contract_name': parsed_data['contractName'], 'contract_address': parsed_data['contractAddress']}) + return 0 + + # todo Rule 50 - Contract address mentioned in flodata field should be same as the receiver FLO address on the output side + # henceforth we will not consider any flo private key initiated comment as valid from this address + # Unlocking can only be done through smart contract system address + if parsed_data['contractAddress'] == inputadd: + session = create_database_session_orm('smart_contract', {'contract_name': f"{parsed_data['contractName']}", 'contract_address': f"{parsed_data['contractAddress']}"}, ContractBase) + session.add(ContractStructure(attribute='contractType', index=0, value=parsed_data['contractType'])) + session.add(ContractStructure(attribute='subtype', index=0, value=parsed_data['subtype'])) + session.add(ContractStructure(attribute='contractName', index=0, value=parsed_data['contractName'])) + session.add(ContractStructure(attribute='tokenIdentification', index=0, value=parsed_data['tokenIdentification'])) + session.add(ContractStructure(attribute='contractAddress', index=0, value=parsed_data['contractAddress'])) + session.add(ContractStructure(attribute='flodata', index=0, value=parsed_data['flodata'])) + session.add(ContractStructure(attribute='expiryTime', index=0, value=parsed_data['contractConditions']['expiryTime'])) + session.add(ContractStructure(attribute='unix_expiryTime', index=0, value=parsed_data['contractConditions']['unix_expiryTime'])) + if 'contractAmount' in parsed_data['contractConditions'].keys(): + session.add(ContractStructure(attribute='contractAmount', index=0, value=parsed_data['contractConditions']['contractAmount'])) + + if 'minimumsubscriptionamount' in parsed_data['contractConditions']: + session.add(ContractStructure(attribute='minimumsubscriptionamount', index=0, value=parsed_data['contractConditions']['minimumsubscriptionamount'])) + if 'maximumsubscriptionamount' in parsed_data['contractConditions']: + session.add(ContractStructure(attribute='maximumsubscriptionamount', index=0, value=parsed_data['contractConditions']['maximumsubscriptionamount'])) + if 'userchoices' in parsed_data['contractConditions']: + for key, value in literal_eval(parsed_data['contractConditions']['userchoices']).items(): + session.add(ContractStructure(attribute='exitconditions', index=key, value=value)) + + if 'payeeAddress' in parsed_data['contractConditions']: + # in this case, expirydate( or maximumamount) is the trigger internally. Keep a track of expiry dates + session.add(ContractStructure(attribute='payeeAddress', index=0, value=json.dumps(parsed_data['contractConditions']['payeeAddress']))) + + # Store transfer as part of ContractTransactionHistory + add_contract_transaction_history(contract_name=parsed_data['contractName'], contract_address=parsed_data['contractAddress'], transactionType='incorporation', transactionSubType=None, sourceFloAddress=inputadd, destFloAddress=outputlist[0], transferAmount=None, blockNumber=blockinfo['height'], blockHash=blockinfo['hash'], blocktime=blockinfo['time'], transactionHash=transaction_data['txid'], jsonData=json.dumps(transaction_data), parsedFloData=json.dumps(parsed_data)) + session.commit() + session.close() + + # add Smart Contract name in token contract association + blockchainReference = neturl + 'tx/' + transaction_data['txid'] + session = create_database_session_orm('token', {'token_name': f"{parsed_data['tokenIdentification']}"}, TokenBase) + session.add(TokenContractAssociation(tokenIdentification=parsed_data['tokenIdentification'], + contractName=parsed_data['contractName'], + contractAddress=parsed_data['contractAddress'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'], + time=transaction_data['time'], + transactionHash=transaction_data['txid'], + blockchainReference=blockchainReference, + jsonData=json.dumps(transaction_data), + transactionType=parsed_data['type'], + parsedFloData=json.dumps(parsed_data))) + session.commit() + session.close() + + # Store smart contract address in system's db, to be ignored during future transfers + session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase) + session.add(ActiveContracts(contractName=parsed_data['contractName'], + contractAddress=parsed_data['contractAddress'], status='active', + tokenIdentification=parsed_data['tokenIdentification'], + contractType=parsed_data['contractType'], + transactionHash=transaction_data['txid'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'], + incorporationDate=transaction_data['time'])) + session.commit() + + session.add(ContractAddressMapping(address=inputadd, addressType='incorporation', + tokenAmount=None, + contractName=parsed_data['contractName'], + contractAddress=inputadd, + transactionHash=transaction_data['txid'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'])) + + session.add(DatabaseTypeMapping(db_name=f"{parsed_data['contractName']}-{inputadd}", + db_type='smartcontract', + keyword='', + object_format='', + blockNumber=transaction_data['blockheight'])) + + session.add(TimeActions(time=parsed_data['contractConditions']['expiryTime'], + activity='contract-time-trigger', + status='active', + contractName=parsed_data['contractName'], + contractAddress=inputadd, + contractType='one-time-event-trigger', + tokens_db=json.dumps([parsed_data['tokenIdentification']]), + parsed_data=json.dumps(parsed_data), + transactionHash=transaction_data['txid'], + blockNumber=transaction_data['blockheight'])) + + session.commit() + session.close() + + updateLatestTransaction(transaction_data, parsed_data, f"{parsed_data['contractName']}-{parsed_data['contractAddress']}") + + pushData_SSEapi('Contract | Contract incorporated at transaction {} with name {}-{}'.format(transaction_data['txid'], parsed_data['contractName'], parsed_data['contractAddress'])) + return 1 + else: + rejectComment = f"Contract Incorporation on transaction {transaction_data['txid']} rejected as contract address in Flodata and input address are different" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'incorporation', inputadd, inputadd, outputlist[0], rejectComment) + pushData_SSEapi(f"Error | Contract Incorporation rejected as address in Flodata and input address are different at transaction {transaction_data['txid']}") + delete_contract_database({'contract_name': parsed_data['contractName'], 'contract_address': parsed_data['contractAddress']}) + return 0 + + if parsed_data['contractType'] == 'continuous-event' or parsed_data['contractType'] == 'continuos-event': + logger.debug("Smart contract is of the type continuous-event") + # Add checks to reject the creation of contract + if parsed_data['contractAddress'] == inputadd: + session = create_database_session_orm('smart_contract', {'contract_name': f"{parsed_data['contractName']}", 'contract_address': f"{parsed_data['contractAddress']}"}, ContractBase) + session.add(ContractStructure(attribute='contractType', index=0, value=parsed_data['contractType'])) + session.add(ContractStructure(attribute='contractName', index=0, value=parsed_data['contractName'])) + session.add(ContractStructure(attribute='contractAddress', index=0, value=parsed_data['contractAddress'])) + session.add(ContractStructure(attribute='flodata', index=0, value=parsed_data['flodata'])) + + if parsed_data['stateF'] != {} and parsed_data['stateF'] is not False: + for key, value in parsed_data['stateF'].items(): + session.add(ContractStructure(attribute=f'statef-{key}', index=0, value=value)) + + if 'subtype' in parsed_data['contractConditions']: + # todo: Check if the both the tokens mentioned exist if its a token swap + if (parsed_data['contractConditions']['subtype'] == 'tokenswap') and (check_database_existence('token', {'token_name':f"{parsed_data['contractConditions']['selling_token'].split('#')[0]}"})) and (check_database_existence('token', {'token_name':f"{parsed_data['contractConditions']['accepting_token'].split('#')[0]}"})): + session.add(ContractStructure(attribute='subtype', index=0, value=parsed_data['contractConditions']['subtype'])) + session.add(ContractStructure(attribute='accepting_token', index=0, value=parsed_data['contractConditions']['accepting_token'])) + session.add(ContractStructure(attribute='selling_token', index=0, value=parsed_data['contractConditions']['selling_token'])) + + if parsed_data['contractConditions']['pricetype'] not in ['predetermined','statef','dynamic']: + rejectComment = f"pricetype is not part of accepted parameters for a continuos event contract of the type token swap.\nSmart contract incorporation on transaction {transaction_data['txid']} rejected" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'incorporation', inputadd, inputadd, outputlist[0], rejectComment) + delete_contract_database({'contract_name': parsed_data['contractName'], 'contract_address': parsed_data['contractAddress']}) + return 0 + + # determine price + session.add(ContractStructure(attribute='pricetype', index=0, value=parsed_data['contractConditions']['pricetype'])) + + if parsed_data['contractConditions']['pricetype'] in ['predetermined','statef']: + session.add(ContractStructure(attribute='price', index=0, value=parsed_data['contractConditions']['price'])) + elif parsed_data['contractConditions']['pricetype'] in ['dynamic']: + session.add(ContractStructure(attribute='price', index=0, value=parsed_data['contractConditions']['price'])) + session.add(ContractStructure(attribute='oracle_address', index=0, value=parsed_data['contractConditions']['oracle_address'])) + + # Store transfer as part of ContractTransactionHistory + blockchainReference = neturl + 'tx/' + transaction_data['txid'] + session.add(ContractTransactionHistory(transactionType='incorporation', + sourceFloAddress=inputadd, + destFloAddress=outputlist[0], + transferAmount=None, + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'], + time=transaction_data['time'], + transactionHash=transaction_data['txid'], + blockchainReference=blockchainReference, + jsonData=json.dumps(transaction_data), + parsedFloData=json.dumps(parsed_data) + )) + session.commit() + session.close() + + # add Smart Contract name in token contract association + accepting_sending_tokenlist = [parsed_data['contractConditions']['accepting_token'], parsed_data['contractConditions']['selling_token']] + for token_name in accepting_sending_tokenlist: + token_name = token_name.split('#')[0] + session = create_database_session_orm('token', {'token_name': f"{token_name}"}, TokenBase) + session.add(TokenContractAssociation(tokenIdentification=token_name, + contractName=parsed_data['contractName'], + contractAddress=parsed_data['contractAddress'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'], + time=transaction_data['time'], + transactionHash=transaction_data['txid'], + blockchainReference=blockchainReference, + jsonData=json.dumps(transaction_data), + transactionType=parsed_data['type'], + parsedFloData=json.dumps(parsed_data))) + session.commit() + session.close() + + # Store smart contract address in system's db, to be ignored during future transfers + session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase) + session.add(ActiveContracts(contractName=parsed_data['contractName'], + contractAddress=parsed_data['contractAddress'], status='active', + tokenIdentification=str(accepting_sending_tokenlist), + contractType=parsed_data['contractType'], + transactionHash=transaction_data['txid'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'], + incorporationDate=transaction_data['time'])) + session.commit() + + # todo - Add a condition for rejected contract transaction on the else loop for this condition + session.add(ContractAddressMapping(address=inputadd, addressType='incorporation', + tokenAmount=None, + contractName=parsed_data['contractName'], + contractAddress=inputadd, + transactionHash=transaction_data['txid'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'])) + session.add(DatabaseTypeMapping(db_name=f"{parsed_data['contractName']}-{inputadd}", + db_type='smartcontract', + keyword='', + object_format='', + blockNumber=transaction_data['blockheight'])) + session.commit() + session.close() + updateLatestTransaction(transaction_data, parsed_data, f"{parsed_data['contractName']}-{parsed_data['contractAddress']}") + pushData_SSEapi('Contract | Contract incorporated at transaction {} with name {}-{}'.format(transaction_data['txid'], parsed_data['contractName'], parsed_data['contractAddress'])) + return 1 + + else: + rejectComment = f"One of the token for the swap does not exist.\nSmart contract incorporation on transaction {transaction_data['txid']} rejected" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'incorporation', inputadd, inputadd, outputlist[0], rejectComment) + delete_contract_database({'contract_name': parsed_data['contractName'], 'contract_address': parsed_data['contractAddress']}) + return 0 + + else: + rejectComment = f"No subtype provided || mentioned tokens do not exist for the Contract of type continuos event.\nSmart contract incorporation on transaction {transaction_data['txid']} rejected" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'incorporation', inputadd, inputadd, outputlist[0], rejectComment) + delete_contract_database({'contract_name': parsed_data['contractName'], 'contract_address': parsed_data['contractAddress']}) + return 0 + + else: + rejectComment = f"Smart contract creation transaction {transaction_data['txid']} rejected as token transactions already exist on the address {parsed_data['contractAddress']}" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'incorporation', inputadd, inputadd, outputlist[0], rejectComment) + delete_contract_database({'contract_name': parsed_data['contractName'], 'contract_address': parsed_data['contractAddress']}) + return 0 + + else: + rejectComment = f"Transaction {transaction_data['txid']} rejected as a Smart Contract with the name {parsed_data['contractName']} at address {parsed_data['contractAddress']} already exists" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'incorporation', inputadd, inputadd, outputlist[0], rejectComment) + delete_contract_database({'contract_name': parsed_data['contractName'], 'contract_address': parsed_data['contractAddress']}) + return 0 + + elif parsed_data['type'] == 'smartContractPays': + logger.info(f"Transaction {transaction_data['txid']} is of the type smartContractPays") + committeeAddressList = refresh_committee_list(APP_ADMIN, neturl, blockinfo['time']) + # Check if input address is a committee address + if inputlist[0] in committeeAddressList: + # check if the contract exists + if check_database_existence('smart_contract', {'contract_name':f"{parsed_data['contractName']}", 'contract_address':f"{outputlist[0]}"}): + # Check if the transaction hash already exists in the contract db (Safety check) + connection = create_database_connection('smart_contract', {'contract_name':f"{parsed_data['contractName']}", 'contract_address':f"{outputlist[0]}"}) + participantAdd_txhash = connection.execute(f"SELECT sourceFloAddress, transactionHash FROM contractTransactionHistory WHERE transactionType != 'incorporation'").fetchall() + participantAdd_txhash_T = list(zip(*participantAdd_txhash)) + + if len(participantAdd_txhash) != 0 and transaction_data['txid'] in list(participantAdd_txhash_T[1]): + logger.warning(f"Transaction {transaction_data['txid']} rejected as it already exists in the Smart Contract db. This is unusual, please check your code") + pushData_SSEapi(f"Error | Transaction {transaction_data['txid']} rejected as it already exists in the Smart Contract db. This is unusual, please check your code") + return 0 + + # pull out the contract structure into a dictionary + contractStructure = extract_contractStructure(parsed_data['contractName'], outputlist[0]) + + # if contractAddress has been passed, check if output address is contract Incorporation address + if 'contractAddress' in contractStructure: + if outputlist[0] != contractStructure['contractAddress']: + rejectComment = f"Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} hasn't expired yet" + logger.warning(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'trigger', outputlist[0], inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + # check the type of smart contract ie. external trigger or internal trigger + if 'payeeAddress' in contractStructure: + rejectComment = f"Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} has an internal trigger" + logger.warning(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'trigger', outputlist[0], inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + # check the status of the contract + contractStatus = check_contract_status(parsed_data['contractName'], outputlist[0]) + contractList = [] + + if contractStatus == 'closed': + rejectComment = f"Transaction {transaction_data['txid']} rejected as Smart contract {parsed_data['contractName']} at the {outputlist[0]} is closed" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'trigger', outputlist[0], inputadd, outputlist[0], rejectComment) + return 0 + else: + session = create_database_session_orm('smart_contract', {'contract_name': f"{parsed_data['contractName']}", 'contract_address': f"{outputlist[0]}"}, ContractBase) + result = session.query(ContractStructure).filter_by(attribute='expiryTime').all() + session.close() + if result: + # now parse the expiry time in python + expirytime = result[0].value.strip() + expirytime_split = expirytime.split(' ') + parse_string = '{}/{}/{} {}'.format(expirytime_split[3], parsing.months[expirytime_split[1]], expirytime_split[2], expirytime_split[4]) + expirytime_object = parsing.arrow.get(parse_string, 'YYYY/M/D HH:mm:ss').replace(tzinfo=expirytime_split[5][3:]) + blocktime_object = parsing.arrow.get(transaction_data['time']).to('Asia/Kolkata') + + if blocktime_object <= expirytime_object: + rejectComment = f"Transaction {transaction_data['txid']} rejected as Smart contract {parsed_data['contractName']}-{outputlist[0]} has not expired and will not trigger" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'trigger', outputlist[0], inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + # check if the user choice passed is part of the contract structure + tempchoiceList = [] + for item in contractStructure['exitconditions']: + tempchoiceList.append(contractStructure['exitconditions'][item]) + + if parsed_data['triggerCondition'] not in tempchoiceList: + rejectComment = f"Transaction {transaction_data['txid']} rejected as triggerCondition, {parsed_data['triggerCondition']}, has been passed to Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} which doesn't accept any userChoice of the given name" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'trigger', outputlist[0], inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + systemdb_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase) + + activecontracts_table_info = systemdb_session.query(ActiveContracts.blockHash, ActiveContracts.incorporationDate, ActiveContracts.expiryDate).filter(ActiveContracts.contractName==parsed_data['contractName'], ActiveContracts.contractAddress==outputlist[0], ActiveContracts.status=='expired').first() + + timeactions_table_info = systemdb_session.query(TimeActions.time, TimeActions.activity, TimeActions.contractType, TimeActions.tokens_db, TimeActions.parsed_data).filter(TimeActions.contractName==parsed_data['contractName'], TimeActions.contractAddress==outputlist[0], TimeActions.status=='active').first() + + # check if minimumsubscriptionamount exists as part of the contract structure + if 'minimumsubscriptionamount' in contractStructure: + # if it has not been reached, close the contract and return money + minimumsubscriptionamount = float(contractStructure['minimumsubscriptionamount']) + session = create_database_session_orm('smart_contract', {'contract_name': f"{parsed_data['contractName']}", 'contract_address': f"{outputlist[0]}"}, ContractBase) + amountDeposited = session.query(func.sum(ContractParticipants.tokenAmount)).all()[0][0] + session.close() + + if amountDeposited is None: + amountDeposited = 0 + + if amountDeposited < minimumsubscriptionamount: + # close the contract and return the money + logger.info('Minimum subscription amount hasn\'t been reached\n The token will be returned back') + # Initialize payback to contract participants + connection = create_database_connection('smart_contract', {'contract_name':f"{parsed_data['contractName']}", 'contract_address':f"{outputlist[0]}"}) + contractParticipants = connection.execute('SELECT participantAddress, tokenAmount, transactionHash FROM contractparticipants').fetchall()[0][0] + + for participant in contractParticipants: + tokenIdentification = connection.execute('SELECT * FROM contractstructure WHERE attribute="tokenIdentification"').fetchall()[0][0] + contractAddress = connection.execute('SELECT * FROM contractstructure WHERE attribute="contractAddress"').fetchall()[0][0] + returnval = transferToken(tokenIdentification, participant[1], contractAddress, participant[0], transaction_data, parsed_data, blockinfo = blockinfo) + if returnval == 0: + logger.info("CRITICAL ERROR | Something went wrong in the token transfer method while doing local Smart Contract Trigger") + return 0 + + connection.execute('update contractparticipants set winningAmount="{}" where participantAddress="{}" and transactionHash="{}"'.format((participant[1], participant[0], participant[4]))) + + # add transaction to ContractTransactionHistory + blockchainReference = neturl + 'tx/' + transaction_data['txid'] + session = create_database_session_orm('smart_contract', {'contract_name': f"{parsed_data['contractName']}", 'contract_address': f"{outputlist[0]}"}, ContractBase) + session.add(ContractTransactionHistory(transactionType='trigger', + transactionSubType='minimumsubscriptionamount-payback', + sourceFloAddress=inputadd, + destFloAddress=outputlist[0], + transferAmount=None, + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'], + time=transaction_data['time'], + transactionHash=transaction_data['txid'], + blockchainReference=blockchainReference, + jsonData=json.dumps(transaction_data), + parsedFloData=json.dumps(parsed_data) + )) + session.commit() + session.close() + + close_expire_contract(contractStructure, 'closed', transaction_data['txid'], blockinfo['height'], blockinfo['hash'], activecontracts_table_info.incorporationDate, activecontracts_table_info.expiryDate, blockinfo['time'], timeactions_table_info.time, timeactions_table_info.activity, parsed_data['contractName'], outputlist[0], timeactions_table_info.contractType, timeactions_table_info.tokens_db, timeactions_table_info.parsed_data, blockinfo['height']) + + updateLatestTransaction(transaction_data, parsed_data, f"{parsed_data['contractName']}-{outputlist[0]}") + pushData_SSEapi('Trigger | Minimum subscription amount not reached at contract {}-{} at transaction {}. Tokens will be refunded'.format(parsed_data['contractName'], outputlist[0], transaction_data['txid'])) + return 1 + + # Trigger the contract + connection = create_database_connection('smart_contract', {'contract_name':f"{parsed_data['contractName']}", 'contract_address':f"{outputlist[0]}"}) + tokenSum = connection.execute('SELECT IFNULL(sum(tokenAmount), 0) FROM contractparticipants').fetchall()[0][0] + if tokenSum > 0: + contractWinners = connection.execute('SELECT * FROM contractparticipants WHERE userChoice="{}"'.format(parsed_data['triggerCondition'])).fetchall() + winnerSum = connection.execute('SELECT sum(tokenAmount) FROM contractparticipants WHERE userChoice="{}"'.format(parsed_data['triggerCondition'])).fetchall()[0][0] + tokenIdentification = connection.execute('SELECT value FROM contractstructure WHERE attribute="tokenIdentification"').fetchall()[0][0] + + for winner in contractWinners: + winnerAmount = "%.8f" % ((winner[2] / winnerSum) * tokenSum) + returnval = transferToken(tokenIdentification, winnerAmount, outputlist[0], winner[1], transaction_data, parsed_data, blockinfo = blockinfo) + if returnval == 0: + logger.critical("Something went wrong in the token transfer method while doing local Smart Contract Trigger") + return 0 + connection.execute(f"INSERT INTO contractwinners (participantAddress, winningAmount, userChoice, transactionHash, blockNumber, blockHash) VALUES('{winner[1]}', {winnerAmount}, '{parsed_data['triggerCondition']}', '{transaction_data['txid']}','{blockinfo['height']}','{blockinfo['hash']}');") + + # add transaction to ContractTransactionHistory + blockchainReference = neturl + 'tx/' + transaction_data['txid'] + session.add(ContractTransactionHistory(transactionType='trigger', + transactionSubType='committee', + sourceFloAddress=inputadd, + destFloAddress=outputlist[0], + transferAmount=None, + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'], + time=transaction_data['time'], + transactionHash=transaction_data['txid'], + blockchainReference=blockchainReference, + jsonData=json.dumps(transaction_data), + parsedFloData=json.dumps(parsed_data) + )) + session.commit() + session.close() + + close_expire_contract(contractStructure, 'closed', transaction_data['txid'], blockinfo['height'], blockinfo['hash'], activecontracts_table_info.incorporationDate, activecontracts_table_info.expiryDate, blockinfo['time'], timeactions_table_info['time'], 'contract-time-trigger', contractStructure['contractName'], contractStructure['contractAddress'], contractStructure['contractType'], timeactions_table_info.tokens_db, timeactions_table_info.parsed_data, blockinfo['height']) + + updateLatestTransaction(transaction_data, parsed_data, f"{contractStructure['contractName']}-{contractStructure['contractAddress']}") + + pushData_SSEapi('Trigger | Contract triggered of the name {}-{} is active currently at transaction {}'.format(parsed_data['contractName'], outputlist[0], transaction_data['txid'])) + return 1 + else: + rejectComment = f"Transaction {transaction_data['txid']} rejected as Smart Contract named {parsed_data['contractName']} at the address {outputlist[0]} doesn't exist" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'trigger', outputlist[0], inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + else: + rejectComment = f"Transaction {transaction_data['txid']} rejected as input address, {inputlist[0]}, is not part of the committee address list" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + elif parsed_data['type'] == 'smartContractDeposit': + if check_database_existence('smart_contract', {'contract_name':f"{parsed_data['contractName']}", 'contract_address':f"{outputlist[0]}"}): + # Reject if the deposit expiry time is greater than incorporated blocktime + expiry_time = convert_datetime_to_arrowobject(parsed_data['depositConditions']['expiryTime']) + if blockinfo['time'] > expiry_time.timestamp(): + rejectComment = f"Contract deposit of transaction {transaction_data['txid']} rejected as expiryTime before current block time" + logger.warning(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'deposit', outputlist[0], inputadd, outputlist[0], rejectComment) + return 0 + + # Check if the transaction hash already exists in the contract db (Safety check) + connection = create_database_connection('smart_contract', {'contract_name':f"{parsed_data['contractName']}", 'contract_address':f"{outputlist[0]}"}) + participantAdd_txhash = connection.execute('SELECT participantAddress, transactionHash FROM contractparticipants').fetchall() + participantAdd_txhash_T = list(zip(*participantAdd_txhash)) + + if len(participantAdd_txhash) != 0 and transaction_data['txid'] in list(participantAdd_txhash_T[1]): + rejectComment = f"Transaction {transaction_data['txid']} rejected as it already exists in the Smart Contract db. This is unusual, please check your code" + logger.warning(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'deposit', outputlist[0], inputadd, outputlist[0], rejectComment) + return 0 + + # if contractAddress was passed, then check if it matches the output address of this contract + if 'contractAddress' in parsed_data: + if parsed_data['contractAddress'] != outputlist[0]: + rejectComment = f"Contract deposit at transaction {transaction_data['txid']} rejected as contractAddress specified in flodata, {parsed_data['contractAddress']}, doesnt not match with transaction's output address {outputlist[0]}" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'participation', outputlist[0], inputadd, outputlist[0], rejectComment) + # Pass information to SSE channel + pushData_SSEapi(f"Error| Mismatch in contract address specified in flodata and the output address of the transaction {transaction_data['txid']}") + return 0 + + # pull out the contract structure into a dictionary + contractStructure = extract_contractStructure(parsed_data['contractName'], outputlist[0]) + + # Transfer the token + returnval = transferToken(parsed_data['tokenIdentification'], parsed_data['depositAmount'], inputlist[0], outputlist[0], transaction_data, parsed_data, blockinfo=blockinfo) + if returnval == 0: + logger.info("Something went wrong in the token transfer method") + pushData_SSEapi(f"Error | Something went wrong while doing the internal db transactions for {transaction_data['txid']}") + return 0 + + # Push the deposit transaction into deposit database contract database + session = create_database_session_orm('smart_contract', {'contract_name': f"{parsed_data['contractName']}", 'contract_address': f"{outputlist[0]}"}, ContractBase) + blockchainReference = neturl + 'tx/' + transaction_data['txid'] + session.add(ContractDeposits(depositorAddress = inputadd, depositAmount = parsed_data['depositAmount'], depositBalance = parsed_data['depositAmount'], expiryTime = parsed_data['depositConditions']['expiryTime'], unix_expiryTime = convert_datetime_to_arrowobject(parsed_data['depositConditions']['expiryTime']).timestamp(), status = 'active', transactionHash = transaction_data['txid'], blockNumber = transaction_data['blockheight'], blockHash = transaction_data['blockhash'])) + session.add(ContractTransactionHistory(transactionType = 'smartContractDeposit', + transactionSubType = None, + sourceFloAddress = inputadd, + destFloAddress = outputlist[0], + transferAmount = parsed_data['depositAmount'], + blockNumber = transaction_data['blockheight'], + blockHash = transaction_data['blockhash'], + time = transaction_data['time'], + transactionHash = transaction_data['txid'], + blockchainReference = blockchainReference, + jsonData = json.dumps(transaction_data), + parsedFloData = json.dumps(parsed_data) + )) + session.commit() + session.close() + + session = create_database_session_orm('system_dbs', {'db_name': f"system"}, SystemBase) + session.add(TimeActions(time=parsed_data['depositConditions']['expiryTime'], + activity='contract-deposit', + status='active', + contractName=parsed_data['contractName'], + contractAddress=outputlist[0], + contractType='continuos-event-swap', + tokens_db=f"{parsed_data['tokenIdentification']}", + parsed_data=json.dumps(parsed_data), + transactionHash=transaction_data['txid'], + blockNumber=transaction_data['blockheight'])) + session.commit() + pushData_SSEapi(f"Deposit Smart Contract Transaction {transaction_data['txid']} for the Smart contract named {parsed_data['contractName']} at the address {outputlist[0]}") + + # If this is the first interaction of the outputlist's address with the given token name, add it to token mapping + systemdb_connection = create_database_connection('system_dbs', {'db_name':'system'}) + firstInteractionCheck = connection.execute(f"SELECT * FROM tokenAddressMapping WHERE tokenAddress='{outputlist[0]}' AND token='{parsed_data['tokenIdentification']}'").fetchall() + if len(firstInteractionCheck) == 0: + connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{outputlist[0]}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}')") + connection.close() + + updateLatestTransaction(transaction_data, parsed_data , f"{parsed_data['contractName']}-{outputlist[0]}") + return 1 + + else: + rejectComment = f"Transaction {transaction_data['txid']} rejected as a Smart Contract with the name {parsed_data['contractName']} at address {outputlist[0]} doesnt exist" + logger.info(rejectComment) + rejected_contract_transaction_history(transaction_data, parsed_data, 'smartContractDeposit', outputlist[0], inputadd, outputlist[0], rejectComment) + return 0 + + elif parsed_data['type'] == 'nftIncorporation': + ''' + DIFFERENT BETWEEN TOKEN AND NFT + System.db will have a different entry + in creation nft word will be extra + NFT Hash must be present + Creation and transfer amount .. only integer parts will be taken + Keyword nft must be present in both creation and transfer + ''' + if not is_a_contract_address(inputlist[0]): + if not check_database_existence('token', {'token_name':f"{parsed_data['tokenIdentification']}"}): + session = create_database_session_orm('token', {'token_name': f"{parsed_data['tokenIdentification']}"}, TokenBase) + session.add(ActiveTable(address=inputlist[0], parentid=0, transferBalance=parsed_data['tokenAmount'], addressBalance=parsed_data['tokenAmount'], blockNumber=blockinfo['height'])) + session.add(TransferLogs(sourceFloAddress=inputadd, destFloAddress=outputlist[0], transferAmount=parsed_data['tokenAmount'], sourceId=0, destinationId=1, blockNumber=transaction_data['blockheight'], time=transaction_data['time'], transactionHash=transaction_data['txid'])) + add_transaction_history(token_name=parsed_data['tokenIdentification'], sourceFloAddress=inputadd, destFloAddress=outputlist[0], transferAmount=parsed_data['tokenAmount'], blockNumber=transaction_data['blockheight'], blockHash=transaction_data['blockhash'], blocktime=transaction_data['time'], transactionHash=transaction_data['txid'], jsonData=json.dumps(transaction_data), transactionType=parsed_data['type'], parsedFloData=json.dumps(parsed_data)) + + session.commit() + session.close() + + # add it to token address to token mapping db table + connection = create_database_connection('system_dbs', {'db_name':'system'}) + connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{inputadd}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}');") + nft_data = {'sha256_hash': f"{parsed_data['nftHash']}"} + connection.execute(f"INSERT INTO databaseTypeMapping (db_name, db_type, keyword, object_format, blockNumber) VALUES ('{parsed_data['tokenIdentification']}', 'nft', '', '{json.dumps(nft_data)}', '{transaction_data['blockheight']}')") + connection.close() + + updateLatestTransaction(transaction_data, parsed_data, f"{parsed_data['tokenIdentification']}") + pushData_SSEapi(f"NFT | Succesfully incorporated NFT {parsed_data['tokenIdentification']} at transaction {transaction_data['txid']}") + return 1 + else: + rejectComment = f"Transaction {transaction_data['txid']} rejected as an NFT with the name {parsed_data['tokenIdentification']} has already been incorporated" + logger.info(rejectComment) + rejected_transaction_history(transaction_data, parsed_data, inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + else: + rejectComment = f"NFT incorporation at transaction {transaction_data['txid']} rejected as either the input address is part of a contract address" + logger.info(rejectComment) + rejected_transaction_history(transaction_data, parsed_data, inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + elif parsed_data['type'] == 'infiniteTokenIncorporation': + if not is_a_contract_address(inputlist[0]) and not is_a_contract_address(outputlist[0]): + if not check_database_existence('token', {'token_name':f"{parsed_data['tokenIdentification']}"}): + parsed_data['tokenAmount'] = 0 + tokendb_session = create_database_session_orm('token', {'token_name': f"{parsed_data['tokenIdentification']}"}, TokenBase) + tokendb_session.add(ActiveTable(address=inputlist[0], parentid=0, transferBalance=parsed_data['tokenAmount'], blockNumber=blockinfo['height'])) + tokendb_session.add(TransferLogs(sourceFloAddress=inputadd, destFloAddress=outputlist[0], + transferAmount=parsed_data['tokenAmount'], sourceId=0, destinationId=1, + blockNumber=transaction_data['blockheight'], time=transaction_data['time'], + transactionHash=transaction_data['txid'])) + + add_transaction_history(token_name=parsed_data['tokenIdentification'], sourceFloAddress=inputadd, destFloAddress=outputlist[0], transferAmount=parsed_data['tokenAmount'], blockNumber=transaction_data['blockheight'], blockHash=transaction_data['blockhash'], blocktime=blockinfo['time'], transactionHash=transaction_data['txid'], jsonData=json.dumps(transaction_data), transactionType=parsed_data['type'], parsedFloData=json.dumps(parsed_data)) + + + # add it to token address to token mapping db table + connection = create_database_connection('system_dbs', {'db_name':'system'}) + connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{inputadd}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}');") + info_object = {'root_address': inputadd} + connection.execute("""INSERT INTO databaseTypeMapping (db_name, db_type, keyword, object_format, blockNumber) VALUES (?, ?, ?, ?, ?)""", (parsed_data['tokenIdentification'], 'infinite-token', '', json.dumps(info_object), transaction_data['blockheight'])) + updateLatestTransaction(transaction_data, parsed_data, f"{parsed_data['tokenIdentification']}") + tokendb_session.commit() + connection.close() + tokendb_session.close() + pushData_SSEapi(f"Token | Succesfully incorporated token {parsed_data['tokenIdentification']} at transaction {transaction_data['txid']}") + return 1 + else: + rejectComment = f"Transaction {transaction_data['txid']} rejected as a token with the name {parsed_data['tokenIdentification']} has already been incorporated" + logger.info(rejectComment) + rejected_transaction_history(transaction_data, parsed_data, inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + else: + rejectComment = f"Infinite token incorporation at transaction {transaction_data['txid']} rejected as either the input address is part of a contract address" + logger.info(rejectComment) + rejected_transaction_history(transaction_data, parsed_data, inputadd, outputlist[0], rejectComment) + pushData_SSEapi(rejectComment) + return 0 + + +def scanBlockchain(): + # Read start block no + session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase) + startblock = int(session.query(SystemData).filter_by(attribute='lastblockscanned').all()[0].value) + 1 + session.commit() + session.close() + + # todo Rule 6 - Find current block height + # Rule 7 - Start analysing the block contents from starting block to current height + + # Find current block height + current_index = -1 + while(current_index == -1): + response = newMultiRequest('blocks?limit=1') + try: + current_index = response['backend']['blocks'] + except: + logger.info('Latest block count response from multiRequest() is not in the right format. Displaying the data received in the log below') + logger.info(response) + logger.info('Program will wait for 1 seconds and try to reconnect') + time.sleep(1) + else: + logger.info("Current block height is %s" % str(current_index)) + break + + for blockindex in range(startblock, current_index): + if blockindex in IGNORE_BLOCK_LIST: + continue + processBlock(blockindex=blockindex) + + # At this point the script has updated to the latest block + # Now we connect to flosight's websocket API to get information about the latest blocks + +def switchNeturl(currentneturl): + neturlindex = serverlist.index(currentneturl) + if neturlindex+1 >= len(serverlist): + return serverlist[neturlindex+1 - len(serverlist)] + else: + return serverlist[neturlindex+1] + + +def reconnectWebsocket(socket_variable): + # Switch a to different flosight + # neturl = switchNeturl(neturl) + # Connect to Flosight websocket to get data on new incoming blocks + i=0 + newurl = serverlist[0] + while(not socket_variable.connected): + logger.info(f"While loop {i}") + logger.info(f"Sleeping for 3 seconds before attempting reconnect to {newurl}") + time.sleep(3) + try: + scanBlockchain() + logger.info(f"Websocket endpoint which is being connected to {newurl}socket.io/socket.io.js") + socket_variable.connect(f"{newurl}socket.io/socket.io.js") + i=i+1 + except: + logger.info(f"disconnect block: Failed reconnect attempt to {newurl}") + newurl = switchNeturl(newurl) + i=i+1 + + +# MAIN EXECUTION STARTS +# Configuration of required variables +config = configparser.ConfigParser() +config.read('config.ini') + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) + +formatter = logging.Formatter('%(asctime)s:%(name)s:%(message)s') +file_handler = logging.FileHandler(os.path.join(config['DEFAULT']['DATA_PATH'],'tracking.log')) +file_handler.setLevel(logging.INFO) +file_handler.setFormatter(formatter) + +stream_handler = logging.StreamHandler() +stream_handler.setFormatter(formatter) + +logger.addHandler(file_handler) +logger.addHandler(stream_handler) + + +# Rule 1 - Read command line arguments to reset the databases as blank +# Rule 2 - Read config to set testnet/mainnet +# Rule 3 - Set flo blockexplorer location depending on testnet or mainnet +# Rule 4 - Set the local flo-cli path depending on testnet or mainnet ( removed this feature | Flosights are the only source ) +# Rule 5 - Set the block number to scan from + + +# Read command line arguments +parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash') +parser.add_argument('-r', '--reset', nargs='?', const=1, type=int, help='Purge existing db and rebuild it from scratch') +parser.add_argument('-rb', '--rebuild', nargs='?', const=1, type=int, help='Rebuild it') +args = parser.parse_args() + +dirpath = os.path.join(config['DEFAULT']['DATA_PATH'], 'tokens') +if not os.path.isdir(dirpath): + os.mkdir(dirpath) +dirpath = os.path.join(config['DEFAULT']['DATA_PATH'], 'smartContracts') +if not os.path.isdir(dirpath): + os.mkdir(dirpath) + +# Read configuration + +# todo - write all assertions to make sure default configs are right +if (config['DEFAULT']['NET'] != 'mainnet') and (config['DEFAULT']['NET'] != 'testnet'): + logger.error("NET parameter in config.ini invalid. Options are either 'mainnet' or 'testnet'. Script is exiting now") + sys.exit(0) + +# Specify mainnet and testnet server list for API calls and websocket calls +# Specify ADMIN ID +serverlist = None +if config['DEFAULT']['NET'] == 'mainnet': + serverlist = config['DEFAULT']['MAINNET_FLOSIGHT_SERVER_LIST'] + APP_ADMIN = 'FNcvkz9PZNZM3HcxM1XTrVL4tgivmCkHp9' +elif config['DEFAULT']['NET'] == 'testnet': + serverlist = config['DEFAULT']['TESTNET_FLOSIGHT_SERVER_LIST'] + APP_ADMIN = 'oWooGLbBELNnwq8Z5YmjoVjw8GhBGH3qSP' +serverlist = serverlist.split(',') +neturl = config['DEFAULT']['FLOSIGHT_NETURL'] +api_url = neturl +tokenapi_sse_url = config['DEFAULT']['TOKENAPI_SSE_URL'] + +IGNORE_BLOCK_LIST = config['DEFAULT']['IGNORE_BLOCK_LIST'].split(',') +IGNORE_BLOCK_LIST = [int(s) for s in IGNORE_BLOCK_LIST] +IGNORE_TRANSACTION_LIST = config['DEFAULT']['IGNORE_TRANSACTION_LIST'].split(',') + + +# Delete database and smartcontract directory if reset is set to 1 +if args.reset == 1: + logger.info("Resetting the database. ") + dirpath = os.path.join(config['DEFAULT']['DATA_PATH'], 'tokens') + if os.path.exists(dirpath): + shutil.rmtree(dirpath) + os.mkdir(dirpath) + dirpath = os.path.join(config['DEFAULT']['DATA_PATH'], 'smartContracts') + if os.path.exists(dirpath): + shutil.rmtree(dirpath) + os.mkdir(dirpath) + dirpath = os.path.join(config['DEFAULT']['DATA_PATH'], 'system.db') + if os.path.exists(dirpath): + os.remove(dirpath) + dirpath = os.path.join(config['DEFAULT']['DATA_PATH'], 'latestCache.db') + if os.path.exists(dirpath): + os.remove(dirpath) + + # Read start block no + startblock = int(config['DEFAULT']['START_BLOCK']) + session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase) + session.add(SystemData(attribute='lastblockscanned', value=startblock - 1)) + session.commit() + session.close() + + # Initialize latest cache DB + session = create_database_session_orm('system_dbs', {'db_name': "latestCache"}, LatestCacheBase) + session.commit() + session.close() + + +# Determine API source for block and transaction information +if __name__ == "__main__": + # MAIN LOGIC STARTS + # scan from the latest block saved locally to latest network block + scanBlockchain() + + # At this point the script has updated to the latest block + # Now we connect to flosight's websocket API to get information about the latest blocks + # Neturl is the URL for Flosight API whose websocket endpoint is being connected to + + sio = socketio.Client() + # Connect to a websocket endpoint and wait for further events + reconnectWebsocket(sio) + #sio.connect(f"{neturl}socket.io/socket.io.js") + + @sio.on('connect') + def token_connect(): + current_time=datetime.now().strftime('%H:%M:%S') + logger.info(f"Token Tracker has connected to websocket endpoint. Time : {current_time}") + sio.emit('subscribe', 'inv') + + @sio.on('disconnect') + def token_disconnect(): + current_time = datetime.now().strftime('%H:%M:%S') + logger.info(f"disconnect block: Token Tracker disconnected from websocket endpoint. Time : {current_time}") + logger.info('disconnect block: Triggering client disconnect') + sio.disconnect() + logger.info('disconnect block: Finished triggering client disconnect') + reconnectWebsocket(sio) + + @sio.on('connect_error') + def connect_error(): + current_time = datetime.now().strftime('%H:%M:%S') + logger.info(f"connection error block: Token Tracker disconnected from websocket endpoint. Time : {current_time}") + logger.info('connection error block: Triggering client disconnect') + sio.disconnect() + logger.info('connection error block: Finished triggering client disconnect') + reconnectWebsocket(sio) + + @sio.on('block') + def on_block(data): + logger.info('New block received') + logger.info(str(data)) + processBlock(blockhash=data) \ No newline at end of file diff --git a/util_db_connect.py b/util_db_connect.py new file mode 100644 index 0000000..3d4e62d --- /dev/null +++ b/util_db_connect.py @@ -0,0 +1,92 @@ +import argparse +import configparser +import json +import logging +import os +import shutil +import sys +import pyflo +import requests +import socketio +from sqlalchemy import create_engine, func +from sqlalchemy.orm import sessionmaker +import time +import arrow +import parsing +from datetime import datetime +from ast import literal_eval +from models import SystemData, TokenBase, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, TokenContractAssociation, RejectedTransactionHistory, ContractBase, ContractStructure, ContractParticipants, ContractTransactionHistory, ContractDeposits, ConsumedInfo, ContractWinners, ContinuosContractBase, ContractStructure2, ContractParticipants2, ContractDeposits2, ContractTransactionHistory2, SystemBase, ActiveContracts, SystemData, ContractAddressMapping, TokenAddressMapping, DatabaseTypeMapping, TimeActions, RejectedContractTransactionHistory, RejectedTransactionHistory, LatestCacheBase, LatestTransactions, LatestBlocks +from statef_processing import process_stateF + + +# Configuration of required variables +config = configparser.ConfigParser() +config.read('config.ini') + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) + +formatter = logging.Formatter('%(asctime)s:%(name)s:%(message)s') +file_handler = logging.FileHandler(os.path.join(config['DEFAULT']['DATA_PATH'],'tracking.log')) +file_handler.setLevel(logging.INFO) +file_handler.setFormatter(formatter) + +stream_handler = logging.StreamHandler() +stream_handler.setFormatter(formatter) + +logger.addHandler(file_handler) +logger.addHandler(stream_handler) + +def create_database_connection(type, parameters): + if type == 'token': + path = os.path.join(config['DEFAULT']['DATA_PATH'], 'tokens', f"{parameters['token_name']}.db") + engine = create_engine(f"sqlite:///{path}", echo=True) + elif type == 'smart_contract': + path = os.path.join(config['DEFAULT']['DATA_PATH'], 'smartContracts', f"{parameters['contract_name']}-{parameters['contract_address']}.db") + engine = create_engine(f"sqlite:///{path}", echo=True) + elif type == 'system_dbs': + path = os.path.join(config['DEFAULT']['DATA_PATH'], f"system.db") + engine = create_engine(f"sqlite:///{path}", echo=False) + elif type == 'latest_cache': + path = os.path.join(config['DEFAULT']['DATA_PATH'], f"latestCache.db") + engine = create_engine(f"sqlite:///{path}", echo=False) + + connection = engine.connect() + return connection + + +def create_database_session_orm(type, parameters, base): + if type == 'token': + path = os.path.join(config['DEFAULT']['DATA_PATH'], 'tokens', f"{parameters['token_name']}.db") + engine = create_engine(f"sqlite:///{path}", echo=True) + base.metadata.create_all(bind=engine) + session = sessionmaker(bind=engine)() + + elif type == 'smart_contract': + path = os.path.join(config['DEFAULT']['DATA_PATH'], 'smartContracts', f"{parameters['contract_name']}-{parameters['contract_address']}.db") + engine = create_engine(f"sqlite:///{path}", echo=True) + base.metadata.create_all(bind=engine) + session = sessionmaker(bind=engine)() + + elif type == 'system_dbs': + path = os.path.join(config['DEFAULT']['DATA_PATH'], f"{parameters['db_name']}.db") + engine = create_engine(f"sqlite:///{path}", echo=False) + base.metadata.create_all(bind=engine) + session = sessionmaker(bind=engine)() + + return session + + +# Connect to system.db with a session +'''session = create_database_session_orm('system_dbs', {'db_name':'system1'}, SystemBase) +subquery_filter = session.query(TimeActions.id).group_by(TimeActions.transactionHash).having(func.count(TimeActions.transactionHash)==1).subquery() +contract_deposits = session.query(TimeActions).filter(TimeActions.id.in_(subquery_filter), TimeActions.status=='active', TimeActions.activity=='contract-deposit').all() + +for contract in contract_deposits: + print(contract.transactionHash)''' + +systemdb_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase) +query = systemdb_session.query(TokenAddressMapping).filter(TokenAddressMapping.tokenAddress == 'contractAddress') +results = query.all() +pdb.set_trace() +print('Lets investigate this now') \ No newline at end of file diff --git a/util_rollback.py b/util_rollback.py new file mode 100644 index 0000000..a4c7165 --- /dev/null +++ b/util_rollback.py @@ -0,0 +1,475 @@ +import argparse +from sqlalchemy import create_engine, func +from sqlalchemy.orm import sessionmaker +from models import SystemData, TokenBase, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, TokenContractAssociation, RejectedTransactionHistory, ContractBase, ContractStructure, ContractParticipants, ContractTransactionHistory, ContractDeposits, ConsumedInfo, ContractWinners, ContinuosContractBase, ContractStructure2, ContractParticipants2, ContractDeposits2, ContractTransactionHistory2, SystemBase, ActiveContracts, SystemData, ContractAddressMapping, TokenAddressMapping, DatabaseTypeMapping, TimeActions, RejectedContractTransactionHistory, RejectedTransactionHistory, LatestCacheBase, LatestTransactions, LatestBlocks +from ast import literal_eval +import os +import json +import logging +import sys + + +apppath = os.path.dirname(os.path.realpath(__file__)) + +# helper functions +def check_database_existence(type, parameters): + if type == 'token': + return os.path.isfile(f"./tokens/{parameters['token_name']}.db") + + if type == 'smart_contract': + return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db") + + +def create_database_connection(type, parameters): + if type == 'token': + engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True) + elif type == 'smart_contract': + engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True) + elif type == 'system_dbs': + engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False) + + connection = engine.connect() + return connection + + +def create_database_session_orm(type, parameters, base): + if type == 'token': + engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True) + base.metadata.create_all(bind=engine) + session = sessionmaker(bind=engine)() + + elif type == 'smart_contract': + engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True) + base.metadata.create_all(bind=engine) + session = sessionmaker(bind=engine)() + + elif type == 'system_dbs': + engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False) + base.metadata.create_all(bind=engine) + session = sessionmaker(bind=engine)() + else: + pdb.set_trace() + + return session + + +def inspect_parsed_flodata(parsed_flodata, inputAddress, outputAddress): + if parsed_flodata['type'] == 'transfer': + if parsed_flodata['transferType'] == 'token': + return {'type':'tokentransfer', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"} + if parsed_flodata['transferType'] == 'smartContract': + return {'type':'smartContract', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'accepting_token_db':f"{parsed_flodata['']}", 'receiving_token_db':f"{parsed_flodata['tokenIdentification']}" ,'token_amount':f"{parsed_flodata['tokenAmount']}"} + if parsed_flodata['transferType'] == 'swapParticipation': + return {'type':'swapParticipation', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'accepting_token_db':f"{parsed_flodata['']}", 'receiving_token_db':f"{parsed_flodata['tokenIdentification']}" ,'token_amount':f"{parsed_flodata['tokenAmount']}"} + if parsed_flodata['transferType'] == 'nft': + return {'type':'nfttransfer', 'nft_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"} + if parsed_flodata['type'] == 'tokenIncorporation': + return {'type':'tokenIncorporation', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"} + if parsed_flodata['type'] == 'smartContractPays': + # contract address, token | both of them come from + sc_session = create_database_session_orm('smart_contract', {'contract_name':f"{parsed_flodata['contractName']}", 'contract_address':f"{outputAddress}"}, ContractBase) + token_db = sc_session.query(ContractStructure.value).filter(ContractStructure.attribute=='tokenIdentification').first()[0] + return {'type':'smartContractPays', 'token_db':f"{token_db}" , 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'triggerCondition':f"{parsed_flodata['triggerCondition']}"} + if parsed_flodata['type'] == 'smartContractIncorporation': + return {'type':'smartContractIncorporation', 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'triggerCondition':f"{parsed_flodata['triggerCondition']}"} + + +def getDatabase_from_parsedFloData(parsed_flodata, inputAddress, outputAddress): + tokenlist = [] + contractlist = [] + if parsed_flodata['type'] == 'transfer': + if parsed_flodata['transferType'] == 'token': + #return {'type':'token_db', 'token_db':f"{parsed_flodata['tokenIdentification']}"} + tokenlist.append(parsed_flodata['tokenIdentification']) + elif parsed_flodata['transferType'] == 'smartContract': + #return {'type':'smartcontract_db', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'token_db':f"{parsed_flodata['tokenIdentification']}"} + tokenlist.append(parsed_flodata['tokenIdentification']) + contractlist.append(f"{parsed_flodata['contractName']}-{outputAddress}") + elif parsed_flodata['transferType'] == 'swapParticipation': + #return {'type':'swapcontract_db', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'accepting_token_db':f"{parsed_flodata['contract-conditions']['accepting_token']}", 'selling_token_db':f"{parsed_flodata['contract-conditions']['selling_token']}"} + tokenlist.append(parsed_flodata['contract-conditions']['accepting_token']) + tokenlist.append(parsed_flodata['contract-conditions']['selling_token']) + contractlist.append(f"{parsed_flodata['contractName']}-{outputAddress}") + elif parsed_flodata['transferType'] == 'nft': + #return {'type':'nft_db', 'token_db':f"{parsed_flodata['tokenIdentification']}"} + tokenlist.append(parsed_flodata['tokenIdentification']) + elif parsed_flodata['type'] == 'smartContractPays': + # contract address, token | both of them come from + sc_session = create_database_session_orm('smart_contract', {'contract_name':f"{parsed_flodata['contractName']}", 'contract_address':f"{outputAddress}"}, ContractBase) + token_db = sc_session.query(ContractStructure.value).filter(ContractStructure.attribute=='tokenIdentification').first()[0] + #return {'type':'smartcontract_db', 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'token_db':f"{token_db}"} + tokenlist.append(token_db) + contractlist.append(f"{parsed_flodata['contractName']}-{outputAddress}") + elif parsed_flodata['type'] == 'smartContractIncorporation': + #return {'type':'smartcontract_db', 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}"} + contractlist.append(f"{parsed_flodata['contractName']}-{outputAddress}") + elif parsed_flodata['type'] == 'tokenIncorporation': + #return {'type':'token_db', 'token_db':f"{parsed_flodata['tokenIdentification']}"} + tokenlist.append(parsed_flodata['tokenIdentification']) + + return tokenlist, contractlist + + +def calc_pid_amount(transferBalance, consumedpid): + consumedpid_sum = 0 + for key in list(consumedpid.keys()): + consumedpid_sum = consumedpid_sum + float(consumedpid[key]) + return transferBalance - consumedpid_sum + + +def find_addressBalance_from_floAddress(database_session, floAddress): + query_output = database_session.query(ActiveTable).filter(ActiveTable.address==floAddress, ActiveTable.addressBalance!=None).first() + if query_output is None: + return 0 + else: + return query_output.addressBalance + + +def rollback_address_balance_processing(db_session, senderAddress, receiverAddress, transferBalance): + # Find out total sum of address + # Find out the last entry where address balance is not null, if exists make it null + + # Calculation phase + current_receiverBalance = find_addressBalance_from_floAddress(db_session, receiverAddress) + current_senderBalance = find_addressBalance_from_floAddress(db_session ,senderAddress) + new_receiverBalance = current_receiverBalance - transferBalance + new_senderBalance = current_senderBalance + transferBalance + + # Insertion phase + # if new receiver balance is 0, then only insert sender address balance + # if receiver balance is not 0, then update previous occurence of the receiver address and sender balance + # for sender, find out weather + # either query out will not come or the last occurence will have address + # for sender, in all cases we will update the addressBalance of last occurences of senderfloaddress + # for receiver, if the currentaddressbalance is 0 then do nothing .. and if the currentaddressbalance is not 0 then update the last occurence of receiver address + sender_query = db_session.query(ActiveTable).filter(ActiveTable.address==senderAddress).order_by(ActiveTable.id.desc()).first() + sender_query.addressBalance = new_senderBalance + + if new_receiverBalance != 0 and new_receiverBalance > 0: + receiver_query = db_session.query(ActiveTable).filter(ActiveTable.address==receiverAddress).order_by(ActiveTable.id.desc()).limit(2).all() + if len(receiver_query) == 2: + receiver_query[1].addressBalance = new_receiverBalance + + +def find_input_output_addresses(transaction_data): + # Create vinlist and outputlist + vinlist = [] + querylist = [] + + for vin in transaction_data["vin"]: + vinlist.append([vin["address"][0], float(vin["value"])]) + + totalinputval = float(transaction_data["valueIn"]) + + # todo Rule 41 - Check if all the addresses in a transaction on the input side are the same + for idx, item in enumerate(vinlist): + if idx == 0: + temp = item[0] + continue + if item[0] != temp: + print(f"System has found more than one address as part of vin. Transaction {transaction_data['txid']} is rejected") + return 0 + + inputlist = [vinlist[0][0], totalinputval] + inputadd = vinlist[0][0] + + # todo Rule 42 - If the number of vout is more than 2, reject the transaction + if len(transaction_data["vout"]) > 2: + print(f"System has found more than 2 address as part of vout. Transaction {transaction_data['txid']} is rejected") + return 0 + + # todo Rule 43 - A transaction accepted by the system has two vouts, 1. The FLO address of the receiver + # 2. Flo address of the sender as change address. If the vout address is change address, then the other adddress + # is the recevier address + + outputlist = [] + addresscounter = 0 + inputcounter = 0 + for obj in transaction_data["vout"]: + if obj["scriptPubKey"]["type"] == "pubkeyhash": + addresscounter = addresscounter + 1 + if inputlist[0] == obj["scriptPubKey"]["addresses"][0]: + inputcounter = inputcounter + 1 + continue + outputlist.append([obj["scriptPubKey"]["addresses"][0], obj["value"]]) + + if addresscounter == inputcounter: + outputlist = [inputlist[0]] + elif len(outputlist) != 1: + print(f"Transaction's change is not coming back to the input address. Transaction {transaction_data['txid']} is rejected") + return 0 + else: + outputlist = outputlist[0] + + return inputlist[0], outputlist[0] + + +def rollback_database(blockNumber, dbtype, dbname): + if dbtype == 'token': + # Connect to database + db_session = create_database_session_orm('token', {'token_name':dbname}, TokenBase) + while(True): + subqry = db_session.query(func.max(ActiveTable.id)) + activeTable_entry = db_session.query(ActiveTable).filter(ActiveTable.id == subqry).first() + if activeTable_entry.blockNumber <= blockNumber: + break + outputAddress = activeTable_entry.address + transferAmount = activeTable_entry.transferBalance + inputAddress = None + + # Find out consumedpid and partially consumed pids + parentid = None + orphaned_parentid = None + consumedpid = None + if activeTable_entry.parentid is not None: + parentid = activeTable_entry.parentid + if activeTable_entry.orphaned_parentid is not None: + orphaned_parentid = activeTable_entry.orphaned_parentid + if activeTable_entry.consumedpid is not None: + consumedpid = literal_eval(activeTable_entry.consumedpid) + + # filter out based on consumped pid and partially consumed pids + if parentid is not None: + # find query in activeTable with the parentid + activeTable_pid_entry = db_session.query(ActiveTable).filter(ActiveTable.id == parentid).all()[0] + # calculate the amount taken from parentid + activeTable_pid_entry.transferBalance = activeTable_pid_entry.transferBalance + calc_pid_amount(activeTable_entry.transferBalance, consumedpid) + inputAddress = activeTable_pid_entry.address + + if orphaned_parentid is not None: + try: + orphaned_parentid_entry = db_session.query(ConsumedTable).filter(ConsumedTable.id == orphaned_parentid).all()[0] + inputAddress = orphaned_parentid_entry.address + except: + pdb.set_trace() + + if consumedpid != {}: + # each key of the pid is totally consumed and with its corresponding value written in the end + # how can we maintain the order of pid consumption? The bigger pid number will be towards the end + # 1. pull the pid number and its details from the consumedpid table + for key in list(consumedpid.keys()): + consumedpid_entry = db_session.query(ConsumedTable).filter(ConsumedTable.id == key).all()[0] + newTransferBalance = consumedpid_entry.transferBalance + consumedpid[key] + db_session.add(ActiveTable(id=consumedpid_entry.id, address=consumedpid_entry.address, parentid=consumedpid_entry.parentid ,consumedpid=consumedpid_entry.consumedpid, transferBalance=newTransferBalance, addressBalance = None, orphaned_parentid=consumedpid_entry.orphaned_parentid ,blockNumber=consumedpid_entry.blockNumber)) + inputAddress = consumedpid_entry.address + db_session.delete(consumedpid_entry) + + orphaned_parentid_entries = db_session.query(ActiveTable).filter(ActiveTable.orphaned_parentid == key).all() + if len(orphaned_parentid_entries) != 0: + for orphan_entry in orphaned_parentid_entries: + orphan_entry.parentid = orphan_entry.orphaned_parentid + orphan_entry.orphaned_parentid = None + + orphaned_parentid_entries = db_session.query(ConsumedTable).filter(ConsumedTable.orphaned_parentid == key).all() + if len(orphaned_parentid_entries) != 0: + for orphan_entry in orphaned_parentid_entries: + orphan_entry.parentid = orphan_entry.orphaned_parentid + orphan_entry.orphaned_parentid = None + + # update addressBalance + rollback_address_balance_processing(db_session, inputAddress, outputAddress, transferAmount) + + # delete operations + # delete the last row in activeTable and transactionTable + db_session.delete(activeTable_entry) + + db_session.query(TransactionHistory).filter(TransactionHistory.blockNumber > blockNumber).delete() + db_session.query(TransferLogs).filter(TransferLogs.blockNumber > blockNumber).delete() + db_session.commit() + + elif dbtype == 'smartcontract': + db_session = create_database_session_orm('smart_contract', {'contract_name':f"{dbname['contract_name']}", 'contract_address':f"{dbname['contract_address']}"}, ContractBase) + db_session.query(ContractTransactionHistory).filter(ContractTransactionHistory.blockNumber > blockNumber).delete() + db_session.query(ContractParticipants).filter(ContractParticipants.blockNumber > blockNumber).delete() + db_session.query(ContractDeposits).filter(ContractDeposits.blockNumber > blockNumber).delete() + db_session.query(ConsumedInfo).filter(ConsumedInfo.blockNumber > blockNumber).delete() + db_session.query(ContractWinners).filter(ContractWinners.blockNumber > blockNumber).delete() + db_session.commit() + + +def delete_database_old(blockNumber, dbname): + db_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase) + databases_to_delete = db_session.query(DatabaseTypeMapping.db_name, DatabaseTypeMapping.db_type).filter(DatabaseTypeMapping.blockNumber>blockNumber).all() + + db_names, db_type = zip(*databases_to_delete) + + for database in databases_to_delete: + if database[1] in ['token','infinite-token']: + dirpath = os.path.join(apppath, 'tokens', f"{dbname}.db") + if os.path.exists(dirpath): + os.remove(dirpath) + elif database[1] in ['smartcontract']: + dirpath = os.path.join(apppath, 'smartcontracts', f"{dbname}.db") + if os.path.exists(dirpath): + os.remove(dirpath) + return db_names + + +def delete_database(blockNumber, dbname): + db_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase) + databases_to_delete = db_session.query(DatabaseTypeMapping.db_name, DatabaseTypeMapping.db_type).filter(DatabaseTypeMapping.db_name == dbname).all() + + db_names, db_type = zip(*databases_to_delete) + + for database in databases_to_delete: + if database[1] in ['token','infinite-token','nft']: + dirpath = os.path.join(apppath, 'tokens', f"{dbname}.db") + if os.path.exists(dirpath): + os.remove(dirpath) + elif database[1] in ['smartcontract']: + dirpath = os.path.join(apppath, 'smartContracts', f"{dbname}.db") + if os.path.exists(dirpath): + os.remove(dirpath) + return db_names + + +def system_database_deletions(blockNumber): + + latestcache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase) + + # delete latestBlocks & latestTransactions entry + latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber > blockNumber).delete() + latestcache_session.query(LatestTransactions).filter(LatestTransactions.blockNumber > blockNumber).delete() + + # delete activeContracts, contractAddressMapping, DatabaseAddressMapping, rejectedContractTransactionHistory, rejectedTransactionHistory, tokenAddressMapping + systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase) + activeContracts_session = systemdb_session.query(ActiveContracts).filter(ActiveContracts.blockNumber > blockNumber).delete() + contractAddressMapping_queries = systemdb_session.query(ContractAddressMapping).filter(ContractAddressMapping.blockNumber > blockNumber).delete() + databaseTypeMapping_queries = systemdb_session.query(DatabaseTypeMapping).filter(DatabaseTypeMapping.blockNumber > blockNumber).delete() + rejectedContractTransactionHistory_queries = systemdb_session.query(RejectedContractTransactionHistory).filter(RejectedContractTransactionHistory.blockNumber > blockNumber).delete() + rejectedTransactionHistory_queries = systemdb_session.query(RejectedTransactionHistory).filter(RejectedTransactionHistory.blockNumber > blockNumber).delete() + tokenAddressMapping_queries = systemdb_session.query(TokenAddressMapping).filter(TokenAddressMapping.blockNumber > blockNumber).delete() + timeAction_queries = systemdb_session.query(TimeActions).filter(TimeActions.blockNumber > blockNumber).delete() + systemdb_session.query(SystemData).filter(SystemData.attribute=='lastblockscanned').update({SystemData.value:str(blockNumber)}) + + latestcache_session.commit() + systemdb_session.commit() + latestcache_session.close() + systemdb_session.close() + + +# Take input from user reg how many blocks to go back in the blockchain +parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash') +parser.add_argument('-rb', '--toblocknumer', nargs='?', type=int, help='Rollback the script to the specified block number') +parser.add_argument('-r', '--blockcount', nargs='?', type=int, help='Rollback the script to the number of blocks specified') +args = parser.parse_args() + +# Get all the transaction and blockdetails from latestCache reg the transactions in the block +systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase) +lastscannedblock = systemdb_session.query(SystemData.value).filter(SystemData.attribute=='lastblockscanned').first() +systemdb_session.close() +lastscannedblock = int(lastscannedblock.value) +if (args.blockcount and args.toblocknumber): + print("You can only specify one of the options -b or -c") + sys.exit(0) +elif args.blockcount: + rollback_block = lastscannedblock - args.blockcount +elif args.toblocknumer: + rollback_block = args.toblocknumer +else: + print("Please specify the number of blocks to rollback") + sys.exit(0) + + +def return_token_contract_set(rollback_block): + latestcache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase) + latestBlocks = latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber > rollback_block).all() + lblocks_dict = {} + blocknumber_list = [] + for block in latestBlocks: + block_dict = block.__dict__ + lblocks_dict[block_dict['blockNumber']] = {'blockHash':f"{block_dict['blockHash']}", 'jsonData':f"{block_dict['jsonData']}"} + blocknumber_list.insert(0,block_dict['blockNumber']) + + tokendb_set = set() + smartcontractdb_set = set() + + for blockindex in blocknumber_list: + # Find the all the transactions that happened in this block + try: + block_tx_hashes = json.loads(lblocks_dict[str(blockindex)]['jsonData'])['tx'] + except: + print(f"Block {blockindex} is not found in latestCache. Skipping this block") + continue + + for txhash in block_tx_hashes: + # Get the transaction details + transaction = latestcache_session.query(LatestTransactions).filter(LatestTransactions.transactionHash == txhash).first() + transaction_data = json.loads(transaction.jsonData) + inputAddress, outputAddress = find_input_output_addresses(transaction_data) + parsed_flodata = literal_eval(transaction.parsedFloData) + tokenlist, contractlist = getDatabase_from_parsedFloData(parsed_flodata, inputAddress, outputAddress) + + for token in tokenlist: + tokendb_set.add(token) + + for contract in contractlist: + smartcontractdb_set.add(contract) + + return tokendb_set, smartcontractdb_set + + +def initiate_rollback_process(): + ''' + tokendb_set, smartcontractdb_set = return_token_contract_set(rollback_block) + ''' + + # Connect to system.db + systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase) + db_names = systemdb_session.query(DatabaseTypeMapping).all() + for db in db_names: + if db.db_type in ['token', 'nft', 'infinite-token']: + if db.blockNumber > rollback_block: + delete_database(rollback_block, f"{db.db_name}") + else: + rollback_database(rollback_block, 'token', f"{db.db_name}") + elif db.db_type in ['smartcontract']: + if db.blockNumber > rollback_block: + delete_database(rollback_block, f"{db.db_name}") + else: + db_split = db.db_name.rsplit('-',1) + db_name = {'contract_name':db_split[0], 'contract_address':db_split[1]} + rollback_database(rollback_block, 'smartcontract', db_name) + + ''' + for token_db in tokendb_set: + token_session = create_database_session_orm('token', {'token_name': token_db}, TokenBase) + if token_session.query(TransactionHistory.blockNumber).first()[0] > rollback_block: + delete_database(rollback_block, token_db) + token_session.commit() + else: + rollback_database(rollback_block, 'token', token_db) + token_session.close() + + for contract_db in smartcontractdb_set: + contract_session = create_database_session_orm('smartcontract', {'db_name': contract_db}, ContractBase) + if contract_session.query(TransactionHistory.blockNumber).first()[0] > rollback_block: + delete_database(rollback_block, contract_db) + contract_session.commit() + else: + rollback_database(rollback_block, 'smartcontract', contract_db) + contract_session.close() + ''' + + system_database_deletions(rollback_block) + + # update lastblockscanned in system_dbs + latestCache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase) + lastblockscanned = latestCache_session.query(LatestBlocks.blockNumber).order_by(LatestBlocks.id.desc()).first()[0] + latestCache_session.close() + + systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase) + lastblockscanned_query = systemdb_session.query(SystemData).filter(SystemData.attribute=='lastblockscanned').first() + lastblockscanned_query.value = rollback_block + systemdb_session.commit() + systemdb_session.close() + + +if __name__ == "__main__": + systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase) + lastblockscanned_query = systemdb_session.query(SystemData).filter(SystemData.attribute=='lastblockscanned').first() + if(rollback_block > int(lastblockscanned_query.value)): + print('Rollback block is greater than the last scanned block\n Exiting ....') + sys.exit(0) + else: + initiate_rollback_process() \ No newline at end of file