From c111b73c82bb7c08910b42ddae37255f09ce5187 Mon Sep 17 00:00:00 2001 From: Vivek Teega Date: Wed, 12 Jan 2022 20:27:12 +0530 Subject: [PATCH] Added parsing for NFTs and Infinite tokens --- models.py | 9 ++++ parsing.py | 95 +++++++++++++++++++++++++++++++---- planning.py | 47 ++++++++++++++++- tracktokens_smartcontracts.py | 66 ++++++++++++++++++++++++ 4 files changed, 207 insertions(+), 10 deletions(-) diff --git a/models.py b/models.py index 271920e..09b6078 100644 --- a/models.py +++ b/models.py @@ -207,6 +207,15 @@ class TokenAddressMapping(SystemBase): blockHash = Column('blockHash', String) +class DatabaseAddressMapping(SystemBase): + __tablename__ = "databaseAddressMapping" + + id = Column('id', Integer, primary_key=True) + db_name = Column('db_name', String) + db_type = Column('db_type', String) + keyword = Column('keyword', String) + object_format = Column ('object_format', String) + class LatestTransactions(LatestCacheBase): __tablename__ = "latestTransactions" id = Column('id', Integer, primary_key=True) diff --git a/parsing.py b/parsing.py index 0652321..98d89c5 100644 --- a/parsing.py +++ b/parsing.py @@ -272,7 +272,32 @@ def outputreturn(*argv): 'contractAddress': argv[5] } return remove_empty_from_dict(parsed_data) - + elif argv[0] == 'nft_create': + parsed_data = { + 'type': 'nftIncorporation', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + 'tokenAmount': argv[3], #initTokens, + 'nftHash': argv[4] #nftHash + } + return parsed_data + elif argv[0] == 'nft_transfer': + parsed_data = { + 'type': 'transfer', + 'transferType': 'nft', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + 'tokenAmount': argv[3], #initTokens, + } + return parsed_data + elif argv[0] == 'infinite_token_create': + parsed_data = { + 'type': 'infiniteTokenIncorporation', + 'flodata': argv[1], #string + 'tokenIdentification': argv[2], #hashList[0][:-1] + } + return parsed_data + def extract_specialcharacter_words(rawstring, special_characters): wordList = [] @@ -542,6 +567,14 @@ def extract_special_character_word(special_character_list, special_character): return False +def extract_NFT_hash(clean_text): + nft_hash = re.search(r"(?:0[xX])?[0-9a-fA-F]{64}",clean_text) + if nft_hash is None: + return False + else: + return nft_hash.group(0) + + def find_original_case(contract_address, original_text): dollar_word = extract_specialcharacter_words(original_text,["$"]) if len(dollar_word)==1 and dollar_word[0][:-1].lower()==contract_address: @@ -713,6 +746,13 @@ def check_existence_of_keyword(inputlist, keywordlist): return True +def check_word_existence_instring(word, text): + word_exists = re.search(fr"\b{word}\b",text) + if word_exists is None: + return False + else: + return word_exists.group(0) + send_category = ['transfer', 'send', 'give'] # keep everything lowercase create_category = ['incorporate', 'create', 'start'] # keep everything lowercase deposit_category = ['submit','deposit'] @@ -836,7 +876,15 @@ text_list = [ ] text_list1 = [ - '''Create Smart Contract with the name India-elections-2019@ of the type one-time-event* using the asset rmt# at the address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1$ with contract-conditions: (1) contractAmount=0.001rmt (2) userChoices=Narendra Modi wins| Narendra Modi loses (3) expiryTime= Wed May 22 2019 21:00:00 GMT+0530''' + + 'create usd# as infinite-token', + 'transfer 10 usd#', + + 'Create 100 albumname# as NFT with 2CF24DBA5FB0A30E26E83B2AC5B9E29E1B161E5C1FA7425E73043362938B9824 as asset hash', + 'Transfer 10 albumname# nft', + + 'Create 400 rmt#', + 'Transfer 20 rmt#' ] logger = logging.getLogger(__name__) @@ -873,15 +921,40 @@ def parse_flodata(text, blockinfo, net): if not check_regex("^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$", tokenname): return outputreturn('noise') - tokenamount = apply_rule1(extractAmount_rule_new, processed_text) - if not tokenamount: - return outputreturn('noise') + isNFT = check_word_existence_instring('nft', processed_text) + + isInfinite = check_word_existence_instring('infinite-token', processed_text) + tokenamount = apply_rule1(extractAmount_rule_new, processed_text) + + ## Cannot be NFT and normal token and infinite token. Find what are the conflicts + # if its an NFT then tokenamount has to be integer and infinite keyword should not be present + # if its a normal token then isNFT and isInfinite should be None/False and token amount has to be present + # if its an infinite token then tokenamount should be None and isNFT should be None/False + + ################################################## + + if (not tokenamount and not isInfinite) or (isNFT and not tokenamount.is_integer() and not isInfinite) or (isInfinite and tokenamount is not False and isNFT is not False): + return outputreturn('noise') operation = apply_rule1(selectCategory, processed_text, send_category, create_category) if operation == 'category1' and tokenamount is not None: - return outputreturn('token_transfer',f"{processed_text}", f"{tokenname}", tokenamount) - elif operation == 'category2' and tokenamount is not None: - return outputreturn('token_incorporation',f"{processed_text}", f"{first_classification['wordlist'][0][:-1]}", tokenamount) + if isNFT: + return outputreturn('nft_transfer',f"{processed_text}", f"{tokenname}", tokenamount) + else: + return outputreturn('token_transfer',f"{processed_text}", f"{tokenname}", tokenamount) + elif operation == 'category2': + if isInfinite: + return outputreturn('infinite_token_create',f"{processed_text}", f"{tokenname}") + else: + if tokenamount is None: + return outputreturn('noise') + if isNFT: + nft_hash = extract_NFT_hash(clean_text) + if nft_hash is False: + return outputreturn('noise') + return outputreturn('nft_create',f"{processed_text}", f"{tokenname}", tokenamount, f"{nft_hash}") + else: + return outputreturn('token_incorporation',f"{processed_text}", f"{first_classification['wordlist'][0][:-1]}", tokenamount) else: return outputreturn('noise') @@ -1050,4 +1123,8 @@ def parse_flodata(text, blockinfo, net): return outputreturn('noise') return outputreturn('continuos-event-token-swap-incorporation', f"{contract_token}", f"{contract_name}", f"{contract_address}", f"{clean_text}", f"{contract_conditions['subtype']}", f"{contract_conditions['accepting_token']}", f"{contract_conditions['selling_token']}", f"{contract_conditions['priceType']}", f"{contract_conditions['price']}") - return outputreturn('noise') \ No newline at end of file + return outputreturn('noise') + +for text in text_list1: + return_data = parse_flodata(text, {}, 'mainnet') + print(return_data) \ No newline at end of file diff --git a/planning.py b/planning.py index 381eeb9..da4c2f7 100644 --- a/planning.py +++ b/planning.py @@ -105,7 +105,7 @@ f"{tokenamount}" = find_number_function ['@','*','#','$',':'] - Smart contract creation - ote-timebased ['@','*','#','$',':','#'] - ['#','@'] - Smart contract particiation - ote-timebased + ['#','@'] - Smart contract particiation - ote-timebased ['#','@','$'] @@ -191,4 +191,49 @@ Write checks for conditions inside contract conditions Serious error handling for contract-conditions * 2222:00 gives error * contractAmount = 0.022rt gives error | check if space is allowed between 0.022 rt +''' + + +''' + + What we need for NFT contract code + + 1. NFT-address mapping table in system.db + 2. New main transaction category class + 3. New sub-category for transfer category class ie. NFT transfer + + + NFT Smart Contract end cases + 1. NFT against an address + 2. NFT against another NFT + 3. + + flodata format for NFT + Create 1000 NFT with bioscope# with nft-details: (1) name = 'bioscope' (2) hash = + + Create 100 albumname# as NFT with 2CF24DBA5FB0A30E26E83B2AC5B9E29E1B161E5C1FA7425E73043362938B9824 as asset hash + [#] + + Rules + ----- + DIFFERENT BETWEEN TOKEN AND NFT + System.db will have a differnent entry + in creation nft word will be extra + NFT Hash must be present + Creation and transfer amount .. only integer parts will be taken + Keyword nft must be present in both creation and transfer + +''' + +''' + +Need infinite tokens to create stable coins, so they can be created without worrying about the upper limit of the coins + +''' + +''' +Create another table in system.db, it simply writes what is every database in one place + +Database_name Database type + ''' \ No newline at end of file diff --git a/tracktokens_smartcontracts.py b/tracktokens_smartcontracts.py index 44d32e0..91467fb 100755 --- a/tracktokens_smartcontracts.py +++ b/tracktokens_smartcontracts.py @@ -1437,6 +1437,8 @@ def processTransaction(transaction_data, parsed_data): headers=headers)''' return 0 + elif parsed_data['transferType'] == 'nft': + pass # todo Rule 47 - If the parsed data type is token incorporation, then check if the name hasn't been taken already # if it has been taken then reject the incorporation. Else incorporate it @@ -1464,6 +1466,7 @@ def processTransaction(transaction_data, parsed_data): # add it to token address to token mapping db table connection = create_database_connection('system_dbs', {'db_name':'system'}) connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{inputadd}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}');") + connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{inputadd}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}');") connection.close() updateLatestTransaction(transaction_data, parsed_data) @@ -2381,6 +2384,69 @@ def processTransaction(transaction_data, parsed_data): '''r = requests.post(tokenapi_sse_url, json={'message': f"Error | Contract transaction {transaction_data['txid']} rejected as a smartcontract with same name {parsed_data['contractName']}-{parsed_data['contractAddress']} dosent exist "}, headers=headers)''' return 0 + elif parsed_data['type'] == 'nftIncorporation': + ''' + DIFFERENT BETWEEN TOKEN AND NFT + System.db will have a different entry + in creation nft word will be extra + NFT Hash must be present + Creation and transfer amount .. only integer parts will be taken + Keyword nft must be present in both creation and transfer + ''' + if not check_database_existence('token', {'token_name':f"{parsed_data['tokenIdentification']}"}): + session = create_database_session_orm('token', {'token_name': f"{parsed_data['tokenIdentification']}"}, Base) + session.add(ActiveTable(address=inputlist[0], parentid=0, transferBalance=parsed_data['tokenAmount'])) + session.add(TransferLogs(sourceFloAddress=inputadd, destFloAddress=outputlist[0], + transferAmount=parsed_data['tokenAmount'], sourceId=0, destinationId=1, + blockNumber=transaction_data['blockheight'], time=transaction_data['blocktime'], + transactionHash=transaction_data['txid'])) + blockchainReference = neturl + 'tx/' + transaction_data['txid'] + session.add(TransactionHistory(sourceFloAddress=inputadd, destFloAddress=outputlist[0], + transferAmount=parsed_data['tokenAmount'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'], + time=transaction_data['blocktime'], + transactionHash=transaction_data['txid'], + blockchainReference=blockchainReference, + jsonData=json.dumps(transaction_data), transactionType=parsed_data['type'], + parsedFloData=json.dumps(parsed_data))) + session.commit() + session.close() + + # add it to token address to token mapping db table + connection = create_database_connection('system_dbs', {'db_name':'system'}) + connection.execute(f"INSERT INTO tokenAddressMapping (tokenAddress, token, transactionHash, blockNumber, blockHash) VALUES ('{inputadd}', '{parsed_data['tokenIdentification']}', '{transaction_data['txid']}', '{transaction_data['blockheight']}', '{transaction_data['blockhash']}');") + nft_data = {'sha256_hash': f"{parsed_data['nftHash']}"} + connection.execute(f"INSERT INTO databaseAddressMapping (db_name, db_type, keyword, object_format) VALUES ('{parsed_data['tokenIdentification']}', 'nft', '', '{nft_data}'") + connection.close() + + updateLatestTransaction(transaction_data, parsed_data) + + pushData_SSEapi(f"Token | Succesfully incorporated token {parsed_data['tokenIdentification']} at transaction {transaction_data['txid']}") + return 1 + else: + logger.info(f"Transaction {transaction_data['txid']} rejected as a token with the name {parsed_data['tokenIdentification']} has already been incorporated") + session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase) + blockchainReference = neturl + 'tx/' + transaction_data['txid'] + session.add(RejectedTransactionHistory(tokenIdentification=parsed_data['tokenIdentification'], + sourceFloAddress=inputadd, destFloAddress=outputlist[0], + transferAmount=parsed_data['tokenAmount'], + blockNumber=transaction_data['blockheight'], + blockHash=transaction_data['blockhash'], + time=transaction_data['blocktime'], + transactionHash=transaction_data['txid'], + blockchainReference=blockchainReference, + jsonData=json.dumps(transaction_data), + rejectComment=f"Transaction {transaction_data['txid']} rejected as a token with the name {parsed_data['tokenIdentification']} has already been incorporated", + transactionType=parsed_data['type'], + parsedFloData=json.dumps(parsed_data) + )) + session.commit() + session.close() + pushData_SSEapi(f"Error | Token incorporation rejected at transaction {transaction_data['txid']} as token {parsed_data['tokenIdentification']} already exists") + return 0 + + ''' {'type': 'smartContractDeposit', 'tokenIdentification': hashList[0][:-1], 'contractName': atList[0][:-1], 'flodata': string, 'depositConditions': deposit_conditions} '''