Compare commits
199 Commits
master
...
mysql-migr
| Author | SHA1 | Date | |
|---|---|---|---|
| a6364d3208 | |||
| 21f421a5a4 | |||
| 82d296c0e6 | |||
| bfd10addba | |||
| b19f2b0beb | |||
| b69097c64b | |||
| b2a7ce64b4 | |||
| a724c1b22d | |||
| e44e5e7192 | |||
| b6cf2080e1 | |||
| 0fd8d18162 | |||
| 56bd2af9cd | |||
| 71d7bf07b4 | |||
| 457b10ef8e | |||
| 51b130550b | |||
| cc8189a00d | |||
| 6615d3ead1 | |||
| 64fb3e1ec4 | |||
| 28b3a9902a | |||
| 46bcd30252 | |||
| e8ae8163ec | |||
| c0cea69528 | |||
| 13cb957de6 | |||
| a93e6633ef | |||
| 4febedb1d2 | |||
| 1cb04982cc | |||
| f920997f69 | |||
| e57e1e1072 | |||
| ccfbe0dd59 | |||
| 67ff6a1748 | |||
| 6f50c3d03d | |||
| 5b031dae34 | |||
| 38d90659be | |||
| 0dd32c7803 | |||
| 4fa48c808c | |||
| e3b197cd4b | |||
| 4234abe59a | |||
| 3d2654149b | |||
|
|
19198ddc82 | ||
| c6cd23fc9d | |||
| e9bc8546c2 | |||
| a83ed33a99 | |||
| bb77a9723b | |||
| 08168cac5e | |||
| 78b05be204 | |||
| dfc2a85da4 | |||
| 4e22b846fb | |||
| b7c9496eae | |||
| cfaf2e2ed4 | |||
| 396e916a5b | |||
| 6a07e2bbc1 | |||
| 6f6f3ce420 | |||
| e011f7208a | |||
| 9c7387795f | |||
| 44aa304f61 | |||
| 63a3c2344c | |||
|
|
f62b3d196b | ||
| f20b8d9e8f | |||
| fbc534b477 | |||
| b396bc1f74 | |||
|
|
9eda14ae6a | ||
|
|
e9ad3307f6 | ||
|
|
a8e885f729 | ||
|
|
4a254336b8 | ||
|
|
5a7ce5bd99 | ||
|
|
993bf6e1b8 | ||
|
|
fa9798d1f0 | ||
|
|
6d56c2a1e0 | ||
|
|
dce543284c | ||
|
|
355364badb | ||
|
|
e94506bf14 | ||
|
|
796e84cc05 | ||
|
|
b84456602d | ||
|
|
bf4684cbe7 | ||
|
|
0966070239 | ||
|
|
cd1b36a246 | ||
| 15bc31c4a7 | |||
|
|
e0d013cd63 | ||
|
|
a35b35d22e | ||
|
|
ba80ae4e6a | ||
|
|
d794e65667 | ||
|
|
752f999ec8 | ||
|
|
2132dd94fe | ||
| 42dc2d6e98 | |||
| 989251127c | |||
|
|
073927bccd | ||
|
|
20c0a4bf76 | ||
|
|
497e7937d1 | ||
|
|
b82e35153f | ||
|
|
7abafd2c2c | ||
|
|
7cf7883b59 | ||
|
|
c4be19058f | ||
|
|
ea3985cb28 | ||
|
|
342ded906e | ||
|
|
41753a03c3 | ||
|
|
02de71e618 | ||
|
|
a173cf6ac3 | ||
|
|
acffba0698 | ||
|
|
e599caa9d4 | ||
|
|
528223fec7 | ||
|
|
901026ccdd | ||
|
|
2da2c555f4 | ||
|
|
a26223f8e1 | ||
|
|
30b3d20630 | ||
|
|
138e53bb06 | ||
|
|
22bca78569 | ||
|
|
743df4d18f | ||
|
|
4511158a43 | ||
|
|
26bb8004ee | ||
|
|
0fcfeae966 | ||
|
|
6d6161ed83 | ||
|
|
b30e8fd875 | ||
|
|
8b112bf0b3 | ||
|
|
9dd69065c8 | ||
|
|
86625c7a80 | ||
|
|
87fe48358e | ||
|
|
18384e8eef | ||
|
|
b327ca5d58 | ||
|
|
a77fed3c1b | ||
|
|
430141cb77 | ||
|
|
170a48f40d | ||
|
|
90439ea2f0 | ||
|
|
703013265a | ||
|
|
f8c22cd571 | ||
|
|
cd9fb1131f | ||
|
|
cf451c0257 | ||
|
|
1fc35f289f | ||
|
|
15dba443b2 | ||
|
|
d659efb298 | ||
|
|
acc3858b27 | ||
|
|
e2b6ef1123 | ||
|
|
6a7fd83ffd | ||
|
|
5c3a1f4536 | ||
|
|
798b7ad21c | ||
|
|
e93e0ae10d | ||
|
|
d63495a878 | ||
|
|
cf70cfd066 | ||
|
|
14c2f048fd | ||
|
|
ac89bc1af8 | ||
|
|
e1143269ea | ||
|
|
1a06347c08 | ||
|
|
18e8cfaaf4 | ||
|
|
1e387114a4 | ||
|
|
f3918207be | ||
|
|
43f3a91107 | ||
|
|
53782d9473 | ||
|
|
db9887679c | ||
|
|
c3c423429b | ||
|
|
eaad8d88b8 | ||
|
|
43ca91258b | ||
|
|
6ce6f75a0e | ||
|
|
29c6019c15 | ||
|
|
4920bc5486 | ||
|
|
32c7494504 | ||
|
|
f9e4b5115b | ||
|
|
1307b0605b | ||
|
|
714afa4ccd | ||
|
|
c111b73c82 | ||
|
|
b19ffdfecc | ||
|
|
fc2e8378a5 | ||
|
|
2ba852f2a1 | ||
|
|
23db3656aa | ||
|
|
a61d21817d | ||
|
|
5abd4262e1 | ||
|
|
1873330b6d | ||
|
|
33969458ef | ||
|
|
1321385999 | ||
|
|
ebbd381177 | ||
|
|
41ef416e26 | ||
|
|
8fe97766f0 | ||
|
|
0b34ac38dc | ||
|
|
07580c6502 | ||
|
|
0ef022dbd4 | ||
|
|
465f9b4222 | ||
|
|
716ba95b5d | ||
|
|
8f49bfd610 | ||
|
|
99f089152c | ||
|
|
17a9dc6984 | ||
| 93f6c9540b | |||
| 3d64e141da | |||
|
|
b71b8d41ce | ||
|
|
7318ed3e31 | ||
| 6715311bf1 | |||
|
|
e3094ad67a | ||
|
|
7073405c85 | ||
|
|
05e840e88f | ||
|
|
baaafd460b | ||
|
|
6ed3eaed09 | ||
|
|
8fcd54beb3 | ||
|
|
f817021f51 | ||
|
|
a2ad56b625 | ||
|
|
6cae4a785e | ||
|
|
47391a0641 | ||
| 7d162cd4ed | |||
|
|
0de0fdfd9a | ||
|
|
41c4078db9 | ||
|
|
a6ad599c8f | ||
|
|
98d0dbc81d | ||
|
|
1b27d0b31a |
31
.github/workflows/test_parsing.yml
vendored
Normal file
31
.github/workflows/test_parsing.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
# This workflow will install Python dependencies, run tests and lint with a single version of Python
|
||||
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
|
||||
|
||||
name: Test flodata parsing
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "swap-statef-testing" ]
|
||||
pull_request:
|
||||
branches: [ "swap-statef-testing" ]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: self-hosted
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.8
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: "3.8"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install arrow==1.1.0 pyflo-lib==2.0.9 requests==2.25.0
|
||||
- name: Test with unittest
|
||||
run: |
|
||||
python -m unittest tests/test_parsing.py
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@ -10,6 +10,11 @@ config.ini
|
||||
config.py
|
||||
*.log
|
||||
py3/
|
||||
py3.9.0/
|
||||
py3.9/
|
||||
__pycache__/
|
||||
*.pyc
|
||||
.vscode/
|
||||
error-notes.txt
|
||||
snippets*
|
||||
helper-files/
|
||||
@ -1 +0,0 @@
|
||||
3.9.0
|
||||
156
README.md
156
README.md
@ -1,4 +1,31 @@
|
||||
# What is it
|
||||
This contains both RanchiMall Token SmartContract Scanner, as well as API on MYSQL. Installation for Scanner will also install all dependencies for API.
|
||||
|
||||
# Howto start the MYSQL version
|
||||
|
||||
## Setup setps
|
||||
|
||||
1. Just download one file from this setup.sh and run it. It will download the rest of repository
|
||||
2. Give setup.sh execute permissions first `chmod +x setup.sh`, and then `./setup.sh`
|
||||
3. Run setup.sh to install python3.7 virtual environment, install and configure MySQL if it does not exist, install all dependencies and then start the application.
|
||||
|
||||
## How to run
|
||||
### Scanner
|
||||
1. python3 tracktokens-smartcontracts.py
|
||||
2. python3 tracktokens-smartcontracts.py --reset
|
||||
3. python3 tracktokens-smartcontracts.py --rebuild
|
||||
4. python3 tracktokens-smartcontracts.py --rebuild usd# tokenroom#
|
||||
|
||||
1. python3 tracktokens-smartcontracts.py => To run normally
|
||||
2. python3 tracktokens-smartcontracts.py --reset => To remove all data and start from scratch
|
||||
3. python3 tracktokens-smartcontracts.py --rebuild => To reprocess existing blockchain data for ALL TOKENS as stored in latestBlocks table of rm_latestCache_db database
|
||||
4. python3 tracktokens-smartcontracts.py --rebuild usd# tokenroom# => To reprocess existing blockchain data for USD# TOKENROOM# as stored in latestBlocks table of rm_latestCache_db database
|
||||
|
||||
### API
|
||||
1. python3 ranchimallflo_api.py
|
||||
|
||||
# FLO Token & Smart Contract System
|
||||
[](https://github.com/ranchimall/flo-token-tracking/actions/workflows/test_parsing.yml)
|
||||
|
||||
## Important versions and their hashes
|
||||
The python script scans the FLO Blockchain for Token and Smart Contract activity and creates/updates local SQLite databases accordingly.
|
||||
@ -28,30 +55,123 @@ Docker-compatibility branch is needed right now because Docker image made for fl
|
||||
```
|
||||
# config.ini
|
||||
[DEFAULT]
|
||||
NET = testnet
|
||||
FLO_CLI_PATH = /usr/local/bin/flo-cli
|
||||
START_BLOCK = 740400
|
||||
|
||||
# config.py
|
||||
committeeAddressList = ['oVwmQnQGtXjRpP7dxJeiRGd5azCrJiB6Ka']
|
||||
sseAPI_url = 'https://ranchimallflo-testnet.duckdns.org/'
|
||||
NET = testnet
|
||||
FLO_CLI_PATH = /usr/local/bin/flo-cli
|
||||
START_BLOCK = 740400
|
||||
FLOSIGHT_NETURL = https://0.0.0.0:19166/
|
||||
TESTNET_FLOSIGHT_SERVER_LIST = https://0.0.0.0:19166/
|
||||
MAINNET_FLOSIGHT_SERVER_LIST = https://blockbook.ranchimall.net/
|
||||
TOKENAPI_SSE_URL = https://ranchimallflo-testnet-blockbook.ranchimall.net
|
||||
IGNORE_BLOCK_LIST = 902446
|
||||
IGNORE_TRANSACTION_LIST = b4ac4ddb51188b28b39bcb3aa31357d5bfe562c21e8aaf8dde0ec560fc893174
|
||||
DATA_PATH = /home/production/deployed/ftt-blockbook-migration-testnet-rescan
|
||||
APP_ADMIN = oWooGLbBELNnwq8Z5YmjoVjw8GhBGH3qSP
|
||||
```
|
||||
|
||||
For mainnet
|
||||
```
|
||||
# config.ini
|
||||
[DEFAULT]
|
||||
NET = mainnet
|
||||
FLO_CLI_PATH = /usr/local/bin/flo-cli
|
||||
START_BLOCK = 3387900
|
||||
NET = mainnet
|
||||
FLO_CLI_PATH = /usr/local/bin/flo-cli
|
||||
START_BLOCK = 3387900
|
||||
FLOSIGHT_NETURL = https://blockbook.ranchimall.net/
|
||||
TESTNET_FLOSIGHT_SERVER_LIST = https://0.0.0.0:19166/
|
||||
MAINNET_FLOSIGHT_SERVER_LIST = https://blockbook.ranchimall.net/
|
||||
TOKENAPI_SSE_URL = https://ranchimallflo-blockbook.ranchimall.net
|
||||
IGNORE_BLOCK_LIST = 2
|
||||
IGNORE_TRANSACTION_LIST = b4
|
||||
DATA_PATH = /home/production/deployed/ftt-blockbook-migration-rescan
|
||||
APP_ADMIN = FNcvkz9PZNZM3HcxM1XTrVL4tgivmCkHp9
|
||||
API_VERIFY = False
|
||||
|
||||
```
|
||||
|
||||
4. Install pyflosetup.sh if dependency errors of any kind come. Give it execute permissions first `chmod +x pyflosetup.sh`, and then `./pyflosetup.sh`
|
||||
|
||||
|
||||
# config.py
|
||||
committeeAddressList = ['FRwwCqbP7DN4z5guffzzhCSgpD8Q33hUG8']
|
||||
sseAPI_url = 'https://ranchimallflo.duckdns.org/'
|
||||
```
|
||||
|
||||
4. If running for the first time, run `python3.7 tracktokens-smartcontracts.py --reset` otherwise run `python3.7 tracktokens-smartcontracts.py`
|
||||
5. If running for the first time, run `python3.7 tracktokens-smartcontracts.py --reset` otherwise run `python3.7 tracktokens-smartcontracts.py`
|
||||
|
||||
|
||||
If you want to listen to RanchiMall's Token Tracker scanner's events you have to subscribe to Ranchimallflo API's end point `/sse`
|
||||
Reference - https://ably.com/topic/server-sent-events
|
||||
## How to setup a virtual environment
|
||||
|
||||
To set up a virtual environment that uses Python 3.7 while keeping Python 3.10 as the default system version, follow these steps:
|
||||
Step 1: Make Sure Python 3.7 is Installed
|
||||
|
||||
Ensure Python 3.7 is installed on your system:
|
||||
|
||||
Add the deadsnakes PPA (if not already done):
|
||||
|
||||
bash
|
||||
|
||||
sudo add-apt-repository ppa:deadsnakes/ppa
|
||||
sudo apt update
|
||||
|
||||
Install Python 3.7 and the venv module for Python 3.7:
|
||||
|
||||
bash
|
||||
|
||||
sudo apt install python3.7 python3.7-venv
|
||||
|
||||
Step 2: Create a Virtual Environment with Python 3.7
|
||||
|
||||
Since you want your virtual environment to specifically use Python 3.7, you need to use Python 3.7 explicitly to create the venv, while keeping Python 3.10 as the system's default Python:
|
||||
|
||||
Create the virtual environment using Python 3.7:
|
||||
|
||||
bash
|
||||
|
||||
/usr/bin/python3.7 -m venv myenv
|
||||
|
||||
This command creates a virtual environment named myenv using Python 3.7 located at /usr/bin/python3.7. Replace myenv with your desired environment name.
|
||||
|
||||
Step 3: Activate the Virtual Environment
|
||||
|
||||
Activate the virtual environment to switch to Python 3.7 within the environment:
|
||||
|
||||
On Linux or macOS:
|
||||
|
||||
bash
|
||||
|
||||
source myenv/bin/activate
|
||||
|
||||
On Windows:
|
||||
|
||||
bash
|
||||
|
||||
.\myenv\Scripts\activate
|
||||
|
||||
After activation, your shell prompt should indicate that the virtual environment is active.
|
||||
Step 4: Verify the Python Version in the Virtual Environment
|
||||
|
||||
To confirm that the virtual environment is using Python 3.7, run:
|
||||
|
||||
bash
|
||||
|
||||
python --version
|
||||
|
||||
You should see output indicating that Python 3.7 is being used:
|
||||
|
||||
Python 3.7.x
|
||||
|
||||
Step 5: Deactivate the Virtual Environment When Done
|
||||
|
||||
When you are finished, deactivate the virtual environment to return to the base Python 3.10:
|
||||
|
||||
bash
|
||||
|
||||
deactivate
|
||||
|
||||
## MySQL commands to create a user
|
||||
1. CREATE USER 'FUfB6cwSsGDbQpmA7Qs8zQJxU3HpwCdnjT'@'localhost' IDENTIFIED BY 'RAcifrTM2V75ipy5MeLYaDU3UNcUXtrit933TGM5o7Yj2fs8XdP5';
|
||||
2. GRANT ALL PRIVILEGES ON `rm_%_db`.* TO 'FUfB6cwSsGDbQpmA7Qs8zQJxU3HpwCdnjT'@'localhost' WITH GRANT OPTION;
|
||||
|
||||
## Modify config.ini
|
||||
```
|
||||
[MYSQL]
|
||||
USERNAME = FUfB6cwSsGDbQpmA7Qs8zQJxU3HpwCdnjT
|
||||
PASSWORD = RAcifrTM2V75ipy5MeLYaDU3UNcUXtrit933TGM5o7Yj2fs8XdP5
|
||||
HOST = localhost
|
||||
DATABASE_PREFIX = rm
|
||||
```
|
||||
|
||||
|
||||
23
app.py
23
app.py
@ -1,23 +0,0 @@
|
||||
import os
|
||||
|
||||
from flask import Flask, jsonify
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
|
||||
@app.route('/')
|
||||
def hello_world():
|
||||
return 'Hello, World!'
|
||||
|
||||
|
||||
@app.route('/getmarkerlist')
|
||||
def marker_list():
|
||||
dblist = os.listdir("databases/")
|
||||
dbdict = {}
|
||||
for idx, item in enumerate(dblist):
|
||||
dbdict[idx] = item[:-3]
|
||||
|
||||
return jsonify(dbdict)
|
||||
|
||||
|
||||
app.run(debug=True)
|
||||
19
config-example.ini
Normal file
19
config-example.ini
Normal file
@ -0,0 +1,19 @@
|
||||
[DEFAULT]
|
||||
NET = mainnet
|
||||
FLO_CLI_PATH = /usr/local/bin/flo-cli
|
||||
START_BLOCK = 3387923
|
||||
FLOSIGHT_NETURL = https://blockbook.ranchimall.net/
|
||||
TESTNET_FLOSIGHT_SERVER_LIST = https://0.0.0.0:19166/
|
||||
MAINNET_FLOSIGHT_SERVER_LIST = https://blockbook.ranchimall.net/
|
||||
TOKENAPI_SSE_URL = https://ranchimallflo-blockbook.ranchimall.net
|
||||
IGNORE_BLOCK_LIST = 2
|
||||
IGNORE_TRANSACTION_LIST = b4
|
||||
APP_ADMIN = FNcvkz9PZNZM3HcxM1XTrVL4tgivmCkHp9
|
||||
API_VERIFY = False
|
||||
|
||||
[MYSQL]
|
||||
USERNAME = FUfB6cwSsGDbQpmA7Qs8zQJxU3HpwCdnjT
|
||||
PASSWORD = RAcifrTM2V75ipy5MeLYaDU3UNcUXtrit933TGM5o7Yj2fs8XdP5
|
||||
HOST = localhost
|
||||
DATABASE_PREFIX = rm
|
||||
|
||||
@ -1 +0,0 @@
|
||||
committeeAddressList = [<committeeAddress>]
|
||||
27
config.ini
27
config.ini
@ -1,4 +1,29 @@
|
||||
[DEFAULT]
|
||||
NET = mainnet
|
||||
FLO_CLI_PATH = /usr/local/bin/flo-cli
|
||||
START_BLOCK = 3387900
|
||||
START_BLOCK = 3387923
|
||||
BLOCKBOOK_NETURL = https://blockbook.ranchimall.net/
|
||||
TESTNET_BLOCKBOOK_SERVER_LIST = https://0.0.0.0:19166/
|
||||
MAINNET_BLOCKBOOK_SERVER_LIST = https://blockbook.ranchimall.net/
|
||||
TOKENAPI_SSE_URL = https://ranchimallflo-blockbook.ranchimall.net
|
||||
IGNORE_BLOCK_LIST = 2
|
||||
IGNORE_TRANSACTION_LIST = b4
|
||||
APP_ADMIN = FNcvkz9PZNZM3HcxM1XTrVL4tgivmCkHp9
|
||||
API_VERIFY = False
|
||||
|
||||
[MYSQL]
|
||||
USERNAME = FUfB6cwSsGDbQpmA7Qs8zQJxU3HpwCdnjT
|
||||
PASSWORD = RAcifrTM2V75ipy5MeLYaDU3UNcUXtrit933TGM5o7Yj2fs8XdP5
|
||||
HOST = localhost
|
||||
DATABASE_PREFIX = rm
|
||||
|
||||
[API]
|
||||
APIHOST = localhost
|
||||
APIPORT = 5432
|
||||
apiUrl = https://blockbook.ranchimall.net/
|
||||
|
||||
# Timeout configurations
|
||||
RETRY_TIMEOUT_LONG = 1800
|
||||
RETRY_TIMEOUT_SHORT = 60
|
||||
DB_RETRY_TIMEOUT = 60
|
||||
API_TIMEOUT = 1
|
||||
|
||||
43
convert_db.py
Normal file
43
convert_db.py
Normal file
@ -0,0 +1,43 @@
|
||||
from models import SystemData, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, RejectedTransactionHistory, Base, ContractStructure, ContractBase, ContractParticipants, SystemBase, ActiveContracts, ContractAddressMapping, LatestCacheBase, ContractTransactionHistory, RejectedContractTransactionHistory, TokenContractAssociation, ContinuosContractBase, ContractStructure1, ContractParticipants1, ContractDeposits1, ContractTransactionHistory1, LatestTransactions, LatestBlocks, DatabaseTypeMapping, TokenAddressMapping, LatestCacheBase1, LatestTransactions1, LatestBlocks1
|
||||
import pdb
|
||||
from sqlalchemy import create_engine, func
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
|
||||
def create_database_session_orm(type, parameters, base):
|
||||
if type == 'token':
|
||||
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'smart_contract':
|
||||
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'system_dbs':
|
||||
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
return session
|
||||
|
||||
|
||||
# connect to the database convert_db
|
||||
convert_db = create_database_session_orm('system_dbs', {'db_name': 'convertdb'}, LatestCacheBase1)
|
||||
latest_blocks = convert_db.query(LatestBlocks1).all()
|
||||
latest_txs = convert_db.query(LatestTransactions1).all()
|
||||
|
||||
|
||||
# create a new database convert_db_new
|
||||
convert_db_1 = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
||||
|
||||
for block in latest_blocks:
|
||||
convert_db_1.add(LatestBlocks(blockNumber=block.blockNumber, blockHash=block.blockHash, jsonData=block.jsonData))
|
||||
|
||||
for tx in latest_txs:
|
||||
convert_db_1.add(LatestTransactions(transactionHash=tx.transactionHash, blockNumber=tx.blockNumber, jsonData=tx.jsonData, transactionType=tx.transactionType, parsedFloData=tx.parsedFloData))
|
||||
|
||||
convert_db_1.commit()
|
||||
convert_db_1.close()
|
||||
convert_db.close()
|
||||
@ -1,3 +0,0 @@
|
||||
cd /home/production/Desktop/flo-token-tracking/
|
||||
python3 tracktokens-smartcontracts.py
|
||||
|
||||
423
models.py
423
models.py
@ -1,224 +1,353 @@
|
||||
from sqlalchemy import Column, Integer, Float, String
|
||||
from sqlalchemy import Column, BigInteger, Float, Text
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
Base = declarative_base()
|
||||
TokenBase = declarative_base()
|
||||
ContractBase = declarative_base()
|
||||
ContinuosContractBase = declarative_base()
|
||||
SystemBase = declarative_base()
|
||||
LatestCacheBase = declarative_base()
|
||||
|
||||
|
||||
class ActiveTable(Base):
|
||||
class ActiveTable(TokenBase):
|
||||
__tablename__ = "activeTable"
|
||||
|
||||
id = Column('id', Integer, primary_key=True)
|
||||
address = Column('address', String)
|
||||
parentid = Column('parentid', Integer)
|
||||
consumedpid = Column('consumedpid', String)
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
address = Column('address', Text)
|
||||
parentid = Column('parentid', BigInteger)
|
||||
consumedpid = Column('consumedpid', Text)
|
||||
transferBalance = Column('transferBalance', Float)
|
||||
addressBalance = Column('addressBalance', Float)
|
||||
orphaned_parentid = Column('orphaned_parentid', BigInteger)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
|
||||
|
||||
class ConsumedTable(Base):
|
||||
class ConsumedTable(TokenBase):
|
||||
__tablename__ = "consumedTable"
|
||||
|
||||
primaryKey = Column('primaryKey', Integer, primary_key=True)
|
||||
id = Column('id', Integer)
|
||||
address = Column('address', String)
|
||||
parentid = Column('parentid', Integer)
|
||||
consumedpid = Column('consumedpid', String)
|
||||
primaryKey = Column('primaryKey', BigInteger, primary_key=True)
|
||||
id = Column('id', BigInteger)
|
||||
address = Column('address', Text)
|
||||
parentid = Column('parentid', BigInteger)
|
||||
consumedpid = Column('consumedpid', Text)
|
||||
transferBalance = Column('transferBalance', Float)
|
||||
addressBalance = Column('addressBalance', Float)
|
||||
orphaned_parentid = Column('orphaned_parentid', BigInteger)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
|
||||
|
||||
class TransferLogs(Base):
|
||||
class TransferLogs(TokenBase):
|
||||
__tablename__ = "transferlogs"
|
||||
|
||||
primary_key = Column('id', Integer, primary_key=True)
|
||||
sourceFloAddress = Column('sourceFloAddress', String)
|
||||
destFloAddress = Column('destFloAddress', String)
|
||||
primary_key = Column('id', BigInteger, primary_key=True)
|
||||
sourceFloAddress = Column('sourceFloAddress', Text)
|
||||
destFloAddress = Column('destFloAddress', Text)
|
||||
transferAmount = Column('transferAmount', Float)
|
||||
sourceId = Column('sourceId', Integer)
|
||||
destinationId = Column('destinationId', Integer)
|
||||
blockNumber = Column('blockNumber', Integer)
|
||||
time = Column('time', Integer)
|
||||
transactionHash = Column('transactionHash', String)
|
||||
sourceId = Column('sourceId', BigInteger)
|
||||
destinationId = Column('destinationId', BigInteger)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
time = Column('time', BigInteger)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
|
||||
|
||||
class TransactionHistory(Base):
|
||||
class TransactionHistory(TokenBase):
|
||||
__tablename__ = "transactionHistory"
|
||||
|
||||
primary_key = Column('id', Integer, primary_key=True)
|
||||
sourceFloAddress = Column('sourceFloAddress', String)
|
||||
destFloAddress = Column('destFloAddress', String)
|
||||
primary_key = Column('id', BigInteger, primary_key=True)
|
||||
sourceFloAddress = Column('sourceFloAddress', Text)
|
||||
destFloAddress = Column('destFloAddress', Text)
|
||||
transferAmount = Column('transferAmount', Float)
|
||||
blockNumber = Column('blockNumber', Integer)
|
||||
blockHash = Column('blockHash', String)
|
||||
time = Column('time', Integer)
|
||||
transactionHash = Column('transactionHash', String)
|
||||
blockchainReference = Column('blockchainReference', String)
|
||||
jsonData = Column('jsonData', String)
|
||||
transactionType = Column('transactionType', String)
|
||||
parsedFloData = Column('parsedFloData', String)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
time = Column('time', BigInteger)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockchainReference = Column('blockchainReference', Text)
|
||||
jsonData = Column('jsonData', Text)
|
||||
transactionType = Column('transactionType', Text)
|
||||
parsedFloData = Column('parsedFloData', Text)
|
||||
|
||||
|
||||
class TokenContractAssociation(Base):
|
||||
class TokenContractAssociation(TokenBase):
|
||||
__tablename__ = "tokenContractAssociation"
|
||||
|
||||
primary_key = Column('id', Integer, primary_key=True)
|
||||
tokenIdentification = Column('tokenIdentification', String)
|
||||
contractName = Column('contractName', String)
|
||||
contractAddress = Column('contractAddress', String)
|
||||
blockNumber = Column('blockNumber', Integer)
|
||||
blockHash = Column('blockHash', String)
|
||||
time = Column('time', Integer)
|
||||
transactionHash = Column('transactionHash', String)
|
||||
blockchainReference = Column('blockchainReference', String)
|
||||
jsonData = Column('jsonData', String)
|
||||
transactionType = Column('transactionType', String)
|
||||
parsedFloData = Column('parsedFloData', String)
|
||||
primary_key = Column('id', BigInteger, primary_key=True)
|
||||
tokenIdentification = Column('tokenIdentification', Text)
|
||||
contractName = Column('contractName', Text)
|
||||
contractAddress = Column('contractAddress', Text)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
time = Column('time', BigInteger)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockchainReference = Column('blockchainReference', Text)
|
||||
jsonData = Column('jsonData', Text)
|
||||
transactionType = Column('transactionType', Text)
|
||||
parsedFloData = Column('parsedFloData', Text)
|
||||
|
||||
|
||||
class ContractStructure(ContractBase):
|
||||
__tablename__ = "contractstructure"
|
||||
|
||||
id = Column('id', Integer, primary_key=True)
|
||||
attribute = Column('attribute', String)
|
||||
index = Column('index', Integer)
|
||||
value = Column('value', String)
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
attribute = Column('attribute', Text)
|
||||
index = Column('index', BigInteger)
|
||||
value = Column('value', Text)
|
||||
|
||||
|
||||
class ContractParticipants(ContractBase):
|
||||
__tablename__ = "contractparticipants"
|
||||
|
||||
id = Column('id', Integer, primary_key=True)
|
||||
participantAddress = Column('participantAddress', String)
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
participantAddress = Column('participantAddress', Text)
|
||||
tokenAmount = Column('tokenAmount', Float)
|
||||
userChoice = Column('userChoice', String)
|
||||
transactionHash = Column('transactionHash', String)
|
||||
blockNumber = Column('blockNumber', Integer)
|
||||
blockHash = Column('blockHash', String)
|
||||
userChoice = Column('userChoice', Text)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
winningAmount = Column('winningAmount', Float)
|
||||
|
||||
|
||||
class ContractTransactionHistory(ContractBase):
|
||||
__tablename__ = "contractTransactionHistory"
|
||||
|
||||
primary_key = Column('id', Integer, primary_key=True)
|
||||
transactionType = Column('transactionType', String)
|
||||
transactionSubType = Column('transactionSubType', String)
|
||||
sourceFloAddress = Column('sourceFloAddress', String)
|
||||
destFloAddress = Column('destFloAddress', String)
|
||||
primary_key = Column('id', BigInteger, primary_key=True)
|
||||
transactionType = Column('transactionType', Text)
|
||||
transactionSubType = Column('transactionSubType', Text)
|
||||
sourceFloAddress = Column('sourceFloAddress', Text)
|
||||
destFloAddress = Column('destFloAddress', Text)
|
||||
transferAmount = Column('transferAmount', Float)
|
||||
blockNumber = Column('blockNumber', Integer)
|
||||
blockHash = Column('blockHash', String)
|
||||
time = Column('time', Integer)
|
||||
transactionHash = Column('transactionHash', String)
|
||||
blockchainReference = Column('blockchainReference', String)
|
||||
jsonData = Column('jsonData', String)
|
||||
parsedFloData = Column('parsedFloData', String)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
time = Column('time', BigInteger)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockchainReference = Column('blockchainReference', Text)
|
||||
jsonData = Column('jsonData', Text)
|
||||
parsedFloData = Column('parsedFloData', Text)
|
||||
|
||||
|
||||
class RejectedContractTransactionHistory(SystemBase):
|
||||
__tablename__ = "rejectedContractTransactionHistory"
|
||||
class ContractDeposits(ContractBase):
|
||||
__tablename__ = "contractdeposits"
|
||||
|
||||
primary_key = Column('id', Integer, primary_key=True)
|
||||
transactionType = Column('transactionType', String)
|
||||
transactionSubType = Column('transactionSubType', String)
|
||||
contractName = Column('contractName', String)
|
||||
contractAddress = Column('contractAddress', String)
|
||||
sourceFloAddress = Column('sourceFloAddress', String)
|
||||
destFloAddress = Column('destFloAddress', String)
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
depositorAddress = Column('depositorAddress', Text)
|
||||
depositAmount = Column('depositAmount', Float)
|
||||
depositBalance = Column('depositBalance', Float)
|
||||
expiryTime = Column('expiryTime', Text)
|
||||
unix_expiryTime = Column('unix_expiryTime', BigInteger)
|
||||
status = Column('status', Text)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
|
||||
|
||||
class ConsumedInfo(ContractBase):
|
||||
__tablename__ = "consumedinfo"
|
||||
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
id_deposittable = Column('id_deposittable', BigInteger)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
|
||||
|
||||
class ContractWinners(ContractBase):
|
||||
__tablename__ = "contractwinners"
|
||||
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
participantAddress = Column('participantAddress', Text)
|
||||
winningAmount = Column('winningAmount', Float)
|
||||
userChoice = Column('userChoice', Text)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
referenceTxHash = Column('referenceTxHash', Text)
|
||||
|
||||
class ContractStructure2(ContinuosContractBase):
|
||||
__tablename__ = "contractstructure"
|
||||
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
attribute = Column('attribute', Text)
|
||||
index = Column('index', BigInteger)
|
||||
value = Column('value', Text)
|
||||
|
||||
|
||||
class ContractParticipants2(ContinuosContractBase):
|
||||
__tablename__ = "contractparticipants"
|
||||
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
participantAddress = Column('participantAddress', Text)
|
||||
tokenAmount = Column('tokenAmount', Float)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
|
||||
|
||||
class ContractDeposits2(ContinuosContractBase):
|
||||
__tablename__ = "contractdeposits"
|
||||
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
depositorAddress = Column('depositorAddress', Text)
|
||||
depositAmount = Column('depositAmount', Float)
|
||||
expiryTime = Column('expiryTime', Text)
|
||||
status = Column('status', Text)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
|
||||
|
||||
class ContractTransactionHistory2(ContinuosContractBase):
|
||||
__tablename__ = "contractTransactionHistory"
|
||||
|
||||
primary_key = Column('id', BigInteger, primary_key=True)
|
||||
transactionType = Column('transactionType', Text)
|
||||
transactionSubType = Column('transactionSubType', Text)
|
||||
sourceFloAddress = Column('sourceFloAddress', Text)
|
||||
destFloAddress = Column('destFloAddress', Text)
|
||||
transferAmount = Column('transferAmount', Float)
|
||||
blockNumber = Column('blockNumber', Integer)
|
||||
blockHash = Column('blockHash', String)
|
||||
time = Column('time', Integer)
|
||||
transactionHash = Column('transactionHash', String)
|
||||
blockchainReference = Column('blockchainReference', String)
|
||||
jsonData = Column('jsonData', String)
|
||||
rejectComment = Column('rejectComment', String)
|
||||
parsedFloData = Column('parsedFloData', String)
|
||||
|
||||
|
||||
class RejectedTransactionHistory(SystemBase):
|
||||
__tablename__ = "rejectedTransactionHistory"
|
||||
|
||||
primary_key = Column('id', Integer, primary_key=True)
|
||||
tokenIdentification = Column('tokenIdentification', String)
|
||||
sourceFloAddress = Column('sourceFloAddress', String)
|
||||
destFloAddress = Column('destFloAddress', String)
|
||||
transferAmount = Column('transferAmount', Float)
|
||||
blockNumber = Column('blockNumber', Integer)
|
||||
blockHash = Column('blockHash', String)
|
||||
time = Column('time', Integer)
|
||||
transactionHash = Column('transactionHash', String)
|
||||
blockchainReference = Column('blockchainReference', String)
|
||||
jsonData = Column('jsonData', String)
|
||||
rejectComment = Column('rejectComment', String)
|
||||
transactionType = Column('transactionType', String)
|
||||
parsedFloData = Column('parsedFloData', String)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
time = Column('time', BigInteger)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockchainReference = Column('blockchainReference', Text)
|
||||
jsonData = Column('jsonData', Text)
|
||||
parsedFloData = Column('parsedFloData', Text)
|
||||
|
||||
|
||||
class ActiveContracts(SystemBase):
|
||||
__tablename__ = "activecontracts"
|
||||
|
||||
id = Column('id', Integer, primary_key=True)
|
||||
contractName = Column('contractName', String)
|
||||
contractAddress = Column('contractAddress', String)
|
||||
status = Column('status', String)
|
||||
tokenIdentification = Column('tokenIdentification', String)
|
||||
contractType = Column('contractType', String)
|
||||
transactionHash = Column('transactionHash', String)
|
||||
blockNumber = Column('blockNumber', Integer)
|
||||
blockHash = Column('blockHash', String)
|
||||
incorporationDate = Column('incorporationDate', String)
|
||||
expiryDate = Column('expiryDate', String)
|
||||
closeDate = Column('closeDate', String)
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
contractName = Column('contractName', Text)
|
||||
contractAddress = Column('contractAddress', Text)
|
||||
status = Column('status', Text)
|
||||
tokenIdentification = Column('tokenIdentification', Text)
|
||||
contractType = Column('contractType', Text)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
incorporationDate = Column('incorporationDate', Text)
|
||||
expiryDate = Column('expiryDate', Text)
|
||||
closeDate = Column('closeDate', Text)
|
||||
|
||||
|
||||
class SystemData(SystemBase):
|
||||
__tablename__ = "systemData"
|
||||
|
||||
id = Column('id', Integer, primary_key=True)
|
||||
attribute = Column('attribute', String)
|
||||
value = Column('value', String)
|
||||
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
attribute = Column('attribute', Text)
|
||||
value = Column('value', Text)
|
||||
|
||||
class ContractAddressMapping(SystemBase):
|
||||
__tablename__ = "contractAddressMapping"
|
||||
|
||||
id = Column('id', Integer, primary_key=True)
|
||||
address = Column('address', String)
|
||||
addressType = Column('addressType', String)
|
||||
contractName = Column('contractName', String)
|
||||
contractAddress = Column('contractAddress', String)
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
address = Column('address', Text)
|
||||
addressType = Column('addressType', Text)
|
||||
contractName = Column('contractName', Text)
|
||||
contractAddress = Column('contractAddress', Text)
|
||||
tokenAmount = Column('tokenAmount', Float)
|
||||
transactionHash = Column('transactionHash', String)
|
||||
blockNumber = Column('blockNumber', Integer)
|
||||
blockHash = Column('blockHash', String)
|
||||
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
|
||||
class TokenAddressMapping(SystemBase):
|
||||
__tablename__ = "tokenAddressMapping"
|
||||
|
||||
id = Column('id', Integer, primary_key=True)
|
||||
tokenAddress = Column('tokenAddress', String)
|
||||
token = Column('token', String)
|
||||
transactionHash = Column('transactionHash', String)
|
||||
blockNumber = Column('blockNumber', Integer)
|
||||
blockHash = Column('blockHash', String)
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
tokenAddress = Column('tokenAddress', Text)
|
||||
token = Column('token', Text)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
|
||||
class DatabaseTypeMapping(SystemBase):
|
||||
__tablename__ = "databaseTypeMapping"
|
||||
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
db_name = Column('db_name', Text)
|
||||
db_type = Column('db_type', Text)
|
||||
keyword = Column('keyword', Text)
|
||||
object_format = Column('object_format', Text)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
|
||||
|
||||
class TimeActions(SystemBase):
|
||||
__tablename__ = "time_actions"
|
||||
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
time = Column('time', Text)
|
||||
activity = Column('activity', Text)
|
||||
status = Column('status', Text)
|
||||
contractName = Column('contractName', Text)
|
||||
contractAddress = Column('contractAddress', Text)
|
||||
contractType = Column('contractType', Text)
|
||||
tokens_db = Column('tokens_db', Text)
|
||||
parsed_data = Column('parsed_data', Text)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
|
||||
|
||||
class RejectedContractTransactionHistory(SystemBase):
|
||||
__tablename__ = "rejectedContractTransactionHistory"
|
||||
|
||||
primary_key = Column('id', BigInteger, primary_key=True)
|
||||
transactionType = Column('transactionType', Text)
|
||||
transactionSubType = Column('transactionSubType', Text)
|
||||
contractName = Column('contractName', Text)
|
||||
contractAddress = Column('contractAddress', Text)
|
||||
sourceFloAddress = Column('sourceFloAddress', Text)
|
||||
destFloAddress = Column('destFloAddress', Text)
|
||||
transferAmount = Column('transferAmount', Float)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
time = Column('time', BigInteger)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockchainReference = Column('blockchainReference', Text)
|
||||
jsonData = Column('jsonData', Text)
|
||||
rejectComment = Column('rejectComment', Text)
|
||||
parsedFloData = Column('parsedFloData', Text)
|
||||
|
||||
|
||||
class RejectedTransactionHistory(SystemBase):
|
||||
__tablename__ = "rejectedTransactionHistory"
|
||||
|
||||
primary_key = Column('id', BigInteger, primary_key=True)
|
||||
tokenIdentification = Column('tokenIdentification', Text)
|
||||
sourceFloAddress = Column('sourceFloAddress', Text)
|
||||
destFloAddress = Column('destFloAddress', Text)
|
||||
transferAmount = Column('transferAmount', Float)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
time = Column('time', BigInteger)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockchainReference = Column('blockchainReference', Text)
|
||||
jsonData = Column('jsonData', Text)
|
||||
rejectComment = Column('rejectComment', Text)
|
||||
transactionType = Column('transactionType', Text)
|
||||
parsedFloData = Column('parsedFloData', Text)
|
||||
|
||||
class LatestTransactions(LatestCacheBase):
|
||||
__tablename__ = "latestTransactions"
|
||||
id = Column('id', Integer, primary_key=True)
|
||||
transactionHash = Column('transactionHash', String)
|
||||
blockNumber = Column('blockNumber', String)
|
||||
jsonData = Column('jsonData', String)
|
||||
transactionType = Column('transactionType', String)
|
||||
parsedFloData = Column('parsedFloData', String)
|
||||
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
transactionHash = Column('transactionHash', Text)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
jsonData = Column('jsonData', Text)
|
||||
transactionType = Column('transactionType', Text)
|
||||
parsedFloData = Column('parsedFloData', Text)
|
||||
db_reference = Column('db_reference', Text)
|
||||
|
||||
|
||||
class LatestBlocks(LatestCacheBase):
|
||||
__tablename__ = "latestBlocks"
|
||||
id = Column('id', Integer, primary_key=True)
|
||||
blockNumber = Column('blockNumber', String)
|
||||
blockHash = Column('blockHash', String)
|
||||
jsonData = Column('jsonData', String)
|
||||
|
||||
id = Column('id', BigInteger, primary_key=True)
|
||||
blockNumber = Column('blockNumber', BigInteger)
|
||||
blockHash = Column('blockHash', Text)
|
||||
jsonData = Column('jsonData', Text)
|
||||
|
||||
|
||||
class RecentBlocks(LatestCacheBase):
|
||||
__tablename__ = "RecentBlocks"
|
||||
|
||||
id = Column(BigInteger, primary_key=True, autoincrement=True)
|
||||
blockNumber = Column(BigInteger, unique=True, nullable=False)
|
||||
blockHash = Column(Text, nullable=False)
|
||||
|
||||
|
||||
|
||||
281
parser_function_definitions.py
Normal file
281
parser_function_definitions.py
Normal file
@ -0,0 +1,281 @@
|
||||
"""
|
||||
|
||||
DEFINITIONS:
|
||||
|
||||
Special character words - A word followed by either of the special character(#,*,@)
|
||||
#-word - Token name
|
||||
@-word - Smart Contract name
|
||||
*-word - Smart Contract type
|
||||
|
||||
"""
|
||||
|
||||
"""
|
||||
FIND RULES
|
||||
|
||||
1. Identify all Special character words in a text string >> and output as a list of those words
|
||||
2. Apply rule 1, but only before a marker or keyword like ":" and output as a list of those words
|
||||
3. Find a number in the string
|
||||
5. Check for an occurance of exact order of pattern of special character words
|
||||
eg. for one-time-event smart contract( identified using *-word), the existence of #-word should be checked before the ':' and output the #-word
|
||||
for continuos-event smart contract( identified using *-word)(with subtype tokenswap), the #-words should be checked after the ':' and output two hash words
|
||||
6. Given a string of the type contract conditions, format and output an object string by removing = and by removing number references
|
||||
7. Idenitfy all the special character words in a text string such that spaces are not taken into account, for eg. Input string => "contract-conditions :(2) accepting_token=rupee#(3) selling_token = bioscope# " |
|
||||
Output string => ["rupee#","bioscope#"]
|
||||
"""
|
||||
|
||||
def findrule1(rawstring, special_character):
|
||||
wordList = []
|
||||
for word in rawstring.split(' '):
|
||||
if word.endswith(special_character) and len(word) != 1:
|
||||
wordList.append(word)
|
||||
return wordList
|
||||
|
||||
def findrule3(text):
|
||||
base_units = {'thousand': 10 ** 3, 'million': 10 ** 6, 'billion': 10 ** 9, 'trillion': 10 ** 12}
|
||||
textList = text.split(' ')
|
||||
counter = 0
|
||||
value = None
|
||||
for idx, word in enumerate(textList):
|
||||
try:
|
||||
result = float(word)
|
||||
if textList[idx + 1] in base_units:
|
||||
value = result * base_units[textList[idx + 1]]
|
||||
counter = counter + 1
|
||||
else:
|
||||
value = result
|
||||
counter = counter + 1
|
||||
except:
|
||||
for unit in base_units:
|
||||
result = word.split(unit)
|
||||
if len(result) == 2 and result[1] == '' and result[0] != '':
|
||||
try:
|
||||
value = float(result[0]) * base_units[unit]
|
||||
counter = counter + 1
|
||||
except:
|
||||
continue
|
||||
|
||||
if counter == 1:
|
||||
return value
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
|
||||
"""
|
||||
TRUE-FALSE RULES
|
||||
|
||||
1. Check if subtype = tokenswap exists in a given string,
|
||||
2. Find if any one of special word in list is present, ie. [start, create, incorporate] and any of the words in second list is not present like [send,transfer, give]
|
||||
|
||||
"""
|
||||
import re
|
||||
|
||||
def findWholeWord(w):
|
||||
return re.compile(r'\b({0})\b'.format(w), flags=re.IGNORECASE).search
|
||||
|
||||
'''
|
||||
findWholeWord('seek')('those who seek shall find') # -> <match object>
|
||||
findWholeWord('word')('swordsmith')
|
||||
'''
|
||||
|
||||
def truefalse_rule1(rawstring, string_tobe_checked):
|
||||
nowhites_rawstring = rawstring.replace(" ","").lower()
|
||||
if string_tobe_checked.replace(" ","").lower() in nowhites_rawstring:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
denied_list = ['transfer', 'send', 'give'] # keep everything lowercase
|
||||
permitted_list = ['incorporate', 'create', 'start'] # keep everything lowercase
|
||||
|
||||
def truefalse_rule2(rawstring, permitted_list, denied_list):
|
||||
# Find transfer , send , give
|
||||
foundPermitted = None
|
||||
foundDenied = None
|
||||
|
||||
for word in permitted_list:
|
||||
if findWholeWord(word)(rawstring):
|
||||
foundPermitted = word
|
||||
break
|
||||
|
||||
for word in denied_list:
|
||||
if findWholeWord(word)(rawstring):
|
||||
foundDenied = word
|
||||
break
|
||||
|
||||
if (foundPermitted in not None) and (foundDenied is None):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def selectCateogry(rawstring, wordlist, category1, category2):
|
||||
|
||||
|
||||
"""
|
||||
CLASSIFY RULES
|
||||
|
||||
1. Based on various combinations of the special character words and special words, create categorizations
|
||||
eg. 1.1 if there is only one #-word, then the flodata is related purely to token system
|
||||
1.2 if there is one #-word, one @-word .. then it is related to smart contract system, but cannot be a creation type since smart contract creaton needs to specify contract type with *-word
|
||||
1.3 if there is one
|
||||
2. Check if it is of the value 'one-time-event' or 'continuos-event'
|
||||
|
||||
"""
|
||||
|
||||
"""
|
||||
REJECT RULES
|
||||
|
||||
1. *-words have to be equal to 1 ie. You can specify only one contract type at once , otherwise noise
|
||||
2. *-word has to fall in the following type ['one-time-event*', 'continuous-event*'], otherwise noise
|
||||
3. @-word should exist only before the : , otherwise noise
|
||||
4. There should be only one @-word, otherwise noise
|
||||
5. for one-time-event smart contract( identified using one-time-event*), if there is a no #-word before : -> reject as noise
|
||||
6. for one-time-event smart contract( identified using one-time-event*) if there is more than one #-word before : -> reject as noise
|
||||
7. for one-time-event smart contract( identified using one-time-event*) if there is/are #-word(s) after colon -> reject as noise
|
||||
8. for continuos-event smart contract( identified using continuos-event*) if there is one or more #-word before : > reject as noise
|
||||
9. for continuos-event smart contract( identified using continuos-event*)( with subtype token-swap ) if there is one or more than two #-word after : > reject as noise
|
||||
10.
|
||||
|
||||
"""
|
||||
|
||||
def rejectrule9(rawtext, starword):
|
||||
pass
|
||||
|
||||
|
||||
extractContractConditions(cleanstring, contracttype, blocktime=blockinfo['time'], marker=hashList[0][:-1])
|
||||
|
||||
# Token incorporation operation
|
||||
## Existance of keyword
|
||||
|
||||
"""
|
||||
APPLY RULES
|
||||
|
||||
1. After application of apply rule1, a parser rule will either return a value or will classify the result as noise
|
||||
|
||||
"""
|
||||
|
||||
def apply_rule1(*argv):
|
||||
a = argv[0](*argv[1:])
|
||||
if a is False:
|
||||
return "noise"
|
||||
elif a if True:
|
||||
return a
|
||||
|
||||
# If any of the parser rule returns a value, then queue it for further processing, otherwise send noise to the output engine
|
||||
apply_rule1(findrule_1, rawstring, special_character)
|
||||
|
||||
def outputreturn(*argv):
|
||||
if argv[0] == 'noise':
|
||||
parsed_data = {'type': 'noise'}
|
||||
elif argv[0] == 'token_incorporation':
|
||||
parsed_data = {
|
||||
'type': 'tokenIncorporation',
|
||||
'flodata': argv[1], #string
|
||||
'tokenIdentification': argv[2], #hashList[0][:-1]
|
||||
'tokenAmount': argv[3] #initTokens
|
||||
}
|
||||
elif argv[0] == 'token_transfer':
|
||||
parsed_data = {
|
||||
'type': 'transfer',
|
||||
'transferType': 'token',
|
||||
'flodata': argv[1], #string
|
||||
'tokenIdentification': argv[2], #hashList[0][:-1]
|
||||
'tokenAmount': argv[3] #amount
|
||||
}
|
||||
elif argv[0] == 'one-time-event-userchoice-smartcontract-incorporation':
|
||||
parsed_data = {
|
||||
'type': 'smartContractIncorporation',
|
||||
'contractType': 'one-time-event',
|
||||
'tokenIdentification': argv[1], #hashList[0][:-1]
|
||||
'contractName': argv[2], #atList[0][:-1]
|
||||
'contractAddress': argv[3], #contractaddress[:-1]
|
||||
'flodata': argv[4], #string
|
||||
'contractConditions': {
|
||||
'contractamount' : argv[5],
|
||||
'minimumsubscriptionamount' : argv[6],
|
||||
'maximumsubscriptionamount' : argv[7],
|
||||
'payeeaddress' : argv[8],
|
||||
'userchoice' : argv[9],
|
||||
'expiryTime' : argv[10]
|
||||
}
|
||||
}
|
||||
elif argv[0] == 'one-time-event-userchoice-smartcontract-participation':
|
||||
parsed_data = {
|
||||
'type': 'transfer',
|
||||
'transferType': 'smartContract',
|
||||
'flodata': argv[1], #string
|
||||
'tokenIdentification': argv[2], #hashList[0][:-1]
|
||||
'operation': 'transfer',
|
||||
'tokenAmount': argv[3], #amount
|
||||
'contractName': argv[4], #atList[0][:-1]
|
||||
'userChoice': argv[5] #userChoice
|
||||
}
|
||||
elif argv[0] == 'one-time-event-userchoice-smartcontract-trigger':
|
||||
parsed_data = {
|
||||
'type': 'smartContractPays',
|
||||
'contractName': argv[1], #atList[0][:-1]
|
||||
'triggerCondition': argv[2] #triggerCondition.group().strip()[1:-1]
|
||||
}
|
||||
elif argv[0] == 'one-time-event-time-smartcontract-incorporation':
|
||||
parsed_data = {
|
||||
'type': 'smartContractIncorporation',
|
||||
'contractType': 'one-time-event',
|
||||
'tokenIdentification': argv[1], #hashList[0][:-1]
|
||||
'contractName': argv[2], #atList[0][:-1]
|
||||
'contractAddress': argv[3], #contractaddress[:-1]
|
||||
'flodata': argv[4], #string
|
||||
'contractConditions': {
|
||||
'contractamount' : argv[5],
|
||||
'minimumsubscriptionamount' : argv[6],
|
||||
'maximumsubscriptionamount' : argv[7],
|
||||
'payeeaddress' : argv[8],
|
||||
'expiryTime' : argv[9]
|
||||
}
|
||||
}
|
||||
elif argv[0] == 'one-time-event-time-smartcontract-participation':
|
||||
parsed_data = {
|
||||
'type': 'transfer',
|
||||
'transferType': 'smartContract',
|
||||
'flodata': argv[1], #string
|
||||
'tokenIdentification': argv[2], #hashList[0][:-1]
|
||||
'operation': 'transfer',
|
||||
'tokenAmount': argv[3], #amount
|
||||
'contractName': argv[4] #atList[0][:-1]
|
||||
}
|
||||
elif argv[0] == 'continuos-event-token-swap-incorporation':
|
||||
parsed_data = {
|
||||
'type': 'smartContractIncorporation',
|
||||
'contractType': 'continuos-event',
|
||||
'tokenIdentification': argv[1], #hashList[0][:-1]
|
||||
'contractName': argv[2], #atList[0][:-1]
|
||||
'contractAddress': argv[3], #contractaddress[:-1]
|
||||
'flodata': argv[4], #string
|
||||
'contractConditions': {
|
||||
'subtype' : argv[5], #tokenswap
|
||||
'accepting_token' : argv[6],
|
||||
'selling_token' : argv[7],
|
||||
'pricetype' : argv[8],
|
||||
'price' : argv[9],
|
||||
}
|
||||
}
|
||||
elif argv[0] == 'continuos-event-token-swap-deposit':
|
||||
parsed_data = {
|
||||
'type': 'smartContractDeposit',
|
||||
'tokenIdentification': argv[1], #hashList[0][:-1]
|
||||
'depositAmount': argv[2], #depositAmount
|
||||
'contractName': argv[3], #atList[0][:-1]
|
||||
'flodata': argv[4], #string
|
||||
'depositConditions': {
|
||||
'expiryTime' : argv[5]
|
||||
}
|
||||
}
|
||||
elif argv[0] == 'continuos-event-token-swap-participation':
|
||||
parsed_data = {
|
||||
'type': 'smartContractParticipation',
|
||||
'tokenIdentification': argv[1], #hashList[0][:-1]
|
||||
'sendAmount': argv[2], #sendtAmount
|
||||
'receiveAmount': argv[3], #receiveAmount
|
||||
'contractName': argv[4], #atList[0][:-1]
|
||||
'flodata': argv[5] #string
|
||||
}
|
||||
1534
parsing.py
1534
parsing.py
File diff suppressed because it is too large
Load Diff
296
planning.txt
Normal file
296
planning.txt
Normal file
@ -0,0 +1,296 @@
|
||||
'''
|
||||
TEMPLATE FOR SECOND STAGE AFTER INPUT CLASSIFIER
|
||||
|
||||
IF BLOCK If the output of input classifier is tokensystem-C,
|
||||
JUST LINEARLY START BUILDING IT
|
||||
|
||||
then first start building the known outputs
|
||||
|
||||
// outputreturn('token_incorporation',f"{flodata}", f"{tokenname}", f"{tokenamount}")
|
||||
|
||||
f"{flodata} = rawstring
|
||||
f"{tokenname}" = wordlist entry
|
||||
tokensystem-C-resolved = Output of second stage classification
|
||||
f"{tokenamount}" = find_number_function
|
||||
'''
|
||||
|
||||
'''
|
||||
The problem we are facing:
|
||||
|
||||
* Token transactions don't have * or @ symbols
|
||||
|
||||
* Smart Contract transactions have * , @ , # symbols
|
||||
|
||||
* Smart Contract transaction of the type one time event have 1 # before colon
|
||||
|
||||
* Smart Contract transaction of the type continuous event has 2 # after colon
|
||||
|
||||
* So we are checking for hashes based on the type of smart contract(identified by *)
|
||||
|
||||
* But the above check disregards checking hashes in token transactions
|
||||
'''
|
||||
|
||||
# Write down all the possible flodata( with all combinations possible) for
|
||||
'''
|
||||
Token creation
|
||||
create 500 million rmt#
|
||||
['#']
|
||||
|
||||
Token transfer
|
||||
transfer 200 rmt#
|
||||
['#']
|
||||
|
||||
One time event userchoice creation
|
||||
Create Smart Contract with the name India-elections-2019@ of the type one-time-event* using the asset rmt# at the address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1$ with contract-conditions: (1) contractAmount=0.001rmt (2) userChoices=Narendra Modi wins| Narendra Modi loses (3) expiryTime= Wed May 22 2019 21:00:00 GMT+0530
|
||||
['@','*','#','$',':']
|
||||
['@','*','#','$',':','#']
|
||||
|
||||
One time event userchoice participation
|
||||
send 0.001 rmt# to india-elections-2019@ to FLO address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1 with the userchoice:'narendra modi wins'
|
||||
['#','@',':']
|
||||
['#','@','$',':']
|
||||
|
||||
One time event userchoice trigger
|
||||
india-elections-2019@ winning-choice:'narendra modi wins'
|
||||
['@',':']
|
||||
|
||||
One time event timeevent creation
|
||||
Create Smart Contract with the name India-elections-2019@ of the type one-time-event* using the asset rmt# at the address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1$ with contract-conditions: (1) contractAmount=0.001rmt (2) expiryTime= Wed May 22 2019 21:00:00 GMT+0530
|
||||
['@','*','#','$',':']
|
||||
['@','*','#','$',':','#']
|
||||
|
||||
One time event timeevent participation
|
||||
send 0.001 rmt# to india-elections-2019@ to FLO address F7osBpjDDV1mSSnMNrLudEQQ3cwDJ2dPR1
|
||||
['#','@']
|
||||
['#','@','$']
|
||||
|
||||
Continuos event token swap creation
|
||||
Create Smart Contract with the name swap-rupee-bioscope@ of the type continuous-event* at the address oRRCHWouTpMSPuL6yZRwFCuh87ZhuHoL78$ with contract-conditions :
|
||||
(1) subtype = tokenswap
|
||||
(2) accepting_token = rupee#
|
||||
(3) selling_token = bioscope#
|
||||
(4) price = '15'
|
||||
(5) priceType = ‘predetermined’
|
||||
(6) direction = oneway
|
||||
|
||||
['@','*','$',':','#','#']
|
||||
|
||||
Continuos event tokenswap deposit
|
||||
Deposit 15 bioscope# to swap-rupee-bioscope@ its FLO address being oRRCHWouTpMSPuL6yZRwFCuh87ZhuHoL78$ with deposit-conditions: (1) expiryTime= Wed Nov 17 2021 21:00:00 GMT+0530
|
||||
['#','@',':']
|
||||
['#','@','$',':']
|
||||
|
||||
Continuos event tokenswap participation
|
||||
Send 15 rupee# to swap-rupee-article@ its FLO address being FJXw6QGVVaZVvqpyF422Aj4FWQ6jm8p2dL$
|
||||
['#','@']
|
||||
['#','@','$']
|
||||
'''
|
||||
|
||||
'''
|
||||
|
||||
['#'] - Token creation
|
||||
|
||||
['#'] - Token particiation
|
||||
|
||||
|
||||
['@','*','#','$',':'] - Smart contract creation user-choice
|
||||
['@','*','#','$',':','#']
|
||||
|
||||
['#','@',':'] - Smart contract participation user-choice
|
||||
['#','@','$',':']
|
||||
|
||||
['@',':'] - Smart contract trigger user-choice
|
||||
|
||||
|
||||
['@','*','#','$',':'] - Smart contract creation - ote-timebased
|
||||
['@','*','#','$',':','#']
|
||||
|
||||
['#','@'] - Smart contract particiation - ote-timebased
|
||||
['#','@','$']
|
||||
|
||||
|
||||
['@','*','$',':','#','#'] - Smart contract creation - continuos event - tokenswap
|
||||
|
||||
['#','@',':'] - Smart contract deposit - continuos event - tokenswap
|
||||
['#','@','$',':']
|
||||
|
||||
['#','@'] - Smart contract participation - continuos event - tokenswap
|
||||
['#','@','$'] - Smart contract participation - continuos event - tokenswap
|
||||
|
||||
'''
|
||||
|
||||
'''
|
||||
|
||||
['#'] - Token creation
|
||||
|
||||
['#'] - Token particiation
|
||||
|
||||
|
||||
['@','*','#','$',':'] - Smart contract creation ote-userchoice
|
||||
['@','*','#','$',':','#']
|
||||
|
||||
['@','*','#','$',':'] - Smart contract creation - ote-timebased
|
||||
['@','*','#','$',':','#']
|
||||
|
||||
|
||||
['#','@',':'] - Smart contract participation user-choice
|
||||
['#','@','$',':']
|
||||
|
||||
['#','@',':'] - Smart contract deposit - continuos event - tokenswap
|
||||
['#','@','$',':']
|
||||
|
||||
|
||||
['@',':'] - Smart contract trigger user-choice
|
||||
|
||||
|
||||
['#','@'] - Smart contract particiation - ote-timebased
|
||||
['#','@','$']
|
||||
|
||||
['#','@'] - Smart contract participation - continuos event - tokenswap
|
||||
['#','@','$'] - Smart contract participation - continuos event - tokenswap
|
||||
|
||||
|
||||
['@','*','$',':','#','#'] - Smart contract creation - continuos event - tokenswap
|
||||
|
||||
'''
|
||||
|
||||
'''
|
||||
Conflicts -
|
||||
|
||||
1. Token creation | Token participation
|
||||
2. Smart contract CREATION of the type one-time-event-userchoice | one-time-event-timebased
|
||||
3. Smart contract PARTICIPATION user-choice | Smart contract DEPOSIT continuos-event token-swap
|
||||
4. Smart contract PARTICIPATION one-time-event-timebased | Smart contract participation - continuos event - tokenswap
|
||||
|
||||
'''
|
||||
|
||||
'''
|
||||
|
||||
Emerging parser design
|
||||
|
||||
Phase 1 - Input processing | Special character position based classification and noise detection (FINISHED)
|
||||
Phase 2 - Conflict recognition (FINISHED)
|
||||
Phase 3 - Category based keyword checks
|
||||
Phase 4 - Parser rules for finding data
|
||||
Phase 5 - Rules for applying parser rules
|
||||
Phase 6 - Category based data field extraction
|
||||
Phase 7 - Output formatting and return (FINISHED)
|
||||
|
||||
'''
|
||||
|
||||
'''
|
||||
Allowed formats of Smart Contract and token names
|
||||
|
||||
1. First character should always be an Alphabet, lower case or upper case
|
||||
2. The last character should always be an Alphabet, lower case or upper case
|
||||
3. The middle characters can be a - or _
|
||||
|
||||
Check for FLO Address
|
||||
|
||||
Write checks for conditions inside contract conditions
|
||||
Serious error handling for contract-conditions
|
||||
* 2222:00 gives error
|
||||
* contractAmount = 0.022rt gives error | check if space is allowed between 0.022 rt
|
||||
'''
|
||||
|
||||
|
||||
'''
|
||||
|
||||
What we need for NFT contract code
|
||||
|
||||
1. NFT-address mapping table in system.db
|
||||
2. New main transaction category class
|
||||
3. New sub-category for transfer category class ie. NFT transfer
|
||||
|
||||
|
||||
NFT Smart Contract end cases
|
||||
1. NFT against an address
|
||||
2. NFT against another NFT
|
||||
3.
|
||||
|
||||
flodata format for NFT
|
||||
Create 1000 NFT with bioscope# with nft-details: (1) name = 'bioscope' (2) hash =
|
||||
|
||||
Create 100 albumname# as NFT with 2CF24DBA5FB0A30E26E83B2AC5B9E29E1B161E5C1FA7425E73043362938B9824 as asset hash
|
||||
[#]
|
||||
|
||||
Rules
|
||||
-----
|
||||
DIFFERENT BETWEEN TOKEN AND NFT
|
||||
System.db will have a differnent entry
|
||||
in creation nft word will be extra
|
||||
NFT Hash must be present
|
||||
Creation and transfer amount .. only integer parts will be taken
|
||||
Keyword nft must be present in both creation and transfer
|
||||
|
||||
'''
|
||||
|
||||
'''
|
||||
|
||||
Need infinite tokens to create stable coins, so they can be created without worrying about the upper limit of the coins
|
||||
|
||||
'''
|
||||
|
||||
'''
|
||||
Create another table in system.db, it simply writes what is every database in one place
|
||||
|
||||
Database_name Database type
|
||||
|
||||
'''
|
||||
|
||||
'''
|
||||
IDEA FOR NEW ROLLBACK SYSTEM - 24 Jan 2022
|
||||
-------------------------------------------
|
||||
|
||||
245436
|
||||
[
|
||||
tx1 - rmt - 245436 - send 10 rmt#
|
||||
tx2 - rmt - 245436 - send 4 rmt#
|
||||
tx3 - rmt - 245436 - send 1 rmt#
|
||||
tx4 - rmt - 245436 - send 100 rmt#
|
||||
tx5 - rmt trigger(5) - 245436 - trigger
|
||||
]
|
||||
|
||||
banana - txhash
|
||||
orange - entries in activepid table
|
||||
mangoes - entries in transaction history table
|
||||
|
||||
CURRENT SYSTEM
|
||||
given a block , find out all the oranges in the block
|
||||
given a block, find out all the bananas in the block and
|
||||
for each banana, find corresponding databases( found through parsing of banana flodata and banana txdata)
|
||||
- if token database then rollback, if contractDatabase then delete entry
|
||||
|
||||
|
||||
NEW SYSTEM
|
||||
give a block , find out all the oranges in the block
|
||||
given a block, find out all the bananas in the block and their corresponding databases( found through parsing of banana flodata and banana txdata)
|
||||
- start opening all those databases one by one | if token database then rollback, if contractDatabase then delete entry
|
||||
|
||||
send transaction -> receive the databases associated with it
|
||||
|
||||
'''
|
||||
|
||||
'''
|
||||
Step 1
|
||||
The block that we are rolling back into is earlier than the database creation blockNumber, then delete the whole database without rolling back. Do this for both token databases and smart contract databases
|
||||
|
||||
Step 2
|
||||
If the rolling back block is later than database creation blockNumber, then invoke rollback a database function( rollback_database )
|
||||
|
||||
Step 3
|
||||
Create a list of databases to be opened, and creation date (creation date is block number). This will exclude the token and smart contract databases which are already deleted
|
||||
|
||||
Step 4
|
||||
For each of the database to be opened, rollback the database to rollback point
|
||||
rollback_database will take 2 inputs, a block number to which it has to rollback to and the name of the database
|
||||
|
||||
Step 5
|
||||
Create a delete function, which will delete from transactionHistory, latestCache and contractDatabase
|
||||
|
||||
To-do
|
||||
------
|
||||
* Integrate all the functions in the following order:
|
||||
1 , 2 , 3 , 4 , 5 | That will finish the operation of taking the block number as input and the roll back function will rollback upto the block number specified for all kinds of databases and all kinds of transactions
|
||||
|
||||
'''
|
||||
55
pyflosetup.sh
Normal file
55
pyflosetup.sh
Normal file
@ -0,0 +1,55 @@
|
||||
#!/bin/bash
|
||||
|
||||
# =====================
|
||||
# Setup Script for PyFLO
|
||||
# =====================
|
||||
|
||||
# Exit on any error
|
||||
set -e
|
||||
|
||||
# Step 1: Update Package List
|
||||
echo "Updating package list..."
|
||||
sudo apt update
|
||||
|
||||
# Step 2: Install System Dependencies
|
||||
echo "Installing system dependencies..."
|
||||
sudo apt install -y build-essential libssl-dev pkg-config python3.7-dev python3-setuptools git
|
||||
|
||||
# Inform the user
|
||||
echo "System dependencies installed successfully."
|
||||
|
||||
# Step 3: Clone the PyFLO Repository
|
||||
if [ ! -d "pyflo" ]; then
|
||||
echo "Cloning the PyFLO repository..."
|
||||
git clone https://github.com/ranchimall/pyflo
|
||||
else
|
||||
echo "PyFLO repository already exists. Skipping clone."
|
||||
fi
|
||||
|
||||
# Step 4: Install Python Dependencies
|
||||
echo "Installing Python dependencies..."
|
||||
if [ ! -f "requirements.txt" ]; then
|
||||
# Generate a requirements.txt file if missing
|
||||
echo "arduino" > requirements.txt
|
||||
# echo "pybtc" >> requirements.txt
|
||||
echo "config" >> requirements.txt
|
||||
echo "Generated requirements.txt with default dependencies."
|
||||
else
|
||||
echo "requirements.txt file exists. Using it for installation."
|
||||
fi
|
||||
|
||||
# Ensure pip is up-to-date
|
||||
pip install --upgrade pip
|
||||
|
||||
# Install Python packages
|
||||
pip install --use-pep517 -r requirements.txt
|
||||
|
||||
# Step 5: Install PyFLO
|
||||
echo "Installing PyFLO..."
|
||||
sudo python3 pyflo/setup.py install
|
||||
|
||||
# Inform the user
|
||||
echo "Python dependencies and PyFLO installed successfully."
|
||||
|
||||
# Step 6: Final Instructions
|
||||
echo "Setup complete! You're ready to use PyFLO."
|
||||
4458
ranchimallflo_api.py
Normal file
4458
ranchimallflo_api.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,16 +1,43 @@
|
||||
aiofiles
|
||||
aiohttp
|
||||
aiomysql
|
||||
apscheduler==3.9.1
|
||||
arrow==1.1.0
|
||||
arduino
|
||||
bidict==0.21.2
|
||||
certifi==2021.5.30
|
||||
cffi==1.14.5
|
||||
requests==2.25.0
|
||||
blinker==1.4
|
||||
cachetools
|
||||
certifi==2022.12.7
|
||||
cffi
|
||||
chardet==3.0.4
|
||||
Click==7.0
|
||||
cryptography
|
||||
DBUtils
|
||||
greenlet==1.1.0
|
||||
h11==0.9.0
|
||||
h2==3.1.1
|
||||
hpack==3.0.0
|
||||
Hypercorn==0.8.2
|
||||
hyperframe==5.2.0
|
||||
idna==2.10
|
||||
itsdangerous==1.1.0
|
||||
Jinja2==2.11.3
|
||||
MarkupSafe
|
||||
multidict==4.5.2
|
||||
priority==1.3.0
|
||||
pycparser==2.20
|
||||
pyflo-lib==2.0.9
|
||||
pymysql
|
||||
python-dateutil==2.8.1
|
||||
python-engineio==3.14.2
|
||||
python-socketio==4.6.1
|
||||
secp256k1==0.13.2
|
||||
Quart==0.10.0
|
||||
Quart-CORS==0.2.0
|
||||
requests==2.25.0
|
||||
six==1.16.0
|
||||
sortedcontainers==2.1.0
|
||||
SQLAlchemy==1.4.18
|
||||
toml==0.10.0
|
||||
typing-extensions==3.7.4
|
||||
urllib3==1.26.5
|
||||
websockets==11.0.3
|
||||
wsproto==0.15.0
|
||||
|
||||
119
setup.sh
Normal file
119
setup.sh
Normal file
@ -0,0 +1,119 @@
|
||||
#!/bin/bash
|
||||
|
||||
# =====================
|
||||
# Setup Script for PyFLO, MySQL, Virtual Environment, and FLO Token Tracking
|
||||
# =====================
|
||||
|
||||
# Exit on any error
|
||||
set -e
|
||||
|
||||
# Step 1: Update Package List
|
||||
echo "Updating package list..."
|
||||
sudo apt update
|
||||
|
||||
# Step 2: Install System Dependencies
|
||||
echo "Installing system dependencies..."
|
||||
sudo apt install -y build-essential libssl-dev pkg-config python3-setuptools git
|
||||
|
||||
# Inform the user
|
||||
echo "System dependencies installed successfully."
|
||||
|
||||
# Step 3: Check and Install Python 3.7
|
||||
echo "Checking for Python 3.7..."
|
||||
if ! python3.7 --version &>/dev/null; then
|
||||
echo "Python 3.7 not found. Installing Python 3.7..."
|
||||
sudo add-apt-repository ppa:deadsnakes/ppa -y
|
||||
sudo apt update
|
||||
sudo apt install -y python3.7 python3.7-venv
|
||||
echo "Python 3.7 installed successfully."
|
||||
else
|
||||
echo "Python 3.7 is already installed."
|
||||
fi
|
||||
|
||||
# Step 4: Set Up Virtual Environment Using Python 3.7
|
||||
VENV_NAME="myenv"
|
||||
echo "Setting up virtual environment using Python 3.7..."
|
||||
/usr/bin/python3.7 -m venv $VENV_NAME
|
||||
echo "Virtual environment '$VENV_NAME' created successfully."
|
||||
|
||||
# Activate the virtual environment
|
||||
source $VENV_NAME/bin/activate
|
||||
|
||||
# Inform the user
|
||||
echo "Virtual environment activated. Using Python version:"
|
||||
python --version
|
||||
|
||||
# Step 5: Check and Install MySQL
|
||||
echo "Checking if MySQL is installed..."
|
||||
if ! dpkg -l | grep -q mysql-server; then
|
||||
echo "MySQL is not installed. Installing MySQL server, client, and development libraries..."
|
||||
sudo apt install -y mysql-server mysql-client libmysqlclient-dev
|
||||
echo "MySQL installed successfully."
|
||||
else
|
||||
echo "MySQL is already installed. Skipping installation."
|
||||
fi
|
||||
|
||||
# Step 6: Check and Start MySQL Service
|
||||
echo "Checking if MySQL service is running..."
|
||||
if systemctl is-active --quiet mysql; then
|
||||
echo "MySQL is already running."
|
||||
else
|
||||
echo "MySQL is not running. Starting MySQL..."
|
||||
sudo systemctl start mysql
|
||||
echo "MySQL service started."
|
||||
fi
|
||||
|
||||
# Enable MySQL to start on boot
|
||||
sudo systemctl enable mysql
|
||||
|
||||
# Step 7: Configure MySQL Default User and Privileges
|
||||
echo "Configuring MySQL user and privileges..."
|
||||
MYSQL_USER="FUfB6cwSsGDbQpmA7Qs8zQJxU3HpwCdnjT"
|
||||
MYSQL_PASSWORD="RAcifrTM2V75ipy5MeLYaDU3UNcUXtrit933TGM5o7Yj2fs8XdP5"
|
||||
|
||||
sudo mysql -e "CREATE USER IF NOT EXISTS '${MYSQL_USER}'@'localhost' IDENTIFIED BY '${MYSQL_PASSWORD}';"
|
||||
sudo mysql -e "GRANT ALL PRIVILEGES ON rm_%_db.* TO '${MYSQL_USER}'@'localhost' WITH GRANT OPTION;"
|
||||
sudo mysql -e "FLUSH PRIVILEGES;"
|
||||
|
||||
echo "MySQL user '${MYSQL_USER}' created and granted privileges on databases matching 'rm_%_db'."
|
||||
|
||||
# Step 8: Clone the FLO Token Tracking Repository Only if `setup.sh` Is Not Already Inside the Repository Directory
|
||||
if [ ! -f "tracktokens-smartcontracts.py" ]; then
|
||||
echo "Cloning the FLO Token Tracking repository (mysql-migration branch)..."
|
||||
git clone --branch mysql-migration https://github.com/ranchimall/flo-token-tracking
|
||||
cd flo-token-tracking
|
||||
else
|
||||
echo "Setup is already in the directory containing the repository. Skipping clone."
|
||||
fi
|
||||
|
||||
# Step 9: Install Python Dependencies
|
||||
echo "Installing Python dependencies..."
|
||||
if [ ! -f "requirements.txt" ]; then
|
||||
# Generate a requirements.txt file if missing
|
||||
echo "arduino" > requirements.txt
|
||||
echo "pybtc" >> requirements.txt
|
||||
echo "config" >> requirements.txt
|
||||
echo "pymysql" >> requirements.txt
|
||||
echo "Generated requirements.txt with default dependencies."
|
||||
else
|
||||
echo "requirements.txt file exists. Adding pymysql to the list."
|
||||
echo "pymysql" >> requirements.txt
|
||||
fi
|
||||
|
||||
# Ensure pip is up-to-date
|
||||
pip install --upgrade pip
|
||||
|
||||
# Install Python packages
|
||||
pip install --use-pep517 -r requirements.txt
|
||||
|
||||
# Step 10: Start the Python Application
|
||||
echo "Starting the Python application 'tracktokens-smartcontracts.py'..."
|
||||
python3.7 tracktokens-smartcontracts.py
|
||||
|
||||
# Final Instructions
|
||||
echo "========================================================"
|
||||
echo "Setup is complete. MySQL server is installed and running."
|
||||
echo "Virtual environment '$VENV_NAME' is set up and active."
|
||||
echo "MySQL user '${MYSQL_USER}' has been created with privileges on databases matching 'rm_%_db'."
|
||||
echo "The Python application has been started."
|
||||
echo "========================================================"
|
||||
29
smart-contract-system-redesign.txt
Normal file
29
smart-contract-system-redesign.txt
Normal file
@ -0,0 +1,29 @@
|
||||
|
||||
DATABASES
|
||||
* Database operations have to be optimized
|
||||
- in terms of not repeating too often
|
||||
- Save changes only when all business logic is approved, since we are working with multiple databases currently
|
||||
* Too much of repitition in database operations right now
|
||||
* Database model classes, for SQL alchemy, have to be optimized ie. base classes for tokenswap and one-time-event totally different right now
|
||||
* Make all database operations to follow SQLAlchemy, no direct SQL commands
|
||||
* Remove all position based queries
|
||||
|
||||
PROGRAM STRUCTURE
|
||||
* Optimize overall program structure
|
||||
|
||||
NEW FEATURES
|
||||
* Rollback feature
|
||||
* When processing blocks from the websocket API, check the blockheight of the new block vs the latest block in the database | this is to make sure none of the transactions go missing
|
||||
|
||||
|
||||
-----
|
||||
processBlocks
|
||||
|
||||
* find the last scanned block in the database
|
||||
* find the latest block at the API
|
||||
* for loop for lastscannedblock to latestblock
|
||||
* processEach transaction based on business logic
|
||||
* Update system.db to reflect currently scanned block as the latest block
|
||||
|
||||
* Check for local smart contract triggers
|
||||
* Check if any token swap contract deposits have to be returned
|
||||
62
sqlite_to_mysql.sh
Normal file
62
sqlite_to_mysql.sh
Normal file
@ -0,0 +1,62 @@
|
||||
#!/bin/bash
|
||||
|
||||
## Description and Usage
|
||||
# This file will take a sqlite3 dump sql file as input and convert it into Mysql dump file
|
||||
# sqlite3 latestCache.db .dump > latestCache_dump.sql will create latestCache_dump.sql from latestCache.db sqlite3 file
|
||||
# chmod +x sqlite_to_mysql.sh is needed on bash shell first to make this script run
|
||||
# ./sqlite_to_mysql.sh latestCache_dump.sql will produce latestCache_dump_mysql.sql which can be imported in MySQL
|
||||
|
||||
# Check if input file is provided
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <sqlite_sql_file>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Input file
|
||||
input_file=$1
|
||||
|
||||
# Output file
|
||||
output_file="${input_file%.sql}_mysql.sql"
|
||||
|
||||
# Create a new output file
|
||||
> "$output_file"
|
||||
|
||||
# Function to modify the SQL dump for MySQL compatibility
|
||||
convert_sqlite_to_mysql() {
|
||||
awk '
|
||||
# Remove SQLite-specific PRAGMA and transaction statements
|
||||
/PRAGMA.*;/ { next }
|
||||
/BEGIN TRANSACTION;/ { next }
|
||||
/COMMIT;/ { next }
|
||||
|
||||
# Replace AUTOINCREMENT with AUTO_INCREMENT
|
||||
{ gsub(/AUTOINCREMENT/, "AUTO_INCREMENT") }
|
||||
|
||||
# Adjust integer types to auto increment if they are primary keys
|
||||
{ gsub(/INTEGER NOT NULL/, "INT NOT NULL AUTO_INCREMENT") }
|
||||
|
||||
# Convert VARCHAR to TEXT
|
||||
{ gsub(/VARCHAR/, "TEXT") }
|
||||
|
||||
# Convert BOOLEAN to TINYINT for MySQL compatibility
|
||||
{ gsub(/\bBOOLEAN\b/, "TINYINT(1)") }
|
||||
|
||||
# Handle CREATE TABLE blocks to remove quotes around column names
|
||||
/CREATE TABLE/ { in_create_table=1 }
|
||||
in_create_table && /;/ { in_create_table=0 }
|
||||
in_create_table { gsub(/"([a-zA-Z0-9_]+)"/, "&"); gsub(/"/, "") }
|
||||
|
||||
# Print the modified line
|
||||
{ print }
|
||||
' "$1" |
|
||||
# Replace '' with \' according to the specified conditions
|
||||
sed "/^INSERT INTO/ { s/\([^,(]\)''/\1\\\\'/g; s/''\([^,)]\)/\\\\'\1/g }" > "$output_file"
|
||||
|
||||
|
||||
}
|
||||
|
||||
# Call the conversion function
|
||||
convert_sqlite_to_mysql "$input_file"
|
||||
|
||||
# Print success message
|
||||
echo "Conversion complete. MySQL-compatible file created: $output_file"
|
||||
87
statef_processing.py
Normal file
87
statef_processing.py
Normal file
@ -0,0 +1,87 @@
|
||||
import requests
|
||||
from operator import attrgetter
|
||||
import json
|
||||
import pdb
|
||||
|
||||
'''
|
||||
USD-INR
|
||||
https://api.exchangerate-api.com/v4/latest/usd
|
||||
|
||||
Parsed stateF
|
||||
"stateF":{
|
||||
"bitcoin_price_source":"bitpay",
|
||||
"usd_inr_exchange_source":"bitpay"
|
||||
}
|
||||
'''
|
||||
|
||||
'''
|
||||
stateF notes for amount split on contracts
|
||||
|
||||
stateF_object = {
|
||||
"floaddresses": "oPkHWcvqBHfCortTHScrVBjXLsZhWie99C-oPkHWcvqBHfCortTHScrVBjXLsZhWie99C-oPkHWcvqBHfCortTHScrVBjXLsZhWie99C",
|
||||
"splits": "10-20-30",
|
||||
}
|
||||
|
||||
'''
|
||||
|
||||
# stateF
|
||||
stateF_address = 'oPkHWcvqBHfCortTHScrVBjXLsZhWie99C'
|
||||
|
||||
stateF_object = {
|
||||
"bitcoin_price_source":"bitpay",
|
||||
"usd_inr_exchange_source":"bitpay"
|
||||
}
|
||||
|
||||
# Flodata object
|
||||
flodata_object = {
|
||||
"bitpay": {
|
||||
"bitcoin_price_source":{
|
||||
"api" : "https://bitpay.com/api/rates",
|
||||
"path" : [2,"rate"],
|
||||
"data_type" : "float"
|
||||
},
|
||||
"usd_inr_exchange_source":{
|
||||
"api" : "https://api.exchangerate-api.com/v4/latest/usd",
|
||||
"path" : ["rates","INR"],
|
||||
"data_type" : "float"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def pull_stateF(floID):
|
||||
response = requests.get(f"https://flosight-testnet.ranchimall.net/api/txs/?address={floID}")
|
||||
if response.status_code == 200:
|
||||
address_details = response.json()
|
||||
latest_stateF = address_details['txs'][0]['floData']
|
||||
latest_stateF = json.loads(latest_stateF)
|
||||
return latest_stateF['stateF']
|
||||
else:
|
||||
print('API response not valid')
|
||||
|
||||
def query_api(api_object):
|
||||
api, path, data_type = api_object.values()
|
||||
response = requests.get(api)
|
||||
if response.status_code == 200:
|
||||
# Use path keys to reach the value
|
||||
api_response = response.json()
|
||||
for key in path:
|
||||
api_response = api_response[key]
|
||||
# todo: how to use datatype to convert
|
||||
if data_type == 'float':
|
||||
value_at_path = float(api_response)
|
||||
return value_at_path
|
||||
else:
|
||||
print('API response not valid')
|
||||
|
||||
def process_stateF(stateF_object, stateF_address):
|
||||
flodata_object = pull_stateF(stateF_address)
|
||||
processed_values = {}
|
||||
for key, value in stateF_object.items():
|
||||
external_value = query_api(flodata_object[value][key])
|
||||
processed_values[key] = external_value
|
||||
return processed_values
|
||||
|
||||
if __name__ == '__main__':
|
||||
processed_statef = process_stateF(stateF_object, stateF_address)
|
||||
print(processed_statef)
|
||||
219
tests/test_parsing.py
Normal file
219
tests/test_parsing.py
Normal file
@ -0,0 +1,219 @@
|
||||
import unittest
|
||||
import sys
|
||||
sys.path.append("..")
|
||||
import parsing
|
||||
|
||||
class TestParsing(unittest.TestCase):
|
||||
|
||||
blockinfo_stub = {'time': 25634}
|
||||
|
||||
def test_token_creation(self):
|
||||
text = 'create 100 rmt#'
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'mainnet')
|
||||
expected_result = {
|
||||
'type': 'tokenIncorporation',
|
||||
'flodata': 'create 100 rmt#',
|
||||
'tokenIdentification': 'rmt',
|
||||
'tokenAmount': 100.0,
|
||||
'stateF': False
|
||||
}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
def test_token_transfer(self):
|
||||
text = 'transfer 10.340 rmt#'
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'mainnet')
|
||||
expected_result = {
|
||||
'type': 'transfer',
|
||||
'transferType': 'token',
|
||||
'flodata': 'transfer 10.340 rmt#',
|
||||
'tokenIdentification': 'rmt',
|
||||
'tokenAmount': 10.34,
|
||||
'stateF': False
|
||||
}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
def test_nft_creation(self):
|
||||
pass
|
||||
|
||||
def test_nft_transfer(self):
|
||||
pass
|
||||
|
||||
def test_infinite_token_incorporation(self):
|
||||
text = 'create usd# as infinite-token'
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'mainnet')
|
||||
expected_result = {
|
||||
'type': 'infiniteTokenIncorporation',
|
||||
'flodata': 'create usd# as infinite-token',
|
||||
'tokenIdentification': 'usd',
|
||||
'stateF': False
|
||||
}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
text = 'create usd# as infinite-token send'
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'mainnet')
|
||||
expected_result = {'type': 'noise'}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
def test_infinite_token_transfer(self):
|
||||
pass
|
||||
|
||||
def test_onetimeevent_timetrigger_creation(self):
|
||||
# contractamount
|
||||
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) contractAmount=0.1 end-contract-conditions'''
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {
|
||||
'type': 'smartContractIncorporation',
|
||||
'contractType': 'one-time-event',
|
||||
'subtype': 'time-trigger',
|
||||
'tokenIdentification': 'bioscope',
|
||||
'contractName': 'all-crowd-fund-1',
|
||||
'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz',
|
||||
'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) contractAmount=0.1 end-contract-conditions',
|
||||
'contractConditions': {
|
||||
'contractAmount': '0.1',
|
||||
'payeeAddress': {
|
||||
'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0
|
||||
},
|
||||
'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530',
|
||||
'unix_expiryTime': 1668387900.0
|
||||
}
|
||||
}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
# minimumsubscriptionamount
|
||||
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 end-contract-conditions'''
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype':'time-trigger','tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 end-contract-conditions', 'contractConditions': {'minimumsubscriptionamount': '1.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
# maximumsubscriptionamount
|
||||
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) maximumsubscriptionamount=10 end-contract-conditions'''
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger','tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) maximumsubscriptionamount=10 end-contract-conditions', 'contractConditions': {'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
# minimumsubscriptionamount | contractamount
|
||||
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1.600 (4) contractAmount=0.1 end-contract-conditions'''
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1.600 (4) contractAmount=0.1 end-contract-conditions', 'contractConditions': {'contractAmount': '0.1', 'minimumsubscriptionamount': '1.6', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
# maximumsubscriptionamount | contractamount
|
||||
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) maximumsubscriptionamount=10 (4) contractAmount=0.1 end-contract-conditions'''
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) maximumsubscriptionamount=10 (4) contractAmount=0.1 end-contract-conditions', 'contractConditions': {'contractAmount': '0.1', 'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
# minimumsubscriptionamount | maximumsubscriptionamount
|
||||
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 end-contract-conditions'''
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype':'time-trigger','tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 end-contract-conditions', 'contractConditions': {'minimumsubscriptionamount': '1.0', 'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
# minimumsubscriptionamount | maximumsubscriptionamount | contractamount
|
||||
text = '''Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions:(1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 (5) contractAmount=0.1 end-contract-conditions'''
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 (5) contractAmount=0.1 end-contract-conditions', 'contractConditions': {'contractAmount': '0.1', 'minimumsubscriptionamount': '1.0', 'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
# With single payeeAddress with : format
|
||||
text = "Create a smart contract of the name album-fund@ of the type one-time-event* using asset bioscope# at the FLO address ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt$ with contract-conditions: (1) expiryTime= Thu May 04 2023 18:57:00 GMT+0530 (India Standard Time) (2) payeeAddress= objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc:100 end-contract-conditions"
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'album-fund', 'contractAddress': 'ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt', 'flodata': 'Create a smart contract of the name album-fund@ of the type one-time-event* using asset bioscope# at the FLO address ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt$ with contract-conditions: (1) expiryTime= Thu May 04 2023 18:57:00 GMT+0530 (India Standard Time) (2) payeeAddress= objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc:100 end-contract-conditions', 'contractConditions': {'payeeAddress': {'objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc': 100.0}, 'expiryTime': 'thu may 04 2023 18:57:00 gmt+0530 (india standard time)', 'unix_expiryTime': 1683246420.0}}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
# With single payeeAddress with normal format
|
||||
text = "Create a smart contract of the name album-fund@ of the type one-time-event* using asset bioscope# at the FLO address ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt$ with contract-conditions: (1) expiryTime= Thu May 04 2023 18:57:00 GMT+0530 (India Standard Time) (2) payeeAddress= objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc end-contract-conditions"
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'album-fund', 'contractAddress': 'ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt', 'flodata': 'Create a smart contract of the name album-fund@ of the type one-time-event* using asset bioscope# at the FLO address ocsiFSsjek3UXKdHpBWF79qrGN6qbpxeMt$ with contract-conditions: (1) expiryTime= Thu May 04 2023 18:57:00 GMT+0530 (India Standard Time) (2) payeeAddress= objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc end-contract-conditions', 'contractConditions': {'payeeAddress': {'objfBRUX5zn4W56aHhRn4DgH6xqeRWk6Xc': 100}, 'expiryTime': 'thu may 04 2023 18:57:00 gmt+0530 (india standard time)', 'unix_expiryTime': 1683246420.0}}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
# With multiple payeeAddress with : format
|
||||
text = "Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 (5) contractAmount=0.1 end-contract-conditions"
|
||||
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'smartContractIncorporation', 'contractType': 'one-time-event', 'subtype': 'time-trigger', 'tokenIdentification': 'bioscope', 'contractName': 'all-crowd-fund-1', 'contractAddress': 'oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz', 'flodata': 'Create a smart contract of the name all-crowd-fund-1@ of the type one-time-event* using asset bioscope# at the FLO address oQkpZCBcAWc945viKqFmJVbVG4aKY4V3Gz$ with contract-conditions: (1) expiryTime= Sun Nov 13 2022 19:35:00 GMT+0530 (2) payeeAddress=oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7:10:oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij:20:oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5:30:oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ:40 (3) minimumsubscriptionamount=1 (4) maximumsubscriptionamount=10 (5) contractAmount=0.1 end-contract-conditions', 'contractConditions': {'contractAmount': '0.1', 'minimumsubscriptionamount': '1.0', 'maximumsubscriptionamount': '10.0', 'payeeAddress': {'oQotdnMBAP1wZ6Kiofx54S2jNjKGiFLYD7': 10.0, 'oMunmikKvxsMSTYzShm2X5tGrYDt9EYPij': 20.0, 'oRpvvGEVKwWiMnzZ528fPhiA2cZA3HgXY5': 30.0, 'oWpVCjPDGzaiVfEFHs6QVM56V1uY1HyCJJ': 40.0}, 'expiryTime': 'sun nov 13 2022 19:35:00 gmt+0530', 'unix_expiryTime': 1668387900.0}}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
|
||||
def test_onetimeevent_timetrigger_participation(self):
|
||||
text = '''send 2.2 bioscope# to all-crowd-fund@'''
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'transfer', 'transferType': 'smartContract', 'flodata': 'send 2.2 bioscope# to all-crowd-fund@', 'tokenIdentification': 'bioscope', 'tokenAmount': 2.2, 'contractName': 'all-crowd-fund'}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
text = 'transfer 6.20000 bioscope# to all-crowd-fund-7@'
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'transfer', 'transferType': 'smartContract', 'flodata': 'transfer 6.20000 bioscope# to all-crowd-fund-7@', 'tokenIdentification': 'bioscope', 'tokenAmount': 6.2, 'contractName': 'all-crowd-fund-7'}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
text = 'transfer 6.20000 bioscope# to all-crowd-fund-7@ 24'
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'noise'}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
text = 'transfer 6.20000 bioscope# to all-crowd-fund-7@ 24 '
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'noise'}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
text = '6.20.000 transfer bioscope# to all-crowd-fund-7@ 24'
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'transfer', 'transferType': 'smartContract', 'flodata': '6.20.000 transfer bioscope# to all-crowd-fund-7@ 24', 'tokenIdentification': 'bioscope', 'tokenAmount': 24.0, 'contractName': 'all-crowd-fund-7'}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
def test_onetimeevent_externaltrigger_creation(self):
|
||||
# contractamount
|
||||
text = '''Create a smart contract of the name twitter-survive@ of the type one-time-event* using asset bioscope# at the FLO address oVbebBNuERWbouDg65zLfdataWEMTnsL8r$ with contract-conditions:(1) expiryTime= Sun Nov 15 2022 14:55:00 GMT+0530 (2) userchoices= survives | dies (3) contractAmount=0.02 end-contract-conditions'''
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {
|
||||
'type': 'smartContractIncorporation',
|
||||
'contractType': 'one-time-event',
|
||||
'subtype': 'external-trigger',
|
||||
'tokenIdentification': 'bioscope',
|
||||
'contractName': 'twitter-survive',
|
||||
'contractAddress': 'oVbebBNuERWbouDg65zLfdataWEMTnsL8r',
|
||||
'flodata': 'Create a smart contract of the name twitter-survive@ of the type one-time-event* using asset bioscope# at the FLO address oVbebBNuERWbouDg65zLfdataWEMTnsL8r$ with contract-conditions: (1) expiryTime= Sun Nov 15 2022 14:55:00 GMT+0530 (2) userchoices= survives | dies (3) contractAmount=0.02 end-contract-conditions',
|
||||
'contractConditions': {
|
||||
'contractAmount': '0.02',
|
||||
'userchoices': "{0: 'survives', 1: 'dies'}",
|
||||
'expiryTime': 'sun nov 15 2022 14:55:00 gmt+0530',
|
||||
'unix_expiryTime': 1668543900.0
|
||||
}
|
||||
}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
def test_tokenswap_deposits(self):
|
||||
text = 'Deposit 1 bioscope# to swap-rupee-bioscope-1@ its FLO address being oTzrcpLPRXsejSdYQ3XN6V4besrAPuJQrk$ with deposit-conditions: (1) expiryTime= Thu Apr 13 2023 21:45:00 GMT+0530'
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {
|
||||
'type': 'smartContractDeposit',
|
||||
'tokenIdentification': 'bioscope',
|
||||
'depositAmount': 1.0,
|
||||
'contractName': 'swap-rupee-bioscope-1',
|
||||
'flodata': 'Deposit 1 bioscope# to swap-rupee-bioscope-1@ its FLO address being oTzrcpLPRXsejSdYQ3XN6V4besrAPuJQrk$ with deposit-conditions: (1) expiryTime= Thu Apr 13 2023 21:45:00 GMT+0530',
|
||||
'depositConditions': {
|
||||
'expiryTime': 'thu apr 13 2023 21:45:00 gmt+0530'
|
||||
},
|
||||
'stateF': False}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
def test_contract_trigger(self):
|
||||
text = 'contract@ triggerCondition:"twitter-survives"'
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {
|
||||
'type': 'smartContractPays',
|
||||
'contractName': 'contract',
|
||||
'triggerCondition': 'twitter-survives',
|
||||
'stateF': False}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
def test_deposit_invalid(self):
|
||||
text = 'Deposit 1 bioscope# to swap-rupee-bioscope-1@ its FLO address being oTzrcpLPRXsejSdYQ3XN6V4besrAPuJQrk$ with deposit-conditions: (1) expiryTime= Tue, 25 Apr 2023 13:40:00 GMT'
|
||||
result = parsing.parse_flodata(text, TestParsing.blockinfo_stub, 'testnet')
|
||||
expected_result = {'type': 'noise'}
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
File diff suppressed because it is too large
Load Diff
3428
tracktokens_smartcontracts.py
Executable file
3428
tracktokens_smartcontracts.py
Executable file
File diff suppressed because it is too large
Load Diff
92
util_db_connect.py
Normal file
92
util_db_connect.py
Normal file
@ -0,0 +1,92 @@
|
||||
import argparse
|
||||
import configparser
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import pyflo
|
||||
import requests
|
||||
import socketio
|
||||
from sqlalchemy import create_engine, func
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
import time
|
||||
import arrow
|
||||
import parsing
|
||||
from datetime import datetime
|
||||
from ast import literal_eval
|
||||
from models import SystemData, TokenBase, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, TokenContractAssociation, RejectedTransactionHistory, ContractBase, ContractStructure, ContractParticipants, ContractTransactionHistory, ContractDeposits, ConsumedInfo, ContractWinners, ContinuosContractBase, ContractStructure2, ContractParticipants2, ContractDeposits2, ContractTransactionHistory2, SystemBase, ActiveContracts, SystemData, ContractAddressMapping, TokenAddressMapping, DatabaseTypeMapping, TimeActions, RejectedContractTransactionHistory, RejectedTransactionHistory, LatestCacheBase, LatestTransactions, LatestBlocks
|
||||
from statef_processing import process_stateF
|
||||
|
||||
|
||||
# Configuration of required variables
|
||||
config = configparser.ConfigParser()
|
||||
config.read('config.ini')
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
formatter = logging.Formatter('%(asctime)s:%(name)s:%(message)s')
|
||||
file_handler = logging.FileHandler(os.path.join(config['DEFAULT']['DATA_PATH'],'tracking.log'))
|
||||
file_handler.setLevel(logging.INFO)
|
||||
file_handler.setFormatter(formatter)
|
||||
|
||||
stream_handler = logging.StreamHandler()
|
||||
stream_handler.setFormatter(formatter)
|
||||
|
||||
logger.addHandler(file_handler)
|
||||
logger.addHandler(stream_handler)
|
||||
|
||||
def create_database_connection(type, parameters):
|
||||
if type == 'token':
|
||||
path = os.path.join(config['DEFAULT']['DATA_PATH'], 'tokens', f"{parameters['token_name']}.db")
|
||||
engine = create_engine(f"sqlite:///{path}", echo=True)
|
||||
elif type == 'smart_contract':
|
||||
path = os.path.join(config['DEFAULT']['DATA_PATH'], 'smartContracts', f"{parameters['contract_name']}-{parameters['contract_address']}.db")
|
||||
engine = create_engine(f"sqlite:///{path}", echo=True)
|
||||
elif type == 'system_dbs':
|
||||
path = os.path.join(config['DEFAULT']['DATA_PATH'], f"system.db")
|
||||
engine = create_engine(f"sqlite:///{path}", echo=False)
|
||||
elif type == 'latest_cache':
|
||||
path = os.path.join(config['DEFAULT']['DATA_PATH'], f"latestCache.db")
|
||||
engine = create_engine(f"sqlite:///{path}", echo=False)
|
||||
|
||||
connection = engine.connect()
|
||||
return connection
|
||||
|
||||
|
||||
def create_database_session_orm(type, parameters, base):
|
||||
if type == 'token':
|
||||
path = os.path.join(config['DEFAULT']['DATA_PATH'], 'tokens', f"{parameters['token_name']}.db")
|
||||
engine = create_engine(f"sqlite:///{path}", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'smart_contract':
|
||||
path = os.path.join(config['DEFAULT']['DATA_PATH'], 'smartContracts', f"{parameters['contract_name']}-{parameters['contract_address']}.db")
|
||||
engine = create_engine(f"sqlite:///{path}", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'system_dbs':
|
||||
path = os.path.join(config['DEFAULT']['DATA_PATH'], f"{parameters['db_name']}.db")
|
||||
engine = create_engine(f"sqlite:///{path}", echo=False)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
return session
|
||||
|
||||
|
||||
# Connect to system.db with a session
|
||||
'''session = create_database_session_orm('system_dbs', {'db_name':'system1'}, SystemBase)
|
||||
subquery_filter = session.query(TimeActions.id).group_by(TimeActions.transactionHash).having(func.count(TimeActions.transactionHash)==1).subquery()
|
||||
contract_deposits = session.query(TimeActions).filter(TimeActions.id.in_(subquery_filter), TimeActions.status=='active', TimeActions.activity=='contract-deposit').all()
|
||||
|
||||
for contract in contract_deposits:
|
||||
print(contract.transactionHash)'''
|
||||
|
||||
systemdb_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase)
|
||||
query = systemdb_session.query(TokenAddressMapping).filter(TokenAddressMapping.tokenAddress == 'contractAddress')
|
||||
results = query.all()
|
||||
pdb.set_trace()
|
||||
print('Lets investigate this now')
|
||||
238
util_rebuild.py
Normal file
238
util_rebuild.py
Normal file
@ -0,0 +1,238 @@
|
||||
from sqlalchemy import create_engine, desc, func
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from models import SystemData, TokenBase, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, TokenContractAssociation, ContractBase, ContractStructure, ContractParticipants, ContractTransactionHistory, ContractDeposits, ConsumedInfo, ContractWinners, ContinuosContractBase, ContractStructure2, ContractParticipants2, ContractDeposits2, ContractTransactionHistory2, SystemBase, ActiveContracts, SystemData, ContractAddressMapping, TokenAddressMapping, DatabaseTypeMapping, TimeActions, RejectedContractTransactionHistory, RejectedTransactionHistory, LatestCacheBase, LatestTransactions, LatestBlocks
|
||||
import json
|
||||
from tracktokens_smartcontracts import processTransaction, checkLocal_expiry_trigger_deposit, newMultiRequest
|
||||
import os
|
||||
import logging
|
||||
import argparse
|
||||
import configparser
|
||||
import shutil
|
||||
import sys
|
||||
import pdb
|
||||
|
||||
|
||||
# helper functions
|
||||
def check_database_existence(type, parameters):
|
||||
if type == 'token':
|
||||
return os.path.isfile(f"./tokens/{parameters['token_name']}.db")
|
||||
|
||||
if type == 'smart_contract':
|
||||
return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db")
|
||||
|
||||
|
||||
def create_database_connection(type, parameters):
|
||||
if type == 'token':
|
||||
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||
elif type == 'smart_contract':
|
||||
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||
elif type == 'system_dbs':
|
||||
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||
|
||||
connection = engine.connect()
|
||||
return connection
|
||||
|
||||
|
||||
def create_database_session_orm(type, parameters, base):
|
||||
if type == 'token':
|
||||
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'smart_contract':
|
||||
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'system_dbs':
|
||||
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
return session
|
||||
|
||||
|
||||
# MAIN EXECUTION STARTS
|
||||
# Configuration of required variables
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
formatter = logging.Formatter('%(asctime)s:%(name)s:%(message)s')
|
||||
|
||||
file_handler = logging.FileHandler('tracking.log')
|
||||
file_handler.setLevel(logging.INFO)
|
||||
file_handler.setFormatter(formatter)
|
||||
|
||||
stream_handler = logging.StreamHandler()
|
||||
stream_handler.setFormatter(formatter)
|
||||
|
||||
logger.addHandler(file_handler)
|
||||
logger.addHandler(stream_handler)
|
||||
|
||||
|
||||
# Rule 1 - Read command line arguments to reset the databases as blank
|
||||
# Rule 2 - Read config to set testnet/mainnet
|
||||
# Rule 3 - Set flo blockexplorer location depending on testnet or mainnet
|
||||
# Rule 4 - Set the local flo-cli path depending on testnet or mainnet ( removed this feature | Flosights are the only source )
|
||||
# Rule 5 - Set the block number to scan from
|
||||
|
||||
|
||||
# Read command line arguments
|
||||
parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash')
|
||||
parser.add_argument('-rb', '--toblocknumer', nargs='?', type=int, help='Forward to the specified block number')
|
||||
parser.add_argument('-r', '--blockcount', nargs='?', type=int, help='Forward to the specified block count')
|
||||
args = parser.parse_args()
|
||||
|
||||
if (args.blockcount and args.toblocknumber):
|
||||
print("You can only specify one of the options -b or -c")
|
||||
sys.exit(0)
|
||||
elif args.blockcount:
|
||||
forward_block = lastscannedblock + args.blockcount
|
||||
elif args.toblocknumer:
|
||||
forward_block = args.toblocknumer
|
||||
else:
|
||||
latestCache_session = create_database_session_orm('system_dbs', {'db_name':'latestCache'}, LatestCacheBase)
|
||||
forward_block = int(latestCache_session.query(LatestBlocks.blockNumber).order_by(LatestBlocks.blockNumber.desc()).first()[0])
|
||||
latestCache_session.close()
|
||||
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
apppath = os.path.dirname(os.path.realpath(__file__))
|
||||
dirpath = os.path.join(apppath, 'tokens')
|
||||
if not os.path.isdir(dirpath):
|
||||
os.mkdir(dirpath)
|
||||
dirpath = os.path.join(apppath, 'smartContracts')
|
||||
if not os.path.isdir(dirpath):
|
||||
os.mkdir(dirpath)
|
||||
|
||||
# rename all the old databases
|
||||
# system.db , latestCache.db, smartContracts, tokens
|
||||
if os.path.isfile('./system.db'):
|
||||
os.rename('system.db', 'system1.db')
|
||||
if os.path.isfile('./latestCache.db'):
|
||||
os.rename('latestCache.db', 'latestCache1.db')
|
||||
if os.path.isfile('./smartContracts'):
|
||||
os.rename('smartContracts', 'smartContracts1')
|
||||
if os.path.isfile('./tokens'):
|
||||
os.rename('tokens', 'tokens1')
|
||||
|
||||
# Read configuration
|
||||
config = configparser.ConfigParser()
|
||||
config.read('config.ini')
|
||||
|
||||
# todo - write all assertions to make sure default configs are right
|
||||
if (config['DEFAULT']['NET'] != 'mainnet') and (config['DEFAULT']['NET'] != 'testnet'):
|
||||
logger.error("NET parameter in config.ini invalid. Options are either 'mainnet' or 'testnet'. Script is exiting now")
|
||||
sys.exit(0)
|
||||
|
||||
# Specify mainnet and testnet server list for API calls and websocket calls
|
||||
serverlist = None
|
||||
if config['DEFAULT']['NET'] == 'mainnet':
|
||||
serverlist = config['DEFAULT']['MAINNET_FLOSIGHT_SERVER_LIST']
|
||||
elif config['DEFAULT']['NET'] == 'testnet':
|
||||
serverlist = config['DEFAULT']['TESTNET_FLOSIGHT_SERVER_LIST']
|
||||
serverlist = serverlist.split(',')
|
||||
neturl = config['DEFAULT']['FLOSIGHT_NETURL']
|
||||
tokenapi_sse_url = config['DEFAULT']['TOKENAPI_SSE_URL']
|
||||
|
||||
# Delete database and smartcontract directory if reset is set to 1
|
||||
#if args.reset == 1:
|
||||
logger.info("Resetting the database. ")
|
||||
apppath = os.path.dirname(os.path.realpath(__file__))
|
||||
dirpath = os.path.join(apppath, 'tokens')
|
||||
shutil.rmtree(dirpath)
|
||||
os.mkdir(dirpath)
|
||||
dirpath = os.path.join(apppath, 'smartContracts')
|
||||
shutil.rmtree(dirpath)
|
||||
os.mkdir(dirpath)
|
||||
dirpath = os.path.join(apppath, 'system.db')
|
||||
if os.path.exists(dirpath):
|
||||
os.remove(dirpath)
|
||||
dirpath = os.path.join(apppath, 'latestCache.db')
|
||||
if os.path.exists(dirpath):
|
||||
os.remove(dirpath)
|
||||
|
||||
# Read start block no
|
||||
startblock = int(config['DEFAULT']['START_BLOCK'])
|
||||
session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase)
|
||||
session.add(SystemData(attribute='lastblockscanned', value=startblock - 1))
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
# Initialize latest cache DB
|
||||
session = create_database_session_orm('system_dbs', {'db_name': "latestCache"}, LatestCacheBase)
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
# get all blocks and transaction data
|
||||
latestCache_session = create_database_session_orm('system_dbs', {'db_name':'latestCache1'}, LatestCacheBase)
|
||||
if forward_block:
|
||||
lblocks = latestCache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber <= forward_block).all()
|
||||
ltransactions = latestCache_session.query(LatestTransactions).filter(LatestTransactions.blockNumber <= forward_block).all()
|
||||
else:
|
||||
lblocks = latestCache_session.query(LatestBlocks).all()
|
||||
ltransactions = latestCache_session.query(LatestTransactions).all()
|
||||
latestCache_session.close()
|
||||
|
||||
# make a list of all internal tx block numbers
|
||||
systemDb_session = create_database_session_orm('system_dbs', {'db_name':'system1'}, SystemBase)
|
||||
internal_action_blocks = systemDb_session.query(ActiveContracts.blockNumber).all()
|
||||
internal_action_blocks = [block[0] for block in internal_action_blocks]
|
||||
internal_action_blocks = sorted(internal_action_blocks)
|
||||
|
||||
lblocks_dict = {}
|
||||
for block in lblocks:
|
||||
block_dict = block.__dict__
|
||||
print(block_dict['blockNumber'])
|
||||
lblocks_dict[block_dict['blockNumber']] = {'blockHash':f"{block_dict['blockHash']}", 'jsonData':f"{block_dict['jsonData']}"}
|
||||
|
||||
# process and rebuild all transactions
|
||||
prev_block = 0
|
||||
|
||||
for transaction in ltransactions:
|
||||
transaction_dict = transaction.__dict__
|
||||
current_block = transaction_dict['blockNumber']
|
||||
|
||||
# Check if any internal action block lies between prev_block and current_block
|
||||
for internal_block in internal_action_blocks:
|
||||
if prev_block < internal_block <= current_block:
|
||||
logger.info(f'Processing block {internal_block}')
|
||||
# Get block details
|
||||
response = newMultiRequest(f"block-index/{internal_block}")
|
||||
blockhash = response['blockHash']
|
||||
blockinfo = newMultiRequest(f"block/{blockhash}")
|
||||
# Call your function here, passing the internal block to it
|
||||
checkLocal_expiry_trigger_deposit(blockinfo)
|
||||
|
||||
transaction_data = json.loads(transaction_dict['jsonData'])
|
||||
parsed_flodata = json.loads(transaction_dict['parsedFloData'])
|
||||
try:
|
||||
block_info = json.loads(lblocks_dict[transaction_dict['blockNumber']]['jsonData'])
|
||||
processTransaction(transaction_data, parsed_flodata, block_info)
|
||||
prev_block = current_block
|
||||
except:
|
||||
prev_block = current_block
|
||||
continue
|
||||
|
||||
# copy the old block data
|
||||
old_latest_cache = create_database_connection('system_dbs', {'db_name':'latestCache1'})
|
||||
old_latest_cache.execute("ATTACH DATABASE 'latestCache.db' AS new_db")
|
||||
old_latest_cache.execute("INSERT INTO new_db.latestBlocks SELECT * FROM latestBlocks WHERE blockNumber <= ?", (forward_block,))
|
||||
old_latest_cache.close()
|
||||
|
||||
# delete
|
||||
# system.db , latestCache.db, smartContracts, tokens
|
||||
if os.path.isfile('./system1.db'):
|
||||
os.remove('system1.db')
|
||||
if os.path.isfile('./latestCache1.db'):
|
||||
os.remove('latestCache1.db')
|
||||
if os.path.isfile('./smartContracts1'):
|
||||
shutil.rmtree('smartContracts1')
|
||||
if os.path.isfile('./tokens1'):
|
||||
shutil.rmtree('tokens1')
|
||||
|
||||
# Update system.db's last scanned block
|
||||
connection = create_database_connection('system_dbs', {'db_name': "system"})
|
||||
connection.execute(f"UPDATE systemData SET value = {int(list(lblocks_dict.keys())[-1])} WHERE attribute = 'lastblockscanned';")
|
||||
connection.close()
|
||||
247
util_rebuild_withAPI.py
Normal file
247
util_rebuild_withAPI.py
Normal file
@ -0,0 +1,247 @@
|
||||
from sqlalchemy import create_engine, desc, func
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from models import SystemData, TokenBase, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, TokenContractAssociation, ContractBase, ContractStructure, ContractParticipants, ContractTransactionHistory, ContractDeposits, ConsumedInfo, ContractWinners, ContinuosContractBase, ContractStructure2, ContractParticipants2, ContractDeposits2, ContractTransactionHistory2, SystemBase, ActiveContracts, SystemData, ContractAddressMapping, TokenAddressMapping, DatabaseTypeMapping, TimeActions, RejectedContractTransactionHistory, RejectedTransactionHistory, LatestCacheBase, LatestTransactions, LatestBlocks
|
||||
import json
|
||||
from tracktokens_smartcontracts import processTransaction, checkLocal_expiry_trigger_deposit, newMultiRequest
|
||||
import os
|
||||
import logging
|
||||
import argparse
|
||||
import configparser
|
||||
import shutil
|
||||
import sys
|
||||
import pdb
|
||||
|
||||
|
||||
# helper functions
|
||||
def check_database_existence(type, parameters):
|
||||
if type == 'token':
|
||||
return os.path.isfile(f"./tokens/{parameters['token_name']}.db")
|
||||
|
||||
if type == 'smart_contract':
|
||||
return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db")
|
||||
|
||||
|
||||
def create_database_connection(type, parameters):
|
||||
if type == 'token':
|
||||
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||
elif type == 'smart_contract':
|
||||
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||
elif type == 'system_dbs':
|
||||
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||
|
||||
connection = engine.connect()
|
||||
return connection
|
||||
|
||||
|
||||
def create_database_session_orm(type, parameters, base):
|
||||
if type == 'token':
|
||||
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'smart_contract':
|
||||
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'system_dbs':
|
||||
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
return session
|
||||
|
||||
|
||||
# MAIN EXECUTION STARTS
|
||||
# Configuration of required variables
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
formatter = logging.Formatter('%(asctime)s:%(name)s:%(message)s')
|
||||
|
||||
file_handler = logging.FileHandler('tracking.log')
|
||||
file_handler.setLevel(logging.INFO)
|
||||
file_handler.setFormatter(formatter)
|
||||
|
||||
stream_handler = logging.StreamHandler()
|
||||
stream_handler.setFormatter(formatter)
|
||||
|
||||
logger.addHandler(file_handler)
|
||||
logger.addHandler(stream_handler)
|
||||
|
||||
|
||||
# Rule 1 - Read command line arguments to reset the databases as blank
|
||||
# Rule 2 - Read config to set testnet/mainnet
|
||||
# Rule 3 - Set flo blockexplorer location depending on testnet or mainnet
|
||||
# Rule 4 - Set the local flo-cli path depending on testnet or mainnet ( removed this feature | Flosights are the only source )
|
||||
# Rule 5 - Set the block number to scan from
|
||||
|
||||
|
||||
# Read command line arguments
|
||||
parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash')
|
||||
parser.add_argument('-rb', '--toblocknumer', nargs='?', type=int, help='Forward to the specified block number')
|
||||
parser.add_argument('-r', '--blockcount', nargs='?', type=int, help='Forward to the specified block count')
|
||||
parser.add_argument('-to', '--to_blockNumber', nargs='?', type=int, help='Process until the specified block number') # New argument
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
if (args.blockcount and args.toblocknumber):
|
||||
print("You can only specify one of the options -b or -c")
|
||||
sys.exit(0)
|
||||
elif args.blockcount:
|
||||
forward_block = lastscannedblock + args.blockcount
|
||||
elif args.toblocknumer:
|
||||
forward_block = args.toblocknumer
|
||||
else:
|
||||
latestCache_session = create_database_session_orm('system_dbs', {'db_name':'latestCache'}, LatestCacheBase)
|
||||
forward_block = int(latestCache_session.query(LatestBlocks.blockNumber).order_by(LatestBlocks.blockNumber.desc()).first()[0])
|
||||
latestCache_session.close()
|
||||
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
apppath = os.path.dirname(os.path.realpath(__file__))
|
||||
dirpath = os.path.join(apppath, 'tokens')
|
||||
if not os.path.isdir(dirpath):
|
||||
os.mkdir(dirpath)
|
||||
dirpath = os.path.join(apppath, 'smartContracts')
|
||||
if not os.path.isdir(dirpath):
|
||||
os.mkdir(dirpath)
|
||||
|
||||
# rename all the old databases
|
||||
# system.db , latestCache.db, smartContracts, tokens
|
||||
if os.path.isfile('./system.db'):
|
||||
os.rename('system.db', 'system1.db')
|
||||
if os.path.isfile('./latestCache.db'):
|
||||
os.rename('latestCache.db', 'latestCache1.db')
|
||||
if os.path.isfile('./smartContracts'):
|
||||
os.rename('smartContracts', 'smartContracts1')
|
||||
if os.path.isfile('./tokens'):
|
||||
os.rename('tokens', 'tokens1')
|
||||
|
||||
# Read configuration
|
||||
config = configparser.ConfigParser()
|
||||
config.read('config.ini')
|
||||
|
||||
# todo - write all assertions to make sure default configs are right
|
||||
if (config['DEFAULT']['NET'] != 'mainnet') and (config['DEFAULT']['NET'] != 'testnet'):
|
||||
logger.error("NET parameter in config.ini invalid. Options are either 'mainnet' or 'testnet'. Script is exiting now")
|
||||
sys.exit(0)
|
||||
|
||||
# Specify mainnet and testnet server list for API calls and websocket calls
|
||||
serverlist = None
|
||||
if config['DEFAULT']['NET'] == 'mainnet':
|
||||
serverlist = config['DEFAULT']['MAINNET_FLOSIGHT_SERVER_LIST']
|
||||
elif config['DEFAULT']['NET'] == 'testnet':
|
||||
serverlist = config['DEFAULT']['TESTNET_FLOSIGHT_SERVER_LIST']
|
||||
serverlist = serverlist.split(',')
|
||||
neturl = config['DEFAULT']['FLOSIGHT_NETURL']
|
||||
tokenapi_sse_url = config['DEFAULT']['TOKENAPI_SSE_URL']
|
||||
|
||||
# Delete database and smartcontract directory if reset is set to 1
|
||||
#if args.reset == 1:
|
||||
logger.info("Resetting the database. ")
|
||||
apppath = os.path.dirname(os.path.realpath(__file__))
|
||||
dirpath = os.path.join(apppath, 'tokens')
|
||||
shutil.rmtree(dirpath)
|
||||
os.mkdir(dirpath)
|
||||
dirpath = os.path.join(apppath, 'smartContracts')
|
||||
shutil.rmtree(dirpath)
|
||||
os.mkdir(dirpath)
|
||||
dirpath = os.path.join(apppath, 'system.db')
|
||||
if os.path.exists(dirpath):
|
||||
os.remove(dirpath)
|
||||
dirpath = os.path.join(apppath, 'latestCache.db')
|
||||
if os.path.exists(dirpath):
|
||||
os.remove(dirpath)
|
||||
|
||||
# Read start block no
|
||||
startblock = int(config['DEFAULT']['START_BLOCK'])
|
||||
session = create_database_session_orm('system_dbs', {'db_name': "system"}, SystemBase)
|
||||
session.add(SystemData(attribute='lastblockscanned', value=startblock - 1))
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
# Initialize latest cache DB
|
||||
session = create_database_session_orm('system_dbs', {'db_name': "latestCache"}, LatestCacheBase)
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
# get all blocks and transaction data
|
||||
latestCache_session = create_database_session_orm('system_dbs', {'db_name':'latestCache1'}, LatestCacheBase)
|
||||
if forward_block:
|
||||
lblocks = latestCache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber <= forward_block).all()
|
||||
ltransactions = latestCache_session.query(LatestTransactions).filter(LatestTransactions.blockNumber <= forward_block).all()
|
||||
else:
|
||||
lblocks = latestCache_session.query(LatestBlocks).all()
|
||||
ltransactions = latestCache_session.query(LatestTransactions).all()
|
||||
latestCache_session.close()
|
||||
|
||||
# make a list of all internal tx block numbers
|
||||
systemDb_session = create_database_session_orm('system_dbs', {'db_name':'system1'}, SystemBase)
|
||||
internal_action_blocks = systemDb_session.query(ActiveContracts.blockNumber).all()
|
||||
internal_action_blocks = [block[0] for block in internal_action_blocks]
|
||||
internal_action_blocks = sorted(internal_action_blocks)
|
||||
|
||||
lblocks_dict = {}
|
||||
for block in lblocks:
|
||||
block_dict = block.__dict__
|
||||
print(block_dict['blockNumber'])
|
||||
lblocks_dict[block_dict['blockNumber']] = {'blockHash':f"{block_dict['blockHash']}", 'jsonData':f"{block_dict['jsonData']}"}
|
||||
|
||||
# process and rebuild all transactions
|
||||
prev_block = 0
|
||||
|
||||
for transaction in ltransactions:
|
||||
transaction_dict = transaction.__dict__
|
||||
current_block = transaction_dict['blockNumber']
|
||||
|
||||
# Check if any internal action block lies between prev_block and current_block
|
||||
for internal_block in internal_action_blocks:
|
||||
|
||||
if prev_block < internal_block <= current_block:
|
||||
logger.info(f'Processing block {internal_block}')
|
||||
# Get block details
|
||||
response = newMultiRequest(f"block-index/{internal_block}")
|
||||
blockhash = response['blockHash']
|
||||
blockinfo = newMultiRequest(f"block/{blockhash}")
|
||||
# Call your function here, passing the internal block to it
|
||||
checkLocal_expiry_trigger_deposit(blockinfo)
|
||||
|
||||
transaction_data = json.loads(transaction_dict['jsonData'])
|
||||
transaction_data = newMultiRequest(f"tx/{transaction_dict['transactionHash']}")
|
||||
parsed_flodata = json.loads(transaction_dict['parsedFloData'])
|
||||
try:
|
||||
block_info = json.loads(lblocks_dict[transaction_dict['blockNumber']]['jsonData'])
|
||||
processTransaction(transaction_data, parsed_flodata, block_info)
|
||||
prev_block = current_block
|
||||
except:
|
||||
prev_block = current_block
|
||||
continue
|
||||
|
||||
# Check if the current block exceeds the specified "to_blockNumber"
|
||||
if current_block >= args.to_blockNumber:
|
||||
logger.info(f"Reached the specified block number {args.to_blockNumber}. Stopping processing.")
|
||||
break
|
||||
|
||||
# copy the old block data
|
||||
old_latest_cache = create_database_connection('system_dbs', {'db_name':'latestCache1'})
|
||||
old_latest_cache.execute("ATTACH DATABASE 'latestCache.db' AS new_db")
|
||||
old_latest_cache.execute("INSERT INTO new_db.latestBlocks SELECT * FROM latestBlocks WHERE blockNumber <= ?", (forward_block,))
|
||||
old_latest_cache.close()
|
||||
|
||||
# delete
|
||||
# system.db , latestCache.db, smartContracts, tokens
|
||||
if os.path.isfile('./system1.db'):
|
||||
os.remove('system1.db')
|
||||
if os.path.isfile('./latestCache1.db'):
|
||||
os.remove('latestCache1.db')
|
||||
if os.path.isfile('./smartContracts1'):
|
||||
shutil.rmtree('smartContracts1')
|
||||
if os.path.isfile('./tokens1'):
|
||||
shutil.rmtree('tokens1')
|
||||
|
||||
# Update system.db's last scanned block
|
||||
connection = create_database_connection('system_dbs', {'db_name': "system"})
|
||||
connection.execute(f"UPDATE systemData SET value = {int(list(lblocks_dict.keys())[-1])} WHERE attribute = 'lastblockscanned';")
|
||||
connection.close()
|
||||
111
util_reset_latest_block.py
Normal file
111
util_reset_latest_block.py
Normal file
@ -0,0 +1,111 @@
|
||||
from sqlalchemy import create_engine, desc, func
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from models import SystemData, TokenBase, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, TokenContractAssociation, ContractBase, ContractStructure, ContractParticipants, ContractTransactionHistory, ContractDeposits, ConsumedInfo, ContractWinners, ContinuosContractBase, ContractStructure2, ContractParticipants2, ContractDeposits2, ContractTransactionHistory2, SystemBase, ActiveContracts, SystemData, ContractAddressMapping, TokenAddressMapping, DatabaseTypeMapping, TimeActions, RejectedContractTransactionHistory, RejectedTransactionHistory, LatestCacheBase, LatestTransactions, LatestBlocks
|
||||
import json
|
||||
from tracktokens_smartcontracts import processTransaction, checkLocal_expiry_trigger_deposit, newMultiRequest
|
||||
import os
|
||||
import logging
|
||||
import argparse
|
||||
import configparser
|
||||
import shutil
|
||||
import sys
|
||||
import pdb
|
||||
|
||||
|
||||
# helper functions
|
||||
def check_database_existence(type, parameters):
|
||||
if type == 'token':
|
||||
return os.path.isfile(f"./tokens/{parameters['token_name']}.db")
|
||||
|
||||
if type == 'smart_contract':
|
||||
return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db")
|
||||
|
||||
|
||||
def create_database_connection(type, parameters):
|
||||
if type == 'token':
|
||||
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||
elif type == 'smart_contract':
|
||||
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||
elif type == 'system_dbs':
|
||||
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||
|
||||
connection = engine.connect()
|
||||
return connection
|
||||
|
||||
|
||||
def create_database_session_orm(type, parameters, base):
|
||||
if type == 'token':
|
||||
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'smart_contract':
|
||||
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'system_dbs':
|
||||
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
return session
|
||||
|
||||
|
||||
# MAIN EXECUTION STARTS
|
||||
# Configuration of required variables
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
formatter = logging.Formatter('%(asctime)s:%(name)s:%(message)s')
|
||||
|
||||
file_handler = logging.FileHandler('tracking.log')
|
||||
file_handler.setLevel(logging.INFO)
|
||||
file_handler.setFormatter(formatter)
|
||||
|
||||
stream_handler = logging.StreamHandler()
|
||||
stream_handler.setFormatter(formatter)
|
||||
|
||||
logger.addHandler(file_handler)
|
||||
logger.addHandler(stream_handler)
|
||||
|
||||
|
||||
# Rule 1 - Read command line arguments to reset the databases as blank
|
||||
# Rule 2 - Read config to set testnet/mainnet
|
||||
# Rule 3 - Set flo blockexplorer location depending on testnet or mainnet
|
||||
# Rule 4 - Set the local flo-cli path depending on testnet or mainnet ( removed this feature | Flosights are the only source )
|
||||
# Rule 5 - Set the block number to scan from
|
||||
|
||||
|
||||
# Read command line arguments
|
||||
parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash')
|
||||
parser.add_argument('-rb', '--resetblocknumer', nargs='?', type=int, help='Forward to the specified block number')
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
# Read configuration
|
||||
config = configparser.ConfigParser()
|
||||
config.read('config.ini')
|
||||
|
||||
# todo - write all assertions to make sure default configs are right
|
||||
if (config['DEFAULT']['NET'] != 'mainnet') and (config['DEFAULT']['NET'] != 'testnet'):
|
||||
logger.error("NET parameter in config.ini invalid. Options are either 'mainnet' or 'testnet'. Script is exiting now")
|
||||
sys.exit(0)
|
||||
|
||||
# Specify mainnet and testnet server list for API calls and websocket calls
|
||||
serverlist = None
|
||||
if config['DEFAULT']['NET'] == 'mainnet':
|
||||
serverlist = config['DEFAULT']['MAINNET_FLOSIGHT_SERVER_LIST']
|
||||
elif config['DEFAULT']['NET'] == 'testnet':
|
||||
serverlist = config['DEFAULT']['TESTNET_FLOSIGHT_SERVER_LIST']
|
||||
serverlist = serverlist.split(',')
|
||||
neturl = config['DEFAULT']['FLOSIGHT_NETURL']
|
||||
tokenapi_sse_url = config['DEFAULT']['TOKENAPI_SSE_URL']
|
||||
|
||||
|
||||
# Update system.db's last scanned block
|
||||
connection = create_database_connection('system_dbs', {'db_name': "system"})
|
||||
print(f"UPDATE systemData SET value = {int(args.resetblocknumer)} WHERE attribute = 'lastblockscanned';")
|
||||
pdb.set_trace()
|
||||
connection.execute(f"UPDATE systemData SET value = {int(args.resetblocknumer)} WHERE attribute = 'lastblockscanned';")
|
||||
connection.close()
|
||||
476
util_rollback.py
Normal file
476
util_rollback.py
Normal file
@ -0,0 +1,476 @@
|
||||
import argparse
|
||||
from sqlalchemy import create_engine, func
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from models import SystemData, TokenBase, ActiveTable, ConsumedTable, TransferLogs, TransactionHistory, TokenContractAssociation, RejectedTransactionHistory, ContractBase, ContractStructure, ContractParticipants, ContractTransactionHistory, ContractDeposits, ConsumedInfo, ContractWinners, ContinuosContractBase, ContractStructure2, ContractParticipants2, ContractDeposits2, ContractTransactionHistory2, SystemBase, ActiveContracts, SystemData, ContractAddressMapping, TokenAddressMapping, DatabaseTypeMapping, TimeActions, RejectedContractTransactionHistory, RejectedTransactionHistory, LatestCacheBase, LatestTransactions, LatestBlocks
|
||||
from ast import literal_eval
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
from parsing import perform_decimal_operation
|
||||
|
||||
|
||||
apppath = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# helper functions
|
||||
def check_database_existence(type, parameters):
|
||||
if type == 'token':
|
||||
return os.path.isfile(f"./tokens/{parameters['token_name']}.db")
|
||||
|
||||
if type == 'smart_contract':
|
||||
return os.path.isfile(f"./smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db")
|
||||
|
||||
|
||||
def create_database_connection(type, parameters):
|
||||
if type == 'token':
|
||||
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||
elif type == 'smart_contract':
|
||||
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||
elif type == 'system_dbs':
|
||||
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||
|
||||
connection = engine.connect()
|
||||
return connection
|
||||
|
||||
|
||||
def create_database_session_orm(type, parameters, base):
|
||||
if type == 'token':
|
||||
engine = create_engine(f"sqlite:///tokens/{parameters['token_name']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'smart_contract':
|
||||
engine = create_engine(f"sqlite:///smartContracts/{parameters['contract_name']}-{parameters['contract_address']}.db", echo=True)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
|
||||
elif type == 'system_dbs':
|
||||
engine = create_engine(f"sqlite:///{parameters['db_name']}.db", echo=False)
|
||||
base.metadata.create_all(bind=engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
else:
|
||||
pdb.set_trace()
|
||||
|
||||
return session
|
||||
|
||||
|
||||
def inspect_parsed_flodata(parsed_flodata, inputAddress, outputAddress):
|
||||
if parsed_flodata['type'] == 'transfer':
|
||||
if parsed_flodata['transferType'] == 'token':
|
||||
return {'type':'tokentransfer', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||
if parsed_flodata['transferType'] == 'smartContract':
|
||||
return {'type':'smartContract', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'accepting_token_db':f"{parsed_flodata['']}", 'receiving_token_db':f"{parsed_flodata['tokenIdentification']}" ,'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||
if parsed_flodata['transferType'] == 'swapParticipation':
|
||||
return {'type':'swapParticipation', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'accepting_token_db':f"{parsed_flodata['']}", 'receiving_token_db':f"{parsed_flodata['tokenIdentification']}" ,'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||
if parsed_flodata['transferType'] == 'nft':
|
||||
return {'type':'nfttransfer', 'nft_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||
if parsed_flodata['type'] == 'tokenIncorporation':
|
||||
return {'type':'tokenIncorporation', 'token_db':f"{parsed_flodata['tokenIdentification']}", 'token_amount':f"{parsed_flodata['tokenAmount']}"}
|
||||
if parsed_flodata['type'] == 'smartContractPays':
|
||||
# contract address, token | both of them come from
|
||||
sc_session = create_database_session_orm('smart_contract', {'contract_name':f"{parsed_flodata['contractName']}", 'contract_address':f"{outputAddress}"}, ContractBase)
|
||||
token_db = sc_session.query(ContractStructure.value).filter(ContractStructure.attribute=='tokenIdentification').first()[0]
|
||||
return {'type':'smartContractPays', 'token_db':f"{token_db}" , 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'triggerCondition':f"{parsed_flodata['triggerCondition']}"}
|
||||
if parsed_flodata['type'] == 'smartContractIncorporation':
|
||||
return {'type':'smartContractIncorporation', 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'triggerCondition':f"{parsed_flodata['triggerCondition']}"}
|
||||
|
||||
|
||||
def getDatabase_from_parsedFloData(parsed_flodata, inputAddress, outputAddress):
|
||||
tokenlist = []
|
||||
contractlist = []
|
||||
if parsed_flodata['type'] == 'transfer':
|
||||
if parsed_flodata['transferType'] == 'token':
|
||||
#return {'type':'token_db', 'token_db':f"{parsed_flodata['tokenIdentification']}"}
|
||||
tokenlist.append(parsed_flodata['tokenIdentification'])
|
||||
elif parsed_flodata['transferType'] == 'smartContract':
|
||||
#return {'type':'smartcontract_db', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'token_db':f"{parsed_flodata['tokenIdentification']}"}
|
||||
tokenlist.append(parsed_flodata['tokenIdentification'])
|
||||
contractlist.append(f"{parsed_flodata['contractName']}-{outputAddress}")
|
||||
elif parsed_flodata['transferType'] == 'swapParticipation':
|
||||
#return {'type':'swapcontract_db', 'contract_db': f"{parsed_flodata['contractName']}-{outputAddress}" ,'accepting_token_db':f"{parsed_flodata['contract-conditions']['accepting_token']}", 'selling_token_db':f"{parsed_flodata['contract-conditions']['selling_token']}"}
|
||||
tokenlist.append(parsed_flodata['contract-conditions']['accepting_token'])
|
||||
tokenlist.append(parsed_flodata['contract-conditions']['selling_token'])
|
||||
contractlist.append(f"{parsed_flodata['contractName']}-{outputAddress}")
|
||||
elif parsed_flodata['transferType'] == 'nft':
|
||||
#return {'type':'nft_db', 'token_db':f"{parsed_flodata['tokenIdentification']}"}
|
||||
tokenlist.append(parsed_flodata['tokenIdentification'])
|
||||
elif parsed_flodata['type'] == 'smartContractPays':
|
||||
# contract address, token | both of them come from
|
||||
sc_session = create_database_session_orm('smart_contract', {'contract_name':f"{parsed_flodata['contractName']}", 'contract_address':f"{outputAddress}"}, ContractBase)
|
||||
token_db = sc_session.query(ContractStructure.value).filter(ContractStructure.attribute=='tokenIdentification').first()[0]
|
||||
#return {'type':'smartcontract_db', 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}", 'token_db':f"{token_db}"}
|
||||
tokenlist.append(token_db)
|
||||
contractlist.append(f"{parsed_flodata['contractName']}-{outputAddress}")
|
||||
elif parsed_flodata['type'] == 'smartContractIncorporation':
|
||||
#return {'type':'smartcontract_db', 'contract_db':f"{parsed_flodata['contractName']}-{outputAddress}"}
|
||||
contractlist.append(f"{parsed_flodata['contractName']}-{outputAddress}")
|
||||
elif parsed_flodata['type'] == 'tokenIncorporation':
|
||||
#return {'type':'token_db', 'token_db':f"{parsed_flodata['tokenIdentification']}"}
|
||||
tokenlist.append(parsed_flodata['tokenIdentification'])
|
||||
|
||||
return tokenlist, contractlist
|
||||
|
||||
|
||||
def calc_pid_amount(transferBalance, consumedpid):
|
||||
consumedpid_sum = 0
|
||||
for key in list(consumedpid.keys()):
|
||||
consumedpid_sum = perform_decimal_operation('addition', consumedpid_sum, float(consumedpid[key]))
|
||||
return transferBalance - consumedpid_sum
|
||||
|
||||
|
||||
def find_addressBalance_from_floAddress(database_session, floAddress):
|
||||
query_output = database_session.query(ActiveTable).filter(ActiveTable.address==floAddress, ActiveTable.addressBalance!=None).first()
|
||||
if query_output is None:
|
||||
return 0
|
||||
else:
|
||||
return query_output.addressBalance
|
||||
|
||||
|
||||
def rollback_address_balance_processing(db_session, senderAddress, receiverAddress, transferBalance):
|
||||
# Find out total sum of address
|
||||
# Find out the last entry where address balance is not null, if exists make it null
|
||||
|
||||
# Calculation phase
|
||||
current_receiverBalance = find_addressBalance_from_floAddress(db_session, receiverAddress)
|
||||
current_senderBalance = find_addressBalance_from_floAddress(db_session ,senderAddress)
|
||||
new_receiverBalance = perform_decimal_operation('subtraction', current_receiverBalance, transferBalance)
|
||||
new_senderBalance = perform_decimal_operation('addition', current_senderBalance, transferBalance)
|
||||
|
||||
# Insertion phase
|
||||
# if new receiver balance is 0, then only insert sender address balance
|
||||
# if receiver balance is not 0, then update previous occurence of the receiver address and sender balance
|
||||
# for sender, find out weather
|
||||
# either query out will not come or the last occurence will have address
|
||||
# for sender, in all cases we will update the addressBalance of last occurences of senderfloaddress
|
||||
# for receiver, if the currentaddressbalance is 0 then do nothing .. and if the currentaddressbalance is not 0 then update the last occurence of receiver address
|
||||
sender_query = db_session.query(ActiveTable).filter(ActiveTable.address==senderAddress).order_by(ActiveTable.id.desc()).first()
|
||||
sender_query.addressBalance = new_senderBalance
|
||||
|
||||
if new_receiverBalance != 0 and new_receiverBalance > 0:
|
||||
receiver_query = db_session.query(ActiveTable).filter(ActiveTable.address==receiverAddress).order_by(ActiveTable.id.desc()).limit(2).all()
|
||||
if len(receiver_query) == 2:
|
||||
receiver_query[1].addressBalance = new_receiverBalance
|
||||
|
||||
|
||||
def find_input_output_addresses(transaction_data):
|
||||
# Create vinlist and outputlist
|
||||
vinlist = []
|
||||
querylist = []
|
||||
|
||||
for vin in transaction_data["vin"]:
|
||||
vinlist.append([vin["addresses"][0], float(vin["value"])])
|
||||
|
||||
totalinputval = float(transaction_data["valueIn"])
|
||||
|
||||
# todo Rule 41 - Check if all the addresses in a transaction on the input side are the same
|
||||
for idx, item in enumerate(vinlist):
|
||||
if idx == 0:
|
||||
temp = item[0]
|
||||
continue
|
||||
if item[0] != temp:
|
||||
print(f"System has found more than one address as part of vin. Transaction {transaction_data['txid']} is rejected")
|
||||
return 0
|
||||
|
||||
inputlist = [vinlist[0][0], totalinputval]
|
||||
inputadd = vinlist[0][0]
|
||||
|
||||
# todo Rule 42 - If the number of vout is more than 2, reject the transaction
|
||||
if len(transaction_data["vout"]) > 2:
|
||||
print(f"System has found more than 2 address as part of vout. Transaction {transaction_data['txid']} is rejected")
|
||||
return 0
|
||||
|
||||
# todo Rule 43 - A transaction accepted by the system has two vouts, 1. The FLO address of the receiver
|
||||
# 2. Flo address of the sender as change address. If the vout address is change address, then the other adddress
|
||||
# is the recevier address
|
||||
|
||||
outputlist = []
|
||||
addresscounter = 0
|
||||
inputcounter = 0
|
||||
for obj in transaction_data["vout"]:
|
||||
if obj["scriptPubKey"]["type"] == "pubkeyhash":
|
||||
addresscounter = addresscounter + 1
|
||||
if inputlist[0] == obj["scriptPubKey"]["addresses"][0]:
|
||||
inputcounter = inputcounter + 1
|
||||
continue
|
||||
outputlist.append([obj["scriptPubKey"]["addresses"][0], obj["value"]])
|
||||
|
||||
if addresscounter == inputcounter:
|
||||
outputlist = [inputlist[0]]
|
||||
elif len(outputlist) != 1:
|
||||
print(f"Transaction's change is not coming back to the input address. Transaction {transaction_data['txid']} is rejected")
|
||||
return 0
|
||||
else:
|
||||
outputlist = outputlist[0]
|
||||
|
||||
return inputlist[0], outputlist[0]
|
||||
|
||||
|
||||
def rollback_database(blockNumber, dbtype, dbname):
|
||||
if dbtype == 'token':
|
||||
# Connect to database
|
||||
db_session = create_database_session_orm('token', {'token_name':dbname}, TokenBase)
|
||||
while(True):
|
||||
subqry = db_session.query(func.max(ActiveTable.id))
|
||||
activeTable_entry = db_session.query(ActiveTable).filter(ActiveTable.id == subqry).first()
|
||||
if activeTable_entry.blockNumber <= blockNumber:
|
||||
break
|
||||
outputAddress = activeTable_entry.address
|
||||
transferAmount = activeTable_entry.transferBalance
|
||||
inputAddress = None
|
||||
|
||||
# Find out consumedpid and partially consumed pids
|
||||
parentid = None
|
||||
orphaned_parentid = None
|
||||
consumedpid = None
|
||||
if activeTable_entry.parentid is not None:
|
||||
parentid = activeTable_entry.parentid
|
||||
if activeTable_entry.orphaned_parentid is not None:
|
||||
orphaned_parentid = activeTable_entry.orphaned_parentid
|
||||
if activeTable_entry.consumedpid is not None:
|
||||
consumedpid = literal_eval(activeTable_entry.consumedpid)
|
||||
|
||||
# filter out based on consumped pid and partially consumed pids
|
||||
if parentid is not None:
|
||||
# find query in activeTable with the parentid
|
||||
activeTable_pid_entry = db_session.query(ActiveTable).filter(ActiveTable.id == parentid).all()[0]
|
||||
# calculate the amount taken from parentid
|
||||
activeTable_pid_entry.transferBalance = activeTable_pid_entry.transferBalance + calc_pid_amount(activeTable_entry.transferBalance, consumedpid)
|
||||
inputAddress = activeTable_pid_entry.address
|
||||
|
||||
if orphaned_parentid is not None:
|
||||
try:
|
||||
orphaned_parentid_entry = db_session.query(ConsumedTable).filter(ConsumedTable.id == orphaned_parentid).all()[0]
|
||||
inputAddress = orphaned_parentid_entry.address
|
||||
except:
|
||||
pdb.set_trace()
|
||||
|
||||
if consumedpid != {}:
|
||||
# each key of the pid is totally consumed and with its corresponding value written in the end
|
||||
# how can we maintain the order of pid consumption? The bigger pid number will be towards the end
|
||||
# 1. pull the pid number and its details from the consumedpid table
|
||||
for key in list(consumedpid.keys()):
|
||||
consumedpid_entry = db_session.query(ConsumedTable).filter(ConsumedTable.id == key).all()[0]
|
||||
newTransferBalance = consumedpid_entry.transferBalance + consumedpid[key]
|
||||
db_session.add(ActiveTable(id=consumedpid_entry.id, address=consumedpid_entry.address, parentid=consumedpid_entry.parentid ,consumedpid=consumedpid_entry.consumedpid, transferBalance=newTransferBalance, addressBalance = None, orphaned_parentid=consumedpid_entry.orphaned_parentid ,blockNumber=consumedpid_entry.blockNumber))
|
||||
inputAddress = consumedpid_entry.address
|
||||
db_session.delete(consumedpid_entry)
|
||||
|
||||
orphaned_parentid_entries = db_session.query(ActiveTable).filter(ActiveTable.orphaned_parentid == key).all()
|
||||
if len(orphaned_parentid_entries) != 0:
|
||||
for orphan_entry in orphaned_parentid_entries:
|
||||
orphan_entry.parentid = orphan_entry.orphaned_parentid
|
||||
orphan_entry.orphaned_parentid = None
|
||||
|
||||
orphaned_parentid_entries = db_session.query(ConsumedTable).filter(ConsumedTable.orphaned_parentid == key).all()
|
||||
if len(orphaned_parentid_entries) != 0:
|
||||
for orphan_entry in orphaned_parentid_entries:
|
||||
orphan_entry.parentid = orphan_entry.orphaned_parentid
|
||||
orphan_entry.orphaned_parentid = None
|
||||
|
||||
# update addressBalance
|
||||
rollback_address_balance_processing(db_session, inputAddress, outputAddress, transferAmount)
|
||||
|
||||
# delete operations
|
||||
# delete the last row in activeTable and transactionTable
|
||||
db_session.delete(activeTable_entry)
|
||||
|
||||
db_session.query(TransactionHistory).filter(TransactionHistory.blockNumber > blockNumber).delete()
|
||||
db_session.query(TransferLogs).filter(TransferLogs.blockNumber > blockNumber).delete()
|
||||
db_session.commit()
|
||||
|
||||
elif dbtype == 'smartcontract':
|
||||
db_session = create_database_session_orm('smart_contract', {'contract_name':f"{dbname['contract_name']}", 'contract_address':f"{dbname['contract_address']}"}, ContractBase)
|
||||
db_session.query(ContractTransactionHistory).filter(ContractTransactionHistory.blockNumber > blockNumber).delete()
|
||||
db_session.query(ContractParticipants).filter(ContractParticipants.blockNumber > blockNumber).delete()
|
||||
db_session.query(ContractDeposits).filter(ContractDeposits.blockNumber > blockNumber).delete()
|
||||
db_session.query(ConsumedInfo).filter(ConsumedInfo.blockNumber > blockNumber).delete()
|
||||
db_session.query(ContractWinners).filter(ContractWinners.blockNumber > blockNumber).delete()
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def delete_database_old(blockNumber, dbname):
|
||||
db_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase)
|
||||
databases_to_delete = db_session.query(DatabaseTypeMapping.db_name, DatabaseTypeMapping.db_type).filter(DatabaseTypeMapping.blockNumber>blockNumber).all()
|
||||
|
||||
db_names, db_type = zip(*databases_to_delete)
|
||||
|
||||
for database in databases_to_delete:
|
||||
if database[1] in ['token','infinite-token']:
|
||||
dirpath = os.path.join(apppath, 'tokens', f"{dbname}.db")
|
||||
if os.path.exists(dirpath):
|
||||
os.remove(dirpath)
|
||||
elif database[1] in ['smartcontract']:
|
||||
dirpath = os.path.join(apppath, 'smartcontracts', f"{dbname}.db")
|
||||
if os.path.exists(dirpath):
|
||||
os.remove(dirpath)
|
||||
return db_names
|
||||
|
||||
|
||||
def delete_database(blockNumber, dbname):
|
||||
db_session = create_database_session_orm('system_dbs', {'db_name':'system'}, SystemBase)
|
||||
databases_to_delete = db_session.query(DatabaseTypeMapping.db_name, DatabaseTypeMapping.db_type).filter(DatabaseTypeMapping.db_name == dbname).all()
|
||||
|
||||
db_names, db_type = zip(*databases_to_delete)
|
||||
|
||||
for database in databases_to_delete:
|
||||
if database[1] in ['token','infinite-token','nft']:
|
||||
dirpath = os.path.join(apppath, 'tokens', f"{dbname}.db")
|
||||
if os.path.exists(dirpath):
|
||||
os.remove(dirpath)
|
||||
elif database[1] in ['smartcontract']:
|
||||
dirpath = os.path.join(apppath, 'smartContracts', f"{dbname}.db")
|
||||
if os.path.exists(dirpath):
|
||||
os.remove(dirpath)
|
||||
return db_names
|
||||
|
||||
|
||||
def system_database_deletions(blockNumber):
|
||||
|
||||
latestcache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
||||
|
||||
# delete latestBlocks & latestTransactions entry
|
||||
latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber > blockNumber).delete()
|
||||
latestcache_session.query(LatestTransactions).filter(LatestTransactions.blockNumber > blockNumber).delete()
|
||||
|
||||
# delete activeContracts, contractAddressMapping, DatabaseAddressMapping, rejectedContractTransactionHistory, rejectedTransactionHistory, tokenAddressMapping
|
||||
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
|
||||
activeContracts_session = systemdb_session.query(ActiveContracts).filter(ActiveContracts.blockNumber > blockNumber).delete()
|
||||
contractAddressMapping_queries = systemdb_session.query(ContractAddressMapping).filter(ContractAddressMapping.blockNumber > blockNumber).delete()
|
||||
databaseTypeMapping_queries = systemdb_session.query(DatabaseTypeMapping).filter(DatabaseTypeMapping.blockNumber > blockNumber).delete()
|
||||
rejectedContractTransactionHistory_queries = systemdb_session.query(RejectedContractTransactionHistory).filter(RejectedContractTransactionHistory.blockNumber > blockNumber).delete()
|
||||
rejectedTransactionHistory_queries = systemdb_session.query(RejectedTransactionHistory).filter(RejectedTransactionHistory.blockNumber > blockNumber).delete()
|
||||
tokenAddressMapping_queries = systemdb_session.query(TokenAddressMapping).filter(TokenAddressMapping.blockNumber > blockNumber).delete()
|
||||
timeAction_queries = systemdb_session.query(TimeActions).filter(TimeActions.blockNumber > blockNumber).delete()
|
||||
systemdb_session.query(SystemData).filter(SystemData.attribute=='lastblockscanned').update({SystemData.value:str(blockNumber)})
|
||||
|
||||
latestcache_session.commit()
|
||||
systemdb_session.commit()
|
||||
latestcache_session.close()
|
||||
systemdb_session.close()
|
||||
|
||||
|
||||
# Take input from user reg how many blocks to go back in the blockchain
|
||||
parser = argparse.ArgumentParser(description='Script tracks RMT using FLO data on the FLO blockchain - https://flo.cash')
|
||||
parser.add_argument('-rb', '--toblocknumer', nargs='?', type=int, help='Rollback the script to the specified block number')
|
||||
parser.add_argument('-r', '--blockcount', nargs='?', type=int, help='Rollback the script to the number of blocks specified')
|
||||
args = parser.parse_args()
|
||||
|
||||
# Get all the transaction and blockdetails from latestCache reg the transactions in the block
|
||||
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
|
||||
lastscannedblock = systemdb_session.query(SystemData.value).filter(SystemData.attribute=='lastblockscanned').first()
|
||||
systemdb_session.close()
|
||||
lastscannedblock = int(lastscannedblock.value)
|
||||
if (args.blockcount and args.toblocknumber):
|
||||
print("You can only specify one of the options -b or -c")
|
||||
sys.exit(0)
|
||||
elif args.blockcount:
|
||||
rollback_block = lastscannedblock - args.blockcount
|
||||
elif args.toblocknumer:
|
||||
rollback_block = args.toblocknumer
|
||||
else:
|
||||
print("Please specify the number of blocks to rollback")
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def return_token_contract_set(rollback_block):
|
||||
latestcache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
||||
latestBlocks = latestcache_session.query(LatestBlocks).filter(LatestBlocks.blockNumber > rollback_block).all()
|
||||
lblocks_dict = {}
|
||||
blocknumber_list = []
|
||||
for block in latestBlocks:
|
||||
block_dict = block.__dict__
|
||||
lblocks_dict[block_dict['blockNumber']] = {'blockHash':f"{block_dict['blockHash']}", 'jsonData':f"{block_dict['jsonData']}"}
|
||||
blocknumber_list.insert(0,block_dict['blockNumber'])
|
||||
|
||||
tokendb_set = set()
|
||||
smartcontractdb_set = set()
|
||||
|
||||
for blockindex in blocknumber_list:
|
||||
# Find the all the transactions that happened in this block
|
||||
try:
|
||||
block_tx_hashes = json.loads(lblocks_dict[str(blockindex)]['jsonData'])['tx']
|
||||
except:
|
||||
print(f"Block {blockindex} is not found in latestCache. Skipping this block")
|
||||
continue
|
||||
|
||||
for txhash in block_tx_hashes:
|
||||
# Get the transaction details
|
||||
transaction = latestcache_session.query(LatestTransactions).filter(LatestTransactions.transactionHash == txhash).first()
|
||||
transaction_data = json.loads(transaction.jsonData)
|
||||
inputAddress, outputAddress = find_input_output_addresses(transaction_data)
|
||||
parsed_flodata = literal_eval(transaction.parsedFloData)
|
||||
tokenlist, contractlist = getDatabase_from_parsedFloData(parsed_flodata, inputAddress, outputAddress)
|
||||
|
||||
for token in tokenlist:
|
||||
tokendb_set.add(token)
|
||||
|
||||
for contract in contractlist:
|
||||
smartcontractdb_set.add(contract)
|
||||
|
||||
return tokendb_set, smartcontractdb_set
|
||||
|
||||
|
||||
def initiate_rollback_process():
|
||||
'''
|
||||
tokendb_set, smartcontractdb_set = return_token_contract_set(rollback_block)
|
||||
'''
|
||||
|
||||
# Connect to system.db
|
||||
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
|
||||
db_names = systemdb_session.query(DatabaseTypeMapping).all()
|
||||
for db in db_names:
|
||||
if db.db_type in ['token', 'nft', 'infinite-token']:
|
||||
if db.blockNumber > rollback_block:
|
||||
delete_database(rollback_block, f"{db.db_name}")
|
||||
else:
|
||||
rollback_database(rollback_block, 'token', f"{db.db_name}")
|
||||
elif db.db_type in ['smartcontract']:
|
||||
if db.blockNumber > rollback_block:
|
||||
delete_database(rollback_block, f"{db.db_name}")
|
||||
else:
|
||||
db_split = db.db_name.rsplit('-',1)
|
||||
db_name = {'contract_name':db_split[0], 'contract_address':db_split[1]}
|
||||
rollback_database(rollback_block, 'smartcontract', db_name)
|
||||
|
||||
'''
|
||||
for token_db in tokendb_set:
|
||||
token_session = create_database_session_orm('token', {'token_name': token_db}, TokenBase)
|
||||
if token_session.query(TransactionHistory.blockNumber).first()[0] > rollback_block:
|
||||
delete_database(rollback_block, token_db)
|
||||
token_session.commit()
|
||||
else:
|
||||
rollback_database(rollback_block, 'token', token_db)
|
||||
token_session.close()
|
||||
|
||||
for contract_db in smartcontractdb_set:
|
||||
contract_session = create_database_session_orm('smartcontract', {'db_name': contract_db}, ContractBase)
|
||||
if contract_session.query(TransactionHistory.blockNumber).first()[0] > rollback_block:
|
||||
delete_database(rollback_block, contract_db)
|
||||
contract_session.commit()
|
||||
else:
|
||||
rollback_database(rollback_block, 'smartcontract', contract_db)
|
||||
contract_session.close()
|
||||
'''
|
||||
|
||||
system_database_deletions(rollback_block)
|
||||
|
||||
# update lastblockscanned in system_dbs
|
||||
latestCache_session = create_database_session_orm('system_dbs', {'db_name': 'latestCache'}, LatestCacheBase)
|
||||
lastblockscanned = latestCache_session.query(LatestBlocks.blockNumber).order_by(LatestBlocks.id.desc()).first()[0]
|
||||
latestCache_session.close()
|
||||
|
||||
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
|
||||
lastblockscanned_query = systemdb_session.query(SystemData).filter(SystemData.attribute=='lastblockscanned').first()
|
||||
lastblockscanned_query.value = rollback_block
|
||||
systemdb_session.commit()
|
||||
systemdb_session.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
systemdb_session = create_database_session_orm('system_dbs', {'db_name': 'system'}, SystemBase)
|
||||
lastblockscanned_query = systemdb_session.query(SystemData).filter(SystemData.attribute=='lastblockscanned').first()
|
||||
if(rollback_block > int(lastblockscanned_query.value)):
|
||||
print('Rollback block is greater than the last scanned block\n Exiting ....')
|
||||
sys.exit(0)
|
||||
else:
|
||||
initiate_rollback_process()
|
||||
Loading…
Reference in New Issue
Block a user