Minor Fixes

- Adding tables to SQL schema
- Moved Kbucket usage from main.js to backup/head.js
- node Kbucket, ordered NodeList are calculated in backup/head.js
This commit is contained in:
sairajzero 2022-01-14 04:52:41 +05:30
parent c32833e625
commit abb82b0b34
3 changed files with 52 additions and 43 deletions

View File

@ -112,6 +112,17 @@ PRIMARY KEY(id),
FOREIGN KEY (floID) REFERENCES Users(floID)
);
CREATE TABLE nodeList(
floID CHAR(34) NOT NULL,
uri TINYTEXT,
PRIMARY KEY(floID)
);
CREATE TABLE trustedList(
floID CHAR(34) NOT NULL,
FOREIGN KEY (floID) REFERENCES Users(floID),
);
CREATE TABLE TagList (
id INT NOT NULL AUTO_INCREMENT,
tag VARCHAR(50) NOT NULL,
@ -133,27 +144,34 @@ FOREIGN KEY (tag) REFERENCES TagList(tag)
);
CREATE TABLE priceHistory (
rate FLOAT NOT NULL,
rec_time DATETIME DEFAULT CURRENT_TIMESTAMP
rate FLOAT NOT NULL,
rec_time DATETIME DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE auditTransaction(
rec_time DATETIME DEFAULT CURRENT_TIMESTAMP,
unit_price FLOAT NOT NULL,
quantity FLOAT NOT NULL,
total_cost FLOAT NOT NULL,
sellerID CHAR(34) NOT NULL,
FLO_seller_old FLOAT NOT NULL,
FLO_seller_new FLOAT NOT NULL,
Rupee_seller_old FLOAT NOT NULL,
Rupee_seller_new FLOAT NOT NULL,
buyerID CHAR(34) NOT NULL,
FLO_buyer_old FLOAT NOT NULL,
FLO_buyer_new FLOAT NOT NULL,
Rupee_buyer_old FLOAT NOT NULL,
Rupee_buyer_new FLOAT NOT NULL,
FOREIGN KEY (sellerID) REFERENCES Users(floID),
FOREIGN KEY (buyerID) REFERENCES Users(floID)
rec_time DATETIME DEFAULT CURRENT_TIMESTAMP,
unit_price FLOAT NOT NULL,
quantity FLOAT NOT NULL,
total_cost FLOAT NOT NULL,
sellerID CHAR(34) NOT NULL,
FLO_seller_old FLOAT NOT NULL,
FLO_seller_new FLOAT NOT NULL,
Rupee_seller_old FLOAT NOT NULL,
Rupee_seller_new FLOAT NOT NULL,
buyerID CHAR(34) NOT NULL,
FLO_buyer_old FLOAT NOT NULL,
FLO_buyer_new FLOAT NOT NULL,
Rupee_buyer_old FLOAT NOT NULL,
Rupee_buyer_new FLOAT NOT NULL,
FOREIGN KEY (sellerID) REFERENCES Users(floID),
FOREIGN KEY (buyerID) REFERENCES Users(floID)
);
CREATE TABLE sinkShares(
floID CHAR(34) NOT NULL,
share TEXT,
time_ DATETIME DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY(floID, share)
);
/* Backup feature (Table and Triggers) */
@ -222,13 +240,6 @@ FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputRupee', NEW.id) ON
CREATE TRIGGER outputRupee_D AFTER DELETE ON outputRupee
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputRupee', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT;
CREATE TRIGGER TagList_I AFTER INSERT ON TagList
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('TagList', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER TagList_U AFTER UPDATE ON TagList
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('TagList', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER TagList_D AFTER DELETE ON TagList
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('TagList', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT;
CREATE TRIGGER Tags_I AFTER INSERT ON Tags
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('Tags', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER Tags_U AFTER UPDATE ON Tags

View File

@ -1,9 +1,12 @@
'use strict';
const K_Bucket = require('./KBucket');
const slave = require('./slave');
const WebSocket = require('ws');
const shareThreshold = 50 / 100;
var DB, app, wss; //Container for database and app
var nodeList, nodeURL, nodeKBucket; //Container for (backup) node list
var nodeShares = null,
nodeSinkID = null,
connectedSlaves = {},
@ -284,7 +287,6 @@ function slaveConnect(floID, ws) {
}
//Transmistter
var nodeList; //Container for (backup) node list
function startBackupTransmitter(server) {
wss = new WebSocket.Server({
server
@ -346,8 +348,10 @@ function initProcess(a) {
module.exports = {
init: initProcess,
set nodeList(ids) {
nodeList = ids;
set nodeList(list) {
nodeURL = list;
nodeKBucket = new K_Bucket(floGlobals.adminID, Object.keys(nodeURL));
nodeList = nodeKBucket.order;
},
set DB(db) {
DB = db;

View File

@ -11,13 +11,10 @@ const Database = require("./database");
const App = require('./app');
const PORT = config['port'];
const K_Bucket = require('./backup/KBucket');
const transmit = require('./backup/head');
const backup = require('./backup/head');
var DB, app;
var nodeList, nodeURL, nodeKBucket;
function refreshData(startup = false) {
return new Promise((resolve, reject) => {
refreshDataFromBlockchain().then(result => {
@ -47,10 +44,10 @@ function refreshDataFromBlockchain() {
nodes_change = true;
if (content.Nodes.remove)
for (let n of content.Nodes.remove)
promises.push(DB.query("DELETE FROM nodeURL WHERE floID=?", [n]));
promises.push(DB.query("DELETE FROM nodeList WHERE floID=?", [n]));
if (content.Nodes.add)
for (let n in content.Nodes.add)
promises.push(DB.query("INSERT INTO nodeURL (floID, url) VALUE (?,?) ON DUPLICATE KEY UPDATE url=NEW.url", [n, content.Nodes.add[n]]));
promises.push(DB.query("INSERT INTO nodeList (floID, uri) VALUE (?,?) ON DUPLICATE KEY UPDATE uri=NEW.uri", [n, content.Nodes.add[n]]));
}
//Trusted List
if (content.Trusted) {
@ -106,16 +103,13 @@ function loadDataFromDB(changes, startup) {
loadDataFromDB.nodeList = function() {
return new Promise((resolve, reject) => {
DB.query("SELECT * FROM nodeList").then(result => {
DB.query("SELECT (floID, uri) FROM nodeList").then(result => {
let nodes = {}
for (let i in result)
nodes[result[i].floID] = result[i];
nodeURL = nodes;
nodeKBucket = new K_Bucket(floGlobals.adminID, Object.keys(nodeURL));
nodeList = nodeKBucket.order;
nodes[result[i].floID] = result[i].uri;
//update dependents
transmit.nodeList = nodeList;
resolve(nodeList);
backup.nodeList = nodes;
resolve(nodes);
}).catch(error => reject(error))
})
}
@ -135,7 +129,7 @@ loadDataFromDB.trustedIDs = function() {
function setDB(db) {
DB = db;
transmit.DB = DB;
backup.DB = DB;
}
module.exports = function startServer(public_dir) {
@ -170,7 +164,7 @@ module.exports = function startServer(public_dir) {
refreshData(true).then(_ => {
app.start(PORT).then(result => {
console.log(result);
transmit.init(app);
backup.init(app);
}).catch(error => console.error(error))
}).catch(error => console.error(error))
}).catch(error => console.error(error));