From a7bab7ed6e45865c1adb83928a999b559d12e573 Mon Sep 17 00:00:00 2001 From: sairajzero Date: Fri, 15 Oct 2021 22:54:15 +0530 Subject: [PATCH] Backup DB feature --- .gitignore | 3 +- args/schema.sql | 76 ++++++++++++++- src/backup/storage.js | 213 +++++++++++++++++++++++++++++++++++++++++ src/backup/transmit.js | 168 ++++++++++++++++++++++++++++++++ src/main.js | 14 ++- src/market.js | 18 ++-- src/request.js | 5 +- 7 files changed, 481 insertions(+), 16 deletions(-) create mode 100644 src/backup/storage.js create mode 100644 src/backup/transmit.js diff --git a/.gitignore b/.gitignore index 3a92ee4..4ff1827 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ /node_modules/ /package-lock.json -/args/config.json +/args/app-config.json +/args/backup-config.json /args/param.json /args/keys.json *test* \ No newline at end of file diff --git a/args/schema.sql b/args/schema.sql index 96e380f..fcd8f52 100644 --- a/args/schema.sql +++ b/args/schema.sql @@ -1,8 +1,9 @@ +-- Main Tables + CREATE TABLE Users ( floID CHAR(34) NOT NULL, pubKey CHAR(66) NOT NULL, created DATETIME DEFAULT CURRENT_TIMESTAMP, -rupeeBalance DECIMAL(12, 2) DEFAULT 0.00, PRIMARY KEY(floID) ); @@ -21,6 +22,13 @@ sign TEXT NOT NULL, request_time DATETIME DEFAULT CURRENT_TIMESTAMP ); +CREATE TABLE Cash ( +id INT NOT NULL AUTO_INCREMENT, +floID CHAR(34) NOT NULL UNIQUE, +rupeeBalance DECIMAL(12, 2) DEFAULT 0.00, +PRIMARY KEY(id) +); + CREATE TABLE Vault ( id INT NOT NULL AUTO_INCREMENT, floID CHAR(34) NOT NULL, @@ -91,3 +99,69 @@ amount FLOAT NOT NULL, status VARCHAR(50) NOT NULL, PRIMARY KEY(id) ); + +-- Backup feature (Table and Triggers) + +CREATE TABLE _backup ( +t_name VARCHAR(20), +id INT, +mode BOOLEAN DEFAULT TRUE, +timestamp DATETIME DEFAULT CURRENT_TIMESTAMP, +PRIMARY KEY(t_name, id) +); + +CREATE TRIGGER Cash_I AFTER INSERT ON Cash +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('Cash', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER Cash_U AFTER UPDATE ON Cash +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('Cash', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER Cash_D AFTER DELETE ON Cash +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('Cash', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT; + +CREATE TRIGGER Vault_I AFTER INSERT ON Vault +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('Vault', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER Vault_U AFTER UPDATE ON Vault +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('Vault', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER Vault_D AFTER DELETE ON Vault +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('Vault', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT; + +CREATE TRIGGER SellOrder_I AFTER INSERT ON SellOrder +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('SellOrder', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER SellOrder_U AFTER UPDATE ON SellOrder +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('SellOrder', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER SellOrder_D AFTER DELETE ON SellOrder +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('SellOrder', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT; + +CREATE TRIGGER BuyOrder_I AFTER INSERT ON BuyOrder +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('BuyOrder', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER BuyOrder_U AFTER UPDATE ON BuyOrder +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('BuyOrder', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER BuyOrder_D AFTER DELETE ON BuyOrder +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('BuyOrder', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT; + +CREATE TRIGGER inputFLO_I AFTER INSERT ON inputFLO +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('inputFLO', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER inputFLO_U AFTER UPDATE ON inputFLO +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('inputFLO', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER inputFLO_D AFTER DELETE ON inputFLO +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('inputFLO', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT; + +CREATE TRIGGER outputFLO_I AFTER INSERT ON outputFLO +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputFLO', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER outputFLO_U AFTER UPDATE ON outputFLO +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputFLO', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER outputFLO_D AFTER DELETE ON outputFLO +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputFLO', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT; + +CREATE TRIGGER inputRupee_I AFTER INSERT ON inputRupee +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('inputRupee', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER inputRupee_U AFTER UPDATE ON inputRupee +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('inputRupee', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER inputRupee_D AFTER DELETE ON inputRupee +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('inputRupee', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT; + +CREATE TRIGGER outputRupee_I AFTER INSERT ON outputRupee +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputRupee', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER outputRupee_U AFTER UPDATE ON outputRupee +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputRupee', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT; +CREATE TRIGGER outputRupee_D AFTER DELETE ON outputRupee +FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputRupee', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT; \ No newline at end of file diff --git a/src/backup/storage.js b/src/backup/storage.js new file mode 100644 index 0000000..f3d8e9a --- /dev/null +++ b/src/backup/storage.js @@ -0,0 +1,213 @@ +const config = require('../../args/backup-config.json'); +const floGlobals = require('../../public/floGlobals'); +require('../set_globals'); +require('../lib'); +require('../floCrypto'); +const WebSocket = require('ws'); + +const WAIT_TIME = 5 * 60 * 1000, + BACKUP_INTERVAL = 5 * 60 * 1000; + +const myPrivKey = config.private_key, + myPubKey = floCrypto.getPubKeyHex(config.private_key), + myFloID = floCrypto.getFloID(config.private_key); + +const Database = require("../database"); +var DB; //Container for Database connection + +function startBackupStorage() { + console.log("Logged in as", myFloID); + Database(config["sql_user"], config["sql_pwd"], config["sql_db"], config["sql_host"]).then(db => { + DB = db; + IntervalFunction(); + const BackupInterval = setInterval(IntervalFunction, BACKUP_INTERVAL); + console.log("Backup Storage Started"); + }) +} + +function IntervalFunction() { + IntervalFunction.count += 1; + console.log(Date.now().toString(), "Instance #" + IntervalFunction.count); + requestInstance.open(); +} +IntervalFunction.count = 0; + +function connectToWSServer(url) { + return new Promise((resolve, reject) => { + const ws = new WebSocket(url); + ws.on('open', _ => resolve(ws)); + ws.on('error', _ => reject(error)); + }) +} + +function requestBackupSync(ws) { + return new Promise((resolve, reject) => { + const tables = { + Users: "created", + Request_Log: "request_time", + Transactions: "tx_time", + _backup: "timestamp" + }; + let subs = []; + for (t in tables) + subs.push(`SELECT MAX(${tables[t]}) as ts FROM ${t}`); + DB.query(`SELECT MAX(ts) as last_time FROM (${subs.join(' UNION ')}) AS Z`).then(result => { + let request = { + floID: myFloID, + pubKey: myPubKey, + type: "BACKUP_SYNC", + last_time: result[0].last_time, + req_time: Date.now() + }; + request.sign = floCrypto.signData(request.type + "|" + request.req_time, myPrivKey); + console.debug("REQUEST: ", request); + ws.send(JSON.stringify(request)); + resolve(request); + }).catch(error => reject(error)) + }) +} + +const requestInstance = { + ws: null, + delete_sync: null, + add_sync: null, + immutable_sync: null, + delete_data: null, + add_data: null, + total_add: null, + request: null, + last_response_time: null +}; + +requestInstance.open = function() { + const self = this; + //Check if there is an active request + if (self.request) { + console.log("A request is already active"); + if (self.last_response_time < Date.now() - WAIT_TIME) + self.close(); + else + return; + } + connectToWSServer(config["main_server_url"]).then(ws => { + requestBackupSync(ws).then(request => { + self.request = request; + self.ws = ws; + ws.on('message', processBackupData) + ws.onclose = _ => console.log("Connection was Interrupted"); + }).catch(error => console.error(error)) + }).catch(error => console.error(error)) +} + +requestInstance.close = function() { + const self = this; + self.ws.onclose = () => null; + self.ws.close(); + self.ws = null; + self.delete_sync = null; + self.add_sync = null; + self.immutable_sync = null; + self.delete_data = null; + self.add_data = null; + self.total_add = null; + self.request = null; + self.last_response_time = null; +} + +function processBackupData(message) { + const self = requestInstance; + self.last_response_time = Date.now(); + try { + const response = JSON.parse(message); + console.debug(response); + if (response.error) { + console.log(response.error); + self.close(); + return; + } + switch (response.mode) { + case "END": + if (response.status) { + if (self.total_add !== self.add_sync) + console.info(`Backup Sync Instance finished!, ${self.total_add - self.add_sync} packets not received.`); + else + console.info("Backup Sync Instance finished successfully"); + updateBackupTable(self.add_data, self.delete_data) + } else + console.info("Backup Sync was not successful! Failed info: ", response.info); + self.close(); + break; + case "DELETE": + self.delete_data = response.delete_data; + self.delete_sync += 1; + deleteData(response.delete_data); + break; + case "ADD_UPDATE_HEADER": + self.add_data = response.add_data; + self.total_add = Object.keys(response.add_data).length; + break; + case "ADD_UPDATE": + self.add_sync += 1; + addUpdateData(response.table, response.data); + break; + case "ADD_IMMUTABLE": + self.immutable_sync += 1; + addImmutableData(response.table, response.data); + break; + } + } catch (error) { + console.error(error); + } +} + +function updateBackupTable(add_data, delete_data) { + //update _backup table for added data + DB.transaction(add_data.map(r => [ + "INSERT INTO _backup (t_name, id, mode, timestamp) VALUES (?, ?, TRUE, ?) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=?", + [r.t_name, r.id, validateValue(r.timestamp), validateValue(r.timestamp)] + ])).then(_ => null).catch(error => console.error(error)); + //update _backup table for deleted data + let del_queries = []; + delete_data.forEach(r => del_queries.push([])); + DB.transaction(delete_data.map(r => [ + "INSERT INTO _backup (t_name, id, mode, timestamp) VALUES (?, ?, NULL, ?) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=?", + [r.t_name, r.id, validateValue(r.timestamp), validateValue(r.timestamp)] + ])).then(_ => null).catch(error => console.error(error)); +} + +function deleteData(data) { + let delete_needed = {}; + data.forEach(r => r.t_name in delete_needed ? delete_needed[r.t_name].push(r.id) : delete_needed[r.t_name] = [r.id]); + let queries = []; + for (let table in delete_needed) + queries.push(`DELETE FROM ${table} WHERE id IN (${delete_needed[table]})`); + DB.transaction(queries).then(_ => null).catch(error => console.error(error)); +} + +function addUpdateData(table, data) { + let cols = Object.keys(data[0]), + _mark = "(" + Array(cols.length).fill('?') + ")"; + values = data.map(r => cols.map(c => validateValue(r[c]))).flat(); + let statement = `INSERT INTO ${table} (${cols}) VALUES ${Array(data.length).fill(_mark)} AS new` + + " ON DUPLICATE KEY UPDATE " + cols.filter(c => c != 'id').map(c => c + " = new." + c); + DB.query(statement, values).then(_ => null).catch(error => console.error(error)); +} + +function addImmutableData(table, data) { + if (!data.length) + return; + const primaryKeys = { + Users: "floID" + }; + let cols = Object.keys(data[0]), + _mark = "(" + Array(cols.length).fill('?') + ")"; + values = data.map(r => cols.map(c => validateValue(r[c]))).flat(); + let statement = `INSERT INTO ${table} (${cols}) VALUES ${Array(data.length).fill(_mark)}`; + if (table in primaryKeys) + statement += ` ON DUPLICATE KEY UPDATE ${primaryKeys[table]}=${primaryKeys[table]}`; + DB.query(statement, values).then(_ => null).catch(error => console.error(error)); +} + +const validateValue = val => (typeof val === "string" && /\.\d{3}Z$/.test(val)) ? val.substring(0, val.length - 1) : val; + +startBackupStorage(); \ No newline at end of file diff --git a/src/backup/transmit.js b/src/backup/transmit.js new file mode 100644 index 0000000..06621e5 --- /dev/null +++ b/src/backup/transmit.js @@ -0,0 +1,168 @@ +const WebSocket = require('ws'); + +var DB; //Container for database +function sendBackup(timestamp, ws) { + if (!timestamp) timestamp = 0; + else if (typeof timestamp === "string" && /\.\d{3}Z$/.test(timestamp)) + timestamp = timestamp.substring(0, timestamp.length - 1); + let promises = [ + send_dataSync(timestamp, ws), + send_deleteSync(timestamp, ws), + send_dataImmutable(timestamp, ws) + ]; + Promise.allSettled(promises).then(result => { + let failedSync = []; + result.forEach(r => r.status === "rejected" ? failedSync.push(r.reason) : null); + if (failedSync.length) { + console.info("Backup Sync Failed:", failedSync); + ws.send(JSON.stringify({ + mode: "END", + status: false, + info: failedSync + })); + } else { + console.info("Backup Sync completed"); + ws.send(JSON.stringify({ + mode: "END", + status: true + })); + } + }); +} + +function send_deleteSync(timestamp, ws) { + return new Promise((resolve, reject) => { + DB.query("SELECT * FROM _backup WHERE mode is NULL AND timestamp > ?", [timestamp]).then(result => { + ws.send(JSON.stringify({ + mode: "DELETE", + delete_data: result + })); + resolve("deleteSync"); + }).catch(error => { + console.error(error); + reject("deleteSync"); + }); + }) +} + +function send_dataSync(timestamp, ws) { + const sendTable = (table, id_list) => new Promise((res, rej) => { + DB.query(`SELECT * FROM ${table} WHERE id IN (${id_list})`) + .then(data => { + ws.send(JSON.stringify({ + table, + mode: "ADD_UPDATE", + data + })); + res(table); + }).catch(error => { + console.error(error); + rej(table); + }); + }); + return new Promise((resolve, reject) => { + DB.query("SELECT * FROM _backup WHERE mode=TRUE AND timestamp > ?", [timestamp]).then(result => { + let sync_needed = {}; + result.forEach(r => r.t_name in sync_needed ? sync_needed[r.t_name].push(r.id) : sync_needed[r.t_name] = [r.id]); + ws.send(JSON.stringify({ + mode: "ADD_UPDATE_HEADER", + add_data: result + })); + let promises = []; + for (let table in sync_needed) + promises.push(sendTable(table, sync_needed[table])); + Promise.allSettled(promises).then(result => { + let failedTables = []; + result.forEach(r => r.status === "rejected" ? failedTables.push(r.reason) : null); + if (failedTables.length) + reject(["dataSync", failedTables]); + else + resolve("dataSync"); + }); + }).catch(error => { + console.error(error); + reject("dataSync"); + }); + }); +} + +function send_dataImmutable(timestamp, ws) { + const immutable_tables = { + Users: "created", + Request_Log: "request_time", + Transactions: "tx_time" + }; + const sendTable = (table, timeCol) => new Promise((res, rej) => { + DB.query(`SELECT * FROM ${table} WHERE ${timeCol} > ?`, [timestamp]) + .then(data => { + ws.send(JSON.stringify({ + table, + mode: "ADD_IMMUTABLE", + data + })); + res(table); + }).catch(error => { + console.error(error); + rej(table); + }); + }); + + return new Promise((resolve, reject) => { + let promises = []; + for (let table in immutable_tables) + promises.push(sendTable(table, immutable_tables[table])); + Promise.allSettled(promises).then(result => { + let failedTables = []; + result.forEach(r => r.status === "rejected" ? failedTables.push(r.reason) : null); + if (failedTables.length) + reject(["dataImmutable", failedTables]); + else + resolve("dataImmutable"); + }); + }) +} + +function startBackupTransmitter(db, port, backupIDs) { + DB = db; + this.port = port; + this.backupIDs = backupIDs; + + const wss = this.wss = new WebSocket.Server({ + port: port + }); + this.close = () => wss.close(); + wss.on('connection', ws => { + ws.on('message', message => { + //verify if from a backup node + try { + let invalid = null, + request = JSON.parse(message); + console.debug(request); + if (!backupIDs.includes(request.floID)) + invalid = "FLO ID not approved for backup"; + else if (request.floID !== floCrypto.getFloID(request.pubKey)) + invalid = "Invalid pubKey"; + else if (!floCrypto.verifySign(request.type + "|" + request.req_time, request.sign, request.pubKey)) + invalid = "Invalid signature"; + else if (request.type !== "BACKUP_SYNC") + invalid = "Invalid Request Type"; + //TODO: check if request time is valid; + if (invalid) + ws.send(JSON.stringify({ + error: invalid + })); + else + sendBackup(request.last_time, ws); + } catch (error) { + console.error(error); + ws.send(JSON.stringify({ + error: 'Unable to process the request!' + })); + } + }); + }); + + console.log("Backup Transmitter running in port", port); +} + +module.exports = startBackupTransmitter; \ No newline at end of file diff --git a/src/main.js b/src/main.js index 559d7d0..431357e 100644 --- a/src/main.js +++ b/src/main.js @@ -1,4 +1,4 @@ -const config = require('../args/config.json'); +const config = require('../args/app-config.json'); global.floGlobals = require('../public/floGlobals'); require('./set_globals'); require('./lib'); @@ -7,14 +7,17 @@ require('./floBlockchainAPI'); const Database = require("./database"); const App = require('./app'); -const floGlobals = require('../public/floGlobals'); const PORT = config['port']; module.exports = function startServer(public_dir) { try { var _tmp = require('../args/keys.json'); _tmp = floCrypto.retrieveShamirSecret(_tmp); - var _pass = process.env.password; + var _pass = process.env.PASSWORD; + if (!_pass) { + console.error('Password not entered!'); + process.exit(1); + } _tmp = Crypto.AES.decrypt(_tmp, _pass); if (floCrypto.verifyPrivKey(_tmp, floGlobals.adminID)) { global.myPrivKey = _tmp; @@ -35,5 +38,10 @@ module.exports = function startServer(public_dir) { Database(config["sql_user"], config["sql_pwd"], config["sql_db"], config["sql_host"]).then(DB => { const app = App(config['secret'], DB); app.listen(PORT, () => console.log(`Server Running at port ${PORT}`)); + //start backup + if (config["backup-port"] && config["backup-floIDs"].length) { + var backupTransmitter = require('./backup/transmit'); + backupTransmitter = new backupTransmitter(DB, config["backup-port"], config["backup-floIDs"]); + } }); }; \ No newline at end of file diff --git a/src/market.js b/src/market.js index e25a29b..da263a8 100644 --- a/src/market.js +++ b/src/market.js @@ -1,5 +1,3 @@ -const floGlobals = require("./floGlobals"); - var net_FLO_price; //container for FLO price (from API or by model) var DB; //container for database @@ -127,7 +125,7 @@ function addBuyOrder(floID, quantity, max_price) { return reject(INVALID(`Invalid quantity (${quantity})`)); else if (typeof max_price !== "number" || max_price <= 0) return reject(INVALID(`Invalid max_price (${max_price})`)); - DB.query("SELECT rupeeBalance FROM Users WHERE floID=?", [floID]).then(result => { + DB.query("SELECT rupeeBalance FROM Cash WHERE floID=?", [floID]).then(result => { if (result.length < 1) return reject(INVALID("FLO ID not registered")); let total = result.pop()["rupeeBalance"]; @@ -205,7 +203,7 @@ function getBestBuyer(cur_price, n = 0) { let buyOrder = result.shift(); if (!buyOrder) return reject("No valid buyers available"); - DB.query("SELECT rupeeBalance AS bal FROM Users WHERE floID=?", [buyOrder.floID]).then(result => { + DB.query("SELECT rupeeBalance AS bal FROM Cash WHERE floID=?", [buyOrder.floID]).then(result => { if (result[0].bal < cur_price * buyOrder.quantity) { //This should not happen unless a buy order is placed when user doesnt have enough rupee balance console.warn(`Buy order ${buyOrder.id} is active, but rupee# is insufficient`); @@ -293,8 +291,8 @@ function processBuyAndSellOrder(seller_best, buyer_best, txQueries) { function updateBalance(seller_best, buyer_best, txQueries, cur_price, quantity) { //Update rupee balance for seller and buyer let totalAmount = cur_price * quantity; - txQueries.push(["UPDATE Users SET rupeeBalance=rupeeBalance+? WHERE floID=?", [totalAmount, seller_best.floID]]); - txQueries.push(["UPDATE Users SET rupeeBalance=rupeeBalance-? WHERE floID=?", [totalAmount, buyer_best.floID]]); + txQueries.push(["UPDATE Cash SET rupeeBalance=rupeeBalance+? WHERE floID=?", [totalAmount, seller_best.floID]]); + txQueries.push(["UPDATE Cash SET rupeeBalance=rupeeBalance-? WHERE floID=?", [totalAmount, buyer_best.floID]]); //Add coins to Buyer txQueries.push(["INSERT INTO Vault(floID, base, quantity) VALUES (?, ?, ?)", [buyer_best.floID, cur_price, quantity]]) //Record transaction @@ -305,7 +303,7 @@ function updateBalance(seller_best, buyer_best, txQueries, cur_price, quantity) function getAccountDetails(floID) { return new Promise((resolve, reject) => { let select = []; - select.push(["rupeeBalance", "Users"]); + select.push(["rupeeBalance", "Cash"]); select.push(["base, quantity", "Vault"]); select.push(["id, quantity, minPrice, time_placed", "SellOrder"]); select.push(["id, quantity, maxPrice, time_placed", "BuyOrder"]); @@ -517,7 +515,7 @@ function confirmDepositRupee() { txQueries.push(["INSERT INTO inputFLO(txid, floID, amount, status) VALUES (?, ?, ?)", [req.txid, req.floID, amount_flo, "SUCCESS"]]); } txQueries.push(["UPDATE inputRupee SET status=? WHERE id=?", ["SUCCESS", req.id]]); - txQueries.push(["UPDATE Users SET rupeeBalance=rupeeBalance+? WHERE floID=?", [amount_rupee, req.floID]]); + txQueries.push(["UPDATE Cash SET rupeeBalance=rupeeBalance+? WHERE floID=?", [amount_rupee, req.floID]]); DB.transaction(txQueries) .then(result => console.debug("Rupee deposited for ", req.floID)) .catch(error => console.error(error)); @@ -571,7 +569,7 @@ function withdrawRupee(floID, amount) { let available = total - locked; if (available < required_flo) return reject(INVALID(`Insufficient FLO (Some FLO are locked in sell orders)! Required ${required_flo} FLO to withdraw tokens`)); - DB.query("SELECT rupeeBalance FROM Users WHERE floID=?", [floID]).then(result => { + DB.query("SELECT rupeeBalance FROM Cash WHERE floID=?", [floID]).then(result => { if (result.length < 1) return reject(INVALID(`FLO_ID: ${floID} not registered`)); if (result[0].rupeeBalance < amount) @@ -590,7 +588,7 @@ function withdrawRupee(floID, amount) { } if (rem > 0) //should not happen AS the total and net is checked already return reject(INVALID("Insufficient FLO")); - txQueries.push(["UPDATE Users SET rupeeBalance=rupeeBalance-? WHERE floID=?", [amount, floID]]); + txQueries.push(["UPDATE Cash SET rupeeBalance=rupeeBalance-? WHERE floID=?", [amount, floID]]); DB.transaction(txQueries).then(result => { //Send FLO to user via blockchain API diff --git a/src/request.js b/src/request.js index 56568c3..9acb65c 100644 --- a/src/request.js +++ b/src/request.js @@ -64,7 +64,10 @@ function SignUp(req, res) { }, data.sign, data.pubKey); if (req_str instanceof INVALID) return res.status(INVALID.e_code).send(req_str.message); - DB.query("INSERT INTO Users(floID, pubKey) VALUES (?, ?)", [data.floID, data.pubKey]).then(result => { + let txQueries = []; + txQueries.push(["INSERT INTO Users(floID, pubKey) VALUES (?, ?)", [data.floID, data.pubKey]]); + txQueries.push(["INSERT INTO Cash (floID) Values (?)", data.floID]); + DB.transaction(txQueries).then(_ => { storeRequest(data.floID, req_str, data.sign); res.send("Account Created"); }).catch(error => {