Merge branch 'sairajzero:main' into main

This commit is contained in:
Sai Raj 2021-10-17 18:52:22 +05:30 committed by GitHub
commit 0a2570ec7a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 719 additions and 37 deletions

3
.gitignore vendored
View File

@ -1,6 +1,7 @@
/node_modules/
/package-lock.json
/args/config.json
/args/app-config.json
/args/backup-config.json
/args/param.json
/args/keys.json
*test*

View File

@ -1,6 +1,6 @@
MIT License
Copyright (c) 2021 sairajzero
Copyright (c) 2021 Ranchimall
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@ -1,8 +1,9 @@
-- Main Tables
CREATE TABLE Users (
floID CHAR(34) NOT NULL,
pubKey CHAR(66) NOT NULL,
created DATETIME DEFAULT CURRENT_TIMESTAMP,
rupeeBalance DECIMAL(12, 2) DEFAULT 0.00,
PRIMARY KEY(floID)
);
@ -21,6 +22,13 @@ sign TEXT NOT NULL,
request_time DATETIME DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE Cash (
id INT NOT NULL AUTO_INCREMENT,
floID CHAR(34) NOT NULL UNIQUE,
rupeeBalance DECIMAL(12, 2) DEFAULT 0.00,
PRIMARY KEY(id)
);
CREATE TABLE Vault (
id INT NOT NULL AUTO_INCREMENT,
floID CHAR(34) NOT NULL,
@ -91,3 +99,69 @@ amount FLOAT NOT NULL,
status VARCHAR(50) NOT NULL,
PRIMARY KEY(id)
);
-- Backup feature (Table and Triggers)
CREATE TABLE _backup (
t_name VARCHAR(20),
id INT,
mode BOOLEAN DEFAULT TRUE,
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY(t_name, id)
);
CREATE TRIGGER Cash_I AFTER INSERT ON Cash
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('Cash', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER Cash_U AFTER UPDATE ON Cash
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('Cash', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER Cash_D AFTER DELETE ON Cash
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('Cash', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT;
CREATE TRIGGER Vault_I AFTER INSERT ON Vault
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('Vault', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER Vault_U AFTER UPDATE ON Vault
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('Vault', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER Vault_D AFTER DELETE ON Vault
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('Vault', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT;
CREATE TRIGGER SellOrder_I AFTER INSERT ON SellOrder
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('SellOrder', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER SellOrder_U AFTER UPDATE ON SellOrder
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('SellOrder', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER SellOrder_D AFTER DELETE ON SellOrder
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('SellOrder', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT;
CREATE TRIGGER BuyOrder_I AFTER INSERT ON BuyOrder
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('BuyOrder', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER BuyOrder_U AFTER UPDATE ON BuyOrder
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('BuyOrder', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER BuyOrder_D AFTER DELETE ON BuyOrder
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('BuyOrder', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT;
CREATE TRIGGER inputFLO_I AFTER INSERT ON inputFLO
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('inputFLO', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER inputFLO_U AFTER UPDATE ON inputFLO
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('inputFLO', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER inputFLO_D AFTER DELETE ON inputFLO
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('inputFLO', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT;
CREATE TRIGGER outputFLO_I AFTER INSERT ON outputFLO
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputFLO', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER outputFLO_U AFTER UPDATE ON outputFLO
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputFLO', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER outputFLO_D AFTER DELETE ON outputFLO
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputFLO', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT;
CREATE TRIGGER inputRupee_I AFTER INSERT ON inputRupee
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('inputRupee', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER inputRupee_U AFTER UPDATE ON inputRupee
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('inputRupee', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER inputRupee_D AFTER DELETE ON inputRupee
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('inputRupee', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT;
CREATE TRIGGER outputRupee_I AFTER INSERT ON outputRupee
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputRupee', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER outputRupee_U AFTER UPDATE ON outputRupee
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputRupee', NEW.id) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=DEFAULT;
CREATE TRIGGER outputRupee_D AFTER DELETE ON outputRupee
FOR EACH ROW INSERT INTO _backup (t_name, id) VALUES ('outputRupee', OLD.id) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=DEFAULT;

43
package.json Normal file
View File

@ -0,0 +1,43 @@
{
"name": "flo-exchange-market",
"version": "1.0.0",
"description": "Central Exchange market for FLO",
"main": "src/main.js",
"dependencies": {
"cookie-parser": "^1.4.5",
"express": "^4.17.1",
"express-session": "^1.17.2",
"mysql": "^2.18.1",
"node-fetch": "^2.6.1",
"ws": "^8.2.3"
},
"devDependencies": {},
"scripts": {
"postinstall": "node setup/post-install.js",
"help": "node setup/help.js",
"setup": "node setup/post-install.js",
"configure": "node setup/configure-settings.js",
"reset-password": "node setup/reset-password.js",
"create-schema": "node setup/create-schema.js",
"configure-backup": "node setup/configure-backup.js",
"create-backup-schema":"node setup/create-backup-schema.js",
"backup":"node src/backup/storage.js",
"start": "node start.js"
},
"repository": {
"type": "git",
"url": "git+https://github.com/ranchimall/exchange-market.git"
},
"keywords": [
"exchange-market",
"FLO",
"blockchain",
"Ranchimall"
],
"author": "Sai Raj",
"license": "MIT",
"bugs": {
"url": "https://github.com/ranchimall/exchange-market/issues"
},
"homepage": "https://github.com/ranchimall/exchange-market#readme"
}

100
setup/configure-backup.js Normal file
View File

@ -0,0 +1,100 @@
const fs = require('fs');
const getInput = require('./getInput');
var config, flag_new;
try {
config = require('../args/backup-config.json');
flag_new = false;
} catch (error) {
config = {
"sql_user": null,
"sql_pwd": null,
"sql_db": "exchange",
"sql_host": "localhost",
"main_server_url": null,
"private_key": null
};
flag_new = true;
}
function flaggedYesOrNo(text) {
return new Promise((resolve) => {
if (flag_new)
resolve(true);
else
getInput.YesOrNo(text)
.then(result => resolve(result))
.catch(error => reject(error))
})
}
function configureMainServerURL() {
return new Promise(resolve => {
getInput.Text('Enter URL of main server', config["main_server_url"]).then(url => {
config["main_server_url"] = url;
resolve(true);
})
})
}
function configureSQL() {
return new Promise(resolve => {
flaggedYesOrNo('Do you want to re-configure mySQL connection').then(value => {
if (value) {
console.log('Enter mySQL connection values: ')
getInput.Text('Host', config['sql_host']).then(host => {
config['sql_host'] = host;
getInput.Text('Database name', config['sql_db']).then(dbname => {
config['sql_db'] = dbname;
getInput.Text('MySQL username', config['sql_user']).then(sql_user => {
config['sql_user'] = sql_user;
getInput.Text('Mysql password', config['sql_pwd']).then(sql_pwd => {
config['sql_pwd'] = sql_pwd;
resolve(true);
})
})
})
})
} else
resolve(false);
})
})
}
function configure() {
return new Promise((resolve, reject) => {
configureMainServerURL().then(port_result => {
configureSQL().then(sql_result => {
fs.writeFile(__dirname + '/../args/backup-config.json', JSON.stringify(config), 'utf8', (err) => {
if (err) {
console.error(err);
return reject(false);
}
console.log('Configuration successful!');
if (sql_result) {
getInput.YesOrNo('Do you want to create schema in the database').then(value => {
if (value) {
const createSchema = require('./create-schema');
createSchema(false).then(result => resolve(result))
.catch(error => {
console.log('Retry using: \n' + 'npm run create-backup-schema');
reject(error);
});
} else {
console.log('To create schema, use: \n' + 'npm run create-backup-schema');
resolve(true);
}
});
} else
resolve(true);
})
})
});
})
}
if (!module.parent)
configure().then(_ => null).catch(_ => null);
else
module.exports = configure;

View File

@ -3,7 +3,7 @@ const getInput = require('./getInput');
var config, flag_new;
try {
config = require('./args/config.json');
config = require('../args/app-config.json');
flag_new = false;
} catch (error) {
config = {
@ -13,7 +13,10 @@ try {
"sql_user": null,
"sql_pwd": null,
"sql_db": "exchange",
"sql_host": "localhost"
"sql_host": "localhost",
"backup-port": "8081",
"backup-floIDs": []
};
flag_new = true;
}
@ -29,15 +32,67 @@ function flaggedYesOrNo(text) {
})
}
function getBackupIDs(ids) {
return new Promise((resolve, reject) => {
getInput("", "continue").then(id => {
if (id === "continue")
resolve(Array.from(new Set(ids)));
else {
ids.push(id);
getBackupIDs(ids)
.then(result => resolve(result))
.catch(error => reject(error));
}
})
})
}
function configureBackup() {
return new Promise(resolve => {
getInput.Text('Enter backup port (N = No backup)', config["backup-port"]).then(backup_port => {
config["backup-port"] = backup_port === N ? null : backup_port;
if (!config["backup-port"])
return resolve(true);
getInput.YesOrNo('Do you want to add/remove backup floIDs?').then(value => {
if (value) {
console("Enter floIDs to add as backup: ");
getBackupIDs(config["backup-floIDs"]).then(ids => {
//delete backup IDs
let tmp_obj = {};
for (let i in ids) {
console.log(i + 1, ":", ids[i]);
tmp_obj[i + 1] = ids[i];
}
getInput.Text("Enter numbers to delete (seperated by comma)", "continue").then(ri => {
if (ri === "continue")
config["backup-floIDs"] = ids;
else {
for (let i of ri.split(","))
delete tmp_obj[parseInt(i)];
let tmp_array = [];
for (let id of tmp_obj)
tmp_array.push(id);
config["backup-floIDs"] = tmp_array;
}
resolve(true);
})
})
} else
resolve(true);
})
})
})
}
function configurePort() {
return new Promise(resolve => {
getInput.Text('Enter port', config["port"]).then(port => {
config["port"] = port;
resolve(true);
configureBackup()
.then(result => resolve(true))
})
})
};
}
function configureSQL() {
return new Promise(resolve => {
@ -61,7 +116,7 @@ function configureSQL() {
resolve(false);
})
})
};
}
function randomizeSessionSecret() {
return new Promise((resolve) => {
@ -72,7 +127,7 @@ function randomizeSessionSecret() {
var characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
for (var i = 0; i < N; i++)
secret += characters.charAt(Math.floor(Math.random() * characters.length));
config['secret'] = secret
config['secret'] = secret;
resolve(true);
} else
resolve(false);
@ -85,7 +140,7 @@ function configure() {
configurePort().then(port_result => {
randomizeSessionSecret().then(secret_result => {
configureSQL().then(sql_result => {
fs.writeFile(__dirname + '/../args/config.json', JSON.stringify(config), 'utf8', (err) => {
fs.writeFile(__dirname + '/../args/app-config.json', JSON.stringify(config), 'utf8', (err) => {
if (err) {
console.error(err);
return reject(false);

View File

@ -0,0 +1,2 @@
const createSchema = require('./create-schema');
createSchema(false);

View File

@ -1,8 +1,8 @@
const fs = require('fs');
const config = require('./args/config.json');
let Database = require('./src/database');
let Database = require('../src/database');
function createSchema() {
function createSchema(app = true) {
const config = require('../args/' + (app ? 'app' : 'backup') + "-config.json");
return new Promise((resolve, reject) => {
fs.readFile(__dirname + '/../args/schema.sql', 'utf8', (err, data) => {
if (err) {

View File

@ -1,11 +1,26 @@
let message = `
Exchange market
---------------
npm install - Install the app and node modules.
npm run setup - Finish the setup (configure and reset password).
npm run configure - Configure the app.
npm run reset-password - Reset the password (for private-key).
npm run create-schema - Create the schema in MySQL database.
npm run help - List all commands.
npm start - Start the application.
`;
npm install - Install the app and node modules.
npm run help - List all commands.
npm run setup - Finish the setup (configure and reset password).
npm run configure - Configure the app.
npm run reset-password - Reset the password (for private-key).
npm run create-schema - Create schema in MySQL database.
npm run configure-backup - Configure the backup.
npm run create-backup-schema - Create backup-schema in MySQL database.
npm run backup - Run the backup-node.
npm start - Start the application (main).
NOTE: env variable 'PASSWORD' required for 'npm start'.
WINDOWS:
$env:PASSWORD="<password>"; npm start
LINUX:
PASSWORD="<password"> npm start
`;
console.log(message);

View File

@ -28,6 +28,8 @@ getInput.YesOrNo('Do you want to finish the setup now').then(value => {
console.log('Reset the password later using:\n' + 'npm run reset-password');
})
})
} else
} else {
console.log('Finish the setup later using:\n' + 'npm run setup');
console.log('To configure for backup use:\n' + 'npm run configure-backup');
}
})

213
src/backup/storage.js Normal file
View File

@ -0,0 +1,213 @@
const config = require('../../args/backup-config.json');
const floGlobals = require('../../public/floGlobals');
require('../set_globals');
require('../lib');
require('../floCrypto');
const WebSocket = require('ws');
const WAIT_TIME = 5 * 60 * 1000,
BACKUP_INTERVAL = 5 * 60 * 1000;
const myPrivKey = config.private_key,
myPubKey = floCrypto.getPubKeyHex(config.private_key),
myFloID = floCrypto.getFloID(config.private_key);
const Database = require("../database");
var DB; //Container for Database connection
function startBackupStorage() {
console.log("Logged in as", myFloID);
Database(config["sql_user"], config["sql_pwd"], config["sql_db"], config["sql_host"]).then(db => {
DB = db;
IntervalFunction();
const BackupInterval = setInterval(IntervalFunction, BACKUP_INTERVAL);
console.log("Backup Storage Started");
})
}
function IntervalFunction() {
IntervalFunction.count += 1;
console.log(Date.now().toString(), "Instance #" + IntervalFunction.count);
requestInstance.open();
}
IntervalFunction.count = 0;
function connectToWSServer(url) {
return new Promise((resolve, reject) => {
const ws = new WebSocket(url);
ws.on('open', _ => resolve(ws));
ws.on('error', _ => reject(error));
})
}
function requestBackupSync(ws) {
return new Promise((resolve, reject) => {
const tables = {
Users: "created",
Request_Log: "request_time",
Transactions: "tx_time",
_backup: "timestamp"
};
let subs = [];
for (t in tables)
subs.push(`SELECT MAX(${tables[t]}) as ts FROM ${t}`);
DB.query(`SELECT MAX(ts) as last_time FROM (${subs.join(' UNION ')}) AS Z`).then(result => {
let request = {
floID: myFloID,
pubKey: myPubKey,
type: "BACKUP_SYNC",
last_time: result[0].last_time,
req_time: Date.now()
};
request.sign = floCrypto.signData(request.type + "|" + request.req_time, myPrivKey);
console.debug("REQUEST: ", request);
ws.send(JSON.stringify(request));
resolve(request);
}).catch(error => reject(error))
})
}
const requestInstance = {
ws: null,
delete_sync: null,
add_sync: null,
immutable_sync: null,
delete_data: null,
add_data: null,
total_add: null,
request: null,
last_response_time: null
};
requestInstance.open = function() {
const self = this;
//Check if there is an active request
if (self.request) {
console.log("A request is already active");
if (self.last_response_time < Date.now() - WAIT_TIME)
self.close();
else
return;
}
connectToWSServer(config["main_server_url"]).then(ws => {
requestBackupSync(ws).then(request => {
self.request = request;
self.ws = ws;
ws.on('message', processBackupData)
ws.onclose = _ => console.log("Connection was Interrupted");
}).catch(error => console.error(error))
}).catch(error => console.error(error))
}
requestInstance.close = function() {
const self = this;
self.ws.onclose = () => null;
self.ws.close();
self.ws = null;
self.delete_sync = null;
self.add_sync = null;
self.immutable_sync = null;
self.delete_data = null;
self.add_data = null;
self.total_add = null;
self.request = null;
self.last_response_time = null;
}
function processBackupData(message) {
const self = requestInstance;
self.last_response_time = Date.now();
try {
const response = JSON.parse(message);
console.debug(response);
if (response.error) {
console.log(response.error);
self.close();
return;
}
switch (response.mode) {
case "END":
if (response.status) {
if (self.total_add !== self.add_sync)
console.info(`Backup Sync Instance finished!, ${self.total_add - self.add_sync} packets not received.`);
else
console.info("Backup Sync Instance finished successfully");
updateBackupTable(self.add_data, self.delete_data)
} else
console.info("Backup Sync was not successful! Failed info: ", response.info);
self.close();
break;
case "DELETE":
self.delete_data = response.delete_data;
self.delete_sync += 1;
deleteData(response.delete_data);
break;
case "ADD_UPDATE_HEADER":
self.add_data = response.add_data;
self.total_add = Object.keys(response.add_data).length;
break;
case "ADD_UPDATE":
self.add_sync += 1;
addUpdateData(response.table, response.data);
break;
case "ADD_IMMUTABLE":
self.immutable_sync += 1;
addImmutableData(response.table, response.data);
break;
}
} catch (error) {
console.error(error);
}
}
function updateBackupTable(add_data, delete_data) {
//update _backup table for added data
DB.transaction(add_data.map(r => [
"INSERT INTO _backup (t_name, id, mode, timestamp) VALUES (?, ?, TRUE, ?) ON DUPLICATE KEY UPDATE mode=TRUE, timestamp=?",
[r.t_name, r.id, validateValue(r.timestamp), validateValue(r.timestamp)]
])).then(_ => null).catch(error => console.error(error));
//update _backup table for deleted data
let del_queries = [];
delete_data.forEach(r => del_queries.push([]));
DB.transaction(delete_data.map(r => [
"INSERT INTO _backup (t_name, id, mode, timestamp) VALUES (?, ?, NULL, ?) ON DUPLICATE KEY UPDATE mode=NULL, timestamp=?",
[r.t_name, r.id, validateValue(r.timestamp), validateValue(r.timestamp)]
])).then(_ => null).catch(error => console.error(error));
}
function deleteData(data) {
let delete_needed = {};
data.forEach(r => r.t_name in delete_needed ? delete_needed[r.t_name].push(r.id) : delete_needed[r.t_name] = [r.id]);
let queries = [];
for (let table in delete_needed)
queries.push(`DELETE FROM ${table} WHERE id IN (${delete_needed[table]})`);
DB.transaction(queries).then(_ => null).catch(error => console.error(error));
}
function addUpdateData(table, data) {
let cols = Object.keys(data[0]),
_mark = "(" + Array(cols.length).fill('?') + ")";
values = data.map(r => cols.map(c => validateValue(r[c]))).flat();
let statement = `INSERT INTO ${table} (${cols}) VALUES ${Array(data.length).fill(_mark)} AS new` +
" ON DUPLICATE KEY UPDATE " + cols.filter(c => c != 'id').map(c => c + " = new." + c);
DB.query(statement, values).then(_ => null).catch(error => console.error(error));
}
function addImmutableData(table, data) {
if (!data.length)
return;
const primaryKeys = {
Users: "floID"
};
let cols = Object.keys(data[0]),
_mark = "(" + Array(cols.length).fill('?') + ")";
values = data.map(r => cols.map(c => validateValue(r[c]))).flat();
let statement = `INSERT INTO ${table} (${cols}) VALUES ${Array(data.length).fill(_mark)}`;
if (table in primaryKeys)
statement += ` ON DUPLICATE KEY UPDATE ${primaryKeys[table]}=${primaryKeys[table]}`;
DB.query(statement, values).then(_ => null).catch(error => console.error(error));
}
const validateValue = val => (typeof val === "string" && /\.\d{3}Z$/.test(val)) ? val.substring(0, val.length - 1) : val;
startBackupStorage();

168
src/backup/transmit.js Normal file
View File

@ -0,0 +1,168 @@
const WebSocket = require('ws');
var DB; //Container for database
function sendBackup(timestamp, ws) {
if (!timestamp) timestamp = 0;
else if (typeof timestamp === "string" && /\.\d{3}Z$/.test(timestamp))
timestamp = timestamp.substring(0, timestamp.length - 1);
let promises = [
send_dataSync(timestamp, ws),
send_deleteSync(timestamp, ws),
send_dataImmutable(timestamp, ws)
];
Promise.allSettled(promises).then(result => {
let failedSync = [];
result.forEach(r => r.status === "rejected" ? failedSync.push(r.reason) : null);
if (failedSync.length) {
console.info("Backup Sync Failed:", failedSync);
ws.send(JSON.stringify({
mode: "END",
status: false,
info: failedSync
}));
} else {
console.info("Backup Sync completed");
ws.send(JSON.stringify({
mode: "END",
status: true
}));
}
});
}
function send_deleteSync(timestamp, ws) {
return new Promise((resolve, reject) => {
DB.query("SELECT * FROM _backup WHERE mode is NULL AND timestamp > ?", [timestamp]).then(result => {
ws.send(JSON.stringify({
mode: "DELETE",
delete_data: result
}));
resolve("deleteSync");
}).catch(error => {
console.error(error);
reject("deleteSync");
});
})
}
function send_dataSync(timestamp, ws) {
const sendTable = (table, id_list) => new Promise((res, rej) => {
DB.query(`SELECT * FROM ${table} WHERE id IN (${id_list})`)
.then(data => {
ws.send(JSON.stringify({
table,
mode: "ADD_UPDATE",
data
}));
res(table);
}).catch(error => {
console.error(error);
rej(table);
});
});
return new Promise((resolve, reject) => {
DB.query("SELECT * FROM _backup WHERE mode=TRUE AND timestamp > ?", [timestamp]).then(result => {
let sync_needed = {};
result.forEach(r => r.t_name in sync_needed ? sync_needed[r.t_name].push(r.id) : sync_needed[r.t_name] = [r.id]);
ws.send(JSON.stringify({
mode: "ADD_UPDATE_HEADER",
add_data: result
}));
let promises = [];
for (let table in sync_needed)
promises.push(sendTable(table, sync_needed[table]));
Promise.allSettled(promises).then(result => {
let failedTables = [];
result.forEach(r => r.status === "rejected" ? failedTables.push(r.reason) : null);
if (failedTables.length)
reject(["dataSync", failedTables]);
else
resolve("dataSync");
});
}).catch(error => {
console.error(error);
reject("dataSync");
});
});
}
function send_dataImmutable(timestamp, ws) {
const immutable_tables = {
Users: "created",
Request_Log: "request_time",
Transactions: "tx_time"
};
const sendTable = (table, timeCol) => new Promise((res, rej) => {
DB.query(`SELECT * FROM ${table} WHERE ${timeCol} > ?`, [timestamp])
.then(data => {
ws.send(JSON.stringify({
table,
mode: "ADD_IMMUTABLE",
data
}));
res(table);
}).catch(error => {
console.error(error);
rej(table);
});
});
return new Promise((resolve, reject) => {
let promises = [];
for (let table in immutable_tables)
promises.push(sendTable(table, immutable_tables[table]));
Promise.allSettled(promises).then(result => {
let failedTables = [];
result.forEach(r => r.status === "rejected" ? failedTables.push(r.reason) : null);
if (failedTables.length)
reject(["dataImmutable", failedTables]);
else
resolve("dataImmutable");
});
})
}
function startBackupTransmitter(db, port, backupIDs) {
DB = db;
this.port = port;
this.backupIDs = backupIDs;
const wss = this.wss = new WebSocket.Server({
port: port
});
this.close = () => wss.close();
wss.on('connection', ws => {
ws.on('message', message => {
//verify if from a backup node
try {
let invalid = null,
request = JSON.parse(message);
console.debug(request);
if (!backupIDs.includes(request.floID))
invalid = "FLO ID not approved for backup";
else if (request.floID !== floCrypto.getFloID(request.pubKey))
invalid = "Invalid pubKey";
else if (!floCrypto.verifySign(request.type + "|" + request.req_time, request.sign, request.pubKey))
invalid = "Invalid signature";
else if (request.type !== "BACKUP_SYNC")
invalid = "Invalid Request Type";
//TODO: check if request time is valid;
if (invalid)
ws.send(JSON.stringify({
error: invalid
}));
else
sendBackup(request.last_time, ws);
} catch (error) {
console.error(error);
ws.send(JSON.stringify({
error: 'Unable to process the request!'
}));
}
});
});
console.log("Backup Transmitter running in port", port);
}
module.exports = startBackupTransmitter;

View File

@ -147,7 +147,7 @@
key.setCompressed(true);
var privateKeyArr = key.getBitcoinPrivateKeyByteArray();
privateKey = BigInteger.fromByteArrayUnsigned(privateKeyArr);
var privateKey = BigInteger.fromByteArrayUnsigned(privateKeyArr);
var messageHash = Crypto.SHA256(data);
var messageHashBigInteger = new BigInteger(messageHash);

View File

@ -1,4 +1,4 @@
const config = require('../args/config.json');
const config = require('../args/app-config.json');
global.floGlobals = require('../public/floGlobals');
require('./set_globals');
require('./lib');
@ -7,14 +7,17 @@ require('./floBlockchainAPI');
const Database = require("./database");
const App = require('./app');
const floGlobals = require('../public/floGlobals');
const PORT = config['port'];
module.exports = function startServer(public_dir) {
try {
var _tmp = require('../args/keys.json');
_tmp = floCrypto.retrieveShamirSecret(_tmp);
var _pass = process.env.password;
var _pass = process.env.PASSWORD;
if (!_pass) {
console.error('Password not entered!');
process.exit(1);
}
_tmp = Crypto.AES.decrypt(_tmp, _pass);
if (floCrypto.verifyPrivKey(_tmp, floGlobals.adminID)) {
global.myPrivKey = _tmp;
@ -35,5 +38,10 @@ module.exports = function startServer(public_dir) {
Database(config["sql_user"], config["sql_pwd"], config["sql_db"], config["sql_host"]).then(DB => {
const app = App(config['secret'], DB);
app.listen(PORT, () => console.log(`Server Running at port ${PORT}`));
//start backup
if (config["backup-port"] && config["backup-floIDs"].length) {
var backupTransmitter = require('./backup/transmit');
backupTransmitter = new backupTransmitter(DB, config["backup-port"], config["backup-floIDs"]);
}
});
};

View File

@ -1,5 +1,3 @@
const floGlobals = require("./floGlobals");
var net_FLO_price; //container for FLO price (from API or by model)
var DB; //container for database
@ -127,7 +125,7 @@ function addBuyOrder(floID, quantity, max_price) {
return reject(INVALID(`Invalid quantity (${quantity})`));
else if (typeof max_price !== "number" || max_price <= 0)
return reject(INVALID(`Invalid max_price (${max_price})`));
DB.query("SELECT rupeeBalance FROM Users WHERE floID=?", [floID]).then(result => {
DB.query("SELECT rupeeBalance FROM Cash WHERE floID=?", [floID]).then(result => {
if (result.length < 1)
return reject(INVALID("FLO ID not registered"));
let total = result.pop()["rupeeBalance"];
@ -205,7 +203,7 @@ function getBestBuyer(cur_price, n = 0) {
let buyOrder = result.shift();
if (!buyOrder)
return reject("No valid buyers available");
DB.query("SELECT rupeeBalance AS bal FROM Users WHERE floID=?", [buyOrder.floID]).then(result => {
DB.query("SELECT rupeeBalance AS bal FROM Cash WHERE floID=?", [buyOrder.floID]).then(result => {
if (result[0].bal < cur_price * buyOrder.quantity) {
//This should not happen unless a buy order is placed when user doesnt have enough rupee balance
console.warn(`Buy order ${buyOrder.id} is active, but rupee# is insufficient`);
@ -293,8 +291,8 @@ function processBuyAndSellOrder(seller_best, buyer_best, txQueries) {
function updateBalance(seller_best, buyer_best, txQueries, cur_price, quantity) {
//Update rupee balance for seller and buyer
let totalAmount = cur_price * quantity;
txQueries.push(["UPDATE Users SET rupeeBalance=rupeeBalance+? WHERE floID=?", [totalAmount, seller_best.floID]]);
txQueries.push(["UPDATE Users SET rupeeBalance=rupeeBalance-? WHERE floID=?", [totalAmount, buyer_best.floID]]);
txQueries.push(["UPDATE Cash SET rupeeBalance=rupeeBalance+? WHERE floID=?", [totalAmount, seller_best.floID]]);
txQueries.push(["UPDATE Cash SET rupeeBalance=rupeeBalance-? WHERE floID=?", [totalAmount, buyer_best.floID]]);
//Add coins to Buyer
txQueries.push(["INSERT INTO Vault(floID, base, quantity) VALUES (?, ?, ?)", [buyer_best.floID, cur_price, quantity]])
//Record transaction
@ -305,7 +303,7 @@ function updateBalance(seller_best, buyer_best, txQueries, cur_price, quantity)
function getAccountDetails(floID) {
return new Promise((resolve, reject) => {
let select = [];
select.push(["rupeeBalance", "Users"]);
select.push(["rupeeBalance", "Cash"]);
select.push(["base, quantity", "Vault"]);
select.push(["id, quantity, minPrice, time_placed", "SellOrder"]);
select.push(["id, quantity, maxPrice, time_placed", "BuyOrder"]);
@ -517,7 +515,7 @@ function confirmDepositRupee() {
txQueries.push(["INSERT INTO inputFLO(txid, floID, amount, status) VALUES (?, ?, ?)", [req.txid, req.floID, amount_flo, "SUCCESS"]]);
}
txQueries.push(["UPDATE inputRupee SET status=? WHERE id=?", ["SUCCESS", req.id]]);
txQueries.push(["UPDATE Users SET rupeeBalance=rupeeBalance+? WHERE floID=?", [amount_rupee, req.floID]]);
txQueries.push(["UPDATE Cash SET rupeeBalance=rupeeBalance+? WHERE floID=?", [amount_rupee, req.floID]]);
DB.transaction(txQueries)
.then(result => console.debug("Rupee deposited for ", req.floID))
.catch(error => console.error(error));
@ -571,7 +569,7 @@ function withdrawRupee(floID, amount) {
let available = total - locked;
if (available < required_flo)
return reject(INVALID(`Insufficient FLO (Some FLO are locked in sell orders)! Required ${required_flo} FLO to withdraw tokens`));
DB.query("SELECT rupeeBalance FROM Users WHERE floID=?", [floID]).then(result => {
DB.query("SELECT rupeeBalance FROM Cash WHERE floID=?", [floID]).then(result => {
if (result.length < 1)
return reject(INVALID(`FLO_ID: ${floID} not registered`));
if (result[0].rupeeBalance < amount)
@ -590,7 +588,7 @@ function withdrawRupee(floID, amount) {
}
if (rem > 0) //should not happen AS the total and net is checked already
return reject(INVALID("Insufficient FLO"));
txQueries.push(["UPDATE Users SET rupeeBalance=rupeeBalance-? WHERE floID=?", [amount, floID]]);
txQueries.push(["UPDATE Cash SET rupeeBalance=rupeeBalance-? WHERE floID=?", [amount, floID]]);
DB.transaction(txQueries).then(result => {
//Send FLO to user via blockchain API

View File

@ -64,7 +64,10 @@ function SignUp(req, res) {
}, data.sign, data.pubKey);
if (req_str instanceof INVALID)
return res.status(INVALID.e_code).send(req_str.message);
DB.query("INSERT INTO Users(floID, pubKey) VALUES (?, ?)", [data.floID, data.pubKey]).then(result => {
let txQueries = [];
txQueries.push(["INSERT INTO Users(floID, pubKey) VALUES (?, ?)", [data.floID, data.pubKey]]);
txQueries.push(["INSERT INTO Cash (floID) Values (?)", data.floID]);
DB.transaction(txQueries).then(_ => {
storeRequest(data.floID, req_str, data.sign);
res.send("Account Created");
}).catch(error => {