Restoration

This commit is contained in:
sairajzero 2021-12-30 06:33:10 +05:30
parent 4b9a5e656b
commit c4732f5831
2 changed files with 113 additions and 100 deletions

View File

@ -1,44 +1,29 @@
'use strict'; 'use strict';
const config = require('../../args/backup-config.json');
const floGlobals = require('../../public/floGlobals');
require('../set_globals');
require('../lib');
require('../floCrypto');
const WebSocket = require('ws');
const WAIT_TIME = 1 * 60 * 1000, const WAIT_TIME = 30 * 60 * 1000,
BACKUP_INTERVAL = 1 * 60 * 1000; BACKUP_INTERVAL = 10 * 60 * 1000;
const myPrivKey = config.private_key,
myPubKey = floCrypto.getPubKeyHex(config.private_key),
myFloID = floCrypto.getFloID(config.private_key);
const Database = require("../database");
var DB; //Container for Database connection var DB; //Container for Database connection
var masterWS; //Container for Master websocket connection
function startBackupStorage() { var intervalID = null;
console.log("Logged in as", myFloID);
Database(config["sql_user"], config["sql_pwd"], config["sql_db"], config["sql_host"]).then(db => {
DB = db;
IntervalFunction();
const BackupInterval = setInterval(IntervalFunction, BACKUP_INTERVAL);
console.log("Backup Storage Started");
})
}
function IntervalFunction() { function startIntervalSync(ws) {
IntervalFunction.count += 1; //set masterWS
console.log(Date.now().toString(), "Instance #" + IntervalFunction.count); ws.on('message', processDataFromMaster);
masterWS = ws;
//stop existing sync
stopIntervalSync();
//start sync
requestInstance.open(); requestInstance.open();
intervalID = setInterval(requestInstance.open, BACKUP_INTERVAL);
} }
IntervalFunction.count = 0;
function connectToWSServer(url) { function stopIntervalSync() {
return new Promise((resolve, reject) => { if (intervalID !== null) {
const ws = new WebSocket(url); clearInterval(intervalID);
ws.on('open', _ => resolve(ws)); intervalID = null;
ws.on('error', _ => reject(error)); }
})
} }
function requestBackupSync(ws) { function requestBackupSync(ws) {
@ -91,20 +76,16 @@ requestInstance.open = function() {
else else
return; return;
} }
connectToWSServer(config["main_server_url"]).then(ws => { if (!masterWS)
requestBackupSync(ws).then(request => { return console.warn("Not connected to master");
self.request = request; requestBackupSync(masterWS).then(request => {
self.ws = ws; self.request = request;
ws.on('message', processBackupData) self.ws = masterWS;
ws.onclose = _ => console.log("Connection was Interrupted");
}).catch(error => console.error(error))
}).catch(error => console.error(error)) }).catch(error => console.error(error))
} }
requestInstance.close = function() { requestInstance.close = function() {
const self = this; const self = this;
self.ws.onclose = () => null;
self.ws.close();
self.ws = null; self.ws = null;
self.delete_sync = null; self.delete_sync = null;
self.add_sync = null; self.add_sync = null;
@ -116,52 +97,56 @@ requestInstance.close = function() {
self.last_response_time = null; self.last_response_time = null;
} }
function processBackupData(message) { function processDataFromMaster(message) {
const self = requestInstance;
self.last_response_time = Date.now();
try { try {
const response = JSON.parse(message); message = JSON.parse(message);
console.debug(response); console.debug(message);
if (response.error) { if (message.mode.startsWith("SYNC"))
console.log(response.error); processBackupData(message);
self.close();
return;
}
switch (response.mode) {
case "END":
if (response.status) {
if (self.total_add !== self.add_sync)
console.info(`Backup Sync Instance finished!, ${self.total_add - self.add_sync} packets not received.`);
else
console.info("Backup Sync Instance finished successfully");
updateBackupTable(self.add_data, self.delete_data)
} else
console.info("Backup Sync was not successful! Failed info: ", response.info);
self.close();
break;
case "DELETE":
self.delete_data = response.delete_data;
self.delete_sync += 1;
deleteData(response.delete_data);
break;
case "ADD_UPDATE_HEADER":
self.add_data = response.add_data;
self.total_add = Object.keys(response.add_data).length;
break;
case "ADD_UPDATE":
self.add_sync += 1;
addUpdateData(response.table, response.data);
break;
case "ADD_IMMUTABLE":
self.immutable_sync += 1;
addImmutableData(response.table, response.data);
break;
}
} catch (error) { } catch (error) {
console.error(error); console.error(error);
} }
} }
function processBackupData(response) {
const self = requestInstance;
self.last_response_time = Date.now();
switch (response.mode) {
case "SYNC_ERROR":
console.log(response.error);
self.close();
break;
case "SYNC_END":
if (response.status) {
if (self.total_add !== self.add_sync)
console.info(`Backup Sync Instance finished!, ${self.total_add - self.add_sync} packets not received.`);
else
console.info("Backup Sync Instance finished successfully");
updateBackupTable(self.add_data, self.delete_data)
} else
console.info("Backup Sync was not successful! Failed info: ", response.info);
self.close();
break;
case "SYNC_DELETE":
self.delete_data = response.delete_data;
self.delete_sync += 1;
deleteData(response.delete_data);
break;
case "SYNC_ADD_UPDATE_HEADER":
self.add_data = response.add_data;
self.total_add = Object.keys(response.add_data).length;
break;
case "SYNC_ADD_UPDATE":
self.add_sync += 1;
addUpdateData(response.table, response.data);
break;
case "SYNC_ADD_IMMUTABLE":
self.immutable_sync += 1;
addImmutableData(response.table, response.data);
break;
}
}
function updateBackupTable(add_data, delete_data) { function updateBackupTable(add_data, delete_data) {
//update _backup table for added data //update _backup table for added data
DB.transaction(add_data.map(r => [ DB.transaction(add_data.map(r => [
@ -212,4 +197,10 @@ function addImmutableData(table, data) {
const validateValue = val => (typeof val === "string" && /\.\d{3}Z$/.test(val)) ? val.substring(0, val.length - 1) : val; const validateValue = val => (typeof val === "string" && /\.\d{3}Z$/.test(val)) ? val.substring(0, val.length - 1) : val;
startBackupStorage(); module.exports = {
set DB(db) {
DB = db;
},
start: startIntervalSync,
stop: stopIntervalSync
}

View File

@ -1,7 +1,9 @@
'use strict'; 'use strict';
const WebSocket = require('ws'); const shareThreshold = 70 / 100;
var DB; //Container for database var DB; //Container for database
//Backup Transfer
function sendBackup(timestamp, ws) { function sendBackup(timestamp, ws) {
if (!timestamp) timestamp = 0; if (!timestamp) timestamp = 0;
else if (typeof timestamp === "string" && /\.\d{3}Z$/.test(timestamp)) else if (typeof timestamp === "string" && /\.\d{3}Z$/.test(timestamp))
@ -17,14 +19,14 @@ function sendBackup(timestamp, ws) {
if (failedSync.length) { if (failedSync.length) {
console.info("Backup Sync Failed:", failedSync); console.info("Backup Sync Failed:", failedSync);
ws.send(JSON.stringify({ ws.send(JSON.stringify({
mode: "END", mode: "SYNC_END",
status: false, status: false,
info: failedSync info: failedSync
})); }));
} else { } else {
console.info("Backup Sync completed"); console.info("Backup Sync completed");
ws.send(JSON.stringify({ ws.send(JSON.stringify({
mode: "END", mode: "SYNC_END",
status: true status: true
})); }));
} }
@ -35,7 +37,7 @@ function send_deleteSync(timestamp, ws) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
DB.query("SELECT * FROM _backup WHERE mode is NULL AND timestamp > ?", [timestamp]).then(result => { DB.query("SELECT * FROM _backup WHERE mode is NULL AND timestamp > ?", [timestamp]).then(result => {
ws.send(JSON.stringify({ ws.send(JSON.stringify({
mode: "DELETE", mode: "SYNC_DELETE",
delete_data: result delete_data: result
})); }));
resolve("deleteSync"); resolve("deleteSync");
@ -52,7 +54,7 @@ function send_dataSync(timestamp, ws) {
.then(data => { .then(data => {
ws.send(JSON.stringify({ ws.send(JSON.stringify({
table, table,
mode: "ADD_UPDATE", mode: "SYNC_ADD_UPDATE",
data data
})); }));
res(table); res(table);
@ -66,7 +68,7 @@ function send_dataSync(timestamp, ws) {
let sync_needed = {}; let sync_needed = {};
result.forEach(r => r.t_name in sync_needed ? sync_needed[r.t_name].push(r.id) : sync_needed[r.t_name] = [r.id]); result.forEach(r => r.t_name in sync_needed ? sync_needed[r.t_name].push(r.id) : sync_needed[r.t_name] = [r.id]);
ws.send(JSON.stringify({ ws.send(JSON.stringify({
mode: "ADD_UPDATE_HEADER", mode: "SYNC_ADD_UPDATE_HEADER",
add_data: result add_data: result
})); }));
let promises = []; let promises = [];
@ -99,7 +101,7 @@ function send_dataImmutable(timestamp, ws) {
.then(data => { .then(data => {
ws.send(JSON.stringify({ ws.send(JSON.stringify({
table, table,
mode: "ADD_IMMUTABLE", mode: "SYNC_ADD_IMMUTABLE",
data data
})); }));
res(table); res(table);
@ -124,15 +126,28 @@ function send_dataImmutable(timestamp, ws) {
}) })
} }
function startBackupTransmitter(db, port, backupIDs) { //Shares
DB = db; function generateNewSink() {
this.port = port; let sink = floCrypto.generateNewID();
this.backupIDs = backupIDs; let nextNodes = KB.nextNode(myFloID, null);
let shares = floCrypto.createShamirsSecretShares(sink.privKey, nextNodes.length, Math.ceil(nextNodes.length * shareThreshold));
sink.shares = {};
for (let i in nextNodes)
sink.shares[nextNodes[i]] = shares[i];
return sink;
}
const wss = this.wss = new WebSocket.Server({ function sendShare(ws, sinkID, share) {
port: port ws.send(JSON.stringify({
}); command: "SINK_SHARE",
this.close = () => wss.close(); sinkID,
keyShare
}));
}
//Transmistter
var nodeList; //Container for (backup) node list
function startBackupTransmitter(wss) {
wss.on('connection', ws => { wss.on('connection', ws => {
ws.on('message', message => { ws.on('message', message => {
//verify if from a backup node //verify if from a backup node
@ -158,13 +173,20 @@ function startBackupTransmitter(db, port, backupIDs) {
} catch (error) { } catch (error) {
console.error(error); console.error(error);
ws.send(JSON.stringify({ ws.send(JSON.stringify({
mode: "SYNC_ERROR",
error: 'Unable to process the request!' error: 'Unable to process the request!'
})); }));
} }
}); });
}); });
console.log("Backup Transmitter running in port", port);
} }
module.exports = startBackupTransmitter; module.exports = {
init: startBackupTransmitter,
set nodeList(ids) {
nodeList = ids;
},
set DB(db) {
DB = db;
}
};