pre cleanup. Running entirely on mongo.

This commit is contained in:
tenthirtyone 2017-08-19 02:57:09 -04:00
parent 5a89cf17d2
commit 697388f1ce
6 changed files with 62 additions and 21 deletions

View File

@ -28,7 +28,7 @@ const config = {
ticker_url: 'https://www.bitstamp.net/api/ticker/',
ticker_prop: 'bitstamp',
max_blocks: 72,
max_txs: 10,
max_txs: 100,
request_ttl: 100000,
},
};

View File

@ -51,7 +51,7 @@ module.exports = function BlockAPI(router) {
limit,
(err, blocks) => {
if (err) {
logger.log('err',
logger.log('error',
`/blocks: ${err}`);
return res.status(404).send();
}
@ -80,7 +80,7 @@ module.exports = function BlockAPI(router) {
1,
(err, block) => {
if (err) {
logger.log('err',
logger.log('error',
`/rawblock/:blockHash: ${err}`);
return res.status(404).send();
}
@ -97,7 +97,7 @@ module.exports = function BlockAPI(router) {
1,
(err, block) => {
if (err) {
logger.log('err',
logger.log('error',
`/block-index/:height: ${err}`);
return res.status(404).send();
}

View File

@ -48,12 +48,12 @@ module.exports = function statusAPI(router) {
} else {
getStatus((err, status) => {
if (err) {
logger.log('err',
logger.log('error',
`/status getStatus: ${err}`);
return res.status(404).send(err);
}
if (!status) {
logger.log('err',
logger.log('error',
'/status getStatus: no Status');
return res.status(404).send();
}
@ -79,12 +79,12 @@ module.exports = function statusAPI(router) {
router.get('/sync', (req, res) => {
getStatus((err, status) => {
if (err) {
logger.log('err',
logger.log('error',
`/sync: ${err}`);
return res.status(404).send(err);
}
if (!status) {
logger.log('err',
logger.log('error',
'/sync: no status');
return res.status(404).send();
}

View File

@ -17,8 +17,8 @@ module.exports = function transactionAPI(router) {
db.txs.getTxById(txid, (err, transaction) => {
if (err) {
logger.log('err',
`getTxById: ${err}`);
logger.log('error',
`/tx/:tid getTxById: ${err.err}`);
return res.status(404).send();
}
@ -63,7 +63,7 @@ module.exports = function transactionAPI(router) {
db.txs.getTxCountByBlock(req.query.block, (err, count) => {
if (err) {
logger.log('err',
logger.log('error',
`getTxByBlock ${err}`);
return res.status(404).send();
}
@ -71,7 +71,7 @@ module.exports = function transactionAPI(router) {
return db.txs.getTxByBlock(req.query.block, pageNum, MAX_TXS, (error, txs) => {
if (error) {
logger.log('err',
logger.log('error',
`getTxByBlock ${error}`);
return res.status(404).send();
}
@ -105,7 +105,7 @@ module.exports = function transactionAPI(router) {
db.txs.getTxCountByAddress(req.query.address, (err, count) => {
if (err) {
logger.log('err',
logger.log('error',
`getTxByBlock ${err}`);
return res.status(404).send();
}
@ -113,7 +113,7 @@ module.exports = function transactionAPI(router) {
return db.txs.getTxByAddress(req.query.address, pageNum, MAX_TXS, (error, txs) => {
if (error) {
logger.log('err',
logger.log('error',
`getTxByBlock ${error}`);
return res.status(404).send();
}

View File

@ -8,7 +8,7 @@ const MAX_TXS = config.api.max_txs;
function getTransactions(params, options, limit, skip, cb) {
// Do not return mongo ids
const defaultOptions = { _id: 0 };
const defaultOptions = { };
// Copy over mongo options
Object.assign(defaultOptions, options);
// Simple sanitizing
@ -60,13 +60,13 @@ function getTransaction(params, options, limit, skip, cb) {
function getTxById(txid, cb) {
getTransaction(
{ hash: txid },
{},
{ },
1,
0,
(err, transaction) => {
if (err) {
logger.log('err',
`/rawblock/:blockHash: ${err}`);
logger.log('error',
`getTxById: ${txid} ${err.err}`);
return cb(err);
}
return cb(null, transaction);
@ -120,7 +120,7 @@ function getTxCountByBlock(blockHash, cb) {
{ block: blockHash },
(err, count) => {
if (err) {
logger.log('err',
logger.log('error',
`getTxCountByBlock ${err}`);
return cb(err);
}
@ -136,7 +136,7 @@ function getTxCountByAddress(address, cb) {
},
(err, count) => {
if (err) {
logger.log('err',
logger.log('error',
`getTxCountByAddress ${err}`);
return cb(err);
}
@ -156,7 +156,7 @@ function updateInput(txid, inputid, value, address) {
},
(err, tx) => {
if (err) {
logger.log('err',
logger.log('error',
`updateInput: ${err}`);
}
},

View File

@ -12,7 +12,10 @@ const db = require('../db');
// the last 20 that hasn't saved.
// Aggregate stuff will replace all of this.
let counter = 0;
function parse(entry, txs) {
counter++;
txs.forEach((tx) => {
const txJSON = tx.toJSON();
const txRAW = tx.toRaw();
@ -58,10 +61,48 @@ function parse(entry, txs) {
if (err) {
logger.log('error', err.message);
}
// As long as this modulo is divisible by 20 we should be OK for now.
// Closer to 20 = chattier at start but ideal later on
if (counter % 20 === 0) {
findEmptyInputs();
counter = 0;
}
});
});
}
function findEmptyInputs() {
db.txs.getTransactions(
{
'inputs.prevout.hash': { $ne: '0000000000000000000000000000000000000000000000000000000000000000' },
'inputs.address': '',
},
{},
100,
0,
(err, txs) => {
if (err) {
return logger.log('error',
`No Empty Inputs found: ${err.err}`);
}
// For each tx with unmarked inputs
return txs.forEach((inputTx) => {
inputTx.inputs.forEach((input) => {
const txHash = input.prevout.hash;
const outIdx = input.prevout.index;
return db.txs.getTxById(txHash, (err, tx) => {
if (err) {
return logger.log('error',
`No Tx found: ${txHash} ${err.err}`);
}
return db.txs.updateInput(inputTx._id, input._id, tx.outputs[outIdx].value, tx.outputs[outIdx].address);
});
});
});
});
}
module.exports = {
parse,
};