Compare commits
No commits in common. "master" and "bitcore-node-rpc" have entirely different histories.
master
...
bitcore-no
@ -1 +1 @@
|
||||
repo_token: DvrDb09a8vhPlVf6DT4cGBjcFOi6DfZN1
|
||||
repo_token: 9YjZe8icW8I0TrZ45hAcLAYr7tlNxok55
|
||||
|
||||
74
.gitignore
vendored
74
.gitignore
vendored
@ -1,36 +1,42 @@
|
||||
node_modules/
|
||||
node_modules/*
|
||||
coverage/*
|
||||
.lock-wscript
|
||||
*.swp
|
||||
*.Makefile
|
||||
*.target.gyp.mk
|
||||
*.node
|
||||
*.sln
|
||||
*.sdf
|
||||
*.vcxproj
|
||||
*.suo
|
||||
*.opensdf
|
||||
*.filters
|
||||
*.user
|
||||
*.project
|
||||
**/*.dylib
|
||||
**/*.so
|
||||
**/*.old
|
||||
**/*.files
|
||||
**/*.config
|
||||
**/*.creator
|
||||
# from https://github.com/github/gitignore/blob/master/Node.gitignore
|
||||
lib-cov
|
||||
*.seed
|
||||
*.log
|
||||
*.tmp
|
||||
*.tmp.*
|
||||
*.csv
|
||||
*.dat
|
||||
*.out
|
||||
*.pid
|
||||
*.gz
|
||||
*.swp
|
||||
tags
|
||||
pids
|
||||
logs
|
||||
results
|
||||
build
|
||||
|
||||
node_modules
|
||||
|
||||
# extras
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.project
|
||||
peerdb.json
|
||||
|
||||
npm-debug.log
|
||||
.nodemonignore
|
||||
|
||||
.DS_Store
|
||||
bin/florincoin*
|
||||
bin/SHA256SUMS.asc
|
||||
regtest/data/node1/regtest
|
||||
regtest/data/node2/regtest
|
||||
regtest/data/node3/regtest
|
||||
flocore-node.json*
|
||||
*.bak
|
||||
*.orig
|
||||
lib/services/insight-api
|
||||
testnet/*
|
||||
db/txs/*
|
||||
db/txs
|
||||
db/testnet/txs/*
|
||||
db/testnet/txs
|
||||
db/blocks/*
|
||||
db/blocks
|
||||
db/testnet/blocks/*
|
||||
db/testnet/blocks
|
||||
|
||||
README.html
|
||||
public
|
||||
coverage
|
||||
report
|
||||
|
||||
83
.jshintrc
83
.jshintrc
@ -1,42 +1,45 @@
|
||||
{
|
||||
"bitwise": false,
|
||||
"browser": true,
|
||||
"camelcase": false,
|
||||
"curly": true,
|
||||
"devel": false,
|
||||
"eqeqeq": true,
|
||||
"esnext": true,
|
||||
"freeze": true,
|
||||
"immed": true,
|
||||
"indent": 2,
|
||||
"latedef": true,
|
||||
"newcap": false,
|
||||
"noarg": true,
|
||||
"node": true,
|
||||
"noempty": true,
|
||||
"nonew": true,
|
||||
"quotmark": "single",
|
||||
"regexp": true,
|
||||
"smarttabs": false,
|
||||
"strict": true,
|
||||
"trailing": true,
|
||||
"undef": true,
|
||||
"unused": true,
|
||||
"maxparams": 6,
|
||||
"maxstatements": 25,
|
||||
"maxcomplexity": 10,
|
||||
"maxdepth": 4,
|
||||
"maxlen": 140,
|
||||
"multistr": true,
|
||||
"predef": [
|
||||
"after",
|
||||
"afterEach",
|
||||
"before",
|
||||
"beforeEach",
|
||||
"describe",
|
||||
"exports",
|
||||
"it",
|
||||
"module",
|
||||
"require"
|
||||
]
|
||||
"bitwise": false, // Prohibit bitwise operators (&, |, ^, etc.).
|
||||
"browser": true, // Standard browser globals e.g. `window`, `document`.
|
||||
"camelcase": false, // Permit only camelcase for `var` and `object indexes`.
|
||||
"curly": true, // Require {} for every new block or scope.
|
||||
"devel": false, // Allow development statements e.g. `console.log();`.
|
||||
"eqeqeq": true, // Require triple equals i.e. `===`.
|
||||
"esnext": true, // Allow ES.next specific features such as `const` and `let`.
|
||||
"freeze": true, // Forbid overwriting prototypes of native objects such as Array, Date and so on.
|
||||
"immed": true, // Require immediate invocations to be wrapped in parens e.g. `( function(){}() );`
|
||||
"indent": 2, // Specify indentation spacing
|
||||
"latedef": true, // Prohibit variable use before definition.
|
||||
"newcap": false, // Require capitalization of all constructor functions e.g. `new F()`.
|
||||
"noarg": true, // Prohibit use of `arguments.caller` and `arguments.callee`.
|
||||
"node": true, // Enable globals available when code is running inside of the NodeJS runtime environment.
|
||||
"noempty": true, // Prohibit use of empty blocks.
|
||||
"nonew": true, // Prohibits the use of constructor functions for side-effects
|
||||
"quotmark": "single", // Define quotes to string values.
|
||||
"regexp": true, // Prohibit `.` and `[^...]` in regular expressions.
|
||||
"smarttabs": false, // Supress warnings about mixed tabs and spaces
|
||||
"strict": true, // Require `use strict` pragma in every file.
|
||||
"trailing": true, // Prohibit trailing whitespaces.
|
||||
"undef": true, // Require all non-global variables be declared before they are used.
|
||||
"unused": true, // Warn unused variables.
|
||||
|
||||
"maxparams": 4, // Maximum number of parameters for a function
|
||||
"maxstatements": 15, // Maximum number of statements in a function
|
||||
"maxcomplexity": 6, // Cyclomatic complexity (http://en.wikipedia.org/wiki/Cyclomatic_complexity)
|
||||
"maxdepth": 4, // Maximum depth of nested control structures
|
||||
"maxlen": 120, // Maximum number of cols in a line
|
||||
"multistr": true, // Allow use of multiline EOL escaping
|
||||
|
||||
"predef": [ // Extra globals.
|
||||
"after",
|
||||
"afterEach",
|
||||
"before",
|
||||
"beforeEach",
|
||||
"define",
|
||||
"describe",
|
||||
"exports",
|
||||
"it",
|
||||
"module",
|
||||
"require"
|
||||
]
|
||||
}
|
||||
|
||||
33
.npmignore
33
.npmignore
@ -1,33 +0,0 @@
|
||||
node_modules/
|
||||
node_modules/*
|
||||
coverage/*
|
||||
.lock-wscript
|
||||
*.swp
|
||||
*.Makefile
|
||||
*.target.gyp.mk
|
||||
*.node
|
||||
*.sln
|
||||
*.sdf
|
||||
*.vcxproj
|
||||
*.suo
|
||||
*.opensdf
|
||||
*.filters
|
||||
*.user
|
||||
*.project
|
||||
**/*.dylib
|
||||
**/*.so
|
||||
**/*.old
|
||||
**/*.files
|
||||
**/*.config
|
||||
**/*.creator
|
||||
*.log
|
||||
.DS_Store
|
||||
bin/florincoin*
|
||||
bin/SHA256SUMS.asc
|
||||
regtest/data/node1/regtest
|
||||
regtest/data/node2/regtest
|
||||
regtest/data/node3/regtest
|
||||
flocore-node.json*
|
||||
*.bak
|
||||
*.orig
|
||||
lib/services/insight-api
|
||||
@ -1,5 +1,7 @@
|
||||
dist: trusty
|
||||
sudo: false
|
||||
language: node_js
|
||||
node_js:
|
||||
- 8
|
||||
- '0.10'
|
||||
install:
|
||||
- npm install
|
||||
after_script:
|
||||
- gulp coveralls
|
||||
|
||||
131
CONTRIBUTING.md
Normal file
131
CONTRIBUTING.md
Normal file
@ -0,0 +1,131 @@
|
||||
Contributing to bitcore-node
|
||||
=======
|
||||
|
||||
## Quick Checklist
|
||||
|
||||
Make sure:
|
||||
|
||||
* `gulp lint` doesn't complain about your changes
|
||||
* `gulp test` passes all the tests
|
||||
* `gulp coverage` covers 100% of the branches of your code
|
||||
|
||||
## Design Guidelines
|
||||
|
||||
These are some global design goals in bitcore that any change must adhere.
|
||||
|
||||
### D1 - Naming Matters
|
||||
|
||||
We take our time with picking names. Code is going to be written once, and read hundreds of times.
|
||||
|
||||
We were inspired to name this rule first due to Uncle Bob's great work *Clean Code*, which has a whole chapter on this subject.
|
||||
|
||||
### D2 - Tests
|
||||
|
||||
Write a test for all your code. We encourage Test Driven Development so we know when our code is right. We have increased test coverage from 80% to around 95% and are targeting 100% as we move towards our 1.0 release.
|
||||
|
||||
## Style Guidelines
|
||||
|
||||
The design guidelines have quite a high abstraction level. These style guidelines are more concrete and easier to apply, and also more opinionated. The design guidelines mentioned above are the way we think about general software development and we believe they should be present in any software project.
|
||||
|
||||
### General
|
||||
|
||||
#### G0 - Default to Felixge's Style Guide
|
||||
|
||||
Follow this Node.js Style Guide: https://github.com/felixge/node-style-guide#nodejs-style-guide
|
||||
|
||||
#### G1 - No Magic Numbers
|
||||
|
||||
Avoid constants in the code as much as possible. Magic strings are also magic numbers.
|
||||
|
||||
#### G2 - Internal Objects Should be Instances
|
||||
|
||||
If a class has a `publicKey` member, for instance, that should be a `PublicKey` instance.
|
||||
|
||||
#### G3 - Internal Amounts Must be Integers Representing Satoshis
|
||||
|
||||
Avoid representation errors by always dealing with satoshis. For conversion for frontends, use the `Unit` class.
|
||||
|
||||
#### G4 - Internal Network References Must be Network Instances
|
||||
|
||||
A special case for [G2](#g2---general-internal-objects-should-be-instances) all network references must be `Network` instances (see `bitcore/lib/network.js`), but when returned to the user, its `.name` property should be used.
|
||||
|
||||
#### G5 - Objects Should Display Nicely in the Console
|
||||
|
||||
Write a `.inspect()` method so an instance can be easily debugged in the console.
|
||||
|
||||
#### G6 - Naming Utility Namespaces
|
||||
|
||||
Name them in CamelCase, as they are namespaces.
|
||||
|
||||
DO:
|
||||
```javascript
|
||||
var BufferUtil = require('./util/buffer');
|
||||
```
|
||||
DON'T:
|
||||
```javascript
|
||||
var bufferUtil = require('./util/buffer');
|
||||
```
|
||||
|
||||
### Interface
|
||||
|
||||
#### I1 - Code that Fails Early
|
||||
|
||||
In order to deal with JavaScript's weak typing and confusing errors, we ask our code to fail as soon as possible when an unexpected input was provided.
|
||||
|
||||
There's a module called `util/preconditions`, loosely based on `preconditions.js`, based on `guava`, that we use for state and argument checking. It should be trivial to use. We recommend using it on all methods, in order to improve robustness and consistency.
|
||||
|
||||
```javascript
|
||||
$.checkState(something === anotherthing, 'Expected something to be anotherthing');
|
||||
$.checkArgument(something < 100, 'something', 'must be less than 100');
|
||||
$.checkArgumentType(something, PrivateKey, 'something'); // The third argument is a helper to mention the name of the argument
|
||||
$.checkArgumentType(something, PrivateKey); // but it's optional (will show up as "(unknown argument)")
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
#### T1 - Tests Must be Written Elegantly
|
||||
|
||||
Style guidelines are not relaxed for tests. Tests are a good way to show how to use the library, and maintaining them is extremely necessary.
|
||||
|
||||
Don't write long tests, write helper functions to make them be as short and concise as possible (they should take just a few lines each), and use good variable names.
|
||||
|
||||
#### T2 - Tests Must not be Random
|
||||
|
||||
Inputs for tests should not be generated randomly. Also, the type and structure of outputs should be checked.
|
||||
|
||||
#### T3 - Data for Tests Included in a JSON File
|
||||
|
||||
If possible, data for tests should be included in a JSON file in the `test/data` directory. This improves interoperability with other libraries and keeps tests cleaner.
|
||||
|
||||
## Pull Request Workflow
|
||||
|
||||
Our workflow is based on GitHub's pull requests. We use feature branches, prepended with: `test`, `feature`, `fix`, `refactor`, or `remove` according to the change the branch introduces. Some examples for such branches are:
|
||||
```sh
|
||||
git checkout -b test/some-module
|
||||
git checkout -b feature/some-new-stuff
|
||||
git checkout -b fix/some-bug
|
||||
git checkout -b remove/some-file
|
||||
```
|
||||
|
||||
We expect pull requests to be rebased to the master branch before merging:
|
||||
```sh
|
||||
git remote add bitpay git@github.com:bitpay/bitcore-node.git
|
||||
git pull --rebase bitpay master
|
||||
```
|
||||
|
||||
Note that we require rebasing your branch instead of merging it, for commit readability reasons.
|
||||
|
||||
After that, you can push the changes to your fork, by doing:
|
||||
```sh
|
||||
git push origin your_branch_name
|
||||
git push origin feature/some-new-stuff
|
||||
git push origin fix/some-bug
|
||||
```
|
||||
Finally go to [github.com/bitpay/bitcore-node](https://github.com/bitpay/bitcore-node) in your web browser and issue a new pull request.
|
||||
|
||||
Main contributors will review your code and possibly ask for changes before your code is pulled in to the main repository. We'll check that all tests pass, review the coding style, and check for general code correctness. If everything is OK, we'll merge your pull request and your code will be part of bitcore.
|
||||
|
||||
If you have any questions feel free to post them to
|
||||
[github.com/bitpay/bitcore-node/issues](https://github.com/bitpay/bitcore-node/issues).
|
||||
|
||||
Thanks for your time and code!
|
||||
94
Gruntfile.js
Normal file
94
Gruntfile.js
Normal file
@ -0,0 +1,94 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function(grunt) {
|
||||
|
||||
//Load NPM tasks
|
||||
grunt.loadNpmTasks('grunt-contrib-watch');
|
||||
grunt.loadNpmTasks('grunt-mocha-test');
|
||||
grunt.loadNpmTasks('grunt-nodemon');
|
||||
grunt.loadNpmTasks('grunt-concurrent');
|
||||
grunt.loadNpmTasks('grunt-env');
|
||||
grunt.loadNpmTasks('grunt-markdown');
|
||||
|
||||
// Project Configuration
|
||||
grunt.initConfig({
|
||||
pkg: grunt.file.readJSON('package.json'),
|
||||
watch: {
|
||||
readme: {
|
||||
files: ['README.md'],
|
||||
tasks: ['markdown']
|
||||
},
|
||||
js: {
|
||||
files: ['Gruntfile.js', 'index.js', 'app/**/*.js'],
|
||||
options: {
|
||||
livereload: true,
|
||||
},
|
||||
},
|
||||
test: {
|
||||
files: ['test/**/*.js', 'test/*.js','app/**/*.js'],
|
||||
tasks: ['test'],
|
||||
}
|
||||
},
|
||||
jshint: {
|
||||
all: {
|
||||
src: ['Gruntfile.js', 'index.js', 'app/**/*.js', 'lib/*.js', 'config/*.js'],
|
||||
options: {
|
||||
jshintrc: true
|
||||
}
|
||||
}
|
||||
},
|
||||
mochaTest: {
|
||||
options: {
|
||||
reporter: 'spec',
|
||||
},
|
||||
src: ['test/**/*.js'],
|
||||
},
|
||||
nodemon: {
|
||||
dev: {
|
||||
script: 'index.js',
|
||||
options: {
|
||||
args: [],
|
||||
ignore: ['test/**/*', 'util/**/*', 'dev-util/**/*'],
|
||||
// nodeArgs: ['--debug'],
|
||||
delayTime: 1,
|
||||
env: {
|
||||
PORT: 3000
|
||||
},
|
||||
cwd: __dirname
|
||||
}
|
||||
}
|
||||
},
|
||||
concurrent: {
|
||||
tasks: ['nodemon', 'watch'],
|
||||
options: {
|
||||
logConcurrentOutput: true
|
||||
}
|
||||
},
|
||||
env: {
|
||||
test: {
|
||||
NODE_ENV: 'test'
|
||||
}
|
||||
},
|
||||
markdown: {
|
||||
all: {
|
||||
files: [
|
||||
{
|
||||
expand: true,
|
||||
src: 'README.md',
|
||||
dest: '.',
|
||||
ext: '.html'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
//Making grunt default to force in order not to break the project.
|
||||
grunt.option('force', true);
|
||||
|
||||
//Default task(s).
|
||||
grunt.registerTask('default', ['concurrent']);
|
||||
|
||||
//Test task.
|
||||
grunt.registerTask('test', ['env:test', 'mochaTest']);
|
||||
};
|
||||
5
LICENSE
5
LICENSE
@ -1,7 +1,4 @@
|
||||
Copyright (c) 2014-2015 BitPay, Inc.
|
||||
|
||||
Parts of this software are based on Florincoin Core
|
||||
Copyright (c) 2009-2015 The Florincoin Core developers
|
||||
Copyright (c) 2013-2015 BitPay, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
458
README.md
458
README.md
@ -1,71 +1,427 @@
|
||||
Flocore Node
|
||||
============
|
||||
Bitcore Node
|
||||
=======
|
||||
|
||||
A Florincoin blockchain indexing and query service. Intended to be used with as a Florincoin full node or in conjunction with a Florincoin full node.
|
||||
|
||||
## Upgrading from previous versions of Flocore Node
|
||||
|
||||
There is no upgrade path from previous versions of Flocore Node due to the removal of the included Florincoin Core software. By installing this version, you must resynchronize the indexes from scratch.
|
||||
|
||||
## Install
|
||||
|
||||
```bash
|
||||
npm install
|
||||
./bin/flocore-node start
|
||||
```
|
||||
|
||||
Note: A default configuration file is placed in the flocore user's home directory (~/.flocore/flocore-node.json). Or, alternatively, you can copy the provided "flocore-node.json.sample" file to the project's root directory as flocore-node.json and edit it for your preferences. If you don't have a preferred block source (trusted peer), [Bcoin](https://github.com/bcoin-org/bcoin) will be started automatically and synchronized with the mainnet chain.
|
||||
[](https://www.npmjs.org/package/bitcore-node)
|
||||
[](https://travis-ci.org/bitpay/bitcore-node)
|
||||
[](https://coveralls.io/r/bitpay/bitcore-node)
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Node.js v8.2.0+
|
||||
- ~500GB of disk storage
|
||||
- ~4GB of RAM
|
||||
* **bitcoind** - Download and Install [Bitcoin](http://bitcoin.org/en/download)
|
||||
|
||||
`bitcore-node` needs a *trusted* bitcoind node to run. It will connect to the node
|
||||
through the RPC API, bitcoin peer-to-peer protocol, and will even read its raw block .dat files for syncing.
|
||||
|
||||
Configure bitcoind to listen to RPC calls and set `txindex` to true.
|
||||
The easiest way to do this is by copying `./etc/bitcoind/bitcoin.conf` to your
|
||||
bitcoin data directory (usually `~/.bitcoin` on Linux, `%appdata%\Bitcoin\` on Windows,
|
||||
or `~/Library/Application Support/Bitcoin` on Mac OS X).
|
||||
|
||||
bitcoind must be running and must have finished downloading the blockchain **before** running `bitcore-node`.
|
||||
|
||||
|
||||
* **Node.js v0.10.x** - Download and Install [Node.js](http://www.nodejs.org/download/).
|
||||
|
||||
* **NPM** - Node.js package manager, should be automatically installed when you get node.js.
|
||||
|
||||
## Quick Install
|
||||
Check the Prerequisites section above before installing.
|
||||
|
||||
To install `bitcore-node`, clone the main repository:
|
||||
|
||||
$ git clone https://github.com/bitpay/bitcore-node && cd bitcore-node
|
||||
|
||||
Install dependencies:
|
||||
|
||||
$ npm install
|
||||
|
||||
Run the main application:
|
||||
|
||||
$ node index.js
|
||||
|
||||
Then open a browser and go to:
|
||||
|
||||
http://localhost:3001
|
||||
|
||||
Please note that the app will need to sync its internal database
|
||||
with the blockchain state, which may take some time. You can check
|
||||
sync progress at http://localhost:3001/api/sync.
|
||||
|
||||
|
||||
## Configuration
|
||||
|
||||
The main configuration file is called "flocore-node.json". This file instructs flocore-node for the following options:
|
||||
All configuration is specified in the [config](config/) folder, particularly the [config.js](config/config.js) file. There you can specify your application name and database name. Certain configuration values are pulled from environment variables if they are defined:
|
||||
|
||||
- location of database files (datadir)
|
||||
- tcp port for web services, if configured (port)
|
||||
- florincoin network type (e.g. mainnet, testnet3, regtest), (network)
|
||||
- what services to include (services)
|
||||
- the services' configuration (servicesConfig)
|
||||
```
|
||||
BITCOIND_HOST # RPC bitcoind host
|
||||
BITCOIND_PORT # RPC bitcoind Port
|
||||
BITCOIND_P2P_HOST # P2P bitcoind Host (will default to BITCOIND_HOST, if specified)
|
||||
BITCOIND_P2P_PORT # P2P bitcoind Port
|
||||
BITCOIND_USER # RPC username
|
||||
BITCOIND_PASS # RPC password
|
||||
BITCOIND_DATADIR # bitcoind datadir. 'testnet3' will be appended automatically if testnet is used. NEED to finish with '/'. e.g: `/vol/data/`
|
||||
BLOCKCHAIN_API_NETWORK [= 'livenet' | 'testnet']
|
||||
BLOCKCHAIN_API_PORT # blockchain api port
|
||||
BLOCKCHAIN_API_DB # Path where to store bitcore-node's internal DB. (defaults to $HOME/.bitcore-node)
|
||||
BLOCKCHAIN_API_SAFE_CONFIRMATIONS=6 # Nr. of confirmation needed to start caching transaction information
|
||||
BLOCKCHAIN_API_IGNORE_CACHE # True to ignore cache of spents in transaction, with more than BLOCKCHAIN_API_SAFE_CONFIRMATIONS confirmations. This is useful for tracking double spents for old transactions.
|
||||
LOGGER_LEVEL # defaults to 'info', can be 'debug','verbose','error', etc.
|
||||
ENABLE_HTTPS # if "true" it will server using SSL/HTTPS
|
||||
```
|
||||
|
||||
## Add-on Services
|
||||
Make sure that bitcoind is configured to [accept incoming connections using 'rpcallowip'](https://en.bitcoin.it/wiki/Running_Bitcoin).
|
||||
|
||||
There are several add-on services available to extend the functionality of Flocore:
|
||||
In case the network is changed (testnet to livenet or vice versa) levelDB database needs to be deleted. This can be performed running:
|
||||
```util/sync.js -D``` and waiting for `bitcore-node` to synchronize again. Once the database is deleted, the sync.js process can be safely interrupted (CTRL+C) and continued from the synchronization process embedded in main app.
|
||||
|
||||
- [Insight API](https://github.com/bitpay/insight-api)
|
||||
- [Insight UI](https://github.com/bitpay/insight-ui)
|
||||
- [Flocore Wallet Service](https://github.com/bitpay/flocore-wallet-service)
|
||||
## Synchronization
|
||||
|
||||
## Documentation
|
||||
The initial synchronization process scans the blockchain from the paired bitcoind server to update addresses and balances. `bitcore-node` needs exactly one trusted bitcoind node to run. This node must have finished downloading the blockchain before running `bitcore-node`.
|
||||
|
||||
- [Services](docs/services.md)
|
||||
- [Fee](docs/services/fee.md) - Creates a service to handle fee queries
|
||||
- [Header](docs/services/header.md) - Creates a service to handle block headers
|
||||
- [Block](docs/services/block.md) - Creates a service to handle blocks
|
||||
- [Transaction](docs/services/transaction.md) - Creates a service to handle transactions
|
||||
- [Address](docs/services/address.md) - Creates a service to handle addresses
|
||||
- [Mempool](docs/services/mempool.md) - Creates a service to handle mempool
|
||||
- [Timestamp](docs/services/timestamp.md) - Creates a service to handle timestamp
|
||||
- [Db](docs/services/db.md) - Creates a service to handle the database
|
||||
- [p2p](docs/services/p2p.md) - Creates a service to handle the peer-to-peer network
|
||||
- [Web](docs/services/web.md) - Creates an express application over which services can expose their web/API content
|
||||
- [Development Environment](docs/development.md) - Guide for setting up a development environment
|
||||
- [Node](docs/node.md) - Details on the node constructor
|
||||
- [Bus](docs/bus.md) - Overview of the event bus constructor
|
||||
- [Release Process](docs/release.md) - Information about verifying a release and the release process.
|
||||
While `bitcore-node` is synchronizing the website can be accessed (the sync process is embedded in the webserver), but there may be missing data or incorrect balances for addresses. The 'sync' status is shown at the `/api/sync` endpoint.
|
||||
|
||||
## Contributing
|
||||
The blockchain can be read from bitcoind's raw `.dat` files or RPC interface.
|
||||
Reading the information from the `.dat` files is much faster so it's the
|
||||
recommended (and default) alternative. `.dat` files are scanned in the default
|
||||
location for each platform (for example, `~/.bitcoin` on Linux). In case a
|
||||
non-standard location is used, it needs to be defined (see the Configuration section).
|
||||
As of June 2014, using `.dat` files the sync process takes 9 hrs.
|
||||
for livenet and 30 mins. for testnet.
|
||||
|
||||
Please send pull requests for bug fixes, code optimization, and ideas for improvement. For more information on how to contribute, please refer to our [CONTRIBUTING](https://github.com/bitpay/flocore/blob/master/CONTRIBUTING.md) file.
|
||||
While synchronizing the blockchain, `bitcore-node` listens for new blocks and
|
||||
transactions relayed by the bitcoind node. Those are also stored on `bitcore-node`'s database.
|
||||
In case `bitcore-node` is shutdown for a period of time, restarting it will trigger
|
||||
a partial (historic) synchronization of the blockchain. Depending on the size of
|
||||
that synchronization task, a reverse RPC or forward `.dat` syncing strategy will be used.
|
||||
|
||||
If bitcoind is shutdown, `bitcore-node` needs to be stopped and restarted
|
||||
once bitcoind is restarted.
|
||||
|
||||
### Syncing old blockchain data manually
|
||||
|
||||
Old blockchain data can be manually synced issuing:
|
||||
|
||||
$ util/sync.js
|
||||
|
||||
Check util/sync.js --help for options, particulary -D to erase the current DB.
|
||||
|
||||
*NOTE*: there is no need to run this manually since the historic synchronization
|
||||
is built in into the web application. Running `bitcore-node` normally will trigger
|
||||
the historic sync automatically.
|
||||
|
||||
|
||||
### DB storage requirement
|
||||
|
||||
To store the blockchain and address related information, LevelDB is used.
|
||||
Two DBs are created: txs and blocks. By default these are stored on
|
||||
|
||||
``~/.bitcore-node/``
|
||||
|
||||
This can be changed at config/config.js. As of June 2014, storing the livenet blockchain takes ~35GB of disk space (2GB for the testnet).
|
||||
|
||||
## Development
|
||||
|
||||
To run `bitcore-node` locally for development with grunt:
|
||||
|
||||
```$ NODE_ENV=development grunt```
|
||||
|
||||
To run the tests
|
||||
|
||||
```$ grunt test```
|
||||
|
||||
|
||||
## Caching schema
|
||||
|
||||
Since v0.2 a new cache schema has been introduced. Only information from transactions with
|
||||
BLOCKCHAIN_API_SAFE_CONFIRMATIONS settings will be cached (by default SAFE_CONFIRMATIONS=6). There
|
||||
are 3 different caches:
|
||||
* Number of confirmations
|
||||
* Transaction output spent/unspent status
|
||||
* scriptPubKey for unspent transactions
|
||||
|
||||
Cache data is only populated on request, i.e., only after accessing the required data for
|
||||
the first time, the information is cached, there is not pre-caching procedure. To ignore
|
||||
cache by default, use BLOCKCHAIN_API_IGNORE_CACHE. Also, address related calls support `?noCache=1`
|
||||
to ignore the cache in a particular API request.
|
||||
|
||||
## API
|
||||
|
||||
By default, `bitcore-node` provides a REST API at `/api`, but this prefix is configurable from the var `apiPrefix` in the `config.js` file.
|
||||
|
||||
The end-points are:
|
||||
|
||||
|
||||
### Block
|
||||
```
|
||||
/api/block/[:hash]
|
||||
/api/block/00000000a967199a2fad0877433c93df785a8d8ce062e5f9b451cd1397bdbf62
|
||||
```
|
||||
### Transaction
|
||||
```
|
||||
/api/tx/[:txid]
|
||||
/api/tx/525de308971eabd941b139f46c7198b5af9479325c2395db7f2fb5ae8562556c
|
||||
```
|
||||
### Address
|
||||
```
|
||||
/api/addr/[:addr][?noTxList=1&noCache=1]
|
||||
/api/addr/mmvP3mTe53qxHdPqXEvdu8WdC7GfQ2vmx5?noTxList=1
|
||||
```
|
||||
### Address Properties
|
||||
```
|
||||
/api/addr/[:addr]/balance
|
||||
/api/addr/[:addr]/totalReceived
|
||||
/api/addr/[:addr]/totalSent
|
||||
/api/addr/[:addr]/unconfirmedBalance
|
||||
```
|
||||
The response contains the value in Satoshis.
|
||||
### Unspent Outputs
|
||||
```
|
||||
/api/addr/[:addr]/utxo[?noCache=1]
|
||||
```
|
||||
Sample return:
|
||||
``` json
|
||||
[
|
||||
{
|
||||
address: "n2PuaAguxZqLddRbTnAoAuwKYgN2w2hZk7",
|
||||
txid: "dbfdc2a0d22a8282c4e7be0452d595695f3a39173bed4f48e590877382b112fc",
|
||||
vout: 0,
|
||||
ts: 1401276201,
|
||||
scriptPubKey: "76a914e50575162795cd77366fb80d728e3216bd52deac88ac",
|
||||
amount: 0.001,
|
||||
confirmations: 3
|
||||
},
|
||||
{
|
||||
address: "n2PuaAguxZqLddRbTnAoAuwKYgN2w2hZk7",
|
||||
txid: "e2b82af55d64f12fd0dd075d0922ee7d6a300f58fe60a23cbb5831b31d1d58b4",
|
||||
vout: 0,
|
||||
ts: 1401226410,
|
||||
scriptPubKey: "76a914e50575162795cd77366fb80d728e3216bd52deac88ac",
|
||||
amount: 0.001,
|
||||
confirmation: 6
|
||||
confirmationsFromCache: true,
|
||||
}
|
||||
]
|
||||
```
|
||||
Please note that in case confirmations are cached (which happens by default when the number of confirmations is bigger that BLOCKCHAIN_API_SAFE_CONFIRMATIONS) the response will include the pair confirmationsFromCache:true, and confirmations will equal BLOCKCHAIN_API_SAFE_CONFIRMATIONS. See noCache and BLOCKCHAIN_API_IGNORE_CACHE options for details.
|
||||
|
||||
|
||||
|
||||
### Unspent Outputs for multiple addresses
|
||||
GET method:
|
||||
```
|
||||
/api/addrs/[:addrs]/utxo
|
||||
/api/addrs/2NF2baYuJAkCKo5onjUKEPdARQkZ6SYyKd5,2NAre8sX2povnjy4aeiHKeEh97Qhn97tB1f/utxo
|
||||
```
|
||||
|
||||
POST method:
|
||||
```
|
||||
/api/addrs/utxo
|
||||
```
|
||||
|
||||
POST params:
|
||||
```
|
||||
addrs: 2NF2baYuJAkCKo5onjUKEPdARQkZ6SYyKd5,2NAre8sX2povnjy4aeiHKeEh97Qhn97tB1f
|
||||
```
|
||||
|
||||
### Transactions by Block
|
||||
```
|
||||
/api/txs/?block=HASH
|
||||
/api/txs/?block=00000000fa6cf7367e50ad14eb0ca4737131f256fc4c5841fd3c3f140140e6b6
|
||||
```
|
||||
### Transactions by Address
|
||||
```
|
||||
/api/txs/?address=ADDR
|
||||
/api/txs/?address=mmhmMNfBiZZ37g1tgg2t8DDbNoEdqKVxAL
|
||||
```
|
||||
|
||||
### Transactions for multiple addresses
|
||||
GET method:
|
||||
```
|
||||
/api/addrs/[:addrs]/txs[?from=&to=]
|
||||
/api/addrs/2NF2baYuJAkCKo5onjUKEPdARQkZ6SYyKd5,2NAre8sX2povnjy4aeiHKeEh97Qhn97tB1f/txs?from=0&to=20
|
||||
```
|
||||
|
||||
POST method:
|
||||
```
|
||||
/api/addrs/txs
|
||||
```
|
||||
|
||||
POST params:
|
||||
```
|
||||
addrs: 2NF2baYuJAkCKo5onjUKEPdARQkZ6SYyKd5,2NAre8sX2povnjy4aeiHKeEh97Qhn97tB1f
|
||||
from (optional): 0
|
||||
to (optional): 20
|
||||
```
|
||||
|
||||
Sample output:
|
||||
```
|
||||
{ totalItems: 100,
|
||||
from: 0,
|
||||
to: 20,
|
||||
items:
|
||||
[ { txid: '3e81723d069b12983b2ef694c9782d32fca26cc978de744acbc32c3d3496e915',
|
||||
version: 1,
|
||||
locktime: 0,
|
||||
vin: [Object],
|
||||
vout: [Object],
|
||||
blockhash: '00000000011a135e5277f5493c52c66829792392632b8b65429cf07ad3c47a6c',
|
||||
confirmations: 109367,
|
||||
time: 1393659685,
|
||||
blocktime: 1393659685,
|
||||
valueOut: 0.3453,
|
||||
size: 225,
|
||||
firstSeenTs: undefined,
|
||||
valueIn: 0.3454,
|
||||
fees: 0.0001 },
|
||||
{ ... },
|
||||
{ ... },
|
||||
...
|
||||
{ ... }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Note: if pagination params are not specified, the result is an array of transactions.
|
||||
|
||||
|
||||
### Transaction broadcasting
|
||||
POST method:
|
||||
```
|
||||
/api/tx/send
|
||||
```
|
||||
POST params:
|
||||
```
|
||||
rawtx: "signed transaction as hex string"
|
||||
|
||||
eg
|
||||
|
||||
rawtx: 01000000017b1eabe0209b1fe794124575ef807057c77ada2138ae4fa8d6c4de0398a14f3f00000000494830450221008949f0cb400094ad2b5eb399d59d01c14d73d8fe6e96df1a7150deb388ab8935022079656090d7f6bac4c9a94e0aad311a4268e082a725f8aeae0573fb12ff866a5f01ffffffff01f0ca052a010000001976a914cbc20a7664f2f69e5355aa427045bc15e7c6c77288ac00000000
|
||||
|
||||
```
|
||||
POST response:
|
||||
```
|
||||
{
|
||||
txid: [:txid]
|
||||
}
|
||||
|
||||
eg
|
||||
|
||||
{
|
||||
txid: "c7736a0a0046d5a8cc61c8c3c2821d4d7517f5de2bc66a966011aaa79965ffba"
|
||||
}
|
||||
```
|
||||
|
||||
### Historic blockchain data sync status
|
||||
```
|
||||
/api/sync
|
||||
```
|
||||
|
||||
### Live network p2p data sync status
|
||||
```
|
||||
/api/peer
|
||||
```
|
||||
|
||||
### Status of the bitcoin network
|
||||
```
|
||||
/api/status?q=xxx
|
||||
```
|
||||
|
||||
Where "xxx" can be:
|
||||
|
||||
* getInfo
|
||||
* getDifficulty
|
||||
* getTxOutSetInfo
|
||||
* getBestBlockHash
|
||||
* getLastBlockHash
|
||||
|
||||
## Web Socket API
|
||||
The web socket API is served using [socket.io](http://socket.io).
|
||||
|
||||
The following are the events published:
|
||||
|
||||
'tx': new transaction received from network. This event is published in the 'inv' room. Data will be a app/models/Transaction object.
|
||||
Sample output:
|
||||
```
|
||||
{
|
||||
"txid":"00c1b1acb310b87085c7deaaeba478cef5dc9519fab87a4d943ecbb39bd5b053",
|
||||
"processed":false
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
'block': new block received from network. This event is published in the 'inv' room. Data will be a app/models/Block object.
|
||||
Sample output:
|
||||
```
|
||||
{
|
||||
"hash":"000000004a3d187c430cd6a5e988aca3b19e1f1d1727a50dead6c8ac26899b96",
|
||||
"time":1389789343,
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
'<bitcoinAddress>': new transaction concerning <bitcoinAddress> received from network. This event is published in the '<bitcoinAddress>' room.
|
||||
|
||||
'status': every 1% increment on the sync task, this event will be triggered. This event is published in the 'sync' room.
|
||||
|
||||
Sample output:
|
||||
```
|
||||
{
|
||||
blocksToSync: 164141,
|
||||
syncedBlocks: 475,
|
||||
upToExisting: true,
|
||||
scanningBackward: true,
|
||||
isEndGenesis: true,
|
||||
end: "000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943",
|
||||
isStartGenesis: false,
|
||||
start: "000000009f929800556a8f3cfdbe57c187f2f679e351b12f7011bfc276c41b6d"
|
||||
}
|
||||
```
|
||||
|
||||
### Example Usage
|
||||
|
||||
The following html page connects to the socket.io API and listens for new transactions.
|
||||
|
||||
html
|
||||
```
|
||||
<html>
|
||||
<body>
|
||||
<script src="http://<bitcore-node-server>:<port>/socket.io/socket.io.js"></script>
|
||||
<script>
|
||||
eventToListenTo = 'tx'
|
||||
room = 'inv'
|
||||
|
||||
var socket = io("http://<bitcore-node-server>:<port>/");
|
||||
socket.on('connect', function() {
|
||||
// Join the room.
|
||||
socket.emit('subscribe', room);
|
||||
})
|
||||
socket.on(eventToListenTo, function(data) {
|
||||
console.log("New transaction received: " + data.txid)
|
||||
})
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
## License
|
||||
(The MIT License)
|
||||
|
||||
Code released under [the MIT license](https://github.com/bitpay/flocore-node/blob/master/LICENSE).
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
Copyright 2013-2017 BitPay, Inc.
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
- florincoin: Copyright (c) 2009-2015 Florincoin Core Developers (MIT License)
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
206
app/controllers/addresses.js
Normal file
206
app/controllers/addresses.js
Normal file
@ -0,0 +1,206 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var _ = require('lodash');
|
||||
var Address = require('../models/Address');
|
||||
var common = require('./common');
|
||||
var async = require('async');
|
||||
|
||||
var tDb = require('../../lib/TransactionDb').default();
|
||||
|
||||
var getAddr = function(req, res, next) {
|
||||
var a;
|
||||
try {
|
||||
var addr = req.param('addr');
|
||||
a = new Address(addr);
|
||||
} catch (e) {
|
||||
common.handleErrors({
|
||||
message: 'Invalid address:' + e.message,
|
||||
code: 1
|
||||
}, res, next);
|
||||
return null;
|
||||
}
|
||||
return a;
|
||||
};
|
||||
|
||||
var getAddrs = function(req, res, next) {
|
||||
var as = [];
|
||||
try {
|
||||
var addrStrs = req.param('addrs');
|
||||
var s = addrStrs.split(',');
|
||||
if (s.length === 0) return as;
|
||||
for (var i = 0; i < s.length; i++) {
|
||||
var a = new Address(s[i]);
|
||||
as.push(a);
|
||||
}
|
||||
} catch (e) {
|
||||
common.handleErrors({
|
||||
message: 'Invalid address:' + e.message,
|
||||
code: 1
|
||||
}, res, next);
|
||||
return null;
|
||||
}
|
||||
return as;
|
||||
};
|
||||
|
||||
exports.show = function(req, res, next) {
|
||||
var a = getAddr(req, res, next);
|
||||
|
||||
if (a) {
|
||||
a.update(function(err) {
|
||||
if (err) {
|
||||
return common.handleErrors(err, res);
|
||||
} else {
|
||||
return res.jsonp(a.getObj());
|
||||
}
|
||||
}, {txLimit: req.query.noTxList?0:-1, ignoreCache: req.param('noCache')});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
exports.utxo = function(req, res, next) {
|
||||
var a = getAddr(req, res, next);
|
||||
if (a) {
|
||||
a.update(function(err) {
|
||||
if (err)
|
||||
return common.handleErrors(err, res);
|
||||
else {
|
||||
return res.jsonp(a.unspent);
|
||||
}
|
||||
}, {onlyUnspent:1, ignoreCache: req.param('noCache')});
|
||||
}
|
||||
};
|
||||
|
||||
exports.multiutxo = function(req, res, next) {
|
||||
var as = getAddrs(req, res, next);
|
||||
if (as) {
|
||||
var utxos = [];
|
||||
async.each(as, function(a, callback) {
|
||||
a.update(function(err) {
|
||||
if (err) callback(err);
|
||||
utxos = utxos.concat(a.unspent);
|
||||
callback();
|
||||
}, {onlyUnspent:1, ignoreCache: req.param('noCache')});
|
||||
}, function(err) { // finished callback
|
||||
if (err) return common.handleErrors(err, res);
|
||||
res.jsonp(utxos);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
exports.multitxs = function(req, res, next) {
|
||||
|
||||
function processTxs(txs, from, to, cb) {
|
||||
txs = _.uniq(_.flatten(txs), 'txid');
|
||||
var nbTxs = txs.length;
|
||||
var paginated = !_.isUndefined(from) || !_.isUndefined(to);
|
||||
|
||||
if (paginated) {
|
||||
txs.sort(function(a, b) {
|
||||
return (b.ts || b.ts) - (a.ts || a.ts);
|
||||
});
|
||||
var start = Math.max(from || 0, 0);
|
||||
var end = Math.min(to || txs.length, txs.length);
|
||||
txs = txs.slice(start, end);
|
||||
}
|
||||
|
||||
var txIndex = {};
|
||||
_.each(txs, function (tx) { txIndex[tx.txid] = tx; });
|
||||
|
||||
async.each(txs, function (tx, callback) {
|
||||
tDb.fromIdWithInfo(tx.txid, function(err, tx) {
|
||||
if (err) console.log(err);
|
||||
if (tx && tx.info) {
|
||||
txIndex[tx.txid].info = tx.info;
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, function (err) {
|
||||
if (err) return cb(err);
|
||||
|
||||
var transactions = _.pluck(txs, 'info');
|
||||
if (paginated) {
|
||||
transactions = {
|
||||
totalItems: nbTxs,
|
||||
from: +from,
|
||||
to: +to,
|
||||
items: transactions,
|
||||
};
|
||||
}
|
||||
return cb(null, transactions);
|
||||
});
|
||||
};
|
||||
|
||||
var from = req.param('from');
|
||||
var to = req.param('to');
|
||||
|
||||
var as = getAddrs(req, res, next);
|
||||
if (as) {
|
||||
var txs = [];
|
||||
async.eachLimit(as, 10, function(a, callback) {
|
||||
a.update(function(err) {
|
||||
if (err) callback(err);
|
||||
txs.push(a.transactions);
|
||||
callback();
|
||||
}, {ignoreCache: req.param('noCache'), includeTxInfo: true});
|
||||
}, function(err) { // finished callback
|
||||
if (err) return common.handleErrors(err, res);
|
||||
processTxs(txs, from, to, function (err, transactions) {
|
||||
if (err) return common.handleErrors(err, res);
|
||||
res.jsonp(transactions);
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
exports.balance = function(req, res, next) {
|
||||
var a = getAddr(req, res, next);
|
||||
if (a)
|
||||
a.update(function(err) {
|
||||
if (err) {
|
||||
return common.handleErrors(err, res);
|
||||
} else {
|
||||
return res.jsonp(a.balanceSat);
|
||||
}
|
||||
}, {ignoreCache: req.param('noCache')});
|
||||
};
|
||||
|
||||
exports.totalReceived = function(req, res, next) {
|
||||
var a = getAddr(req, res, next);
|
||||
if (a)
|
||||
a.update(function(err) {
|
||||
if (err) {
|
||||
return common.handleErrors(err, res);
|
||||
} else {
|
||||
return res.jsonp(a.totalReceivedSat);
|
||||
}
|
||||
}, {ignoreCache: req.param('noCache')});
|
||||
};
|
||||
|
||||
exports.totalSent = function(req, res, next) {
|
||||
var a = getAddr(req, res, next);
|
||||
if (a)
|
||||
a.update(function(err) {
|
||||
if (err) {
|
||||
return common.handleErrors(err, res);
|
||||
} else {
|
||||
return res.jsonp(a.totalSentSat);
|
||||
}
|
||||
}, {ignoreCache: req.param('noCache')});
|
||||
};
|
||||
|
||||
exports.unconfirmedBalance = function(req, res, next) {
|
||||
var a = getAddr(req, res, next);
|
||||
if (a)
|
||||
a.update(function(err) {
|
||||
if (err) {
|
||||
return common.handleErrors(err, res);
|
||||
} else {
|
||||
return res.jsonp(a.unconfirmedBalanceSat);
|
||||
}
|
||||
}, {ignoreCache: req.param('noCache')});
|
||||
};
|
||||
176
app/controllers/blocks.js
Normal file
176
app/controllers/blocks.js
Normal file
@ -0,0 +1,176 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
var common = require('./common'),
|
||||
async = require('async'),
|
||||
BlockDb = require('../../lib/BlockDb'),
|
||||
TransactionDb = require('../../lib/TransactionDb');
|
||||
|
||||
var bdb = new BlockDb();
|
||||
var tdb = new TransactionDb();
|
||||
|
||||
/**
|
||||
* Find block by hash ...
|
||||
*/
|
||||
exports.block = function(req, res, next, hash) {
|
||||
bdb.fromHashWithInfo(hash, function(err, block) {
|
||||
if (err || !block)
|
||||
return common.handleErrors(err, res, next);
|
||||
else {
|
||||
tdb.getPoolInfo(block.info.tx[0], function(info) {
|
||||
block.info.poolInfo = info;
|
||||
req.block = block.info;
|
||||
return next();
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Show block
|
||||
*/
|
||||
exports.show = function(req, res) {
|
||||
if (req.block) {
|
||||
res.jsonp(req.block);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Show block by Height
|
||||
*/
|
||||
exports.blockindex = function(req, res, next, height) {
|
||||
bdb.blockIndex(height, function(err, hashStr) {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
res.status(400).send('Bad Request'); // TODO
|
||||
} else {
|
||||
res.jsonp(hashStr);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
var getBlock = function(blockhash, cb) {
|
||||
bdb.fromHashWithInfo(blockhash, function(err, block) {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
// TODO
|
||||
if (!block.info) {
|
||||
console.log('Could not get %s from RPC. Orphan? Error?', blockhash); //TODO
|
||||
// Probably orphan
|
||||
block.info = {
|
||||
hash: blockhash,
|
||||
isOrphan: 1,
|
||||
};
|
||||
}
|
||||
|
||||
tdb.getPoolInfo(block.info.tx[0], function(info) {
|
||||
block.info.poolInfo = info;
|
||||
return cb(err, block.info);
|
||||
});
|
||||
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* List of blocks by date
|
||||
*/
|
||||
|
||||
var DFLT_LIMIT=200;
|
||||
// in testnet, this number is much bigger, we dont support
|
||||
// exploring blocks by date.
|
||||
|
||||
exports.list = function(req, res) {
|
||||
var isToday = false;
|
||||
|
||||
//helper to convert timestamps to yyyy-mm-dd format
|
||||
var formatTimestamp = function(date) {
|
||||
var yyyy = date.getUTCFullYear().toString();
|
||||
var mm = (date.getUTCMonth() + 1).toString(); // getMonth() is zero-based
|
||||
var dd = date.getUTCDate().toString();
|
||||
|
||||
return yyyy + '-' + (mm[1] ? mm : '0' + mm[0]) + '-' + (dd[1] ? dd : '0' + dd[0]); //padding
|
||||
};
|
||||
|
||||
var dateStr;
|
||||
var todayStr = formatTimestamp(new Date());
|
||||
|
||||
if (req.query.blockDate) {
|
||||
// TODO: Validate format yyyy-mm-dd
|
||||
dateStr = req.query.blockDate;
|
||||
isToday = dateStr === todayStr;
|
||||
} else {
|
||||
dateStr = todayStr;
|
||||
isToday = true;
|
||||
}
|
||||
var gte = Math.round((new Date(dateStr)).getTime() / 1000);
|
||||
|
||||
//pagination
|
||||
var lte = parseInt(req.query.startTimestamp) || gte + 86400;
|
||||
var prev = formatTimestamp(new Date((gte - 86400) * 1000));
|
||||
var next = lte ? formatTimestamp(new Date(lte * 1000)) :null;
|
||||
var limit = parseInt(req.query.limit || DFLT_LIMIT) + 1;
|
||||
var more;
|
||||
|
||||
bdb.getBlocksByDate(gte, lte, limit, function(err, blockList) {
|
||||
|
||||
if (err) {
|
||||
res.status(500).send(err);
|
||||
} else {
|
||||
var l = blockList.length;
|
||||
|
||||
if (l===limit) {
|
||||
more = true;
|
||||
blockList.pop;
|
||||
}
|
||||
|
||||
var moreTs=lte;
|
||||
async.mapSeries(blockList,
|
||||
function(b, cb) {
|
||||
getBlock(b.hash, function(err, info) {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
return cb(err);
|
||||
}
|
||||
if (b.ts < moreTs) moreTs = b.ts;
|
||||
return cb(err, {
|
||||
height: info.height,
|
||||
size: info.size,
|
||||
hash: b.hash,
|
||||
time: b.ts || info.time,
|
||||
txlength: info.tx.length,
|
||||
poolInfo: info.poolInfo
|
||||
});
|
||||
});
|
||||
}, function(err, allblocks) {
|
||||
|
||||
// sort blocks by height
|
||||
allblocks.sort(
|
||||
function compare(a,b) {
|
||||
if (a.height < b.height) return 1;
|
||||
if (a.height > b.height) return -1;
|
||||
return 0;
|
||||
});
|
||||
|
||||
res.jsonp({
|
||||
blocks: allblocks,
|
||||
length: allblocks.length,
|
||||
pagination: {
|
||||
next: next,
|
||||
prev: prev,
|
||||
currentTs: lte - 1,
|
||||
current: dateStr,
|
||||
isToday: isToday,
|
||||
more: more,
|
||||
moreTs: moreTs,
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
16
app/controllers/common.js
Normal file
16
app/controllers/common.js
Normal file
@ -0,0 +1,16 @@
|
||||
'use strict';
|
||||
|
||||
|
||||
exports.handleErrors = function (err, res) {
|
||||
if (err) {
|
||||
if (err.code) {
|
||||
res.status(400).send(err.message + '. Code:' + err.code);
|
||||
}
|
||||
else {
|
||||
res.status(503).send(err.message);
|
||||
}
|
||||
}
|
||||
else {
|
||||
res.status(404).send('Not found');
|
||||
}
|
||||
};
|
||||
60
app/controllers/currency.js
Normal file
60
app/controllers/currency.js
Normal file
@ -0,0 +1,60 @@
|
||||
'use strict';
|
||||
|
||||
var config = require('../../config/config');
|
||||
|
||||
// Set the initial vars
|
||||
var timestamp = +new Date(),
|
||||
delay = config.currencyRefresh * 60000,
|
||||
bitstampRate = 0;
|
||||
|
||||
exports.index = function(req, res) {
|
||||
|
||||
var _xhr = function() {
|
||||
if (typeof XMLHttpRequest !== 'undefined' && XMLHttpRequest !== null) {
|
||||
return new XMLHttpRequest();
|
||||
} else if (typeof require !== 'undefined' && require !== null) {
|
||||
var XMLhttprequest = require('xmlhttprequest').XMLHttpRequest;
|
||||
return new XMLhttprequest();
|
||||
}
|
||||
};
|
||||
|
||||
var _request = function(url, cb) {
|
||||
var request;
|
||||
request = _xhr();
|
||||
request.open('GET', url, true);
|
||||
request.onreadystatechange = function() {
|
||||
if (request.readyState === 4) {
|
||||
if (request.status === 200) {
|
||||
return cb(false, request.responseText);
|
||||
}
|
||||
|
||||
return cb(true, {
|
||||
status: request.status,
|
||||
message: 'Request error'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return request.send(null);
|
||||
};
|
||||
|
||||
// Init
|
||||
var currentTime = +new Date();
|
||||
if (bitstampRate === 0 || currentTime >= (timestamp + delay)) {
|
||||
timestamp = currentTime;
|
||||
|
||||
_request('https://www.bitstamp.net/api/ticker/', function(err, data) {
|
||||
if (!err) bitstampRate = parseFloat(JSON.parse(data).last);
|
||||
|
||||
res.jsonp({
|
||||
status: 200,
|
||||
data: { bitstamp: bitstampRate }
|
||||
});
|
||||
});
|
||||
} else {
|
||||
res.jsonp({
|
||||
status: 200,
|
||||
data: { bitstamp: bitstampRate }
|
||||
});
|
||||
}
|
||||
};
|
||||
26
app/controllers/index.js
Normal file
26
app/controllers/index.js
Normal file
@ -0,0 +1,26 @@
|
||||
'use strict';
|
||||
|
||||
var config = require('../../config/config');
|
||||
|
||||
var _getVersion = function() {
|
||||
var pjson = require('../../package.json');
|
||||
return pjson.version;
|
||||
};
|
||||
|
||||
exports.render = function(req, res) {
|
||||
|
||||
if (config.publicPath) {
|
||||
return res.sendfile(config.publicPath + '/index.html');
|
||||
}
|
||||
else {
|
||||
var version = _getVersion();
|
||||
res.send('bitcore-node API v' + version);
|
||||
}
|
||||
};
|
||||
|
||||
exports.version = function(req, res) {
|
||||
var version = _getVersion();
|
||||
res.json({
|
||||
version: version
|
||||
});
|
||||
};
|
||||
27
app/controllers/messages.js
Normal file
27
app/controllers/messages.js
Normal file
@ -0,0 +1,27 @@
|
||||
'use strict';
|
||||
|
||||
var common = require('./common');
|
||||
var Rpc = require('../../lib/Rpc');
|
||||
|
||||
|
||||
exports.verify = function(req, res) {
|
||||
var address = req.param('address'),
|
||||
signature = req.param('signature'),
|
||||
message = req.param('message');
|
||||
|
||||
if(typeof(address) == 'undefined'
|
||||
|| typeof(signature) == 'undefined'
|
||||
|| typeof(message) == 'undefined') {
|
||||
return common.handleErrors({
|
||||
message: 'Missing parameters (expected "address", "signature" and "message")',
|
||||
code: 1
|
||||
}, res);
|
||||
}
|
||||
|
||||
Rpc.verifyMessage(address, signature, message, function(err, result) {
|
||||
if (err) {
|
||||
return common.handleErrors(err, res);
|
||||
}
|
||||
res.json({'result' : result});
|
||||
});
|
||||
};
|
||||
73
app/controllers/socket.js
Normal file
73
app/controllers/socket.js
Normal file
@ -0,0 +1,73 @@
|
||||
'use strict';
|
||||
|
||||
// server-side socket behaviour
|
||||
var ios = null; // io is already taken in express
|
||||
var util = require('bitcore').util;
|
||||
var logger = require('../../lib/logger').logger;
|
||||
|
||||
module.exports.init = function(io_ext) {
|
||||
ios = io_ext;
|
||||
if (ios) {
|
||||
// when a new socket connects
|
||||
ios.sockets.on('connection', function(socket) {
|
||||
logger.verbose('New connection from ' + socket.id);
|
||||
// when it subscribes, make it join the according room
|
||||
socket.on('subscribe', function(topic) {
|
||||
logger.debug('subscribe to ' + topic);
|
||||
socket.join(topic);
|
||||
socket.emit('subscribed');
|
||||
});
|
||||
|
||||
// disconnect handler
|
||||
socket.on('disconnect', function() {
|
||||
logger.verbose('disconnected ' + socket.id);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
return ios;
|
||||
};
|
||||
|
||||
var simpleTx = function(tx) {
|
||||
return {
|
||||
txid: tx
|
||||
};
|
||||
};
|
||||
|
||||
var fullTx = function(tx) {
|
||||
var t = {
|
||||
txid: tx.txid,
|
||||
size: tx.size,
|
||||
};
|
||||
// Outputs
|
||||
var valueOut = 0;
|
||||
tx.vout.forEach(function(o) {
|
||||
valueOut += o.valueSat;
|
||||
});
|
||||
|
||||
t.valueOut = (valueOut.toFixed(8) / util.COIN);
|
||||
return t;
|
||||
};
|
||||
|
||||
module.exports.broadcastTx = function(tx) {
|
||||
if (ios) {
|
||||
var t = (typeof tx === 'string') ? simpleTx(tx) : fullTx(tx);
|
||||
ios.sockets.in('inv').emit('tx', t);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.broadcastBlock = function(block) {
|
||||
if (ios)
|
||||
ios.sockets.in('inv').emit('block', block);
|
||||
};
|
||||
|
||||
module.exports.broadcastAddressTx = function(txid, address) {
|
||||
if (ios) {
|
||||
ios.sockets.in(address).emit(address, txid);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.broadcastSyncInfo = function(historicSync) {
|
||||
if (ios)
|
||||
ios.sockets.in('sync').emit('status', historicSync);
|
||||
};
|
||||
62
app/controllers/status.js
Normal file
62
app/controllers/status.js
Normal file
@ -0,0 +1,62 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var Status = require('../models/Status'),
|
||||
common = require('./common');
|
||||
|
||||
/**
|
||||
* Status
|
||||
*/
|
||||
exports.show = function(req, res) {
|
||||
|
||||
if (! req.query.q) {
|
||||
res.status(400).send('Bad Request');
|
||||
}
|
||||
else {
|
||||
var option = req.query.q;
|
||||
var statusObject = new Status();
|
||||
|
||||
var returnJsonp = function (err) {
|
||||
if (err || ! statusObject)
|
||||
return common.handleErrors(err, res);
|
||||
else {
|
||||
res.jsonp(statusObject);
|
||||
}
|
||||
};
|
||||
|
||||
switch(option) {
|
||||
case 'getInfo':
|
||||
statusObject.getInfo(returnJsonp);
|
||||
break;
|
||||
case 'getDifficulty':
|
||||
statusObject.getDifficulty(returnJsonp);
|
||||
break;
|
||||
case 'getTxOutSetInfo':
|
||||
statusObject.getTxOutSetInfo(returnJsonp);
|
||||
break;
|
||||
case 'getLastBlockHash':
|
||||
statusObject.getLastBlockHash(returnJsonp);
|
||||
break;
|
||||
case 'getBestBlockHash':
|
||||
statusObject.getBestBlockHash(returnJsonp);
|
||||
break;
|
||||
default:
|
||||
res.status(400).send('Bad Request');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
exports.sync = function(req, res) {
|
||||
if (req.historicSync)
|
||||
res.jsonp(req.historicSync.info());
|
||||
};
|
||||
|
||||
exports.peer = function(req, res) {
|
||||
if (req.peerSync) {
|
||||
var info = req.peerSync.info();
|
||||
res.jsonp(info);
|
||||
}
|
||||
};
|
||||
166
app/controllers/transactions.js
Normal file
166
app/controllers/transactions.js
Normal file
@ -0,0 +1,166 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
var Address = require('../models/Address');
|
||||
var async = require('async');
|
||||
var common = require('./common');
|
||||
var util = require('util');
|
||||
|
||||
var Rpc = require('../../lib/Rpc');
|
||||
|
||||
var tDb = require('../../lib/TransactionDb').default();
|
||||
var bdb = require('../../lib/BlockDb').default();
|
||||
|
||||
exports.send = function(req, res) {
|
||||
Rpc.sendRawTransaction(req.body.rawtx, function(err, txid) {
|
||||
if (err) {
|
||||
var message;
|
||||
if(err.code == -25) {
|
||||
message = util.format(
|
||||
'Generic error %s (code %s)',
|
||||
err.message, err.code);
|
||||
} else if(err.code == -26) {
|
||||
message = util.format(
|
||||
'Transaction rejected by network (code %s). Reason: %s',
|
||||
err.code, err.message);
|
||||
} else {
|
||||
message = util.format('%s (code %s)', err.message, err.code);
|
||||
}
|
||||
return res.status(400).send(message);
|
||||
}
|
||||
res.json({'txid' : txid});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Find transaction by hash ...
|
||||
*/
|
||||
exports.transaction = function(req, res, next, txid) {
|
||||
|
||||
tDb.fromIdWithInfo(txid, function(err, tx) {
|
||||
if (err || ! tx)
|
||||
return common.handleErrors(err, res);
|
||||
else {
|
||||
req.transaction = tx.info;
|
||||
return next();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Show transaction
|
||||
*/
|
||||
exports.show = function(req, res) {
|
||||
|
||||
if (req.transaction) {
|
||||
res.jsonp(req.transaction);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
var getTransaction = function(txid, cb) {
|
||||
|
||||
tDb.fromIdWithInfo(txid, function(err, tx) {
|
||||
if (err) console.log(err);
|
||||
|
||||
if (!tx || !tx.info) {
|
||||
console.log('[transactions.js.48]:: TXid %s not found in RPC. CHECK THIS.', txid);
|
||||
return ({ txid: txid });
|
||||
}
|
||||
|
||||
return cb(null, tx.info);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* List of transaction
|
||||
*/
|
||||
exports.list = function(req, res, next) {
|
||||
var bId = req.query.block;
|
||||
var addrStr = req.query.address;
|
||||
var page = req.query.pageNum;
|
||||
var pageLength = 10;
|
||||
var pagesTotal = 1;
|
||||
var txLength;
|
||||
var txs;
|
||||
|
||||
if (bId) {
|
||||
bdb.fromHashWithInfo(bId, function(err, block) {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
return res.status(500).send('Internal Server Error');
|
||||
}
|
||||
|
||||
if (! block) {
|
||||
return res.status(404).send('Not found');
|
||||
}
|
||||
|
||||
txLength = block.info.tx.length;
|
||||
|
||||
if (page) {
|
||||
var spliceInit = page * pageLength;
|
||||
txs = block.info.tx.splice(spliceInit, pageLength);
|
||||
pagesTotal = Math.ceil(txLength / pageLength);
|
||||
}
|
||||
else {
|
||||
txs = block.info.tx;
|
||||
}
|
||||
|
||||
async.mapSeries(txs, getTransaction, function(err, results) {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
res.status(404).send('TX not found');
|
||||
}
|
||||
|
||||
res.jsonp({
|
||||
pagesTotal: pagesTotal,
|
||||
txs: results
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
else if (addrStr) {
|
||||
var a = new Address(addrStr);
|
||||
|
||||
a.update(function(err) {
|
||||
if (err && !a.totalReceivedSat) {
|
||||
console.log(err);
|
||||
res.status(404).send('Invalid address');
|
||||
return next();
|
||||
}
|
||||
|
||||
txLength = a.transactions.length;
|
||||
|
||||
if (page) {
|
||||
var spliceInit = page * pageLength;
|
||||
txs = a.transactions.splice(spliceInit, pageLength);
|
||||
pagesTotal = Math.ceil(txLength / pageLength);
|
||||
}
|
||||
else {
|
||||
txs = a.transactions;
|
||||
}
|
||||
|
||||
async.mapSeries(txs, getTransaction, function(err, results) {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
res.status(404).send('TX not found');
|
||||
}
|
||||
|
||||
res.jsonp({
|
||||
pagesTotal: pagesTotal,
|
||||
txs: results
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
else {
|
||||
res.jsonp({
|
||||
txs: []
|
||||
});
|
||||
}
|
||||
};
|
||||
211
app/models/Address.js
Normal file
211
app/models/Address.js
Normal file
@ -0,0 +1,211 @@
|
||||
'use strict';
|
||||
|
||||
var imports = require('soop').imports();
|
||||
var async = require('async');
|
||||
var bitcore = require('bitcore');
|
||||
var BitcoreAddress = bitcore.Address;
|
||||
var BitcoreTransaction = bitcore.Transaction;
|
||||
var BitcoreUtil = bitcore.util;
|
||||
var Parser = bitcore.BinaryParser;
|
||||
var Buffer = bitcore.Buffer;
|
||||
var TransactionDb = imports.TransactionDb || require('../../lib/TransactionDb').default();
|
||||
var BlockDb = imports.BlockDb || require('../../lib/BlockDb').default();
|
||||
var config = require('../../config/config');
|
||||
var CONCURRENCY = 5;
|
||||
|
||||
function Address(addrStr) {
|
||||
this.balanceSat = 0;
|
||||
this.totalReceivedSat = 0;
|
||||
this.totalSentSat = 0;
|
||||
|
||||
this.unconfirmedBalanceSat = 0;
|
||||
|
||||
this.txApperances = 0;
|
||||
this.unconfirmedTxApperances= 0;
|
||||
this.seen = {};
|
||||
|
||||
// TODO store only txids? +index? +all?
|
||||
this.transactions = [];
|
||||
this.unspent = [];
|
||||
|
||||
var a = new BitcoreAddress(addrStr);
|
||||
a.validate();
|
||||
this.addrStr = addrStr;
|
||||
|
||||
Object.defineProperty(this, 'totalSent', {
|
||||
get: function() {
|
||||
return parseFloat(this.totalSentSat) / parseFloat(BitcoreUtil.COIN);
|
||||
},
|
||||
set: function(i) {
|
||||
this.totalSentSat = i * BitcoreUtil.COIN;
|
||||
},
|
||||
enumerable: 1,
|
||||
});
|
||||
|
||||
Object.defineProperty(this, 'balance', {
|
||||
get: function() {
|
||||
return parseFloat(this.balanceSat) / parseFloat(BitcoreUtil.COIN);
|
||||
},
|
||||
set: function(i) {
|
||||
this.balance = i * BitcoreUtil.COIN;
|
||||
},
|
||||
enumerable: 1,
|
||||
});
|
||||
|
||||
Object.defineProperty(this, 'totalReceived', {
|
||||
get: function() {
|
||||
return parseFloat(this.totalReceivedSat) / parseFloat(BitcoreUtil.COIN);
|
||||
},
|
||||
set: function(i) {
|
||||
this.totalReceived = i * BitcoreUtil.COIN;
|
||||
},
|
||||
enumerable: 1,
|
||||
});
|
||||
|
||||
|
||||
Object.defineProperty(this, 'unconfirmedBalance', {
|
||||
get: function() {
|
||||
return parseFloat(this.unconfirmedBalanceSat) / parseFloat(BitcoreUtil.COIN);
|
||||
},
|
||||
set: function(i) {
|
||||
this.unconfirmedBalanceSat = i * BitcoreUtil.COIN;
|
||||
},
|
||||
enumerable: 1,
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
Address.prototype.getObj = function() {
|
||||
// Normalize json address
|
||||
return {
|
||||
'addrStr': this.addrStr,
|
||||
'balance': this.balance,
|
||||
'balanceSat': this.balanceSat,
|
||||
'totalReceived': this.totalReceived,
|
||||
'totalReceivedSat': this.totalReceivedSat,
|
||||
'totalSent': this.totalSent,
|
||||
'totalSentSat': this.totalSentSat,
|
||||
'unconfirmedBalance': this.unconfirmedBalance,
|
||||
'unconfirmedBalanceSat': this.unconfirmedBalanceSat,
|
||||
'unconfirmedTxApperances': this.unconfirmedTxApperances,
|
||||
'txApperances': this.txApperances,
|
||||
'transactions': this.transactions
|
||||
};
|
||||
};
|
||||
|
||||
Address.prototype._addTxItem = function(txItem, txList, includeInfo) {
|
||||
function addTx(data) {
|
||||
if (!txList) return;
|
||||
if (includeInfo) {
|
||||
txList.push(data);
|
||||
} else {
|
||||
txList.push(data.txid);
|
||||
}
|
||||
};
|
||||
|
||||
var add=0, addSpend=0;
|
||||
var v = txItem.value_sat;
|
||||
var seen = this.seen;
|
||||
|
||||
// Founding tx
|
||||
if (!seen[txItem.txid]) {
|
||||
seen[txItem.txid] = 1;
|
||||
add = 1;
|
||||
|
||||
addTx({ txid: txItem.txid, ts: txItem.ts });
|
||||
}
|
||||
|
||||
// Spent tx
|
||||
if (txItem.spentTxId && !seen[txItem.spentTxId] ) {
|
||||
addTx({ txid: txItem.spentTxId, ts: txItem.spentTs });
|
||||
seen[txItem.spentTxId]=1;
|
||||
addSpend=1;
|
||||
}
|
||||
if (txItem.isConfirmed) {
|
||||
this.txApperances += add;
|
||||
this.totalReceivedSat += v;
|
||||
if (! txItem.spentTxId ) {
|
||||
//unspent
|
||||
this.balanceSat += v;
|
||||
}
|
||||
else if(!txItem.spentIsConfirmed) {
|
||||
// unspent
|
||||
this.balanceSat += v;
|
||||
this.unconfirmedBalanceSat -= v;
|
||||
this.unconfirmedTxApperances += addSpend;
|
||||
}
|
||||
else {
|
||||
// spent
|
||||
this.totalSentSat += v;
|
||||
this.txApperances += addSpend;
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.unconfirmedBalanceSat += v;
|
||||
this.unconfirmedTxApperances += add;
|
||||
}
|
||||
};
|
||||
|
||||
// opts are
|
||||
// .onlyUnspent
|
||||
// .txLimit (=0 -> no txs, => -1 no limit)
|
||||
// .includeTxInfo
|
||||
//
|
||||
Address.prototype.update = function(next, opts) {
|
||||
var self = this;
|
||||
if (!self.addrStr) return next();
|
||||
opts = opts || {};
|
||||
|
||||
if (! ('ignoreCache' in opts) )
|
||||
opts.ignoreCache = config.ignoreCache;
|
||||
|
||||
// should collect txList from address?
|
||||
var txList = opts.txLimit === 0 ? null: [];
|
||||
|
||||
var tDb = TransactionDb;
|
||||
var bDb = BlockDb;
|
||||
tDb.fromAddr(self.addrStr, opts, function(err,txOut){
|
||||
if (err) return next(err);
|
||||
|
||||
bDb.fillConfirmations(txOut, function(err) {
|
||||
if (err) return next(err);
|
||||
|
||||
tDb.cacheConfirmations(txOut, function(err) {
|
||||
// console.log('[Address.js.161:txOut:]',txOut); //TODO
|
||||
if (err) return next(err);
|
||||
if (opts.onlyUnspent) {
|
||||
txOut = txOut.filter(function(x){
|
||||
return !x.spentTxId;
|
||||
});
|
||||
tDb.fillScriptPubKey(txOut, function() {
|
||||
self.unspent = txOut.map(function(x){
|
||||
return {
|
||||
address: self.addrStr,
|
||||
txid: x.txid,
|
||||
vout: x.index,
|
||||
ts: x.ts,
|
||||
scriptPubKey: x.scriptPubKey,
|
||||
amount: x.value_sat / BitcoreUtil.COIN,
|
||||
confirmations: x.isConfirmedCached ? (config.safeConfirmations) : x.confirmations,
|
||||
confirmationsFromCache: !!x.isConfirmedCached,
|
||||
};
|
||||
});
|
||||
return next();
|
||||
});
|
||||
}
|
||||
else {
|
||||
txOut.forEach(function(txItem){
|
||||
self._addTxItem(txItem, txList, opts.includeTxInfo);
|
||||
});
|
||||
if (txList)
|
||||
self.transactions = txList;
|
||||
|
||||
return next();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = require('soop')(Address);
|
||||
|
||||
105
app/models/Status.js
Normal file
105
app/models/Status.js
Normal file
@ -0,0 +1,105 @@
|
||||
'use strict';
|
||||
//var imports = require('soop').imports();
|
||||
|
||||
var async = require('async');
|
||||
var bitcore = require('bitcore');
|
||||
var RpcClient = bitcore.RpcClient;
|
||||
var config = require('../../config/config');
|
||||
var rpc = new RpcClient(config.bitcoind);
|
||||
var bDb = require('../../lib/BlockDb').default();
|
||||
|
||||
function Status() {}
|
||||
|
||||
Status.prototype.getInfo = function(next) {
|
||||
var that = this;
|
||||
async.series([
|
||||
function (cb) {
|
||||
rpc.getInfo(function(err, info){
|
||||
if (err) return cb(err);
|
||||
|
||||
that.info = info.result;
|
||||
return cb();
|
||||
});
|
||||
},
|
||||
], function (err) {
|
||||
return next(err);
|
||||
});
|
||||
};
|
||||
|
||||
Status.prototype.getDifficulty = function(next) {
|
||||
var that = this;
|
||||
async.series([
|
||||
function (cb) {
|
||||
rpc.getDifficulty(function(err, df){
|
||||
if (err) return cb(err);
|
||||
|
||||
that.difficulty = df.result;
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
], function (err) {
|
||||
return next(err);
|
||||
});
|
||||
};
|
||||
|
||||
Status.prototype.getTxOutSetInfo = function(next) {
|
||||
var that = this;
|
||||
async.series([
|
||||
function (cb) {
|
||||
rpc.getTxOutSetInfo(function(err, txout){
|
||||
if (err) return cb(err);
|
||||
|
||||
that.txoutsetinfo = txout.result;
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
], function (err) {
|
||||
return next(err);
|
||||
});
|
||||
};
|
||||
|
||||
Status.prototype.getBestBlockHash = function(next) {
|
||||
var that = this;
|
||||
async.series([
|
||||
function (cb) {
|
||||
rpc.getBestBlockHash(function(err, bbh){
|
||||
if (err) return cb(err);
|
||||
|
||||
that.bestblockhash = bbh.result;
|
||||
return cb();
|
||||
});
|
||||
},
|
||||
|
||||
], function (err) {
|
||||
return next(err);
|
||||
});
|
||||
};
|
||||
|
||||
Status.prototype.getLastBlockHash = function(next) {
|
||||
var that = this;
|
||||
bDb.getTip(function(err,tip) {
|
||||
that.syncTipHash = tip;
|
||||
async.waterfall(
|
||||
[
|
||||
function(callback){
|
||||
rpc.getBlockCount(function(err, bc){
|
||||
if (err) return callback(err);
|
||||
callback(null, bc.result);
|
||||
});
|
||||
},
|
||||
function(bc, callback){
|
||||
rpc.getBlockHash(bc, function(err, bh){
|
||||
if (err) return callback(err);
|
||||
callback(null, bh.result);
|
||||
});
|
||||
}
|
||||
],
|
||||
function (err, result) {
|
||||
that.lastblockhash = result;
|
||||
return next();
|
||||
}
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = require('soop')(Status);
|
||||
@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
var flocore = require('../lib/cli/flocore');
|
||||
flocore();
|
||||
109
config/config.js
Normal file
109
config/config.js
Normal file
@ -0,0 +1,109 @@
|
||||
'use strict';
|
||||
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var mkdirp = require('mkdirp');
|
||||
|
||||
var rootPath = path.normalize(__dirname + '/..'),
|
||||
env,
|
||||
db,
|
||||
port,
|
||||
b_port,
|
||||
p2p_port;
|
||||
|
||||
var packageStr = fs.readFileSync(rootPath + '/package.json');
|
||||
var version = JSON.parse(packageStr).version;
|
||||
|
||||
|
||||
function getUserHome() {
|
||||
return process.env[(process.platform === 'win32') ? 'USERPROFILE' : 'HOME'];
|
||||
}
|
||||
|
||||
var home = process.env.BLOCKCHAIN_API_DB || (getUserHome() + '/.bitcore-node');
|
||||
|
||||
if (process.env.BLOCKCHAIN_API_NETWORK === 'livenet') {
|
||||
env = 'livenet';
|
||||
db = home;
|
||||
port = '3000';
|
||||
b_port = '8332';
|
||||
p2p_port = '8333';
|
||||
} else {
|
||||
env = 'testnet';
|
||||
db = home + '/testnet';
|
||||
port = '3001';
|
||||
b_port = '18332';
|
||||
p2p_port = '18333';
|
||||
}
|
||||
port = parseInt(process.env.BLOCKCHAIN_API_PORT) || port;
|
||||
|
||||
|
||||
switch (process.env.NODE_ENV) {
|
||||
case 'production':
|
||||
env += '';
|
||||
break;
|
||||
case 'test':
|
||||
env += ' - test environment';
|
||||
break;
|
||||
default:
|
||||
env += ' - development';
|
||||
break;
|
||||
}
|
||||
|
||||
var network = process.env.BLOCKCHAIN_API_NETWORK || 'testnet';
|
||||
|
||||
var dataDir = process.env.BITCOIND_DATADIR;
|
||||
var isWin = /^win/.test(process.platform);
|
||||
var isMac = /^darwin/.test(process.platform);
|
||||
var isLinux = /^linux/.test(process.platform);
|
||||
if (!dataDir) {
|
||||
if (isWin) dataDir = '%APPDATA%\\Bitcoin\\';
|
||||
if (isMac) dataDir = process.env.HOME + '/Library/Application Support/Bitcoin/';
|
||||
if (isLinux) dataDir = process.env.HOME + '/.bitcoin/';
|
||||
}
|
||||
dataDir += network === 'testnet' ? 'testnet3' : '';
|
||||
|
||||
var safeConfirmations = process.env.BLOCKCHAIN_API_SAFE_CONFIRMATIONS || 6;
|
||||
var ignoreCache = process.env.BLOCKCHAIN_API_IGNORE_CACHE || 0;
|
||||
|
||||
|
||||
var bitcoindConf = {
|
||||
protocol: process.env.BITCOIND_PROTO || 'http',
|
||||
user: process.env.BITCOIND_USER || 'user',
|
||||
pass: process.env.BITCOIND_PASS || 'pass',
|
||||
host: process.env.BITCOIND_HOST || '127.0.0.1',
|
||||
port: process.env.BITCOIND_PORT || b_port,
|
||||
p2pPort: process.env.BITCOIND_P2P_PORT || p2p_port,
|
||||
p2pHost: process.env.BITCOIND_P2P_HOST || process.env.BITCOIND_HOST || '127.0.0.1',
|
||||
dataDir: dataDir,
|
||||
// DO NOT CHANGE THIS!
|
||||
disableAgent: true
|
||||
};
|
||||
|
||||
var loggerLevel = process.env.LOGGER_LEVEL || 'info';
|
||||
var enableHTTPS = process.env.ENABLE_HTTPS === 'true';
|
||||
|
||||
if (!fs.existsSync(db)) {
|
||||
mkdirp.sync(db);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
loggerLevel: loggerLevel,
|
||||
enableHTTPS: enableHTTPS,
|
||||
version: version,
|
||||
root: rootPath,
|
||||
publicPath: process.env.BLOCKCHAIN_API_PUBLIC_PATH || false,
|
||||
apiPrefix: '/api',
|
||||
port: port,
|
||||
leveldb: db,
|
||||
bitcoind: bitcoindConf,
|
||||
network: network,
|
||||
disableP2pSync: false,
|
||||
disableHistoricSync: false,
|
||||
poolMatchFile: rootPath + '/etc/minersPoolStrings.json',
|
||||
|
||||
keys: {
|
||||
segmentio: process.env.BLOCKCHAIN_API_SEGMENTIO_KEY
|
||||
},
|
||||
safeConfirmations: safeConfirmations, // PLEASE NOTE THAT *FULL RESYNC* IS NEEDED TO CHANGE safeConfirmations
|
||||
ignoreCache: ignoreCache,
|
||||
};
|
||||
71
config/express.js
Normal file
71
config/express.js
Normal file
@ -0,0 +1,71 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
var express = require('express');
|
||||
var config = require('./config');
|
||||
var path = require('path');
|
||||
var logger = require('../lib/logger').logger;
|
||||
|
||||
module.exports = function(app, historicSync, peerSync) {
|
||||
|
||||
|
||||
//custom middleware
|
||||
var setHistoric = function(req, res, next) {
|
||||
req.historicSync = historicSync;
|
||||
next();
|
||||
};
|
||||
|
||||
var setPeer = function(req, res, next) {
|
||||
req.peerSync = peerSync;
|
||||
next();
|
||||
};
|
||||
|
||||
app.set('showStackError', true);
|
||||
app.set('json spaces', 0);
|
||||
|
||||
app.enable('jsonp callback');
|
||||
app.use(config.apiPrefix + '/sync', setHistoric);
|
||||
app.use(config.apiPrefix + '/peer', setPeer);
|
||||
app.use(express.logger('dev'));
|
||||
app.use(express.json());
|
||||
app.use(express.urlencoded());
|
||||
app.use(express.methodOverride());
|
||||
app.use(express.compress());
|
||||
|
||||
if (config.enableEmailstore) {
|
||||
var allowCopayCrossDomain = function(req, res, next) {
|
||||
if ('OPTIONS' == req.method) {
|
||||
res.send(200);
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
next();
|
||||
}
|
||||
app.use(allowCopayCrossDomain);
|
||||
}
|
||||
|
||||
if (config.publicPath) {
|
||||
var staticPath = path.normalize(config.rootPath + '/../' + config.publicPath);
|
||||
//IMPORTANT: for html5mode, this line must to be before app.router
|
||||
app.use(express.static(staticPath));
|
||||
}
|
||||
|
||||
app.use(function(req, res, next) {
|
||||
app.locals.config = config;
|
||||
next();
|
||||
});
|
||||
|
||||
//routes should be at the last
|
||||
app.use(app.router);
|
||||
|
||||
//Assume 404 since no middleware responded
|
||||
app.use(function(req, res) {
|
||||
res.status(404).jsonp({
|
||||
status: 404,
|
||||
url: req.originalUrl,
|
||||
error: 'Not found'
|
||||
});
|
||||
});
|
||||
};
|
||||
14
config/headers.js
Normal file
14
config/headers.js
Normal file
@ -0,0 +1,14 @@
|
||||
'use strict';
|
||||
|
||||
var logger = require('../lib/logger').logger;
|
||||
|
||||
module.exports = function(app) {
|
||||
|
||||
app.use(function(req, res, next) {
|
||||
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, DELETE');
|
||||
res.setHeader('Access-Control-Allow-Headers', 'X-Requested-With,Content-Type,Authorization');
|
||||
res.setHeader('Access-Control-Expose-Headers', 'X-Email-Needs-Validation,X-Quota-Per-Item,X-Quota-Items-Limit,X-RateLimit-Limit,X-RateLimit-Remaining');
|
||||
next();
|
||||
});
|
||||
};
|
||||
65
config/routes.js
Normal file
65
config/routes.js
Normal file
@ -0,0 +1,65 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
var config = require('./config');
|
||||
|
||||
module.exports = function(app) {
|
||||
|
||||
var apiPrefix = config.apiPrefix;
|
||||
|
||||
//Block routes
|
||||
var blocks = require('../app/controllers/blocks');
|
||||
app.get(apiPrefix + '/blocks', blocks.list);
|
||||
|
||||
|
||||
app.get(apiPrefix + '/block/:blockHash', blocks.show);
|
||||
app.param('blockHash', blocks.block);
|
||||
|
||||
app.get(apiPrefix + '/block-index/:height', blocks.blockindex);
|
||||
app.param('height', blocks.blockindex);
|
||||
|
||||
// Transaction routes
|
||||
var transactions = require('../app/controllers/transactions');
|
||||
app.get(apiPrefix + '/tx/:txid', transactions.show);
|
||||
app.param('txid', transactions.transaction);
|
||||
app.get(apiPrefix + '/txs', transactions.list);
|
||||
app.post(apiPrefix + '/tx/send', transactions.send);
|
||||
|
||||
// Address routes
|
||||
var addresses = require('../app/controllers/addresses');
|
||||
app.get(apiPrefix + '/addr/:addr', addresses.show);
|
||||
app.get(apiPrefix + '/addr/:addr/utxo', addresses.utxo);
|
||||
app.get(apiPrefix + '/addrs/:addrs/utxo', addresses.multiutxo);
|
||||
app.post(apiPrefix + '/addrs/utxo', addresses.multiutxo);
|
||||
app.get(apiPrefix + '/addrs/:addrs/txs', addresses.multitxs);
|
||||
app.post(apiPrefix + '/addrs/txs', addresses.multitxs);
|
||||
|
||||
// Address property routes
|
||||
app.get(apiPrefix + '/addr/:addr/balance', addresses.balance);
|
||||
app.get(apiPrefix + '/addr/:addr/totalReceived', addresses.totalReceived);
|
||||
app.get(apiPrefix + '/addr/:addr/totalSent', addresses.totalSent);
|
||||
app.get(apiPrefix + '/addr/:addr/unconfirmedBalance', addresses.unconfirmedBalance);
|
||||
|
||||
// Status route
|
||||
var st = require('../app/controllers/status');
|
||||
app.get(apiPrefix + '/status', st.show);
|
||||
|
||||
app.get(apiPrefix + '/sync', st.sync);
|
||||
app.get(apiPrefix + '/peer', st.peer);
|
||||
|
||||
// Currency
|
||||
var currency = require('../app/controllers/currency');
|
||||
app.get(apiPrefix + '/currency', currency.index);
|
||||
|
||||
// Address routes
|
||||
var messages = require('../app/controllers/messages');
|
||||
app.get(apiPrefix + '/messages/verify', messages.verify);
|
||||
app.post(apiPrefix + '/messages/verify', messages.verify);
|
||||
|
||||
//Home route
|
||||
var index = require('../app/controllers/index');
|
||||
app.get(apiPrefix + '/version', index.version);
|
||||
app.get('*', index.render);
|
||||
};
|
||||
1
contrib/.gitignore
vendored
1
contrib/.gitignore
vendored
@ -1 +0,0 @@
|
||||
*.json
|
||||
@ -1,32 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var request = require('request');
|
||||
var config = require('./config.json');
|
||||
|
||||
// each of those addresses has a large number of utxos
|
||||
|
||||
// we are going to act like this group of addresses is our wallet, this ought to be fun!
|
||||
|
||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
|
||||
|
||||
var url = config.txs.new;
|
||||
|
||||
if (process.argv[2] === 'old') {
|
||||
url = config.txs.old;
|
||||
}
|
||||
|
||||
console.log(url);
|
||||
|
||||
var options = {
|
||||
url: url,
|
||||
method: 'POST',
|
||||
qs: { from: 0, to: 5, noAsm: 1, noScriptSig: 1, noSpent: 1 },
|
||||
json: { addrs: config.addrs }
|
||||
};
|
||||
|
||||
request(options, function(err, response, body) {
|
||||
console.log(body);
|
||||
});
|
||||
|
||||
|
||||
|
||||
@ -1,63 +0,0 @@
|
||||
'use strict';
|
||||
// pulls some rando utxos that can be used for testing
|
||||
|
||||
var levelup = require('levelup');
|
||||
var leveldown = require('leveldown');
|
||||
var Encoding = require('../lib/services/address/encoding');
|
||||
var fs = require('fs');
|
||||
var outputFile = '/tmp/large_amounts_utxos.json';
|
||||
var addresses = [];
|
||||
|
||||
var dbLocation = process.argv[2];
|
||||
|
||||
console.log('Using db location: ', dbLocation);
|
||||
|
||||
var addressPrefix = new Buffer('0006', 'hex');
|
||||
|
||||
var startAddress = new Array(35).join('0');
|
||||
var endAddress = new Array(35).join('f');
|
||||
|
||||
var store = levelup(leveldown(dbLocation), {
|
||||
keyEncoding: 'binary',
|
||||
valueEncoding: 'binary'
|
||||
});
|
||||
|
||||
var encoding = new Encoding(addressPrefix);
|
||||
|
||||
var start = encoding.encodeUtxoIndexKey(startAddress);
|
||||
var end = encoding.encodeUtxoIndexKey(endAddress);
|
||||
var res = {};
|
||||
var limit = 18000000;
|
||||
var count = 0;
|
||||
|
||||
var stream = store.createReadStream({
|
||||
gte: start,
|
||||
lte: end
|
||||
});
|
||||
|
||||
|
||||
stream.on('data', function(data) {
|
||||
count++;
|
||||
limit--;
|
||||
if (limit <= 0) {
|
||||
stream.emit('end');
|
||||
}
|
||||
var key = encoding.decodeUtxoIndexKey(data.key);
|
||||
if (res[key.address] >= 1) {
|
||||
res[key.address]++;
|
||||
} else {
|
||||
res[key.address] = 1;
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('end', function() {
|
||||
Object.keys(res).map(function(key) {
|
||||
if (res[key] > 1000) {
|
||||
addresses.push(key);
|
||||
}
|
||||
});
|
||||
fs.writeFileSync(outputFile, JSON.stringify(addresses));
|
||||
console.log('total utxo count: ', count);
|
||||
console.log('done');
|
||||
});
|
||||
|
||||
@ -1,48 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var levelup = require('levelup');
|
||||
var leveldown = require('leveldown');
|
||||
var Encoding = require('../lib/services/address/encoding');
|
||||
var dbPath = '/Users/chrisk/.bwdb/flocore-node.db';
|
||||
var flocore = require('flocore-lib');
|
||||
var db = levelup(dbPath, {keyEncoding: 'binary', valueEncoding: 'binary'});
|
||||
|
||||
var prefix = new Buffer('0002', 'hex');
|
||||
var encoding = new Encoding(prefix);
|
||||
var address = '1MfDRRVVKXUe5KNVZzu8CBzUZDHTTYZM94';
|
||||
var addressLength = new Buffer(1);
|
||||
addressLength.writeUInt8(address.length);
|
||||
|
||||
//var startBuffer = prefix;
|
||||
//var endBuffer = Buffer.concat([prefix, new Buffer('ff', 'hex')]);
|
||||
|
||||
//var startBuffer = Buffer.concat([prefix, addressLength, new Buffer(address, 'utf8'), new Buffer('00', 'hex')]);
|
||||
//var endBuffer = Buffer.concat([prefix, addressLength, new Buffer(address, 'utf8'), new Buffer('01', 'hex')]);
|
||||
var start = Buffer.concat([prefix, new Buffer('0437cd7f8525ceed2324359c2d0ba26006d92d856a9c20fa0241106ee5a597c9', 'hex')]);
|
||||
var end = Buffer.concat([prefix, new Buffer('0437cd7f8525ceed2324359c2d0ba26006d92d856a9c20fa0241106ee5a597c9', 'hex'), new Buffer('01', 'hex')]);
|
||||
var stream = db.createReadStream({
|
||||
gte: start,
|
||||
lt: end
|
||||
});
|
||||
stream.on('data', function(data) {
|
||||
var txkey = data.key.slice(2).toString('hex');
|
||||
var height = data.value.readUInt32BE();
|
||||
var timestamp = data.value.readDoubleBE(4);
|
||||
var inputValues = [];
|
||||
var inputValuesLength = data.value.readUInt16BE(12);
|
||||
for(var i = 0; i < inputValuesLength / 8; i++) {
|
||||
inputValues.push(buffer.readDoubleBE(i * 8 + 14));
|
||||
}
|
||||
var transaction = new flocore.Transaction(data.value.slice(inputValues.length * 8 + 14));
|
||||
transaction.__height = height;
|
||||
transaction.__inputValues = inputValues;
|
||||
transaction.__timestamp = timestamp;
|
||||
//console.log(txkey, transaction.toObject());
|
||||
console.log(data.value);
|
||||
console.log(transaction.__height, transaction.__inputValues, transaction.__timestamp);
|
||||
//console.log(data.key.toString('hex'), data.value.toString('hex'));
|
||||
});
|
||||
|
||||
stream.on('end', function() {
|
||||
console.log('end');
|
||||
});
|
||||
@ -1,11 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# helper script to run bwdb and/or restart it
|
||||
|
||||
# execute thie script and then simply tail /tmp/bwdb-out
|
||||
# e.g. ./contrib/restart_bwdb.sh && tail -f /tmp/bwdb-out
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
pkill -2 -x flocore
|
||||
wait
|
||||
exec $DIR/../bin/flocore-node start >> /tmp/bwdb-out 2>&1 &
|
||||
@ -1,149 +0,0 @@
|
||||
'use strict'
|
||||
var Readable = require('stream').Readable;
|
||||
var Writable = require('stream').Writable;
|
||||
var Transform = require('stream').Transform;
|
||||
var inherits = require('util').inherits;
|
||||
var async = require('async');
|
||||
|
||||
function main() {
|
||||
var blockStream = new BlockStream();
|
||||
var processConcurrent = new ProcessConcurrent();
|
||||
var processSerial = new ProcessSerial();
|
||||
var writeStreamFast = new WriteStreamFast();
|
||||
var writeStreamSlow = new WriteStreamSlow();
|
||||
|
||||
var start = Date.now();
|
||||
|
||||
writeStreamFast.on('finish', function() {
|
||||
var end = Date.now();
|
||||
console.log('Total time: ', (end - start) + ' ms');
|
||||
console.log('Concurrent write time: ', writeStreamSlow.writeTime + ' ms');
|
||||
console.log('Serial write time: ', writeStreamFast.writeTime + ' ms');
|
||||
});
|
||||
|
||||
blockStream
|
||||
.pipe(processConcurrent)
|
||||
.pipe(writeStreamSlow);
|
||||
|
||||
blockStream
|
||||
.pipe(processSerial)
|
||||
.pipe(writeStreamFast);
|
||||
}
|
||||
|
||||
function BlockStream() {
|
||||
Readable.call(this, {objectMode: true, highWaterMark: 10});
|
||||
this.height = 0;
|
||||
}
|
||||
|
||||
inherits(BlockStream, Readable);
|
||||
|
||||
BlockStream.prototype._read = function() {
|
||||
var self = this;
|
||||
console.log('_read');
|
||||
|
||||
setTimeout(function() {
|
||||
self.height++;
|
||||
if(self.height > 40) {
|
||||
self.push(null);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('ReadStream block ', self.height);
|
||||
console.log(self.push({height: self.height}));
|
||||
}, 500);
|
||||
};
|
||||
|
||||
function ProcessSerial() {
|
||||
Transform.call(this, {objectMode: true, highWaterMark: 10});
|
||||
}
|
||||
|
||||
inherits(ProcessSerial, Transform);
|
||||
|
||||
ProcessSerial.prototype._transform = function(block, enc, callback) {
|
||||
var operations = [{index1: block.height}, {index2: block.height}];
|
||||
setTimeout(function() {
|
||||
var obj = {
|
||||
tipHeight: block.height,
|
||||
operations: operations
|
||||
};
|
||||
|
||||
callback(null, obj);
|
||||
}, 100);
|
||||
};
|
||||
|
||||
function ProcessConcurrent() {
|
||||
Transform.call(this, {objectMode: true, highWaterMark: 10});
|
||||
this.operations = [];
|
||||
this.lastHeight = 0;
|
||||
};
|
||||
|
||||
inherits(ProcessConcurrent, Transform);
|
||||
|
||||
ProcessConcurrent.prototype._transform = function(block, enc, callback) {
|
||||
var self = this;
|
||||
|
||||
self.lastHeight = block.height;
|
||||
|
||||
setTimeout(function() {
|
||||
self.operations = self.operations.concat([{index3: block.height}, {index4: block.height}]);
|
||||
|
||||
console.log(self.operations.length);
|
||||
if(self.operations.length >= 10) {
|
||||
var obj = {
|
||||
concurrentTipHeight: self.lastHeight,
|
||||
operations: self.operations
|
||||
};
|
||||
self.operations = [];
|
||||
|
||||
return callback(null, obj);
|
||||
}
|
||||
|
||||
callback();
|
||||
}, 100);
|
||||
};
|
||||
|
||||
ProcessConcurrent.prototype._flush = function(callback) {
|
||||
if(this.operations.length) {
|
||||
var obj = {
|
||||
concurrentTipHeight: this.lastHeight,
|
||||
operations: this.operations
|
||||
};
|
||||
|
||||
this.operations = [];
|
||||
return callback(null, operations);
|
||||
}
|
||||
};
|
||||
|
||||
function WriteStreamSlow() {
|
||||
Writable.call(this, {objectMode: true, highWaterMark: 10});
|
||||
this.writeTime = 0;
|
||||
}
|
||||
|
||||
inherits(WriteStreamSlow, Writable);
|
||||
|
||||
WriteStreamSlow.prototype._write = function(operations, enc, callback) {
|
||||
var self = this;
|
||||
setTimeout(function() {
|
||||
console.log('WriteStreamSlow block ', operations.concurrentTipHeight);
|
||||
self.writeTime += 2000;
|
||||
callback();
|
||||
}, 2000);
|
||||
};
|
||||
|
||||
function WriteStreamFast() {
|
||||
Writable.call(this, {objectMode: true, highWaterMark: 1});
|
||||
this.writeTime = 0;
|
||||
}
|
||||
|
||||
inherits(WriteStreamFast, Writable);
|
||||
|
||||
WriteStreamFast.prototype._write = function(operations, enc, callback) {
|
||||
var self = this;
|
||||
setTimeout(function() {
|
||||
console.log('WriteStreamFast block ', operations.tipHeight);
|
||||
self.writeTime += 1000;
|
||||
callback();
|
||||
}, 1000);
|
||||
};
|
||||
|
||||
main();
|
||||
@ -1,20 +0,0 @@
|
||||
[Unit]
|
||||
Description=BWDB
|
||||
Requires=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
WorkingDirectory=/usr/opt/flocore
|
||||
ExecStart=/usr/bin/bwdb
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
Restart=on-failure
|
||||
RestartSec=15
|
||||
User=flocore
|
||||
ExecStartPre=/bin/mkdir -p /run/bwdb
|
||||
ExecStartPre=/bin/chown flocore:flocore /run/bwdb
|
||||
ExecStartPre=/bin/chmod 755 /run/bwdb
|
||||
PermissionsStartOnly=true
|
||||
TimeoutStopSec=300
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
File diff suppressed because one or more lines are too long
25
dev-util/block-level.js
Executable file
25
dev-util/block-level.js
Executable file
@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
var
|
||||
config = require('../config/config'),
|
||||
levelup = require('levelup');
|
||||
|
||||
|
||||
db = levelup(config.leveldb + '/blocks');
|
||||
|
||||
db.createReadStream({start: 'b-'})
|
||||
.on('data', function (data) {
|
||||
console.log('[block-level.js.11:data:]',data); //TODO
|
||||
if (data==false) c++;
|
||||
})
|
||||
.on('error', function (err) {
|
||||
return cb(err);
|
||||
})
|
||||
.on('close', function () {
|
||||
return cb(null);
|
||||
})
|
||||
.on('end', function () {
|
||||
return cb(null);
|
||||
});
|
||||
|
||||
|
||||
26
dev-util/dbdump.js
Normal file
26
dev-util/dbdump.js
Normal file
@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
var levelup = require('levelup');
|
||||
|
||||
|
||||
|
||||
var dbPath = process.argv[2];
|
||||
var s = process.argv[3];
|
||||
console.log('DB: ',dbPath); //TODO
|
||||
|
||||
|
||||
|
||||
var db = levelup(dbPath );
|
||||
|
||||
|
||||
db.createReadStream({start: s, end: s+'~'})
|
||||
.on('data', function (data) {
|
||||
console.log(data.key + ' => ' + data.value); //TODO
|
||||
})
|
||||
.on('error', function () {
|
||||
})
|
||||
.on('end', function () {
|
||||
});
|
||||
|
||||
|
||||
41
dev-util/explode_tx.js
Executable file
41
dev-util/explode_tx.js
Executable file
@ -0,0 +1,41 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
var util = require('util');
|
||||
var mongoose= require('mongoose'),
|
||||
config = require('../config/config');
|
||||
|
||||
process.env.NODE_ENV = process.env.NODE_ENV || 'development';
|
||||
|
||||
var T = require('../app/models/TransactionOut');
|
||||
|
||||
|
||||
// var hash = process.argv[2] || '0000000000b6288775bbd326bedf324ca8717a15191da58391535408205aada4';
|
||||
var hash = process.argv[2] || '6749762ae220c10705556799dcec9bb6a54a7b881eb4b961323a3363b00db518';
|
||||
|
||||
|
||||
|
||||
|
||||
mongoose.connect(config.db);
|
||||
|
||||
mongoose.connection.on('error', function(err) { console.log(err); });
|
||||
|
||||
|
||||
mongoose.connection.on('open', function() {
|
||||
|
||||
var b = new Buffer(hash,'hex');
|
||||
|
||||
T.createFromTxs([hash], function(err, ret) {
|
||||
|
||||
console.log('Err:');
|
||||
console.log(err);
|
||||
|
||||
|
||||
console.log('Ret:');
|
||||
console.log(util.inspect(ret,{depth:null}));
|
||||
mongoose.connection.close();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
|
||||
33
dev-util/find_ref.sh
Executable file
33
dev-util/find_ref.sh
Executable file
@ -0,0 +1,33 @@
|
||||
#!/bin/bash
|
||||
|
||||
FIND='find';
|
||||
|
||||
##if [[ "$OSTYPE" =~ "darwin" ]]
|
||||
##then
|
||||
## FIND='gfind'
|
||||
##fi
|
||||
|
||||
|
||||
if [ -z "$1" ]
|
||||
then
|
||||
echo "$0 : find functions references "
|
||||
echo "Usage $0 function_name "
|
||||
exit;
|
||||
fi
|
||||
|
||||
EXTRA=''
|
||||
|
||||
|
||||
CMD="grep -rnH"
|
||||
|
||||
if [ "$2" != '--nocolor' ]
|
||||
then
|
||||
CMD="$CMD --color=always"
|
||||
fi
|
||||
|
||||
|
||||
$FIND -L . -name \*.json -not -wholename \*node_modules\* -not -wholename \*public/lib\* -exec $CMD "$1" {} + \
|
||||
-o -name \*.html -not -wholename \*node_modules\* -not -wholename \*public/lib\* -exec $CMD "$1" {} + \
|
||||
-o -name \*.jade -not -wholename \*node_modules\* -not -wholename \*public/lib\* -exec $CMD "$1" {} + \
|
||||
-o -name \*.js -not -wholename \*node_modules\* -not -wholename \*public/lib\* -exec $CMD "$1" {} +
|
||||
|
||||
25
dev-util/getAddr.js
Executable file
25
dev-util/getAddr.js
Executable file
@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
var util = require('util'),
|
||||
config = require('../config/config');
|
||||
|
||||
process.env.NODE_ENV = process.env.NODE_ENV || 'development';
|
||||
|
||||
var A = require('../app/models/Address');
|
||||
|
||||
// var hash = process.argv[2] || '0000000000b6288775bbd326bedf324ca8717a15191da58391535408205aada4';
|
||||
var hash = process.argv[2] || 'mp3Rzxx9s1A21SY3sjJ3CQoa2Xjph7e5eS';
|
||||
|
||||
var a= new A(hash);
|
||||
a.update(function(err) {
|
||||
console.log('Err:');
|
||||
console.log(err);
|
||||
|
||||
console.log('Ret:');
|
||||
console.log(util.inspect(a,{depth:null}));
|
||||
|
||||
})
|
||||
|
||||
|
||||
|
||||
21
dev-util/getTx.js
Executable file
21
dev-util/getTx.js
Executable file
@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
var util = require('util'),
|
||||
config = require('../config/config');
|
||||
|
||||
process.env.NODE_ENV = process.env.NODE_ENV || 'development';
|
||||
|
||||
var TransactionDb = require('../lib/TransactionDb.js').default();
|
||||
var hash = process.argv[2] || '4286d6fc82a314348af4e9d3ce649f78ce4569937e9ad6613563755f0d14e3d1';
|
||||
|
||||
var t= TransactionDb.fromIdWithInfo(hash,function(err,tx) {
|
||||
console.log('Err:');
|
||||
console.log(err);
|
||||
|
||||
console.log('Ret:');
|
||||
console.log(util.inspect(tx,{depth:null}));
|
||||
});
|
||||
|
||||
|
||||
|
||||
31
dev-util/get_block.js
Executable file
31
dev-util/get_block.js
Executable file
@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
var util = require('util');
|
||||
|
||||
process.env.NODE_ENV = process.env.NODE_ENV || 'development';
|
||||
|
||||
var RpcClient = require('../node_modules/bitcore/RpcClient');
|
||||
|
||||
var config = require('../config/config');
|
||||
|
||||
|
||||
var hash = process.argv[2] || '0000000000b6288775bbd326bedf324ca8717a15191da58391535408205aada4';
|
||||
//var hash = process.argv[2] || 'f6c2901f39fd07f2f2e503183d76f73ecc1aee9ac9216fde58e867bc29ce674e';
|
||||
|
||||
//hash = 'e2253359458db3e732c82a43fc62f56979ff59928f25a2df34dfa443e9a41160';
|
||||
|
||||
var rpc = new RpcClient(config.bitcoind);
|
||||
|
||||
rpc.getBlock( hash, function(err, ret) {
|
||||
|
||||
console.log('Err:');
|
||||
console.log(err);
|
||||
|
||||
|
||||
console.log('Ret:');
|
||||
console.log(util.inspect(ret, { depth: 10} ));
|
||||
});
|
||||
|
||||
|
||||
|
||||
34
dev-util/level-put.js
Executable file
34
dev-util/level-put.js
Executable file
@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
var config = require('../config/config'),
|
||||
levelup = require('levelup');
|
||||
|
||||
|
||||
|
||||
var k = process.argv[2];
|
||||
var v = process.argv[3];
|
||||
var isBlock = process.argv[4] === '1';
|
||||
|
||||
|
||||
var dbPath = config.leveldb + (isBlock ? '/blocks' : '/txs');
|
||||
console.log('DB: ',dbPath); //TODO
|
||||
|
||||
|
||||
|
||||
var db = levelup(dbPath );
|
||||
|
||||
|
||||
if (v) {
|
||||
db.put(k,v,function(err) {
|
||||
console.log('[PUT done]',err); //TODO
|
||||
});
|
||||
}
|
||||
else {
|
||||
db.del(k,function(err) {
|
||||
console.log('[DEL done]',err); //TODO
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
30
dev-util/level.js
Executable file
30
dev-util/level.js
Executable file
@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
var config = require('../config/config'),
|
||||
levelup = require('levelup');
|
||||
|
||||
|
||||
|
||||
var s = process.argv[2];
|
||||
var isBlock = process.argv[3] === '1';
|
||||
|
||||
|
||||
var dbPath = config.leveldb + (isBlock ? '/blocks' : '/txs');
|
||||
console.log('DB: ',dbPath); //TODO
|
||||
|
||||
|
||||
|
||||
var db = levelup(dbPath );
|
||||
|
||||
|
||||
db.createReadStream({start: s, end: s+'~'})
|
||||
.on('data', function (data) {
|
||||
console.log(data.key + ' => ' + data.value); //TODO
|
||||
})
|
||||
.on('error', function () {
|
||||
})
|
||||
.on('end', function () {
|
||||
});
|
||||
|
||||
|
||||
25
dev-util/read_block.js
Executable file
25
dev-util/read_block.js
Executable file
@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
process.env.NODE_ENV = process.env.NODE_ENV || 'development';
|
||||
|
||||
var assert = require('assert'),
|
||||
config = require('../config/config'),
|
||||
BlockExtractor = require('../lib/BlockExtractor').class(),
|
||||
networks = require('bitcore/networks'),
|
||||
util = require('bitcore/util/util');
|
||||
|
||||
var be = new BlockExtractor(config.bitcoind.dataDir, config.network);
|
||||
var network = config.network === 'testnet' ? networks.testnet: networks.livenet;
|
||||
// console.log('[read_block.js.13]', be.nextFile() );
|
||||
|
||||
var c=0;
|
||||
while (c++ < 100) {
|
||||
be.getNextBlock(function(err, b) {
|
||||
console.log('[read_block.js.14]',err, c, b?util.formatHashAlt(b.hash):''); //TODO
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
70
dev-util/stats
Normal file
70
dev-util/stats
Normal file
@ -0,0 +1,70 @@
|
||||
|
||||
|
||||
first 5%
|
||||
|
||||
=> with data + mongo + w/RPC for blocks: 48.8s
|
||||
=> with RPC + mongo: 2m26s
|
||||
=> with files + mongo + wo/RPC for blocks: 36.7s
|
||||
=> with files + mongo + wo/RPC for blocks + wo/mongoIndexes:
|
||||
|
||||
|
||||
first 10%
|
||||
|
||||
=> sin RPC, sin Tx, sin store block => 0.7s
|
||||
=> sin RPC, sin grabar, procesando TX => 8.5s
|
||||
=> sin RPC, sin TX processing, sin grabar => 12s28
|
||||
=> con RPC, TX processing, sin Grabar Tx, grabando bloques => 29s
|
||||
=> con RPC, sin TX processing, sin Grabar Tx, grabando bloques => 35s
|
||||
=> con RPC, TX processing, sin Grabar Tx, grabando bloques => 43s
|
||||
|
||||
=> TX processing, sin RPC, sin saves TX, y blocks => 11.6s
|
||||
=> TX processing, CON RPC, sin saves TX, y blocks => 35s
|
||||
=> con RPC, TX processing, sin saves TX => 45s
|
||||
=> con RPC, TX processing, Grabarndo todo => 78s
|
||||
=> con RPC, TX processing, Grabarndo todo => 78s
|
||||
(18k blocks, 36k txouts)
|
||||
|
||||
//LEVEL DB
|
||||
=> sin RPC, TX processing, todo en level => 14s
|
||||
=> con RPC, TX processing, todo en level => 39.7s
|
||||
=> con RPC, TX processing, tx mongo, blocks en level => 64s
|
||||
|
||||
|
||||
=> sin RPC, TX processing, todo en level, handling REORGs, more data => 28s
|
||||
=> sin RPC, TX processing, todo en level, handling REORGs, more data, tx ts => 34t s
|
||||
|
||||
|
||||
//FROM blk00002.dat (more txs), 5%
|
||||
|
||||
=> now total : 1m13s
|
||||
=> removing block writes => 1m8s
|
||||
=> sacando los contenidos adentro de getblock from file de => 4.5s!!
|
||||
|
||||
=> con base58 cpp => 21s
|
||||
=> toda la testnet => 17m !!
|
||||
|
||||
10% de blk2
|
||||
=> 50s con base58cpp
|
||||
=> 41s commentando todo addr
|
||||
=> 5s commentando todo get HistoricSync.prototype.getBlockFromFile = function(cb) {
|
||||
=> 15s commentando todo get HistoricSync.prototype.getBlockFromFile = function(cb) {
|
||||
|
||||
10% de blk 1
|
||||
=> 59s
|
||||
=> 15s comentando desde b.getStandardizedObject()
|
||||
=> 39s comentando dps b.getStandardizedObject()
|
||||
|
||||
|
||||
Mon Mar 10 11:59:25 ART 2014
|
||||
10% de blk 0 (testnet)
|
||||
=> 37s
|
||||
|
||||
Thu May 22 13:42:50 ART 2014 (base58check + toString opts + custom getStandardizedObject)
|
||||
10% testnet
|
||||
=> 29s
|
||||
|
||||
|
||||
100% testnet
|
||||
=> 17m10s
|
||||
|
||||
|
||||
20
dev-util/status_info.js
Executable file
20
dev-util/status_info.js
Executable file
@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env node
|
||||
process.env.NODE_ENV = process.env.NODE_ENV || 'development';
|
||||
|
||||
var RpcClient = require('../node_modules/bitcore/RpcClient').class();
|
||||
|
||||
var config = require('../config/config');
|
||||
|
||||
var rpc = new RpcClient(config.bitcoind);
|
||||
|
||||
var block = rpc.getInfo(function(err, block) {
|
||||
if (err) {
|
||||
console.log("Err:");
|
||||
console.log(err);
|
||||
}
|
||||
|
||||
console.log("Block info:");
|
||||
console.log(block);
|
||||
});
|
||||
|
||||
|
||||
17
dev-util/sync-level.js
Normal file
17
dev-util/sync-level.js
Normal file
@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
var Sync = require('../lib/Sync').class();
|
||||
|
||||
|
||||
var s = new Sync();
|
||||
|
||||
|
||||
s.setOrphan(
|
||||
'0000000000c2b1e8dab92a72741289e5ef0d4f375fd1b26f729da2ba979c028a',
|
||||
'000000000228f9d02654459e09998c7557afa9082784c11226853f5feb805df9',
|
||||
function (err) {
|
||||
console.log('[sync-level.js.15]',err); //TODO
|
||||
});
|
||||
|
||||
|
||||
30
docs/bus.md
30
docs/bus.md
@ -1,30 +0,0 @@
|
||||
# Bus
|
||||
The bus provides a way to subscribe to events from any of the services running. It's implemented abstract from transport specific implementation. The primary use of the bus in Flocore Node is for subscribing to events via a web socket.
|
||||
|
||||
## Opening/Closing
|
||||
|
||||
```javascript
|
||||
|
||||
// a node is needed to be able to open a bus
|
||||
var node = new Node(configuration);
|
||||
|
||||
// will create a new bus that is ready to subscribe to events
|
||||
var bus = node.openBus();
|
||||
|
||||
// will remove all event listeners
|
||||
bus.close();
|
||||
```
|
||||
|
||||
## Subscribing/Unsubscribing
|
||||
|
||||
```javascript
|
||||
|
||||
// subscribe to all transaction events
|
||||
bus.subscribe('florincoind/rawtransaction');
|
||||
|
||||
// to subscribe to new block hashes
|
||||
bus.subscribe('florincoind/hashblock');
|
||||
|
||||
// unsubscribe
|
||||
bus.unsubscribe('florincoind/rawtransaction');
|
||||
```
|
||||
@ -1,162 +0,0 @@
|
||||
# Setting up Development Environment
|
||||
|
||||
## Install Node.js
|
||||
|
||||
Install Node.js by your favorite method, or use Node Version Manager by following directions at https://github.com/creationix/nvm
|
||||
|
||||
```bash
|
||||
nvm install v4
|
||||
```
|
||||
|
||||
## Fork and Download Repositories
|
||||
|
||||
To develop flocore-node:
|
||||
|
||||
```bash
|
||||
cd ~
|
||||
git clone git@github.com:<yourusername>/flocore-node.git
|
||||
git clone git@github.com:<yourusername>/flocore-lib.git
|
||||
```
|
||||
|
||||
To develop florincoin or to compile from source:
|
||||
|
||||
```bash
|
||||
git clone git@github.com:<yourusername>/florincoin.git
|
||||
git fetch origin <branchname>:<branchname>
|
||||
git checkout <branchname>
|
||||
```
|
||||
**Note**: See florincoin documentation for building florincoin on your platform.
|
||||
|
||||
|
||||
## Install Development Dependencies
|
||||
|
||||
For Ubuntu:
|
||||
```bash
|
||||
sudo apt-get install libzmq3-dev
|
||||
sudo apt-get install build-essential
|
||||
```
|
||||
**Note**: Make sure that libzmq-dev is not installed, it should be removed when installing libzmq3-dev.
|
||||
|
||||
|
||||
For Mac OS X:
|
||||
```bash
|
||||
brew install zeromq
|
||||
```
|
||||
|
||||
## Install and Symlink
|
||||
|
||||
```bash
|
||||
cd flocore-lib
|
||||
npm install
|
||||
cd ../flocore-node
|
||||
npm install
|
||||
```
|
||||
**Note**: If you get a message about not being able to download florincoin distribution, you'll need to compile florincoind from source, and setup your configuration to use that version.
|
||||
|
||||
|
||||
We now will setup symlinks in `flocore-node` *(repeat this for any other modules you're planning on developing)*:
|
||||
```bash
|
||||
cd node_modules
|
||||
rm -rf flocore-lib
|
||||
ln -s ~/flocore-lib
|
||||
rm -rf florincoind-rpc
|
||||
ln -s ~/florincoind-rpc
|
||||
```
|
||||
|
||||
And if you're compiling or developing florincoin:
|
||||
```bash
|
||||
cd ../bin
|
||||
ln -sf ~/florincoin/src/florincoind
|
||||
```
|
||||
|
||||
## Run Tests
|
||||
|
||||
If you do not already have mocha installed:
|
||||
```bash
|
||||
npm install mocha -g
|
||||
```
|
||||
|
||||
To run all test suites:
|
||||
```bash
|
||||
cd flocore-node
|
||||
npm run regtest
|
||||
npm run test
|
||||
```
|
||||
|
||||
To run a specific unit test in watch mode:
|
||||
```bash
|
||||
mocha -w -R spec test/services/florincoind.unit.js
|
||||
```
|
||||
|
||||
To run a specific regtest:
|
||||
```bash
|
||||
mocha -R spec regtest/florincoind.js
|
||||
```
|
||||
|
||||
## Running a Development Node
|
||||
|
||||
To test running the node, you can setup a configuration that will specify development versions of all of the services:
|
||||
|
||||
```bash
|
||||
cd ~
|
||||
mkdir devnode
|
||||
cd devnode
|
||||
mkdir node_modules
|
||||
touch flocore-node.json
|
||||
touch package.json
|
||||
```
|
||||
|
||||
Edit `flocore-node.json` with something similar to:
|
||||
```json
|
||||
{
|
||||
"network": "livenet",
|
||||
"port": 3001,
|
||||
"services": [
|
||||
"florincoind",
|
||||
"web",
|
||||
"insight-api",
|
||||
"insight-ui",
|
||||
"<additional_service>"
|
||||
],
|
||||
"servicesConfig": {
|
||||
"florincoind": {
|
||||
"spawn": {
|
||||
"datadir": "/home/<youruser>/.florincoin",
|
||||
"exec": "/home/<youruser>/florincoin/src/florincoind"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Note**: To install services [insight-api](https://github.com/bitpay/insight-api) and [insight-ui](https://github.com/bitpay/insight-ui) you'll need to clone the repositories locally.
|
||||
|
||||
Setup symlinks for all of the services and dependencies:
|
||||
|
||||
```bash
|
||||
cd node_modules
|
||||
ln -s ~/flocore-lib
|
||||
ln -s ~/flocore-node
|
||||
ln -s ~/insight-api
|
||||
ln -s ~/insight-ui
|
||||
```
|
||||
|
||||
Make sure that the `<datadir>/florincoin.conf` has the necessary settings, for example:
|
||||
```
|
||||
server=1
|
||||
whitelist=127.0.0.1
|
||||
txindex=1
|
||||
addressindex=1
|
||||
timestampindex=1
|
||||
spentindex=1
|
||||
zmqpubrawtx=tcp://127.0.0.1:28332
|
||||
zmqpubhashblock=tcp://127.0.0.1:28332
|
||||
rpcallowip=127.0.0.1
|
||||
rpcuser=florincoin
|
||||
rpcpassword=local321
|
||||
```
|
||||
|
||||
From within the `devnode` directory with the configuration file, start the node:
|
||||
```bash
|
||||
../flocore-node/bin/flocore-node start
|
||||
```
|
||||
@ -1 +0,0 @@
|
||||
../README.md
|
||||
51
docs/node.md
51
docs/node.md
@ -1,51 +0,0 @@
|
||||
# Node
|
||||
A node represents a collection of services that are loaded together. For more information about services, please see the [Services Documentation](services.md).
|
||||
|
||||
## API Documentation
|
||||
- `start()` - Will start the node's services in the correct order based on the dependencies of a service.
|
||||
- `stop()` - Will stop the node's services.
|
||||
- `openBus()` - Will create a new event bus to subscribe to events.
|
||||
- `getAllAPIMethods()` - Returns information about all of the API methods from the services.
|
||||
- `getAllPublishEvents()` - Returns information about publish events.
|
||||
- `getServiceOrder()` - Returns an array of service modules.
|
||||
- `services.<service-name>.<method>` - Additional API methods exposed by each service. The services for the node are defined when the node instance is constructed.
|
||||
|
||||
## Example Usage
|
||||
|
||||
```js
|
||||
|
||||
var index = require('flocore-node');
|
||||
var Florincoin = index.services.Florincoin;
|
||||
var Node = index.Node;
|
||||
|
||||
var configuration = {
|
||||
datadir: '/home/user/.florincoin',
|
||||
network: 'testnet',
|
||||
services: [
|
||||
{
|
||||
name: 'florincoind',
|
||||
module: Florincoin,
|
||||
config: {}
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
var node = new Node(configuration);
|
||||
|
||||
node.start(function() {
|
||||
//start the node so the node.on('ready') is actually called.
|
||||
});
|
||||
|
||||
node.on('ready', function() {
|
||||
console.log('Florincoin Node Ready');
|
||||
});
|
||||
|
||||
node.on('error', function(err) {
|
||||
console.error(err);
|
||||
});
|
||||
|
||||
// shutdown the node
|
||||
node.stop(function() {
|
||||
// the shutdown is complete
|
||||
});
|
||||
```
|
||||
@ -1,28 +0,0 @@
|
||||
# Release Process
|
||||
|
||||
Binaries for florincoind are distributed for convenience and built deterministically with Gitian, signatures for florincoind are located at the [gitian.sigs](https://github.com/bitpay/gitian.sigs) respository.
|
||||
|
||||
## How to Release
|
||||
|
||||
When publishing to npm, the .gitignore file is used to exclude files from the npm publishing process. Be sure that the flocore-node directory has only the directories and files that you would like to publish to npm. You might need to run the commands below on each platform that you intend to publish (e.g. Mac and Linux).
|
||||
|
||||
To make a release, bump the `version` of the `package.json`:
|
||||
|
||||
```bash
|
||||
git checkout master
|
||||
git pull upstream master
|
||||
npm install
|
||||
npm run test
|
||||
npm run regtest
|
||||
npm run jshint
|
||||
git commit -a -m "Bump package version to <version>"
|
||||
git push upstream master
|
||||
npm publish
|
||||
```
|
||||
|
||||
Create a release tag and push it to the BitPay Github repo:
|
||||
|
||||
```bash
|
||||
git tag -s v<version> -m 'v<version>'
|
||||
git push upstream v<version>
|
||||
```
|
||||
@ -1,20 +0,0 @@
|
||||
# Scaffold
|
||||
A collection of functions for creating, managing, starting, stopping and interacting with a Flocore node.
|
||||
|
||||
## Install
|
||||
This function will add a service to a node by installing the necessary dependencies and modifying the `flocore-node.json` configuration.
|
||||
|
||||
## Start
|
||||
This function will load a configuration file `flocore-node.json` and instantiate and start a node based on the configuration.
|
||||
|
||||
## Find Config
|
||||
This function will recursively find a configuration `flocore-node.json` file in parent directories and return the result.
|
||||
|
||||
## Default Config
|
||||
This function will return a default configuration with the default services based on environment variables, and will default to using the standard `/home/user/.florincoin` data directory.
|
||||
|
||||
## Uninstall
|
||||
This function will remove a service from a node by uninstalling the necessary dependencies and modifying the `flocore-node.json` configuration.
|
||||
|
||||
## Call Method
|
||||
This function will call an API method on a node via the JSON-RPC interface.
|
||||
@ -1,88 +0,0 @@
|
||||
# Services
|
||||
Flocore Node has a service module system that can start up additional services that can include additional:
|
||||
- Blockchain indexes (e.g. querying balances for addresses)
|
||||
- API methods
|
||||
- HTTP routes
|
||||
- Event types to publish and subscribe
|
||||
|
||||
The `flocore-node.json` file describes which services will load for a node:
|
||||
|
||||
```json
|
||||
{
|
||||
"services": [
|
||||
"florincoind", "web"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Services correspond with a Node.js module as described in 'package.json', for example:
|
||||
|
||||
```json
|
||||
{
|
||||
"dependencies": {
|
||||
"flocore-lib": "^0.13.7",
|
||||
"flocore-node": "^0.2.0",
|
||||
"insight-api": "^3.0.0"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
_Note:_ If you already have a flocore-node database, and you want to query data from previous blocks in the blockchain, you will need to reindex. Reindexing right now means deleting your flocore-node database and resyncing.
|
||||
|
||||
## Using Services Programmatically
|
||||
If, instead, you would like to run a custom node, you can include services by including them in your configuration object when initializing a new node.
|
||||
|
||||
```js
|
||||
//Require flocore
|
||||
var flocore = require('flocore-node');
|
||||
|
||||
//Services
|
||||
var Florincoin = flocore.services.Florincoin;
|
||||
var Web = flocore.services.Web;
|
||||
|
||||
var myNode = new flocore.Node({
|
||||
network: 'regtest'
|
||||
services: [
|
||||
{
|
||||
name: 'florincoind',
|
||||
module: Florincoin,
|
||||
config: {
|
||||
spawn: {
|
||||
datadir: '/home/<username>/.florincoin',
|
||||
exec: '/home/<username>/flocore-node/bin/florincoind'
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'web',
|
||||
module: Web,
|
||||
config: {
|
||||
port: 3001
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
```
|
||||
|
||||
Now that you've loaded your services you can access them via `myNode.services.<service-name>.<method-name>`. For example if you wanted to check the balance of an address, you could access the address service like so.
|
||||
|
||||
```js
|
||||
myNode.services.florincoind.getAddressBalance('1HB5XMLmzFVj8ALj6mfBsbifRoD4miY36v', false, function(err, total) {
|
||||
console.log(total.balance); //Satoshi amount of this address
|
||||
});
|
||||
```
|
||||
|
||||
## Writing a Service
|
||||
A new service can be created by inheriting from `Node.Service` and implementing these methods and properties:
|
||||
- `Service.dependencies` - An array of services that are needed, this will determine the order that services are started on the node.
|
||||
- `Service.prototype.start()` - Called to start up the service.
|
||||
- `Service.prototype.stop()` - Called to stop the service.
|
||||
- `Service.prototype.blockHandler()` - Will be called when a block is added or removed from the chain, and is useful for updating a database view/index.
|
||||
- `Service.prototype.getAPIMethods()` - Describes which API methods that this service includes, these methods can then be called over the JSON-RPC API, as well as the command-line utility.
|
||||
- `Service.prototype.getPublishEvents()` - Describes which events can be subscribed to for this service, useful to subscribe to events over the included web socket API.
|
||||
- `Service.prototype.setupRoutes()` - A service can extend HTTP routes on an express application by implementing this method.
|
||||
|
||||
The `package.json` for the service module can either export the `Node.Service` directly, or specify a specific module to load by including `"flocoreNode": "lib/flocore-node.js"`.
|
||||
|
||||
Please take a look at some of the existing services for implementation specifics.
|
||||
|
||||
@ -1,23 +0,0 @@
|
||||
# Address Service
|
||||
|
||||
The address service provides an address index for the Florincoin blockchain. Specifically, it builds and maintains the following information about every address ever used on the Florincoin network:
|
||||
|
||||
- block heights the address appeared in
|
||||
- transaction ids and the index in the transaction
|
||||
- whether the address appeared in an input or output
|
||||
- the timestamp for the block
|
||||
|
||||
Additionally, the address index also maintains the unspent transaction output index for the Florincoin blockchain. Example queries for this type of data is provided by 'getAddressUnspentOutputs', 'getAddressSummary', and 'getAddressHistory'.
|
||||
|
||||
This service is generally used to support other services and is not used externally.
|
||||
|
||||
## Service Configuration
|
||||
|
||||
none
|
||||
|
||||
## Other services this service Depends on
|
||||
|
||||
- db
|
||||
- header
|
||||
- transaction
|
||||
- timestamp
|
||||
@ -1,20 +0,0 @@
|
||||
# Block Service
|
||||
|
||||
The block service provides a block index for the Florincoin blockchain. Specifically, there are two data points this service tracks:
|
||||
|
||||
- block hash
|
||||
- raw block
|
||||
|
||||
This service is generally used to support other services and is not used externally.
|
||||
|
||||
## Service Configuration
|
||||
|
||||
none
|
||||
|
||||
## Other services this service Depends on
|
||||
|
||||
- header
|
||||
- timestamp
|
||||
- p2p
|
||||
- db
|
||||
|
||||
@ -1,13 +0,0 @@
|
||||
# Db Service
|
||||
|
||||
The db service provides an abstraction over the underlying database used to store the indexes in flocore-node.
|
||||
|
||||
## Service Configuration
|
||||
|
||||
None
|
||||
|
||||
## Other services this service Depends on
|
||||
|
||||
None
|
||||
|
||||
|
||||
@ -1,22 +0,0 @@
|
||||
# Fee Service
|
||||
|
||||
The fee service is a requirement of the insight-api service (not a flocore-node built-in service). Its primary purpose is to query a florincoin full node for the most up-to-date miner fees for transactions. A florincoin full node such as [BTC1](https://github.com/btc1/florincoin) or [bcoin](https://github.com/bcoin-org/bcoin) with an available RPC interface is required.
|
||||
|
||||
## Service Configuration
|
||||
|
||||
```json
|
||||
"fee": {
|
||||
"rpc": {
|
||||
"user": "user",
|
||||
"pass": "pass",
|
||||
"host": "localhost",
|
||||
"protocol": "http",
|
||||
"port": 8332
|
||||
}
|
||||
}
|
||||
```
|
||||
## Usage Example
|
||||
|
||||
```bash
|
||||
curl http://localhost:3001/insight-api/estimateFee
|
||||
```
|
||||
@ -1,19 +0,0 @@
|
||||
# Header Service
|
||||
|
||||
The header service provides a header index for the Florincoin blockchain. Specifically, it builds and maintains the following information about every florincoin block header:
|
||||
|
||||
- block hash
|
||||
- block height
|
||||
- block header
|
||||
|
||||
This service is generally used to support other services and is not used externally.
|
||||
|
||||
## Service Configuration
|
||||
|
||||
none
|
||||
|
||||
## Other services this service Depends on
|
||||
|
||||
- db
|
||||
- p2p
|
||||
|
||||
@ -1,16 +0,0 @@
|
||||
# Mempool Service
|
||||
|
||||
The mempool service provides a mempool transaction index for the Florincoin blockchain. Specifically, it maintains a larger index of mempool transactions than a typical full node can manage on its own.
|
||||
|
||||
- transaction id
|
||||
- transaction
|
||||
|
||||
This service is generally used to support other services and is not used externally.
|
||||
|
||||
## Service Configuration
|
||||
|
||||
none
|
||||
|
||||
## Other services this service Depends on
|
||||
|
||||
- db
|
||||
@ -1,21 +0,0 @@
|
||||
# P2P Service
|
||||
|
||||
The p2p service provides a peer-to-peer interface for the Florincoin blockchain. This service abstracts the connection and commnuication interface between the Florincoin and the rest of flocore node.
|
||||
|
||||
|
||||
This service also provides the publisher interface on flocore-node bus architecture. The P2P service will publish header, block and transaction events.
|
||||
|
||||
## Service Configuration
|
||||
|
||||
```json
|
||||
"p2p": {
|
||||
"peers": [
|
||||
{ "ip": { "v4": "127.0.0.1" }, "port": 8333 }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Other services this service Depends on
|
||||
|
||||
None
|
||||
|
||||
@ -1,18 +0,0 @@
|
||||
# Timestamp Service
|
||||
|
||||
The timestamp service provides a block timestamp index for the Florincoin blockchain. The only reason this index needs to exist is to ensure that block timestamps are always strictly greater than all the previous block timestamps. In the native block timestamps, this is not always the case. Without this index, accounting systems that are based on time spans (pretty much all of them), there will be issues accounting for transactions accurately.
|
||||
|
||||
- block timestamp
|
||||
- block hash
|
||||
|
||||
This service is generally used to support other services and is not used externally.
|
||||
|
||||
## Service Configuration
|
||||
|
||||
none
|
||||
|
||||
## Other services this service Depends on
|
||||
|
||||
- db
|
||||
|
||||
|
||||
@ -1,21 +0,0 @@
|
||||
# Transaction Service
|
||||
|
||||
The transaction service provides a transaction index for the Florincoin blockchain. Specifically, it builds and maintains the following information about every transaction on the Florincoin network:
|
||||
|
||||
- transaction ids and transactions
|
||||
- input values for every transaction
|
||||
- the timestamp for the block that the transaction appears in
|
||||
- the block height for the block that the transaction appears in
|
||||
|
||||
This service is generally used to support other services and is not used externally.
|
||||
|
||||
## Service Configuration
|
||||
|
||||
none
|
||||
|
||||
## Other services this service Depends on
|
||||
|
||||
- p2p
|
||||
- db
|
||||
- timestamp
|
||||
- mempool
|
||||
@ -1,39 +0,0 @@
|
||||
# Web Service
|
||||
The web service creates an express app which can be used by services for setting up web routes for API's, static content, web applications, etc. This allows users to interact with various flocore node services over one http or https port.
|
||||
|
||||
In order for your service to add routes, it must implement the `setupRoutes()` and `getRoutePrefix()` methods.
|
||||
|
||||
## Example
|
||||
|
||||
```js
|
||||
MyService.prototype.setupRoutes = function(app, express) {
|
||||
// Set up routes
|
||||
app.get('/hello', function(req, res) {
|
||||
res.send('world');
|
||||
});
|
||||
|
||||
// Serve static content
|
||||
app.use('/static', express.static(__dirname + '/static'));
|
||||
};
|
||||
|
||||
MyService.prototype.getRoutePrefix = function() {
|
||||
return 'my-service'
|
||||
};
|
||||
```
|
||||
|
||||
## Configuring Web Service for HTTPS
|
||||
You can run the web service over https by editing your flocore node config, setting https to true and adding httpsOptions:
|
||||
|
||||
```json
|
||||
{
|
||||
"port": 3001,
|
||||
"https": true,
|
||||
"httpsOptions": {
|
||||
"key": "path-to-private-key",
|
||||
"cert": "path-to-certificate"
|
||||
},
|
||||
"services": [
|
||||
"web"
|
||||
]
|
||||
}
|
||||
```
|
||||
@ -1,77 +0,0 @@
|
||||
# Upgrade Notes
|
||||
|
||||
## From Flocore 3.0.0 to 4.0.0
|
||||
|
||||
`flocore-node@2.1.1` to `flocore-node@3.0.0`
|
||||
|
||||
This major upgrade includes changes to indexes, API methods and services. Please review below details before upgrading.
|
||||
|
||||
### Indexes
|
||||
|
||||
Indexes include *more information* and are now also *faster*. Because of this a **reindex will be necessary** when upgrading as the address and database indexes are now a part of florincoind with three new `florincoin.conf` options:
|
||||
- `-addressindex`
|
||||
- `-timestampindex`
|
||||
- `-spentindex`
|
||||
|
||||
To start reindexing add `reindex=1` during the **first startup only**.
|
||||
|
||||
### Configuration Options
|
||||
|
||||
- The `florincoin.conf` file in will need to be updated to include additional indexes *(see below)*.
|
||||
- The `datadir` option is now a part of `florincoind` spawn configuration, and there is a new option to connect to multiple florincoind processes (Please see [Florincoin Service Docs](services/florincoind.md) for more details). The services `db` and `address` are now a part of the `florincoind` service. Here is how to update `flocore-node.json` configuration options:
|
||||
|
||||
**Before**:
|
||||
```json
|
||||
{
|
||||
"datadir": "/home/<username>/.florincoin",
|
||||
"network": "livenet",
|
||||
"port": 3001,
|
||||
"services": [
|
||||
"address",
|
||||
"florincoind",
|
||||
"db",
|
||||
"web"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**After**:
|
||||
```json
|
||||
{
|
||||
"network": "livenet",
|
||||
"port": 3001,
|
||||
"services": [
|
||||
"florincoind",
|
||||
"web"
|
||||
],
|
||||
"servicesConfig": {
|
||||
"florincoind": {
|
||||
"spawn": {
|
||||
"datadir": "/home/<username>/.florincoin",
|
||||
"exec": "/home/<username>/flocore-node/bin/florincoind"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
It will also be necessary to update `florincoin.conf` settings, to include these fields:
|
||||
```
|
||||
server=1
|
||||
whitelist=127.0.0.1
|
||||
txindex=1
|
||||
addressindex=1
|
||||
timestampindex=1
|
||||
spentindex=1
|
||||
zmqpubrawtx=tcp://127.0.0.1:<port>
|
||||
zmqpubhashblock=tcp://127.0.0.1:<port>
|
||||
rpcallowip=127.0.0.1
|
||||
rpcuser=<user>
|
||||
rpcpassword=<password>
|
||||
```
|
||||
|
||||
**Important**: Once changes have been made you'll also need to add the `reindex=1` option **only for the first startup** to regenerate the indexes. Once this is complete you should be able to remove the `flocore-node.db` directory with the old indexes.
|
||||
|
||||
### API and Service Changes
|
||||
- Many API methods that were a part of the `db` and `address` services are now a part of the `florincoind` service. Please see [Florincoin Service Docs](services/florincoind.md) for more details.
|
||||
- The `db` and `address` services are deprecated, most of the functionality still exists. Any services that were extending indexes with the `db` service, will need to manage chain state itself, or build the indexes within `florincoind`.
|
||||
10
etc/bitcoind/bitcoin-livenet.conf
Normal file
10
etc/bitcoind/bitcoin-livenet.conf
Normal file
@ -0,0 +1,10 @@
|
||||
rpcuser=user
|
||||
rpcpassword=pass
|
||||
server=1
|
||||
txindex=1
|
||||
|
||||
# Allow connections outsite localhost?
|
||||
rpcallowip=192.168.1.*
|
||||
|
||||
rpcport=8332
|
||||
|
||||
10
etc/bitcoind/bitcoin-testnet.conf
Normal file
10
etc/bitcoind/bitcoin-testnet.conf
Normal file
@ -0,0 +1,10 @@
|
||||
rpcuser=user
|
||||
rpcpassword=pass
|
||||
server=1
|
||||
txindex=1
|
||||
|
||||
# Allow connections outsite localhost?
|
||||
rpcallowip=192.168.1.*
|
||||
|
||||
rpcport=18332
|
||||
testnet=3
|
||||
227
etc/minersPoolStrings.json
Normal file
227
etc/minersPoolStrings.json
Normal file
@ -0,0 +1,227 @@
|
||||
[
|
||||
{
|
||||
"poolName":"50BTC",
|
||||
"url":"https://50btc.com/",
|
||||
"searchStrings":[
|
||||
"50BTC.com",
|
||||
"50btc.com"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"175btc",
|
||||
"url":"http://www.175btc.com/",
|
||||
"searchStrings":[
|
||||
"Mined By 175btc.com"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"ASICminer",
|
||||
"url":"https://bitcointalk.org/index.php?topic=99497.0",
|
||||
"searchStrings":[
|
||||
"Mined By ASICMiner"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"AntMiner",
|
||||
"url":"https://bitmaintech.com/",
|
||||
"searchStrings":[
|
||||
"AntPool"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"agentD",
|
||||
"url":"http://",
|
||||
"searchStrings":[
|
||||
"agentD"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"Bitfury",
|
||||
"url":"http://bitfury.org/",
|
||||
"searchStrings":[
|
||||
"2av0id51pct"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"BitMinter",
|
||||
"url":"https://bitminter.com/",
|
||||
"searchStrings":[
|
||||
"BitMinter"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"Bitparking",
|
||||
"url":"http://bitparking.com/",
|
||||
"searchStrings":[
|
||||
"bitparking"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"BTC Guild",
|
||||
"url":"https://www.btcguild.com/",
|
||||
"searchStrings":[
|
||||
"Mined by BTC Guild",
|
||||
"BTC Guild"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"bcpool.io",
|
||||
"url":"https://bcpool.io/",
|
||||
"searchStrings":[
|
||||
"bcpool"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"Discus Fish",
|
||||
"url":"http://f2pool.com/",
|
||||
"searchStrings":[
|
||||
"七彩神仙鱼",
|
||||
"Made in China",
|
||||
"Mined by user"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"Discus Fish Solo",
|
||||
"url":"http://f2pool.com/",
|
||||
"searchStrings":[
|
||||
"For Pierce and Paul"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"Cointerra",
|
||||
"url":"http://cointerra.com/",
|
||||
"searchStrings":[
|
||||
"cointerra"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"Eligius",
|
||||
"url":"http://eligius.st/",
|
||||
"searchStrings":[
|
||||
"Eligius"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"EclipseMC",
|
||||
"url":"https://eclipsemc.com/",
|
||||
"searchStrings":[
|
||||
"Josh Zerlan was here!",
|
||||
"EclipseMC",
|
||||
"Aluminum Falcon"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"GIVE-ME-COINS",
|
||||
"url":"https://give-me-coins.com/",
|
||||
"searchStrings":[
|
||||
"Mined at GIVE-ME-COINS.com"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"ghash.io",
|
||||
"url":"https://ghash.io/",
|
||||
"searchStrings":[
|
||||
"ghash.io",
|
||||
"GHash.IO"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"HHTT",
|
||||
"url":"http://hhtt.1209k.com/",
|
||||
"searchStrings":[
|
||||
"HHTT"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"KNCminer",
|
||||
"url":"https://www.kncminer.com/",
|
||||
"searchStrings":[
|
||||
"KnCMiner"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"Megabigpower",
|
||||
"url":"http://megabigpower.com/",
|
||||
"searchStrings":[
|
||||
"megabigpower.com"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"MultiCoin",
|
||||
"url":"https://multicoin.co/",
|
||||
"searchStrings":[
|
||||
"MultiCoin.co"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"Mt Red",
|
||||
"url":"https://mtred.com/",
|
||||
"searchStrings":[
|
||||
"/mtred/"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"MaxBTC",
|
||||
"url":"https://www.maxbtc.com",
|
||||
"searchStrings":[
|
||||
"MaxBTC"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"NMCbit",
|
||||
"url":"http://nmcbit.com/",
|
||||
"searchStrings":[
|
||||
"nmcbit.com"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"ozcoin",
|
||||
"url":"https://ozco.in/",
|
||||
"searchStrings":[
|
||||
"ozco.in",
|
||||
"ozcoin"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"Polmine.pl",
|
||||
"url":"https://polmine.pl/",
|
||||
"searchStrings":[
|
||||
"by polmine.pl"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"simplecoin",
|
||||
"url":"http://simplecoin.us/",
|
||||
"searchStrings":[
|
||||
"simplecoin.us ftw"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"Slush",
|
||||
"url":"https://mining.bitcoin.cz/",
|
||||
"searchStrings":[
|
||||
"slush"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"TripleMining",
|
||||
"url":"https://www.triplemining.com/",
|
||||
"searchStrings":[
|
||||
"Triplemining.com"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"wizkid057",
|
||||
"url":"http://wizkid057.com/btc",
|
||||
"searchStrings":[
|
||||
"wizkid057"
|
||||
]
|
||||
},
|
||||
{
|
||||
"poolName":"Yourbtc.net",
|
||||
"url":"http://yourbtc.net/",
|
||||
"searchStrings":[
|
||||
"yourbtc.net"
|
||||
]
|
||||
}
|
||||
]
|
||||
14
etc/test-cert.pem
Normal file
14
etc/test-cert.pem
Normal file
@ -0,0 +1,14 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICMjCCAZugAwIBAgIJAK9dKmjfxq+BMA0GCSqGSIb3DQEBCwUAMDIxCzAJBgNV
|
||||
BAYTAkFSMRMwEQYDVQQIDApTb21lLVN0YXRlMQ4wDAYDVQQKDAVDb3BheTAeFw0x
|
||||
NDA4MjExNzQyMTBaFw0xNDA5MjAxNzQyMTBaMDIxCzAJBgNVBAYTAkFSMRMwEQYD
|
||||
VQQIDApTb21lLVN0YXRlMQ4wDAYDVQQKDAVDb3BheTCBnzANBgkqhkiG9w0BAQEF
|
||||
AAOBjQAwgYkCgYEA1BbMI6V06LKoBrcf5bJ8LH7EjwqbEacIOpiY7B+8W3sAM1bB
|
||||
6hA2IlPvKL3qTdhMMKFZGZMYypmlAQTI1N+VNSwJHNjyepFbtkdNytSC8qw8bhak
|
||||
yt4TByYEw1NMYx7I0OOdjh/DKsS+EOIgQDT9zSB+NgErKb0mKrginwgk5XkCAwEA
|
||||
AaNQME4wHQYDVR0OBBYEFM0G1agUfY4zRNfxJ+0sHV3EsoGKMB8GA1UdIwQYMBaA
|
||||
FM0G1agUfY4zRNfxJ+0sHV3EsoGKMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEL
|
||||
BQADgYEAOg7n1RCyB1BJ6TuF99i25H7kpGUSL57ajNyyCKDciTPmpxVJ5knAjPYa
|
||||
hbXX+dlq2B8QEnfkE5FMDLkO3RS3xU8YfekIDHofDuXR9boD/4rRlsN8md2Jmkr6
|
||||
MyRtYPtsPWVeoz0WmG5f1yobHmh7mYf17oN+uRJKX68s8G6b/SQ=
|
||||
-----END CERTIFICATE-----
|
||||
15
etc/test-key.pem
Normal file
15
etc/test-key.pem
Normal file
@ -0,0 +1,15 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIICXgIBAAKBgQDUFswjpXTosqgGtx/lsnwsfsSPCpsRpwg6mJjsH7xbewAzVsHq
|
||||
EDYiU+8ovepN2EwwoVkZkxjKmaUBBMjU35U1LAkc2PJ6kVu2R03K1ILyrDxuFqTK
|
||||
3hMHJgTDU0xjHsjQ452OH8MqxL4Q4iBANP3NIH42ASspvSYquCKfCCTleQIDAQAB
|
||||
AoGAMUzDUx3o2RZ+XGFA9uHQX39wLVfnx+itzwEduvV9kT48Q7LNDJ2MF9qu4yeS
|
||||
SVoYC83Vqk45Gw8v/dag4GrAgdk1NHZZ56Z/G55m06Y45xS6ZarBdbe0N1jdZEab
|
||||
RG3FgxyPSUiZ5aLIMxMMtgt/DRv9BPpIeLNDMgyQRjVWlMkCQQDzlLwkp4bo+CAY
|
||||
UMcsSN+KGurEMsuF0qc/+TLqpKDoOaLtd1F+Ntn20tQqeH0YLWktFvzAgY7wYXrb
|
||||
lhMuAxa7AkEA3ucGEXNqwu1qVP4fXfEN1E0Y5X/euXMsfgNG8IK82hF3h83hnqNM
|
||||
3FcGFOyKnL7E5TfRlJfxhAGqUfCe+2zjWwJBAKA6CID8CkyZW1NjX4EL9q+8AQ5K
|
||||
c4J2DTqRzCJ5ZLcdosUeJecmYb5w9MtzMqaCyJq2clCXaNVK6iwjzj4IHh0CQQCY
|
||||
sgwvIjCtrfQcmyUjtoExwUrf1LPfuK1u+ZG8KuNyQ2rtxjTb9qQtgRPye4QNEoZR
|
||||
O+a/c0MImhdyIHLYa+RnAkEAwfLD4q+FDx4eX0ANO7/PI/XiJGqi6x1cYUwyRg9o
|
||||
2S6hN5RnUD/nf2HKHU0esp34UMY/UWMrodCRDZj/ijg4UA==
|
||||
-----END RSA PRIVATE KEY-----
|
||||
@ -1,34 +0,0 @@
|
||||
{
|
||||
"network": "testnet",
|
||||
"port": 3001,
|
||||
"datadir": "/tmp",
|
||||
"services": [
|
||||
"p2p",
|
||||
"db",
|
||||
"header",
|
||||
"block",
|
||||
"mempool",
|
||||
"address",
|
||||
"transaction",
|
||||
"timestamp",
|
||||
"fee",
|
||||
"insight-api",
|
||||
"web"
|
||||
],
|
||||
"servicesConfig": {
|
||||
"insight-api": {
|
||||
"routePrefix": "api",
|
||||
"disableRateLimiter": true,
|
||||
"enableCache": true
|
||||
},
|
||||
"fee": {
|
||||
"rpc": {
|
||||
"user": "local",
|
||||
"pass": "local",
|
||||
"host": "localhost",
|
||||
"protocol": "http",
|
||||
"port": 18332
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
148
index.js
Normal file → Executable file
148
index.js
Normal file → Executable file
@ -1,26 +1,134 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
'use strict';
|
||||
//Set the node enviornment variable if not set before
|
||||
process.env.NODE_ENV = process.env.NODE_ENV || 'development';
|
||||
|
||||
module.exports = require('./lib');
|
||||
module.exports.Node = require('./lib/node');
|
||||
module.exports.Service = require('./lib/service');
|
||||
module.exports.errors = require('./lib/errors');
|
||||
var fs = require('fs');
|
||||
var PeerSync = require('./lib/PeerSync');
|
||||
var HistoricSync = require('./lib/HistoricSync');
|
||||
|
||||
module.exports.services = {};
|
||||
module.exports.services.Web = require('./lib/services/web');
|
||||
var http = require('http');
|
||||
var https = require('https');
|
||||
var express = require('express');
|
||||
var program = require('commander');
|
||||
|
||||
module.exports.scaffold = {};
|
||||
module.exports.scaffold.create = require('./lib/scaffold/create');
|
||||
module.exports.scaffold.add = require('./lib/scaffold/add');
|
||||
module.exports.scaffold.remove = require('./lib/scaffold/remove');
|
||||
module.exports.scaffold.start = require('./lib/scaffold/start');
|
||||
module.exports.scaffold.callMethod = require('./lib/scaffold/call-method');
|
||||
module.exports.scaffold.findConfig = require('./lib/scaffold/find-config');
|
||||
module.exports.scaffold.defaultConfig = require('./lib/scaffold/default-config');
|
||||
var config = require('./config/config');
|
||||
var logger = require('./lib/logger').logger;
|
||||
program
|
||||
.version(config.version);
|
||||
|
||||
module.exports.cli = {};
|
||||
module.exports.cli.main = require('./lib/cli/main');
|
||||
module.exports.cli.daemon = require('./lib/cli/daemon');
|
||||
module.exports.cli.flocore = require('./lib/cli/flocore');
|
||||
module.exports.cli.flocored = require('./lib/cli/flocored');
|
||||
// text title
|
||||
console.log(
|
||||
'bitcore-node
|
||||
\n\t\t\t\t\t\tv%s\n', config.version);
|
||||
program.on('--help', function() {
|
||||
logger.info('\n# Configuration:\n\
|
||||
\tBLOCKCHAIN_API_NETWORK (Network): %s\n\
|
||||
\tBLOCKCHAIN_API_DB (Database Path): %s\n\
|
||||
\tBLOCKCHAIN_API_SAFE_CONFIRMATIONS (Safe Confirmations): %s\n\
|
||||
\tBLOCKCHAIN_API_IGNORE_CACHE (Ignore Cache): %s\n\
|
||||
# Bicoind Connection configuration:\n\
|
||||
\tRPC Username: %s\t\tBITCOIND_USER\n\
|
||||
\tRPC Password: %s\tBITCOIND_PASS\n\
|
||||
\tRPC Protocol: %s\t\tBITCOIND_PROTO\n\
|
||||
\tRPC Host: %s\t\tBITCOIND_HOST\n\
|
||||
\tRPC Port: %s\t\t\tBITCOIND_PORT\n\
|
||||
\tP2P Port: %s\t\t\tBITCOIND_P2P_PORT\n\
|
||||
\tBITCOIND_DATADIR: %s\n\
|
||||
\t%s\n\
|
||||
\nChange setting by assigning the enviroment variables above. Example:\n\
|
||||
$ BLOCKCHAIN_API_NETWORK="testnet" BITCOIND_HOST="123.123.123.123" ./index.js\
|
||||
\n\n',
|
||||
config.network, config.leveldb, config.safeConfirmations, config.ignoreCache ? 'yes' : 'no',
|
||||
config.bitcoind.user,
|
||||
config.bitcoind.pass ? 'Yes(hidden)' : 'No',
|
||||
config.bitcoind.protocol,
|
||||
config.bitcoind.host,
|
||||
config.bitcoind.port,
|
||||
config.bitcoind.p2pPort,
|
||||
config.bitcoind.dataDir + (config.network === 'testnet' ? '*' : ''), (config.network === 'testnet' ? '* (/testnet3 is added automatically)' : '')
|
||||
);
|
||||
});
|
||||
|
||||
module.exports.lib = require('flocore-lib');
|
||||
program.parse(process.argv);
|
||||
|
||||
// create express app
|
||||
var expressApp = express();
|
||||
|
||||
// setup headers
|
||||
require('./config/headers')(expressApp);
|
||||
|
||||
// setup http/https base server
|
||||
var server;
|
||||
if (config.enableHTTPS) {
|
||||
var serverOpts = {};
|
||||
serverOpts.key = fs.readFileSync('./etc/test-key.pem');
|
||||
serverOpts.cert = fs.readFileSync('./etc/test-cert.pem');
|
||||
server = https.createServer(serverOpts, expressApp);
|
||||
} else {
|
||||
server = http.createServer(expressApp);
|
||||
}
|
||||
|
||||
// Bootstrap models
|
||||
var models_path = __dirname + '/app/models';
|
||||
var walk = function(path) {
|
||||
fs.readdirSync(path).forEach(function(file) {
|
||||
var newPath = path + '/' + file;
|
||||
var stat = fs.statSync(newPath);
|
||||
if (stat.isFile()) {
|
||||
if (/(.*)\.(js$)/.test(file)) {
|
||||
require(newPath);
|
||||
}
|
||||
} else if (stat.isDirectory()) {
|
||||
walk(newPath);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
walk(models_path);
|
||||
|
||||
// p2pSync process
|
||||
var peerSync = new PeerSync({
|
||||
shouldBroadcast: true
|
||||
});
|
||||
|
||||
if (!config.disableP2pSync) {
|
||||
peerSync.run();
|
||||
}
|
||||
|
||||
// historic_sync process
|
||||
var historicSync = new HistoricSync({
|
||||
shouldBroadcastSync: true
|
||||
});
|
||||
peerSync.historicSync = historicSync;
|
||||
|
||||
if (!config.disableHistoricSync) {
|
||||
historicSync.start({}, function(err) {
|
||||
if (err) {
|
||||
var txt = 'ABORTED with error: ' + err.message;
|
||||
console.log('[historic_sync] ' + txt);
|
||||
}
|
||||
if (peerSync) peerSync.allowReorgs = true;
|
||||
});
|
||||
} else
|
||||
if (peerSync) peerSync.allowReorgs = true;
|
||||
|
||||
|
||||
|
||||
// socket.io
|
||||
var ios = require('socket.io')(server, config);
|
||||
require('./app/controllers/socket.js').init(ios);
|
||||
|
||||
// express settings
|
||||
require('./config/express')(expressApp, historicSync, peerSync);
|
||||
require('./config/routes')(expressApp);
|
||||
|
||||
|
||||
//Start the app by listening on <port>
|
||||
server.listen(config.port, function() {
|
||||
logger.info('bitcoin-node API listening on port %d in %s mode', server.address().port, process.env.NODE_ENV);
|
||||
});
|
||||
|
||||
//expose app
|
||||
exports = module.exports = expressApp;
|
||||
|
||||
466
lib/BlockDb.js
Normal file
466
lib/BlockDb.js
Normal file
@ -0,0 +1,466 @@
|
||||
'use strict';
|
||||
var imports = require('soop').imports();
|
||||
var TIMESTAMP_PREFIX = 'bts-'; // bts-<ts> => <hash>
|
||||
var PREV_PREFIX = 'bpr-'; // bpr-<hash> => <prev_hash>
|
||||
var NEXT_PREFIX = 'bne-'; // bne-<hash> => <next_hash>
|
||||
var MAIN_PREFIX = 'bma-'; // bma-<hash> => <height> (0 is unconnected)
|
||||
var TIP = 'bti-'; // bti = <hash>:<height> last block on the chain
|
||||
var LAST_FILE_INDEX = 'file-'; // last processed file index
|
||||
|
||||
// txid - blockhash mapping (only for confirmed txs, ONLY FOR BEST BRANCH CHAIN)
|
||||
var IN_BLK_PREFIX = 'btx-'; //btx-<txid> = <block>
|
||||
|
||||
|
||||
var MAX_OPEN_FILES = 500;
|
||||
var CONCURRENCY = 5;
|
||||
var DFLT_REQUIRED_CONFIRMATIONS = 1;
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
var levelup = require('levelup'),
|
||||
config = require('../config/config');
|
||||
var db = imports.db || levelup(config.leveldb + '/blocks',{maxOpenFiles: MAX_OPEN_FILES} );
|
||||
var Rpc = imports.rpc || require('./Rpc');
|
||||
var async = require('async');
|
||||
|
||||
|
||||
var logger = require('./logger').logger;
|
||||
var info = logger.info;
|
||||
|
||||
var BlockDb = function(opts) {
|
||||
this.txDb = require('./TransactionDb').default();
|
||||
this.safeConfirmations = config.safeConfirmations || DEFAULT_SAFE_CONFIRMATIONS;
|
||||
BlockDb.super(this, arguments);
|
||||
};
|
||||
|
||||
BlockDb.prototype.close = function(cb) {
|
||||
db.close(cb);
|
||||
};
|
||||
|
||||
BlockDb.prototype.drop = function(cb) {
|
||||
var path = config.leveldb + '/blocks';
|
||||
db.close(function() {
|
||||
require('leveldown').destroy(path, function () {
|
||||
db = levelup(path,{maxOpenFiles: MAX_OPEN_FILES} );
|
||||
return cb();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
BlockDb.prototype._addBlockScript = function(b, height) {
|
||||
var time_key = TIMESTAMP_PREFIX +
|
||||
( b.time || Math.round(new Date().getTime() / 1000) );
|
||||
|
||||
return [
|
||||
{
|
||||
type: 'put',
|
||||
key: time_key,
|
||||
value: b.hash,
|
||||
},
|
||||
{
|
||||
type: 'put',
|
||||
key: MAIN_PREFIX + b.hash,
|
||||
value: height,
|
||||
},
|
||||
{
|
||||
type: 'put',
|
||||
key:PREV_PREFIX + b.hash,
|
||||
value: b.previousblockhash,
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
BlockDb.prototype._delTxsScript = function(txs) {
|
||||
var dbScript =[];
|
||||
|
||||
for(var ii in txs){
|
||||
dbScript.push({
|
||||
type: 'del',
|
||||
key: IN_BLK_PREFIX + txs[ii],
|
||||
});
|
||||
}
|
||||
return dbScript;
|
||||
};
|
||||
|
||||
BlockDb.prototype._addTxsScript = function(txs, hash, height) {
|
||||
var dbScript =[];
|
||||
|
||||
for(var ii in txs){
|
||||
dbScript.push({
|
||||
type: 'put',
|
||||
key: IN_BLK_PREFIX + txs[ii],
|
||||
value: hash+':'+height,
|
||||
});
|
||||
}
|
||||
return dbScript;
|
||||
};
|
||||
|
||||
// Returns blockHash and height for a given txId (If the tx is on the MAIN chain).
|
||||
BlockDb.prototype.getBlockForTx = function(txId, cb) {
|
||||
db.get(IN_BLK_PREFIX + txId,function (err, val) {
|
||||
if (err && err.notFound) return cb();
|
||||
if (err) return cb(err);
|
||||
|
||||
var v = val.split(':');
|
||||
return cb(err,v[0],parseInt(v[1]));
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype._changeBlockHeight = function(hash, height, cb) {
|
||||
var self = this;
|
||||
var dbScript1 = this._setHeightScript(hash,height);
|
||||
|
||||
logger.log('Getting TXS FROM %s to set it Main', hash);
|
||||
this.fromHashWithInfo(hash, function(err, bi) {
|
||||
if (!bi || !bi.info || !bi.info.tx)
|
||||
throw new Error('unable to get info for block:'+ hash);
|
||||
|
||||
var dbScript2;
|
||||
if (height>=0) {
|
||||
dbScript2 = self._addTxsScript(bi.info.tx, hash, height);
|
||||
logger.info('\t%s %d Txs', 'Confirming', bi.info.tx.length);
|
||||
} else {
|
||||
dbScript2 = self._delTxsScript(bi.info.tx);
|
||||
logger.info('\t%s %d Txs', 'Unconfirming', bi.info.tx.length);
|
||||
}
|
||||
db.batch(dbScript2.concat(dbScript1),cb);
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype.setBlockMain = function(hash, height, cb) {
|
||||
this._changeBlockHeight(hash,height,cb);
|
||||
};
|
||||
|
||||
BlockDb.prototype.setBlockNotMain = function(hash, cb) {
|
||||
this._changeBlockHeight(hash,-1,cb);
|
||||
};
|
||||
|
||||
// adds a block (and its txs). Does not update Next pointer in
|
||||
// the block prev to the new block, nor TIP pointer
|
||||
//
|
||||
BlockDb.prototype.add = function(b, height, cb) {
|
||||
var txs = typeof b.tx[0] === 'string' ? b.tx : b.tx.map( function(o){ return o.txid; });
|
||||
|
||||
var dbScript = this._addBlockScript(b,height);
|
||||
dbScript = dbScript.concat(this._addTxsScript(txs, b.hash, height));
|
||||
this.txDb.addMany(b.tx, function(err) {
|
||||
if (err) return cb(err);
|
||||
db.batch(dbScript, cb);
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype.getTip = function(cb) {
|
||||
|
||||
if (this.cachedTip){
|
||||
var v = this.cachedTip.split(':');
|
||||
return cb(null,v[0], parseInt(v[1]));
|
||||
}
|
||||
|
||||
var self = this;
|
||||
db.get(TIP, function(err, val) {
|
||||
if (!val) return cb();
|
||||
self.cachedTip = val;
|
||||
var v = val.split(':');
|
||||
return cb(err,v[0], parseInt(v[1]));
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype.setTip = function(hash, height, cb) {
|
||||
this.cachedTip = hash + ':' + height;
|
||||
db.put(TIP, this.cachedTip, function(err) {
|
||||
return cb(err);
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype.getDepth = function(hash, cb) {
|
||||
var v = this.cachedTip.split(':');
|
||||
if (!v) throw new Error('getDepth called with not cachedTip');
|
||||
this.getHeight(hash, function(err,h){
|
||||
return cb(err,parseInt(v[1]) - h);
|
||||
});
|
||||
};
|
||||
|
||||
//mainly for testing
|
||||
BlockDb.prototype.setPrev = function(hash, prevHash, cb) {
|
||||
db.put(PREV_PREFIX + hash, prevHash, function(err) {
|
||||
return cb(err);
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype.getPrev = function(hash, cb) {
|
||||
db.get(PREV_PREFIX + hash, function(err,val) {
|
||||
if (err && err.notFound) { err = null; val = null;}
|
||||
return cb(err,val);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
BlockDb.prototype.setLastFileIndex = function(idx, cb) {
|
||||
var self = this;
|
||||
if (this.lastFileIndexSaved === idx) return cb();
|
||||
|
||||
db.put(LAST_FILE_INDEX, idx, function(err) {
|
||||
self.lastFileIndexSaved = idx;
|
||||
return cb(err);
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype.getLastFileIndex = function(cb) {
|
||||
db.get(LAST_FILE_INDEX, function(err,val) {
|
||||
if (err && err.notFound) { err = null; val = null;}
|
||||
return cb(err,val);
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype.getNext = function(hash, cb) {
|
||||
db.get(NEXT_PREFIX + hash, function(err,val) {
|
||||
if (err && err.notFound) { err = null; val = null;}
|
||||
return cb(err,val);
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype.getHeight = function(hash, cb) {
|
||||
db.get(MAIN_PREFIX + hash, function(err, val) {
|
||||
if (err && err.notFound) { err = null; val = 0;}
|
||||
return cb(err,parseInt(val));
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype._setHeightScript = function(hash, height) {
|
||||
logger.log('setHeight: %s #%d', hash,height);
|
||||
return ([{
|
||||
type: 'put',
|
||||
key: MAIN_PREFIX + hash,
|
||||
value: height,
|
||||
}]);
|
||||
};
|
||||
|
||||
BlockDb.prototype.setNext = function(hash, nextHash, cb) {
|
||||
db.put(NEXT_PREFIX + hash, nextHash, function(err) {
|
||||
return cb(err);
|
||||
});
|
||||
};
|
||||
|
||||
// Unused
|
||||
BlockDb.prototype.countConnected = function(cb) {
|
||||
var c = 0;
|
||||
console.log('Counting connected blocks. This could take some minutes');
|
||||
db.createReadStream({start: MAIN_PREFIX, end: MAIN_PREFIX + '~' })
|
||||
.on('data', function (data) {
|
||||
if (data.value !== 0) c++;
|
||||
})
|
||||
.on('error', function (err) {
|
||||
return cb(err);
|
||||
})
|
||||
.on('end', function () {
|
||||
return cb(null, c);
|
||||
});
|
||||
};
|
||||
|
||||
// .has() return true orphans also
|
||||
BlockDb.prototype.has = function(hash, cb) {
|
||||
var k = PREV_PREFIX + hash;
|
||||
db.get(k, function (err) {
|
||||
var ret = true;
|
||||
if (err && err.notFound) {
|
||||
err = null;
|
||||
ret = false;
|
||||
}
|
||||
return cb(err, ret);
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype.fromHashWithInfo = function(hash, cb) {
|
||||
var self = this;
|
||||
|
||||
Rpc.getBlock(hash, function(err, info) {
|
||||
if (err || !info) return cb(err);
|
||||
|
||||
//TODO can we get this from RPC .height?
|
||||
self.getHeight(hash, function(err, height) {
|
||||
if (err) return cb(err);
|
||||
|
||||
info.isMainChain = height>=0 ? true : false;
|
||||
|
||||
return cb(null, {
|
||||
hash: hash,
|
||||
info: info,
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype.getBlocksByDate = function(start_ts, end_ts, limit, cb) {
|
||||
var list = [];
|
||||
var opts = {
|
||||
start: TIMESTAMP_PREFIX + end_ts, //Inverted since list is reversed
|
||||
end: TIMESTAMP_PREFIX + start_ts,
|
||||
limit: limit,
|
||||
reverse: 1,
|
||||
};
|
||||
|
||||
db.createReadStream(opts)
|
||||
.on('data', function (data) {
|
||||
var k = data.key.split('-');
|
||||
list.push({
|
||||
ts: k[1],
|
||||
hash: data.value,
|
||||
});
|
||||
})
|
||||
.on('error', function (err) {
|
||||
return cb(err);
|
||||
})
|
||||
.on('end', function () {
|
||||
return cb(null, list.reverse());
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype.blockIndex = function(height, cb) {
|
||||
return Rpc.blockIndex(height,cb);
|
||||
};
|
||||
|
||||
BlockDb.prototype._fillConfirmationsOneSpent = function(o, chainHeight, cb) {
|
||||
var self = this;
|
||||
if (!o.spentTxId) return cb();
|
||||
|
||||
if (o.multipleSpentAttempts) {
|
||||
async.eachLimit(o.multipleSpentAttempts, CONCURRENCY,
|
||||
function(oi, e_c) {
|
||||
// Only one will be confirmed
|
||||
self.getBlockForTx(oi.txid, function(err, hash, height) {
|
||||
if (err) return;
|
||||
if (height>=0) {
|
||||
o.spentTxId = oi.txid;
|
||||
o.index = oi.index;
|
||||
o.spentIsConfirmed = chainHeight >= height;
|
||||
o.spentConfirmations = chainHeight - height +1;
|
||||
}
|
||||
return e_c();
|
||||
});
|
||||
}, cb);
|
||||
} else {
|
||||
self.getBlockForTx(o.spentTxId, function(err, hash, height) {
|
||||
if (err) return cb(err);
|
||||
if (height >=0 ) {
|
||||
o.spentIsConfirmed = chainHeight >= height;
|
||||
o.spentConfirmations = chainHeight - height +1;
|
||||
}
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
BlockDb.prototype._fillConfirmationsOne = function(o, chainHeight, cb) {
|
||||
var self = this;
|
||||
self.getBlockForTx(o.txid, function(err, hash, height) {
|
||||
if (err) return cb(err);
|
||||
if (height>=0) {
|
||||
o.isConfirmed = chainHeight >= height;
|
||||
o.confirmations = chainHeight - height +1;
|
||||
return self._fillConfirmationsOneSpent(o,chainHeight,cb);
|
||||
}
|
||||
else return cb();
|
||||
});
|
||||
};
|
||||
|
||||
BlockDb.prototype.fillConfirmations = function(txouts, cb) {
|
||||
var self = this;
|
||||
this.getTip(function(err, hash, height){
|
||||
var txs = txouts.filter(function(x){
|
||||
return !x.spentIsConfirmedCached // not 100%cached
|
||||
&& !(x.isConfirmedCached && !x.spentTxId); // and not partial cached but not spent
|
||||
});
|
||||
//console.log('[BlockDb.js.373:txs:]',txs.length, txs.slice(0,5)); //TODO
|
||||
|
||||
async.eachLimit(txs, CONCURRENCY, function(txout, e_c) {
|
||||
if(txout.isConfirmedCached) {
|
||||
self._fillConfirmationsOneSpent(txout,height, e_c);
|
||||
} else {
|
||||
self._fillConfirmationsOne(txout,height, e_c);
|
||||
}
|
||||
|
||||
}, cb);
|
||||
});
|
||||
};
|
||||
|
||||
/* this is only for migration scripts */
|
||||
BlockDb.prototype._runScript = function(script, cb) {
|
||||
db.batch(script,cb);
|
||||
};
|
||||
|
||||
BlockDb.prototype.migrateV02 = function(cb) {
|
||||
var k = 'txb-';
|
||||
var dbScript = [];
|
||||
var c=0;
|
||||
var c2=0;
|
||||
var N=50000;
|
||||
this.txDb._db.createReadStream({
|
||||
start: k,
|
||||
end: k + '~'
|
||||
})
|
||||
.on('data', function(data) {
|
||||
var k = data.key.split('-');
|
||||
var v = data.value.split(':');
|
||||
dbScript.push({
|
||||
type: 'put',
|
||||
key: IN_BLK_PREFIX + k[1],
|
||||
value: data.value,
|
||||
});
|
||||
if (c++>N) {
|
||||
console.log('\t%dM txs processed', ((c2+=N)/1e6).toFixed(3));
|
||||
db.batch(dbScript,function () {
|
||||
c=0;
|
||||
dbScript=[];
|
||||
});
|
||||
}
|
||||
})
|
||||
.on('error', function(err) {
|
||||
return cb(err);
|
||||
})
|
||||
.on('end', function (){
|
||||
return cb();
|
||||
});
|
||||
|
||||
|
||||
};
|
||||
|
||||
BlockDb.prototype.migrateV02cleanup = function(cb) {
|
||||
var self = this;
|
||||
console.log('## deleting txb- from txs db'); //todo
|
||||
|
||||
var k = 'txb-';
|
||||
var d = this.txDb._db;
|
||||
d.createReadStream({
|
||||
start: k,
|
||||
end: k + '~'
|
||||
})
|
||||
.pipe(d.createWriteStream({type:'del'}))
|
||||
.on('close', function(err){
|
||||
if (err) return cb(err);
|
||||
console.log('## deleting tx- from txs db'); //todo
|
||||
|
||||
var k = 'tx-';
|
||||
var d = self.txDb._db;
|
||||
d.createReadStream({
|
||||
start: k,
|
||||
end: k + '~'
|
||||
})
|
||||
.pipe(d.createWriteStream({type:'del'}))
|
||||
.on('close', function(err){
|
||||
if (err) return cb(err);
|
||||
var k = 'txa-';
|
||||
var d = self.txDb._db;
|
||||
d.createReadStream({
|
||||
start: k,
|
||||
end: k + '~'
|
||||
})
|
||||
.pipe(d.createWriteStream({type:'del'}))
|
||||
.on('close', cb);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
module.exports = require('soop')(BlockDb);
|
||||
142
lib/BlockExtractor.js
Normal file
142
lib/BlockExtractor.js
Normal file
@ -0,0 +1,142 @@
|
||||
'use strict';
|
||||
var bitcore = require('bitcore'),
|
||||
Block = bitcore.Block,
|
||||
networks = bitcore.networks,
|
||||
Parser = bitcore.BinaryParser,
|
||||
fs = require('fs'),
|
||||
Buffer = bitcore.Buffer,
|
||||
glob = require('glob'),
|
||||
async = require('async');
|
||||
|
||||
function BlockExtractor(dataDir, network) {
|
||||
var path = dataDir + '/blocks/blk*.dat';
|
||||
|
||||
this.dataDir = dataDir;
|
||||
this.files = glob.sync(path);
|
||||
this.nfiles = this.files.length;
|
||||
|
||||
if (this.nfiles === 0)
|
||||
throw new Error('Could not find block files at: ' + path);
|
||||
|
||||
this.currentFileIndex = 0;
|
||||
this.isCurrentRead = false;
|
||||
this.currentBuffer = null;
|
||||
this.currentParser = null;
|
||||
this.network = network === 'testnet' ? networks.testnet: networks.livenet;
|
||||
this.magic = this.network.magic.toString('hex');
|
||||
}
|
||||
|
||||
BlockExtractor.prototype.currentFile = function() {
|
||||
return this.files[this.currentFileIndex];
|
||||
};
|
||||
|
||||
|
||||
BlockExtractor.prototype.nextFile = function() {
|
||||
if (this.currentFileIndex < 0) return false;
|
||||
|
||||
var ret = true;
|
||||
|
||||
this.isCurrentRead = false;
|
||||
this.currentBuffer = null;
|
||||
this.currentParser = null;
|
||||
|
||||
if (this.currentFileIndex < this.nfiles - 1) {
|
||||
this.currentFileIndex++;
|
||||
}
|
||||
else {
|
||||
this.currentFileIndex=-1;
|
||||
ret = false;
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
|
||||
BlockExtractor.prototype.readCurrentFileSync = function() {
|
||||
if (this.currentFileIndex < 0 || this.isCurrentRead) return;
|
||||
|
||||
this.isCurrentRead = true;
|
||||
|
||||
var fname = this.currentFile();
|
||||
if (!fname) return;
|
||||
|
||||
|
||||
var stats = fs.statSync(fname);
|
||||
|
||||
var size = stats.size;
|
||||
|
||||
console.log('Reading Blockfile %s [%d MB]',
|
||||
fname, parseInt(size/1024/1024));
|
||||
|
||||
var fd = fs.openSync(fname, 'r');
|
||||
|
||||
var buffer = new Buffer(size);
|
||||
|
||||
fs.readSync(fd, buffer, 0, size, 0);
|
||||
|
||||
this.currentBuffer = buffer;
|
||||
this.currentParser = new Parser(buffer);
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
BlockExtractor.prototype._getMagic = function() {
|
||||
if (!this.currentParser)
|
||||
return null;
|
||||
|
||||
var byte0 = this.currentParser ? this.currentParser.buffer(1).toString('hex') : null;
|
||||
|
||||
|
||||
|
||||
// Grab 3 bytes from block without removing them
|
||||
var p = this.currentParser.pos;
|
||||
var bytes123 = this.currentParser.subject.toString('hex',p,p+3);
|
||||
var magic = byte0 + bytes123;
|
||||
|
||||
if (magic !=='00000000' && magic !== this.magic) {
|
||||
if(this.errorCount++ > 4)
|
||||
throw new Error('CRITICAL ERROR: Magic number mismatch: ' +
|
||||
magic + '!=' + this.magic);
|
||||
magic=null;
|
||||
}
|
||||
|
||||
if (magic==='00000000')
|
||||
magic =null;
|
||||
|
||||
return magic;
|
||||
};
|
||||
|
||||
BlockExtractor.prototype.getNextBlock = function(cb) {
|
||||
var b;
|
||||
var magic;
|
||||
var isFinished = 0;
|
||||
|
||||
while(!magic && !isFinished) {
|
||||
this.readCurrentFileSync();
|
||||
magic= this._getMagic();
|
||||
|
||||
if (!this.currentParser || this.currentParser.eof() ) {
|
||||
|
||||
if (this.nextFile()) {
|
||||
console.log('Moving forward to file:' + this.currentFile() );
|
||||
magic = null;
|
||||
} else {
|
||||
console.log('Finished all files');
|
||||
isFinished = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (isFinished)
|
||||
return cb();
|
||||
|
||||
// Remove 3 bytes from magic and spacer
|
||||
this.currentParser.buffer(3+4);
|
||||
|
||||
b = new Block();
|
||||
b.parse(this.currentParser);
|
||||
b.getHash();
|
||||
this.errorCount=0;
|
||||
return cb(null,b);
|
||||
};
|
||||
|
||||
module.exports = require('soop')(BlockExtractor);
|
||||
|
||||
435
lib/HistoricSync.js
Normal file
435
lib/HistoricSync.js
Normal file
@ -0,0 +1,435 @@
|
||||
'use strict';
|
||||
|
||||
var imports = require('soop').imports();
|
||||
var util = require('util');
|
||||
var async = require('async');
|
||||
|
||||
var bitcore = require('bitcore');
|
||||
var networks = bitcore.networks;
|
||||
var config = imports.config || require('../config/config');
|
||||
var Sync = require('./Sync');
|
||||
var sockets = require('../app/controllers/socket.js');
|
||||
var BlockExtractor = require('./BlockExtractor.js');
|
||||
var buffertools = require('buffertools');
|
||||
var bitcoreUtil = bitcore.util;
|
||||
var logger = require('./logger').logger;
|
||||
var info = logger.info;
|
||||
var error = logger.error;
|
||||
var PERCENTAGE_TO_START_FROM_RPC = 0.96;
|
||||
|
||||
var BAD_GEN_ERROR = 'Bad genesis block. Network mismatch between bitcore-node and bitcoind? bitcore-node is configured for:';
|
||||
var BAD_GEN_ERROR_DB = 'Bad genesis block. Network mismatch between bitcore-node and levelDB? bitcore-node is configured for:';
|
||||
|
||||
function HistoricSync(opts) {
|
||||
opts = opts || {};
|
||||
this.shouldBroadcast = opts.shouldBroadcastSync;
|
||||
|
||||
this.network = config.network === 'testnet' ? networks.testnet: networks.livenet;
|
||||
|
||||
var genesisHashReversed = new Buffer(32);
|
||||
this.network.genesisBlock.hash.copy(genesisHashReversed);
|
||||
buffertools.reverse(genesisHashReversed);
|
||||
this.genesis = genesisHashReversed.toString('hex');
|
||||
|
||||
var bitcore = require('bitcore');
|
||||
var RpcClient = bitcore.RpcClient;
|
||||
|
||||
this.rpc = new RpcClient(config.bitcoind);
|
||||
this.sync = new Sync(opts);
|
||||
this.height =0;
|
||||
}
|
||||
|
||||
HistoricSync.prototype.showProgress = function() {
|
||||
var self = this;
|
||||
|
||||
if ( self.status ==='syncing' &&
|
||||
( self.height ) % self.step !== 1) return;
|
||||
|
||||
if (self.error)
|
||||
error(self.error);
|
||||
|
||||
else {
|
||||
self.updatePercentage();
|
||||
info(util.format('status: [%d%%]', self.syncPercentage));
|
||||
}
|
||||
if (self.shouldBroadcast) {
|
||||
sockets.broadcastSyncInfo(self.info());
|
||||
}
|
||||
//
|
||||
// if (self.syncPercentage > 10) {
|
||||
// process.exit(-1);
|
||||
// }
|
||||
};
|
||||
|
||||
|
||||
HistoricSync.prototype.setError = function(err) {
|
||||
var self = this;
|
||||
self.error = err.message?err.message:err.toString();
|
||||
self.status='error';
|
||||
self.showProgress();
|
||||
return err;
|
||||
};
|
||||
|
||||
|
||||
|
||||
HistoricSync.prototype.close = function() {
|
||||
this.sync.close();
|
||||
};
|
||||
|
||||
|
||||
HistoricSync.prototype.info = function() {
|
||||
this.updatePercentage();
|
||||
return {
|
||||
status: this.status,
|
||||
blockChainHeight: this.blockChainHeight,
|
||||
syncPercentage: this.syncPercentage,
|
||||
height: this.height,
|
||||
syncTipHash: this.sync.tip,
|
||||
error: this.error,
|
||||
type: this.type,
|
||||
startTs: this.startTs,
|
||||
endTs: this.endTs,
|
||||
};
|
||||
};
|
||||
|
||||
HistoricSync.prototype.updatePercentage = function() {
|
||||
var r = this.height / this.blockChainHeight;
|
||||
this.syncPercentage = parseFloat(100 * r).toFixed(3);
|
||||
if (this.syncPercentage > 100) this.syncPercentage = 100;
|
||||
};
|
||||
|
||||
HistoricSync.prototype.getBlockFromRPC = function(cb) {
|
||||
var self = this;
|
||||
|
||||
if (!self.currentRpcHash) return cb();
|
||||
|
||||
var blockInfo;
|
||||
self.rpc.getBlock(self.currentRpcHash, function(err, ret) {
|
||||
if (err) return cb(err);
|
||||
if (ret) {
|
||||
blockInfo = ret.result;
|
||||
// this is to match block retreived from file
|
||||
if (blockInfo.hash === self.genesis)
|
||||
blockInfo.previousblockhash =
|
||||
self.network.genesisBlock.prev_hash.toString('hex');
|
||||
|
||||
self.currentRpcHash = blockInfo.nextblockhash;
|
||||
}
|
||||
else {
|
||||
blockInfo = null;
|
||||
}
|
||||
return cb(null, blockInfo);
|
||||
});
|
||||
};
|
||||
|
||||
HistoricSync.prototype.getStandardizedBlock = function(b) {
|
||||
var self = this;
|
||||
|
||||
var block = {
|
||||
hash: bitcoreUtil.formatHashFull(b.getHash()),
|
||||
previousblockhash: bitcoreUtil.formatHashFull(b.prev_hash),
|
||||
time: b.timestamp,
|
||||
};
|
||||
var isCoinBase = 1;
|
||||
block.tx = b.txs.map(function(tx){
|
||||
var ret = self.sync.txDb.getStandardizedTx(tx, b.timestamp, isCoinBase);
|
||||
isCoinBase=0;
|
||||
return ret;
|
||||
});
|
||||
return block;
|
||||
};
|
||||
|
||||
HistoricSync.prototype.getBlockFromFile = function(cb) {
|
||||
var self = this;
|
||||
|
||||
var blockInfo;
|
||||
|
||||
//get Info
|
||||
self.blockExtractor.getNextBlock(function(err, b) {
|
||||
if (err || ! b) return cb(err);
|
||||
blockInfo = self.getStandardizedBlock(b);
|
||||
self.sync.bDb.setLastFileIndex(self.blockExtractor.currentFileIndex, function(err) {
|
||||
return cb(err,blockInfo);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
HistoricSync.prototype.updateBlockChainHeight = function(cb) {
|
||||
var self = this;
|
||||
|
||||
self.rpc.getBlockCount(function(err, res) {
|
||||
self.blockChainHeight = res.result;
|
||||
return cb(err);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
HistoricSync.prototype.checkNetworkSettings = function(next) {
|
||||
var self = this;
|
||||
|
||||
self.hasGenesis = false;
|
||||
|
||||
// check network config
|
||||
self.rpc.getBlockHash(0, function(err, res){
|
||||
if (!err && ( res && res.result !== self.genesis)) {
|
||||
err = new Error(BAD_GEN_ERROR + config.network);
|
||||
}
|
||||
if (err) return next(err);
|
||||
self.sync.bDb.has(self.genesis, function(err, b) {
|
||||
if (!err && ( res && res.result !== self.genesis)) {
|
||||
err = new Error(BAD_GEN_ERROR_DB + config.network);
|
||||
}
|
||||
self.hasGenesis = b?true:false;
|
||||
return next(err);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
HistoricSync.prototype.updateStartBlock = function(opts, next) {
|
||||
var self = this;
|
||||
|
||||
self.startBlock = self.genesis;
|
||||
|
||||
if (opts.startAt) {
|
||||
self.sync.bDb.fromHashWithInfo(opts.startAt, function(err, bi) {
|
||||
var blockInfo = bi ? bi.info : {};
|
||||
if (blockInfo.height) {
|
||||
self.startBlock = opts.startAt;
|
||||
self.height = blockInfo.height;
|
||||
info('Resuming sync from block: %s #%d',opts.startAt, self.height);
|
||||
return next(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
self.sync.bDb.getTip(function(err,tip, height) {
|
||||
if (!tip) return next();
|
||||
|
||||
var blockInfo;
|
||||
var oldtip;
|
||||
|
||||
//check that the tip is still on the mainchain
|
||||
async.doWhilst(
|
||||
function(cb) {
|
||||
self.sync.bDb.fromHashWithInfo(tip, function(err, bi) {
|
||||
blockInfo = bi ? bi.info : {};
|
||||
if (oldtip)
|
||||
self.sync.bDb.setBlockNotMain(oldtip, cb);
|
||||
else
|
||||
return cb();
|
||||
});
|
||||
},
|
||||
function(err) {
|
||||
if (err) return next(err);
|
||||
var ret = false;
|
||||
|
||||
var d = Math.abs(height-blockInfo.height);
|
||||
if (d>6) {
|
||||
error('Previous Tip block tip height differs by %d. Please delete and resync (-D)',d);
|
||||
process.exit(1);
|
||||
}
|
||||
if ( self.blockChainHeight === blockInfo.height ||
|
||||
blockInfo.confirmations > 0) {
|
||||
ret = false;
|
||||
}
|
||||
else {
|
||||
oldtip = tip;
|
||||
if (!tip)
|
||||
throw new Error('Previous blockchain tip was not found on bitcoind. Please reset bitcore-node DB. Tip was:'+tip)
|
||||
tip = blockInfo.previousblockhash;
|
||||
info('Previous TIP is now orphan. Back to:' + tip);
|
||||
ret = true;
|
||||
}
|
||||
return ret;
|
||||
},
|
||||
function(err) {
|
||||
self.startBlock = tip;
|
||||
self.height = height;
|
||||
info('Resuming sync from block: %s #%d',tip,height);
|
||||
return next(err);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
HistoricSync.prototype.prepareFileSync = function(opts, next) {
|
||||
var self = this;
|
||||
|
||||
if ( opts.forceRPC || !config.bitcoind.dataDir ||
|
||||
self.height > self.blockChainHeight * PERCENTAGE_TO_START_FROM_RPC) return next();
|
||||
|
||||
|
||||
try {
|
||||
self.blockExtractor = new BlockExtractor(config.bitcoind.dataDir, config.network);
|
||||
} catch (e) {
|
||||
info(e.message + '. Disabling file sync.');
|
||||
return next();
|
||||
}
|
||||
|
||||
self.getFn = self.getBlockFromFile;
|
||||
self.allowReorgs = true;
|
||||
self.sync.bDb.getLastFileIndex(function(err, idx) {
|
||||
|
||||
if (opts.forceStartFile)
|
||||
self.blockExtractor.currentFileIndex = opts.forceStartFile;
|
||||
else if (idx) self.blockExtractor.currentFileIndex = idx;
|
||||
|
||||
var h = self.genesis;
|
||||
|
||||
info('Seeking file to:' + self.startBlock);
|
||||
//forward till startBlock
|
||||
async.whilst(
|
||||
function() {
|
||||
return h !== self.startBlock;
|
||||
},
|
||||
function (w_cb) {
|
||||
self.getBlockFromFile(function(err,b) {
|
||||
if (!b) return w_cb('Could not find block ' + self.startBlock);
|
||||
h=b.hash;
|
||||
setImmediate(function(){
|
||||
return w_cb(err);
|
||||
});
|
||||
});
|
||||
}, function(err){
|
||||
console.log('\tFOUND Starting Block!');
|
||||
|
||||
// TODO SET HEIGHT
|
||||
return next(err);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
//NOP
|
||||
HistoricSync.prototype.prepareRpcSync = function(opts, next) {
|
||||
var self = this;
|
||||
|
||||
if (self.blockExtractor) return next();
|
||||
self.getFn = self.getBlockFromRPC;
|
||||
self.allowReorgs = true;
|
||||
self.currentRpcHash = self.startBlock;
|
||||
return next();
|
||||
};
|
||||
|
||||
HistoricSync.prototype.showSyncStartMessage = function() {
|
||||
var self = this;
|
||||
|
||||
info('Got ' + self.height +
|
||||
' blocks in current DB, out of ' + self.blockChainHeight + ' block at bitcoind');
|
||||
|
||||
if (self.blockExtractor) {
|
||||
info('bitcoind dataDir configured...importing blocks from .dat files');
|
||||
info('First file index: ' + self.blockExtractor.currentFileIndex);
|
||||
}
|
||||
else {
|
||||
info('syncing from RPC (slow)');
|
||||
}
|
||||
|
||||
info('Starting from: ', self.startBlock);
|
||||
self.showProgress();
|
||||
};
|
||||
|
||||
|
||||
HistoricSync.prototype.setupSyncStatus = function() {
|
||||
var self = this;
|
||||
|
||||
var step = parseInt( (self.blockChainHeight - self.height) / 1000);
|
||||
if (step < 10) step = 10;
|
||||
|
||||
self.step = step;
|
||||
self.type = self.blockExtractor?'from .dat Files':'from RPC calls';
|
||||
self.status = 'syncing';
|
||||
self.startTs = Date.now();
|
||||
self.endTs = null;
|
||||
this.error = null;
|
||||
this.syncPercentage = 0;
|
||||
};
|
||||
|
||||
HistoricSync.prototype.checkDBVersion = function(cb) {
|
||||
this.sync.txDb.checkVersion02(function(isOk){
|
||||
if (!isOk) {
|
||||
console.log('\n#############################\n\n ## bitcore-node API DB is older that v0.2. Please resync using:\n $ util/sync.js -D\n More information at bitcore-node API\'s Readme.md');
|
||||
process.exit(1);
|
||||
}
|
||||
// Add more test here in future changes.
|
||||
return cb();
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
HistoricSync.prototype.prepareToSync = function(opts, next) {
|
||||
var self = this;
|
||||
|
||||
self.status = 'starting';
|
||||
async.series([
|
||||
function(s_c) {
|
||||
self.checkDBVersion(s_c);
|
||||
},
|
||||
function(s_c) {
|
||||
self.checkNetworkSettings(s_c);
|
||||
},
|
||||
function(s_c) {
|
||||
self.updateBlockChainHeight(s_c);
|
||||
},
|
||||
function(s_c) {
|
||||
self.updateStartBlock(opts,s_c);
|
||||
},
|
||||
function(s_c) {
|
||||
self.prepareFileSync(opts, s_c);
|
||||
},
|
||||
function(s_c) {
|
||||
self.prepareRpcSync(opts, s_c);
|
||||
},
|
||||
],
|
||||
function(err) {
|
||||
if (err) return(self.setError(err));
|
||||
|
||||
self.showSyncStartMessage();
|
||||
self.setupSyncStatus();
|
||||
return next();
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
HistoricSync.prototype.start = function(opts, next) {
|
||||
var self = this;
|
||||
|
||||
if (self.status==='starting' || self.status==='syncing') {
|
||||
error('## Wont start to sync while status is %s', self.status);
|
||||
return next();
|
||||
}
|
||||
|
||||
self.prepareToSync(opts, function(err) {
|
||||
if (err) return next(self.setError(err));
|
||||
|
||||
async.whilst(
|
||||
function() {
|
||||
self.showProgress();
|
||||
return self.status === 'syncing';
|
||||
},
|
||||
function (w_cb) {
|
||||
self.getFn(function(err,blockInfo) {
|
||||
if (err) return w_cb(self.setError(err));
|
||||
|
||||
if (blockInfo && blockInfo.hash && (!opts.stopAt || opts.stopAt !== blockInfo.hash)) {
|
||||
self.sync.storeTipBlock(blockInfo, self.allowReorgs, function(err, height) {
|
||||
if (err) return w_cb(self.setError(err));
|
||||
if (height>=0) self.height=height;
|
||||
setImmediate(function(){
|
||||
return w_cb(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
else {
|
||||
self.endTs = Date.now();
|
||||
self.status = 'finished';
|
||||
var info = self.info();
|
||||
logger.debug('Done Syncing blockchain', info.type, 'to height', info.height);
|
||||
return w_cb(err);
|
||||
}
|
||||
});
|
||||
}, next);
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = require('soop')(HistoricSync);
|
||||
182
lib/MessageDb.js
Normal file
182
lib/MessageDb.js
Normal file
@ -0,0 +1,182 @@
|
||||
'use strict';
|
||||
var soop = require('soop');
|
||||
var imports = soop.imports();
|
||||
var levelup = require('levelup');
|
||||
var config = require('../config/config');
|
||||
var Rpc = imports.rpc || require('./Rpc');
|
||||
var async = require('async');
|
||||
var logger = require('./logger').logger;
|
||||
var util = require('util');
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
var microtime = require('microtime');
|
||||
var bitcore = require('bitcore');
|
||||
var AuthMessage = bitcore.AuthMessage;
|
||||
var preconditions = require('preconditions').singleton();
|
||||
|
||||
var MESSAGE_PREFIX = 'msg-'; // msg-<recieving_pubkey>-<ts> => <message>
|
||||
|
||||
var MAX_OPEN_FILES = 500;
|
||||
var CONCURRENCY = 5;
|
||||
|
||||
|
||||
var db;
|
||||
var MessageDb = function(opts) {
|
||||
opts = opts || {};
|
||||
this.path = config.leveldb + '/messages' + (opts.name ? ('-' + opts.name) : '');
|
||||
this.db = opts.db || db || levelup(this.path, {
|
||||
maxOpenFiles: MAX_OPEN_FILES,
|
||||
valueEncoding: 'json'
|
||||
});
|
||||
this.initEvents();
|
||||
db = this.db;
|
||||
};
|
||||
util.inherits(MessageDb, EventEmitter);
|
||||
|
||||
MessageDb.prototype.initEvents = function() {
|
||||
if (db) return;
|
||||
var self = this;
|
||||
this.db.on('put', function(key, value) {
|
||||
var data = {};
|
||||
data.key = key;
|
||||
data.value = value;
|
||||
var message = MessageDb.fromStorage(data);
|
||||
self.emit('message', message);
|
||||
});
|
||||
this.db.on('ready', function() {
|
||||
//console.log('Database ready!');
|
||||
});
|
||||
};
|
||||
|
||||
MessageDb.prototype.close = function(cb) {
|
||||
this.db.close(cb);
|
||||
};
|
||||
|
||||
|
||||
var messageKey = function(to, ts) {
|
||||
preconditions.checkArgument(typeof to === 'string');
|
||||
preconditions.checkArgument(to.length === 66);
|
||||
preconditions.checkArgument(!ts || typeof ts === 'number');
|
||||
if (!ts) ts = Math.round(microtime.now());
|
||||
return MESSAGE_PREFIX + to.toString() + '-' + ts;
|
||||
};
|
||||
|
||||
MessageDb.prototype.addMessage = function(m, cb) {
|
||||
|
||||
if (!this.authenticate(m)) {
|
||||
cb(new Error('Authentication failed'));
|
||||
return;
|
||||
}
|
||||
|
||||
var key;
|
||||
try {
|
||||
key = messageKey(m.to);
|
||||
} catch (e) {
|
||||
cb(new Error('Bad message'));
|
||||
return;
|
||||
};
|
||||
|
||||
var value = m;
|
||||
this.db.put(key, value, cb);
|
||||
};
|
||||
|
||||
MessageDb.prototype.authenticate = function(m) {
|
||||
preconditions.checkArgument(m.pubkey);
|
||||
preconditions.checkArgument(m.sig);
|
||||
preconditions.checkArgument(m.encrypted);
|
||||
|
||||
var frompubkey = new Buffer(m.pubkey, 'hex');
|
||||
var sig = new Buffer(m.sig, 'hex');
|
||||
var encrypted = new Buffer(m.encrypted, 'hex');
|
||||
return AuthMessage._verify(frompubkey, sig, encrypted);
|
||||
};
|
||||
|
||||
MessageDb.parseKey = function(key) {
|
||||
var ret = {};
|
||||
var spl = key.split('-');
|
||||
|
||||
ret.to = spl[1];
|
||||
ret.ts = +spl[2];
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
MessageDb.fromStorage = function(data) {
|
||||
var parsed = MessageDb.parseKey(data.key);
|
||||
var message = data.value;
|
||||
message.ts = parsed.ts;
|
||||
message.to = parsed.to;
|
||||
return message;
|
||||
};
|
||||
|
||||
MessageDb.prototype.getMessages = function(to, lower_ts, upper_ts, cb) {
|
||||
var list = [],
|
||||
opts;
|
||||
lower_ts = lower_ts || 1;
|
||||
try {
|
||||
opts = {
|
||||
start: messageKey(to, lower_ts),
|
||||
end: messageKey(to, upper_ts),
|
||||
// limit: limit, TODO
|
||||
reverse: false,
|
||||
};
|
||||
} catch (e) {
|
||||
cb(new Error('Bad message range'));
|
||||
return;
|
||||
};
|
||||
|
||||
db.createReadStream(opts)
|
||||
.on('data', function(data) {
|
||||
var message = MessageDb.fromStorage(data);
|
||||
list.push(message);
|
||||
})
|
||||
.on('error', function(err) {
|
||||
return cb(err);
|
||||
})
|
||||
.on('end', function() {
|
||||
return cb(null, list);
|
||||
});
|
||||
};
|
||||
|
||||
MessageDb.prototype.getAll = function(cb) {
|
||||
var list = [];
|
||||
db.createReadStream()
|
||||
.on('data', function(data) {
|
||||
list.push(MessageDb.fromStorage(data));
|
||||
})
|
||||
.on('error', function(err) {
|
||||
return cb(err);
|
||||
})
|
||||
.on('end', function() {
|
||||
return cb(null, list);
|
||||
});
|
||||
};
|
||||
|
||||
MessageDb.prototype.removeUpTo = function(ts, cb) {
|
||||
preconditions.checkArgument(ts);
|
||||
preconditions.checkArgument(typeof ts === 'number');
|
||||
var opts = {};
|
||||
var dels = [];
|
||||
db.createKeyStream(opts)
|
||||
.on('data', function(key) {
|
||||
var parsed = MessageDb.parseKey(key);
|
||||
if (parsed.ts < ts) {
|
||||
logger.verbose('Deleting message ' + key);
|
||||
dels.push({
|
||||
type: 'del',
|
||||
key: key
|
||||
});
|
||||
}
|
||||
})
|
||||
.on('error', function(err) {
|
||||
return cb(err);
|
||||
})
|
||||
.on('end', function() {
|
||||
db.batch(dels, function(err) {
|
||||
if (err) return cb(err);
|
||||
else cb(null, dels.length);
|
||||
})
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
module.exports = soop(MessageDb);
|
||||
150
lib/PeerSync.js
Normal file
150
lib/PeerSync.js
Normal file
@ -0,0 +1,150 @@
|
||||
'use strict';
|
||||
var fs = require('fs');
|
||||
var bitcore = require('bitcore');
|
||||
var bitcoreUtil = bitcore.util;
|
||||
var Sync = require('./Sync');
|
||||
var Peer = bitcore.Peer;
|
||||
var PeerManager = bitcore.PeerManager;
|
||||
var config = require('../config/config');
|
||||
var networks = bitcore.networks;
|
||||
var sockets = require('../app/controllers/socket.js');
|
||||
|
||||
var peerdb_fn = 'peerdb.json';
|
||||
|
||||
function PeerSync(opts) {
|
||||
opts = opts|| {};
|
||||
this.shouldBroadcast = opts.shouldBroadcast;
|
||||
this.connected = false;
|
||||
this.peerdb = undefined;
|
||||
this.allowReorgs = false;
|
||||
var pmConfig = {
|
||||
network: config.network
|
||||
};
|
||||
this.peerman = new PeerManager(pmConfig);
|
||||
this.load_peers();
|
||||
this.sync = new Sync(opts);
|
||||
this.verbose = opts.verbose || false;
|
||||
}
|
||||
|
||||
PeerSync.prototype.log = function() {
|
||||
if (this.verbose) console.log(arguments);
|
||||
};
|
||||
|
||||
PeerSync.prototype.load_peers = function() {
|
||||
this.peerdb = [{
|
||||
ipv4: config.bitcoind.p2pHost,
|
||||
port: config.bitcoind.p2pPort
|
||||
}];
|
||||
|
||||
fs.writeFileSync(peerdb_fn, JSON.stringify(this.peerdb));
|
||||
};
|
||||
|
||||
PeerSync.prototype.info = function() {
|
||||
return {
|
||||
connected: this.connected,
|
||||
host: this.peerdb[0].ipv4,
|
||||
port: this.peerdb[0].port
|
||||
};
|
||||
};
|
||||
|
||||
PeerSync.prototype.handleInv = function(info) {
|
||||
var invs = info.message.invs;
|
||||
info.conn.sendGetData(invs);
|
||||
};
|
||||
|
||||
PeerSync.prototype._broadcastAddr = function(txid, addrs) {
|
||||
if (addrs) {
|
||||
for(var ii in addrs){
|
||||
sockets.broadcastAddressTx(txid, ii);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
PeerSync.prototype.handleTx = function(info) {
|
||||
var self =this;
|
||||
var tx = this.sync.txDb.getStandardizedTx(info.message.tx);
|
||||
self.log('[p2p_sync] Handle tx: ' + tx.txid);
|
||||
tx.time = tx.time || Math.round(new Date().getTime() / 1000);
|
||||
|
||||
this.sync.storeTx(tx, function(err, relatedAddrs) {
|
||||
if (err) {
|
||||
self.log('[p2p_sync] Error in handle TX: ' + JSON.stringify(err));
|
||||
}
|
||||
else if (self.shouldBroadcast) {
|
||||
sockets.broadcastTx(tx);
|
||||
self._broadcastAddr(tx.txid, relatedAddrs);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
PeerSync.prototype.handleBlock = function(info) {
|
||||
var self = this;
|
||||
var block = info.message.block;
|
||||
var blockHash = bitcoreUtil.formatHashFull(block.calcHash());
|
||||
self.log('[p2p_sync] Handle block: %s (allowReorgs: %s)', blockHash, self.allowReorgs);
|
||||
|
||||
var tx_hashes = block.txs.map(function(tx) {
|
||||
return bitcoreUtil.formatHashFull(tx.hash);
|
||||
});
|
||||
|
||||
self.sync.storeTipBlock({
|
||||
'hash': blockHash,
|
||||
'tx': tx_hashes,
|
||||
'previousblockhash': bitcoreUtil.formatHashFull(block.prev_hash),
|
||||
}, self.allowReorgs, function(err, height) {
|
||||
if (err && err.message.match(/NEED_SYNC/) && self.historicSync) {
|
||||
self.log('[p2p_sync] Orphan block received. Triggering sync');
|
||||
self.historicSync.start({forceRPC:1}, function(){
|
||||
self.log('[p2p_sync] Done resync.');
|
||||
});
|
||||
}
|
||||
else if (err) {
|
||||
self.log('[p2p_sync] Error in handle Block: ', err);
|
||||
}
|
||||
else {
|
||||
if (self.shouldBroadcast) {
|
||||
sockets.broadcastBlock(blockHash);
|
||||
// broadcasting address here is a bad idea. listening to new block
|
||||
// should be enoght
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
PeerSync.prototype.handleConnected = function(data) {
|
||||
var peerman = data.pm;
|
||||
var peers_n = peerman.peers.length;
|
||||
this.log('[p2p_sync] Connected to ' + peers_n + ' peer' + (peers_n !== 1 ? 's' : ''));
|
||||
};
|
||||
|
||||
PeerSync.prototype.run = function() {
|
||||
var self = this;
|
||||
|
||||
this.peerdb.forEach(function(datum) {
|
||||
var peer = new Peer(datum.ipv4, datum.port);
|
||||
self.peerman.addPeer(peer);
|
||||
});
|
||||
|
||||
this.peerman.on('connection', function(conn) {
|
||||
self.connected = true;
|
||||
conn.on('inv', self.handleInv.bind(self));
|
||||
conn.on('block', self.handleBlock.bind(self));
|
||||
conn.on('tx', self.handleTx.bind(self));
|
||||
});
|
||||
this.peerman.on('connect', self.handleConnected.bind(self));
|
||||
|
||||
this.peerman.on('netDisconnected', function() {
|
||||
self.connected = false;
|
||||
});
|
||||
|
||||
this.peerman.start();
|
||||
};
|
||||
|
||||
PeerSync.prototype.close = function() {
|
||||
this.sync.close();
|
||||
};
|
||||
|
||||
|
||||
module.exports = require('soop')(PeerSync);
|
||||
32
lib/PoolMatch.js
Normal file
32
lib/PoolMatch.js
Normal file
@ -0,0 +1,32 @@
|
||||
'use strict';
|
||||
|
||||
var imports = require('soop').imports();
|
||||
var fs = require('fs');
|
||||
var buffertools = require('buffertools');
|
||||
var db = imports.db || JSON.parse( fs.readFileSync(imports.poolMatchFile || './poolMatchFile.json'));
|
||||
|
||||
var PoolMatch = function() {
|
||||
var self = this;
|
||||
|
||||
self.strings = {};
|
||||
db.forEach(function(pool) {
|
||||
pool.searchStrings.forEach(function(s) {
|
||||
self.strings[s] = {
|
||||
poolName: pool.poolName,
|
||||
url: pool.url
|
||||
};
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
PoolMatch.prototype.match = function(buffer) {
|
||||
var self = this;
|
||||
for(var k in self.strings) {
|
||||
if (buffertools.indexOf(buffer, k) >= 0) {
|
||||
return self.strings[k];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = require('soop')(PoolMatch);
|
||||
119
lib/Rpc.js
Normal file
119
lib/Rpc.js
Normal file
@ -0,0 +1,119 @@
|
||||
'use strict';
|
||||
|
||||
var imports = require('soop').imports();
|
||||
|
||||
var bitcore = require('bitcore'),
|
||||
RpcClient = bitcore.RpcClient,
|
||||
BitcoreBlock = bitcore.Block,
|
||||
util = require('util'),
|
||||
config = require('../config/config');
|
||||
|
||||
var bitcoreRpc = imports.bitcoreRpc || new RpcClient(config.bitcoind);
|
||||
|
||||
function Rpc() {
|
||||
}
|
||||
|
||||
Rpc._parseTxResult = function(info) {
|
||||
var b = new Buffer(info.hex,'hex');
|
||||
|
||||
// remove fields we dont need, to speed and adapt the information
|
||||
delete info.hex;
|
||||
|
||||
// Inputs => add index + coinBase flag
|
||||
var n =0;
|
||||
info.vin.forEach(function(i) {
|
||||
i.n = n++;
|
||||
if (i.coinbase) info.isCoinBase = true;
|
||||
});
|
||||
|
||||
// Outputs => add total
|
||||
var valueOutSat = 0;
|
||||
info.vout.forEach( function(o) {
|
||||
o.value = o.value.toFixed(8);
|
||||
valueOutSat += o.value * bitcore.util.COIN;
|
||||
});
|
||||
info.valueOut = valueOutSat.toFixed(0) / bitcore.util.COIN;
|
||||
info.size = b.length;
|
||||
|
||||
return info;
|
||||
};
|
||||
|
||||
|
||||
Rpc.errMsg = function(err) {
|
||||
var e = err;
|
||||
e.message += util.format(' [Host: %s:%d User:%s Using password:%s]',
|
||||
bitcoreRpc.host,
|
||||
bitcoreRpc.port,
|
||||
bitcoreRpc.user,
|
||||
bitcoreRpc.pass?'yes':'no'
|
||||
);
|
||||
return e;
|
||||
};
|
||||
|
||||
Rpc.getTxInfo = function(txid, doNotParse, cb) {
|
||||
var self = this;
|
||||
|
||||
if (typeof doNotParse === 'function') {
|
||||
cb = doNotParse;
|
||||
doNotParse = false;
|
||||
}
|
||||
|
||||
bitcoreRpc.getRawTransaction(txid, 1, function(err, txInfo) {
|
||||
// Not found?
|
||||
if (err && err.code === -5) return cb();
|
||||
if (err) return cb(self.errMsg(err));
|
||||
|
||||
var info = doNotParse ? txInfo.result : self._parseTxResult(txInfo.result);
|
||||
return cb(null,info);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
Rpc.blockIndex = function(height, cb) {
|
||||
var self = this;
|
||||
|
||||
bitcoreRpc.getBlockHash(height, function(err, bh){
|
||||
if (err) return cb(self.errMsg(err));
|
||||
cb(null, { blockHash: bh.result });
|
||||
});
|
||||
};
|
||||
|
||||
Rpc.getBlock = function(hash, cb) {
|
||||
var self = this;
|
||||
|
||||
bitcoreRpc.getBlock(hash, function(err,info) {
|
||||
// Not found?
|
||||
if (err && err.code === -5) return cb();
|
||||
if (err) return cb(self.errMsg(err));
|
||||
|
||||
|
||||
if (info.result.height)
|
||||
info.result.reward = BitcoreBlock.getBlockValue(info.result.height) / bitcore.util.COIN ;
|
||||
|
||||
return cb(err,info.result);
|
||||
});
|
||||
};
|
||||
|
||||
Rpc.sendRawTransaction = function(rawtx, cb) {
|
||||
bitcoreRpc.sendRawTransaction(rawtx, function(err, txid) {
|
||||
if (err) return cb(err);
|
||||
|
||||
return cb(err, txid.result);
|
||||
});
|
||||
};
|
||||
|
||||
Rpc.verifyMessage = function(address, signature, message, cb) {
|
||||
var self = this;
|
||||
bitcoreRpc.verifyMessage(address, signature, message, function(err, message) {
|
||||
if (err && (err.code === -3 || err.code === -5))
|
||||
return cb(err); // -3 = invalid address, -5 = malformed base64 / etc.
|
||||
if (err)
|
||||
return cb(self.errMsg(err));
|
||||
|
||||
return cb(err, message.result);
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = require('soop')(Rpc);
|
||||
|
||||
|
||||
300
lib/Sync.js
Normal file
300
lib/Sync.js
Normal file
@ -0,0 +1,300 @@
|
||||
'use strict';
|
||||
|
||||
var imports = require('soop').imports();
|
||||
|
||||
var config = imports.config || require('../config/config');
|
||||
var bitcore = require('bitcore');
|
||||
var networks = bitcore.networks;
|
||||
var async = require('async');
|
||||
|
||||
var logger = require('./logger').logger;
|
||||
var d = logger.log;
|
||||
var info = logger.info;
|
||||
|
||||
|
||||
|
||||
var syncId = 0;
|
||||
|
||||
function Sync(opts) {
|
||||
this.id = syncId++;
|
||||
this.opts = opts || {};
|
||||
this.bDb = require('./BlockDb').default();
|
||||
this.txDb = require('./TransactionDb').default();
|
||||
this.network = config.network === 'testnet' ? networks.testnet : networks.livenet;
|
||||
this.cachedLastHash = null;
|
||||
}
|
||||
|
||||
Sync.prototype.close = function(cb) {
|
||||
var self = this;
|
||||
self.txDb.close(function() {
|
||||
self.bDb.close(cb);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
Sync.prototype.destroy = function(next) {
|
||||
var self = this;
|
||||
async.series([
|
||||
|
||||
function(b) {
|
||||
self.bDb.drop(b);
|
||||
},
|
||||
function(b) {
|
||||
self.txDb.drop(b);
|
||||
},
|
||||
], next);
|
||||
};
|
||||
|
||||
/*
|
||||
* Arrives a NEW block, which is the new TIP
|
||||
*
|
||||
* Case 0) Simple case
|
||||
* A-B-C-D-E(TIP)-NEW
|
||||
*
|
||||
* Case 1)
|
||||
* A-B-C-D-E(TIP)
|
||||
* \
|
||||
* NEW
|
||||
*
|
||||
* 1) Declare D-E orphans (and possible invalidate TXs on them)
|
||||
*
|
||||
* Case 2)
|
||||
* A-B-C-D-E(TIP)
|
||||
* \
|
||||
* F-G-NEW
|
||||
* 1) Set F-G as connected (mark TXs as valid)
|
||||
* 2) Set new heights in F-G-NEW
|
||||
* 3) Declare D-E orphans (and possible invalidate TXs on them)
|
||||
*
|
||||
*
|
||||
* Case 3)
|
||||
*
|
||||
* A-B-C-D-E(TIP) ... NEW
|
||||
*
|
||||
* NEW is ignored (if allowReorgs is false)
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
Sync.prototype.storeTipBlock = function(b, allowReorgs, cb) {
|
||||
|
||||
if (typeof allowReorgs === 'function') {
|
||||
cb = allowReorgs;
|
||||
allowReorgs = true;
|
||||
}
|
||||
if (!b) return cb();
|
||||
var self = this;
|
||||
|
||||
if ( self.storingBlock ) {
|
||||
logger.debug('Storing a block already. Delaying storeTipBlock with:' +
|
||||
b.hash);
|
||||
return setTimeout( function() {
|
||||
logger.debug('Retrying storeTipBlock with: ' + b.hash);
|
||||
self.storeTipBlock(b,allowReorgs,cb);
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
self.storingBlock=1;
|
||||
var oldTip, oldNext, oldHeight, needReorg = false, height = -1;
|
||||
var newPrev = b.previousblockhash;
|
||||
async.series([
|
||||
|
||||
// This seems unnecesary.
|
||||
// function(c) {
|
||||
// // TODO? remove this check?
|
||||
// self.bDb.has(b.hash, function(err, val) {
|
||||
// return c(err ||
|
||||
// (val ? new Error('WARN: Ignoring already existing block:' + b.hash) : null));
|
||||
// });
|
||||
// },
|
||||
function(c) {
|
||||
if (!allowReorgs || newPrev === self.cachedLastHash) return c();
|
||||
self.bDb.has(newPrev, function(err, val) {
|
||||
// Genesis? no problem
|
||||
if (!val && newPrev.match(/^0+$/)) return c();
|
||||
return c(err ||
|
||||
(!val ? new Error('NEED_SYNC Ignoring block with non existing prev:' + b.hash) : null));
|
||||
});
|
||||
},
|
||||
function(c) {
|
||||
if (!allowReorgs) return c();
|
||||
self.bDb.getTip(function(err, hash, h) {
|
||||
oldTip = hash;
|
||||
oldHeight = hash ? (h || 0) : -1
|
||||
if (oldTip && newPrev !== oldTip) {
|
||||
needReorg = true;
|
||||
logger.debug('REORG Triggered, tip mismatch');
|
||||
}
|
||||
return c();
|
||||
});
|
||||
},
|
||||
|
||||
function(c) {
|
||||
if (!needReorg) return c();
|
||||
self.bDb.getNext(newPrev, function(err, val) {
|
||||
if (err) return c(err);
|
||||
oldNext = val;
|
||||
return c();
|
||||
});
|
||||
},
|
||||
function(c) {
|
||||
if (!allowReorgs) return c();
|
||||
if (needReorg) {
|
||||
info('NEW TIP: %s NEED REORG (old tip: %s #%d)', b.hash, oldTip, oldHeight);
|
||||
self.processReorg(oldTip, oldNext, newPrev, oldHeight, function(err, h) {
|
||||
if (err) throw err;
|
||||
|
||||
height = h;
|
||||
return c();
|
||||
});
|
||||
}
|
||||
else {
|
||||
height = oldHeight + 1;
|
||||
return c();
|
||||
}
|
||||
},
|
||||
function(c) {
|
||||
self.cachedLastHash = b.hash; // just for speed up.
|
||||
self.bDb.add(b, height, c);
|
||||
},
|
||||
function(c) {
|
||||
if (!allowReorgs) return c();
|
||||
self.bDb.setTip(b.hash, height, function(err) {
|
||||
return c(err);
|
||||
});
|
||||
},
|
||||
function(c) {
|
||||
self.bDb.setNext(newPrev, b.hash, function(err) {
|
||||
return c(err);
|
||||
});
|
||||
}
|
||||
|
||||
],
|
||||
function(err) {
|
||||
if (err && err.toString().match(/WARN/)) {
|
||||
err = null;
|
||||
}
|
||||
self.storingBlock=0;
|
||||
return cb(err, height);
|
||||
});
|
||||
};
|
||||
|
||||
Sync.prototype.processReorg = function(oldTip, oldNext, newPrev, oldHeight, cb) {
|
||||
var self = this;
|
||||
|
||||
var orphanizeFrom, newHeight;
|
||||
|
||||
async.series([
|
||||
|
||||
function(c) {
|
||||
self.bDb.getHeight(newPrev, function(err, height) {
|
||||
if (!height) {
|
||||
// Case 3 + allowReorgs = true
|
||||
return c(new Error('Could not found block:' + newPrev));
|
||||
}
|
||||
if (height<0) return c();
|
||||
|
||||
newHeight = height + 1;
|
||||
info('Reorg Case 1) OldNext: %s NewHeight: %d', oldNext, newHeight);
|
||||
orphanizeFrom = oldNext;
|
||||
return c(err);
|
||||
});
|
||||
},
|
||||
function(c) {
|
||||
if (orphanizeFrom) return c();
|
||||
|
||||
info('Reorg Case 2)');
|
||||
self.setBranchConnectedBackwards(newPrev, function(err, yHash, newYHashNext, height) {
|
||||
if (err) return c(err);
|
||||
newHeight = height;
|
||||
self.bDb.getNext(yHash, function(err, yHashNext) {
|
||||
// Connect the new branch, and orphanize the old one.
|
||||
orphanizeFrom = yHashNext;
|
||||
self.bDb.setNext(yHash, newYHashNext, function(err) {
|
||||
return c(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
function(c) {
|
||||
if (!orphanizeFrom) return c();
|
||||
self._setBranchOrphan(orphanizeFrom, function(err) {
|
||||
return c(err);
|
||||
});
|
||||
},
|
||||
],
|
||||
function(err) {
|
||||
return cb(err, newHeight);
|
||||
});
|
||||
};
|
||||
|
||||
Sync.prototype._setBranchOrphan = function(fromHash, cb) {
|
||||
var self = this,
|
||||
hashInterator = fromHash;
|
||||
|
||||
async.whilst(
|
||||
function() {
|
||||
return hashInterator;
|
||||
},
|
||||
function(c) {
|
||||
self.bDb.setBlockNotMain(hashInterator, function(err) {
|
||||
if (err) return cb(err);
|
||||
self.bDb.getNext(hashInterator, function(err, val) {
|
||||
hashInterator = val;
|
||||
return c(err);
|
||||
});
|
||||
});
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Sync.prototype.setBranchConnectedBackwards = function(fromHash, cb) {
|
||||
//console.log('[Sync.js.219:setBranchConnectedBackwards:]',fromHash); //TODO
|
||||
var self = this,
|
||||
hashInterator = fromHash,
|
||||
lastHash = fromHash,
|
||||
yHeight,
|
||||
branch = [];
|
||||
|
||||
async.doWhilst(
|
||||
function(c) {
|
||||
branch.unshift(hashInterator);
|
||||
|
||||
self.bDb.getPrev(hashInterator, function(err, val) {
|
||||
if (err) return c(err);
|
||||
lastHash = hashInterator;
|
||||
hashInterator = val;
|
||||
self.bDb.getHeight(hashInterator, function(err, height) {
|
||||
yHeight = height;
|
||||
return c();
|
||||
});
|
||||
});
|
||||
},
|
||||
function() {
|
||||
return hashInterator && yHeight<=0;
|
||||
},
|
||||
function() {
|
||||
info('\tFound yBlock: %s #%d', hashInterator, yHeight);
|
||||
var heightIter = yHeight + 1;
|
||||
var hashIter;
|
||||
async.whilst(
|
||||
function() {
|
||||
hashIter = branch.shift();
|
||||
return hashIter;
|
||||
},
|
||||
function(c) {
|
||||
self.bDb.setBlockMain(hashIter, heightIter++, c);
|
||||
},
|
||||
function(err) {
|
||||
return cb(err, hashInterator, lastHash, heightIter);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
//Store unconfirmed TXs
|
||||
Sync.prototype.storeTx = function(tx, cb) {
|
||||
this.txDb.add(tx, cb);
|
||||
};
|
||||
|
||||
|
||||
module.exports = require('soop')(Sync);
|
||||
749
lib/TransactionDb.js
Normal file
749
lib/TransactionDb.js
Normal file
@ -0,0 +1,749 @@
|
||||
'use strict';
|
||||
|
||||
var imports = require('soop').imports();
|
||||
|
||||
|
||||
|
||||
// to show tx outs
|
||||
var OUTS_PREFIX = 'txo-'; //txo-<txid>-<n> => [addr, btc_sat]
|
||||
var SPENT_PREFIX = 'txs-'; //txs-<txid(out)>-<n(out)>-<txid(in)>-<n(in)> = ts
|
||||
|
||||
// to sum up addr balance (only outs, spents are gotten later)
|
||||
var ADDR_PREFIX = 'txa2-'; //txa-<addr>-<tsr>-<txid>-<n>
|
||||
// tsr = 1e13-js_timestamp
|
||||
// => + btc_sat [:isConfirmed:[scriptPubKey|isSpendConfirmed:SpentTxid:SpentVout:SpentTs]
|
||||
// |balance:txApperances
|
||||
|
||||
|
||||
// TODO: use bitcore networks module
|
||||
var genesisTXID = '4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b';
|
||||
var CONCURRENCY = 10;
|
||||
var DEFAULT_SAFE_CONFIRMATIONS = 6;
|
||||
|
||||
var MAX_OPEN_FILES = 500;
|
||||
var END_OF_WORLD_TS = 1e13;
|
||||
// var CONFIRMATION_NR_TO_NOT_CHECK = 10; //Spend
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var bitcore = require('bitcore'),
|
||||
Rpc = imports.rpc || require('./Rpc'),
|
||||
util = bitcore.util,
|
||||
networks = bitcore.networks,
|
||||
levelup = require('levelup'),
|
||||
async = require('async'),
|
||||
config = require('../config/config'),
|
||||
assert = require('assert'),
|
||||
Script = bitcore.Script,
|
||||
bitcoreUtil = bitcore.util,
|
||||
buffertools = require('buffertools');
|
||||
|
||||
var logger = require('./logger').logger;
|
||||
|
||||
var db = imports.db || levelup(config.leveldb + '/txs', {
|
||||
maxOpenFiles: MAX_OPEN_FILES
|
||||
});
|
||||
var PoolMatch = imports.poolMatch || require('soop').load('./PoolMatch', config);
|
||||
// This is 0.1.2 = > c++ version of base58-native
|
||||
var base58 = require('base58-native').base58Check;
|
||||
var encodedData = require('soop').load('bitcore/util/EncodedData', {
|
||||
base58: base58
|
||||
});
|
||||
var versionedData = require('soop').load('bitcore/util/VersionedData', {
|
||||
parent: encodedData
|
||||
});
|
||||
|
||||
var Address = require('soop').load('bitcore/lib/Address', {
|
||||
parent: versionedData
|
||||
});
|
||||
|
||||
|
||||
|
||||
var TransactionDb = function() {
|
||||
TransactionDb.super(this, arguments);
|
||||
this.network = config.network === 'testnet' ? networks.testnet : networks.livenet;
|
||||
this.poolMatch = new PoolMatch();
|
||||
this.safeConfirmations = config.safeConfirmations || DEFAULT_SAFE_CONFIRMATIONS;
|
||||
|
||||
this._db = db; // this is only exposed for migration script
|
||||
};
|
||||
|
||||
TransactionDb.prototype.close = function(cb) {
|
||||
db.close(cb);
|
||||
};
|
||||
|
||||
TransactionDb.prototype.drop = function(cb) {
|
||||
var path = config.leveldb + '/txs';
|
||||
db.close(function() {
|
||||
require('leveldown').destroy(path, function() {
|
||||
db = levelup(path, {
|
||||
maxOpenFiles: 500
|
||||
});
|
||||
return cb();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
TransactionDb.prototype._addSpentInfo = function(r, txid, index, ts) {
|
||||
if (r.spentTxId) {
|
||||
if (!r.multipleSpentAttempts) {
|
||||
r.multipleSpentAttempts = [{
|
||||
txid: r.spentTxId,
|
||||
index: r.index,
|
||||
}];
|
||||
}
|
||||
r.multipleSpentAttempts.push({
|
||||
txid: txid,
|
||||
index: parseInt(index),
|
||||
});
|
||||
} else {
|
||||
r.spentTxId = txid;
|
||||
r.spentIndex = parseInt(index);
|
||||
r.spentTs = parseInt(ts);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// This is not used now
|
||||
TransactionDb.prototype.fromTxId = function(txid, cb) {
|
||||
var self = this;
|
||||
var k = OUTS_PREFIX + txid;
|
||||
var ret = [];
|
||||
var idx = {};
|
||||
var i = 0;
|
||||
|
||||
// outs.
|
||||
db.createReadStream({
|
||||
start: k,
|
||||
end: k + '~'
|
||||
})
|
||||
.on('data', function(data) {
|
||||
var k = data.key.split('-');
|
||||
var v = data.value.split(':');
|
||||
ret.push({
|
||||
addr: v[0],
|
||||
value_sat: parseInt(v[1]),
|
||||
index: parseInt(k[2]),
|
||||
});
|
||||
idx[parseInt(k[2])] = i++;
|
||||
})
|
||||
.on('error', function(err) {
|
||||
return cb(err);
|
||||
})
|
||||
.on('end', function() {
|
||||
|
||||
var k = SPENT_PREFIX + txid + '-';
|
||||
db.createReadStream({
|
||||
start: k,
|
||||
end: k + '~'
|
||||
})
|
||||
.on('data', function(data) {
|
||||
var k = data.key.split('-');
|
||||
var j = idx[parseInt(k[2])];
|
||||
|
||||
assert(typeof j !== 'undefined', 'Spent could not be stored: tx ' + txid +
|
||||
'spent in TX:' + k[1] + ',' + k[2] + ' j:' + j);
|
||||
|
||||
self._addSpentInfo(ret[j], k[3], k[4], data.value);
|
||||
})
|
||||
.on('error', function(err) {
|
||||
return cb(err);
|
||||
})
|
||||
.on('end', function(err) {
|
||||
return cb(err, ret);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
TransactionDb.prototype._fillSpent = function(info, cb) {
|
||||
var self = this;
|
||||
|
||||
if (!info) return cb();
|
||||
|
||||
var k = SPENT_PREFIX + info.txid + '-';
|
||||
db.createReadStream({
|
||||
start: k,
|
||||
end: k + '~'
|
||||
})
|
||||
.on('data', function(data) {
|
||||
var k = data.key.split('-');
|
||||
self._addSpentInfo(info.vout[k[2]], k[3], k[4], data.value);
|
||||
})
|
||||
.on('error', function(err) {
|
||||
return cb(err);
|
||||
})
|
||||
.on('end', function(err) {
|
||||
return cb(err);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
TransactionDb.prototype._fillOutpoints = function(txInfo, cb) {
|
||||
var self = this;
|
||||
|
||||
if (!txInfo || txInfo.isCoinBase) return cb();
|
||||
|
||||
var valueIn = 0;
|
||||
var incompleteInputs = 0;
|
||||
|
||||
async.eachLimit(txInfo.vin, CONCURRENCY, function(i, c_in) {
|
||||
self.fromTxIdN(i.txid, i.vout, function(err, ret) {
|
||||
if (!ret || !ret.addr || !ret.valueSat) {
|
||||
logger.info('Could not get TXouts in %s,%d from %s ', i.txid, i.vout, txInfo.txid);
|
||||
if (ret) i.unconfirmedInput = ret.unconfirmedInput;
|
||||
incompleteInputs = 1;
|
||||
return c_in(); // error not scalated
|
||||
}
|
||||
|
||||
txInfo.firstSeenTs = ret.ts;
|
||||
i.unconfirmedInput = i.unconfirmedInput;
|
||||
i.addr = ret.addr;
|
||||
i.valueSat = ret.valueSat;
|
||||
i.value = ret.valueSat / util.COIN;
|
||||
valueIn += i.valueSat;
|
||||
|
||||
if (ret.multipleSpentAttempt || !ret.spentTxId ||
|
||||
(ret.spentTxId && ret.spentTxId !== txInfo.txid)
|
||||
) {
|
||||
if (ret.multipleSpentAttempts) {
|
||||
ret.multipleSpentAttempts.forEach(function(mul) {
|
||||
if (mul.spentTxId !== txInfo.txid) {
|
||||
|
||||
i.doubleSpentTxID = ret.spentTxId;
|
||||
i.doubleSpentIndex = ret.spentIndex;
|
||||
}
|
||||
});
|
||||
} else if (!ret.spentTxId) {
|
||||
i.dbError = 'Input spent not registered';
|
||||
} else {
|
||||
|
||||
i.doubleSpentTxID = ret.spentTxId;
|
||||
i.doubleSpentIndex = ret.spentIndex;
|
||||
}
|
||||
} else {
|
||||
i.doubleSpentTxID = null;
|
||||
}
|
||||
return c_in();
|
||||
});
|
||||
},
|
||||
function() {
|
||||
if (!incompleteInputs) {
|
||||
txInfo.valueIn = valueIn / util.COIN;
|
||||
txInfo.fees = (valueIn - (txInfo.valueOut * util.COIN)).toFixed(0) / util.COIN;
|
||||
} else {
|
||||
txInfo.incompleteInputs = 1;
|
||||
}
|
||||
return cb();
|
||||
});
|
||||
};
|
||||
|
||||
TransactionDb.prototype._getInfo = function(txid, next) {
|
||||
var self = this;
|
||||
|
||||
Rpc.getTxInfo(txid, function(err, txInfo) {
|
||||
if (err) return next(err);
|
||||
self._fillOutpoints(txInfo, function() {
|
||||
self._fillSpent(txInfo, function() {
|
||||
return next(null, txInfo);
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
// Simplified / faster Info version: No spent / outpoints info.
|
||||
TransactionDb.prototype.fromIdInfoSimple = function(txid, cb) {
|
||||
Rpc.getTxInfo(txid, true, function(err, info) {
|
||||
if (err) return cb(err);
|
||||
if (!info) return cb();
|
||||
return cb(err, info);
|
||||
});
|
||||
};
|
||||
|
||||
TransactionDb.prototype.fromIdWithInfo = function(txid, cb) {
|
||||
var self = this;
|
||||
|
||||
self._getInfo(txid, function(err, info) {
|
||||
if (err) return cb(err);
|
||||
if (!info) return cb();
|
||||
return cb(err, {
|
||||
txid: txid,
|
||||
info: info
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
// Gets address info from an outpoint
|
||||
TransactionDb.prototype.fromTxIdN = function(txid, n, cb) {
|
||||
var self = this;
|
||||
var k = OUTS_PREFIX + txid + '-' + n;
|
||||
|
||||
db.get(k, function(err, val) {
|
||||
var ret;
|
||||
|
||||
if (!val || (err && err.notFound)) {
|
||||
err = null;
|
||||
ret = {
|
||||
unconfirmedInput: 1
|
||||
};
|
||||
} else {
|
||||
var a = val.split(':');
|
||||
ret = {
|
||||
addr: a[0],
|
||||
valueSat: parseInt(a[1]),
|
||||
};
|
||||
}
|
||||
|
||||
// spent?
|
||||
var k = SPENT_PREFIX + txid + '-' + n + '-';
|
||||
db.createReadStream({
|
||||
start: k,
|
||||
end: k + '~'
|
||||
})
|
||||
.on('data', function(data) {
|
||||
var k = data.key.split('-');
|
||||
self._addSpentInfo(ret, k[3], k[4], data.value);
|
||||
})
|
||||
.on('error', function(error) {
|
||||
return cb(error);
|
||||
})
|
||||
.on('end', function() {
|
||||
return cb(null, ret);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
TransactionDb.prototype.deleteCacheForAddress = function(addr, cb) {
|
||||
var k = ADDR_PREFIX + addr + '-';
|
||||
var dbScript = [];
|
||||
db.createReadStream({
|
||||
start: k,
|
||||
end: k + '~'
|
||||
})
|
||||
.on('data', function(data) {
|
||||
var v = data.value.split(':');
|
||||
dbScript.push({
|
||||
type: 'put',
|
||||
key: data.key,
|
||||
value: v[0],
|
||||
});
|
||||
})
|
||||
.on('error', function(err) {
|
||||
return cb(err);
|
||||
})
|
||||
.on('end', function() {
|
||||
db.batch(dbScript, cb);
|
||||
});
|
||||
};
|
||||
|
||||
TransactionDb.prototype.cacheConfirmations = function(txouts, cb) {
|
||||
var self = this;
|
||||
|
||||
var dbScript = [];
|
||||
for (var ii in txouts) {
|
||||
var txout = txouts[ii];
|
||||
|
||||
//everything already cached?
|
||||
if (txout.spentIsConfirmedCached) {
|
||||
continue;
|
||||
}
|
||||
|
||||
var infoToCache = [];
|
||||
if (txout.confirmations >= self.safeConfirmations) {
|
||||
|
||||
if (txout.spentConfirmations >= self.safeConfirmations) {
|
||||
// if spent, we overwrite scriptPubKey cache (not needed anymore)
|
||||
// First 1 = txout.isConfirmedCached (must be equal to 1 at this point)
|
||||
infoToCache = [1, 1, txout.spentTxId, txout.spentIndex, txout.spentTs];
|
||||
} else {
|
||||
if (!txout.isConfirmedCached) {
|
||||
infoToCache.push(1);
|
||||
txout.confirmedWillBeCached = 1;
|
||||
}
|
||||
}
|
||||
//console.log('[TransactionDb.js.352:infoToCache:]',infoToCache); //TODO
|
||||
if (infoToCache.length) {
|
||||
|
||||
infoToCache.unshift(txout.value_sat);
|
||||
dbScript.push({
|
||||
type: 'put',
|
||||
key: txout.key,
|
||||
value: infoToCache.join(':'),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//console.log('[TransactionDb.js.339:dbScript:]',dbScript); //TODO
|
||||
db.batch(dbScript, cb);
|
||||
};
|
||||
|
||||
|
||||
TransactionDb.prototype.cacheScriptPubKey = function(txouts, cb) {
|
||||
// console.log('[TransactionDb.js.381:cacheScriptPubKey:]'); //TODO
|
||||
var self = this;
|
||||
var dbScript = [];
|
||||
for (var ii in txouts) {
|
||||
var txout = txouts[ii];
|
||||
//everything already cached?
|
||||
if (txout.scriptPubKeyCached || txout.spentTxId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (txout.scriptPubKey) {
|
||||
var infoToCache = [txout.value_sat, (txout.isConfirmedCached || txout.confirmedWillBeCached) ? 1 : 0, txout.scriptPubKey];
|
||||
dbScript.push({
|
||||
type: 'put',
|
||||
key: txout.key,
|
||||
value: infoToCache.join(':'),
|
||||
});
|
||||
}
|
||||
}
|
||||
db.batch(dbScript, cb);
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
TransactionDb.prototype._parseAddrData = function(k, data, ignoreCache) {
|
||||
var v = data.value.split(':');
|
||||
// console.log('[TransactionDb.js.375]',data.key,data.value);
|
||||
var item = {
|
||||
key: data.key,
|
||||
ts: END_OF_WORLD_TS - parseInt(k[2]),
|
||||
txid: k[3],
|
||||
index: parseInt(k[4]),
|
||||
value_sat: parseInt(v[0]),
|
||||
};
|
||||
|
||||
if (ignoreCache)
|
||||
return item;
|
||||
|
||||
// Cache:
|
||||
// v[1]== isConfirmedCached
|
||||
// v[2]=== '1' -> is SpendCached -> [4]=spendTxId [5]=spentIndex [6]=spendTs
|
||||
// v[2]!== '1' -> is ScriptPubkey -> [[2] = scriptPubkey
|
||||
if (v[1] === '1') {
|
||||
item.isConfirmed = 1;
|
||||
item.isConfirmedCached = 1;
|
||||
// console.log('[TransactionDb.js.356] CACHE HIT CONF:', item.key);
|
||||
// Sent, confirmed
|
||||
if (v[2] === '1') {
|
||||
// console.log('[TransactionDb.js.356] CACHE HIT SPENT:', item.key);
|
||||
item.spentIsConfirmed = 1;
|
||||
item.spentIsConfirmedCached = 1;
|
||||
item.spentTxId = v[3];
|
||||
item.spentIndex = parseInt(v[4]);
|
||||
item.spentTs = parseInt(v[5]);
|
||||
}
|
||||
// Scriptpubkey cached
|
||||
else if (v[2]) {
|
||||
item.scriptPubKey = v[2];
|
||||
item.scriptPubKeyCached = 1;
|
||||
// console.log('[TransactionDb.js.356] CACHE HIT SCRIPTPUBKEY:', item.key, v, item.scriptPubKey);
|
||||
}
|
||||
}
|
||||
return item;
|
||||
};
|
||||
|
||||
TransactionDb.prototype.fromAddr = function(addr, opts, cb) {
|
||||
opts = opts || {};
|
||||
var self = this;
|
||||
var k = ADDR_PREFIX + addr + '-';
|
||||
var ret = [];
|
||||
var unique = {};
|
||||
|
||||
db.createReadStream({
|
||||
start: k,
|
||||
end: k + '~',
|
||||
limit: opts.txLimit > 0 ? opts.txLimit : -1, // -1 means not limit
|
||||
})
|
||||
.on('data', function(data) {
|
||||
var k = data.key.split('-');
|
||||
var index = k[3] + k[4];
|
||||
if (!unique[index]) {
|
||||
unique[index] = 1;
|
||||
ret.push(self._parseAddrData(k, data, opts.ignoreCache));
|
||||
}
|
||||
})
|
||||
.on('error', cb)
|
||||
.on('end', function() {
|
||||
async.eachLimit(ret.filter(function(x) {
|
||||
return !x.spentIsConfirmed;
|
||||
}), CONCURRENCY, function(o, e_c) {
|
||||
var k = SPENT_PREFIX + o.txid + '-' + o.index + '-';
|
||||
db.createReadStream({
|
||||
start: k,
|
||||
end: k + '~'
|
||||
})
|
||||
.on('data', function(data) {
|
||||
var k = data.key.split('-');
|
||||
self._addSpentInfo(o, k[3], k[4], data.value);
|
||||
})
|
||||
.on('error', e_c)
|
||||
.on('end', e_c);
|
||||
},
|
||||
function(err) {
|
||||
return cb(err, ret);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
TransactionDb.prototype._fromBuffer = function(buf) {
|
||||
var buf2 = buffertools.reverse(buf);
|
||||
return parseInt(buf2.toString('hex'), 16);
|
||||
};
|
||||
|
||||
TransactionDb.prototype.getStandardizedTx = function(tx, time, isCoinBase) {
|
||||
var self = this;
|
||||
tx.txid = bitcoreUtil.formatHashFull(tx.getHash());
|
||||
var ti = 0;
|
||||
|
||||
tx.vin = tx.ins.map(function(txin) {
|
||||
var ret = {
|
||||
n: ti++
|
||||
};
|
||||
if (isCoinBase) {
|
||||
ret.isCoinBase = true;
|
||||
} else {
|
||||
ret.txid = buffertools.reverse(new Buffer(txin.getOutpointHash())).toString('hex');
|
||||
ret.vout = txin.getOutpointIndex();
|
||||
}
|
||||
return ret;
|
||||
});
|
||||
|
||||
var to = 0;
|
||||
tx.vout = tx.outs.map(function(txout) {
|
||||
var val;
|
||||
if (txout.s) {
|
||||
var s = new Script(txout.s);
|
||||
var addrs = new Address.fromScriptPubKey(s, config.network);
|
||||
// support only for p2pubkey p2pubkeyhash and p2sh
|
||||
if (addrs && addrs.length === 1) {
|
||||
val = {
|
||||
addresses: [addrs[0].toString()]
|
||||
};
|
||||
}
|
||||
}
|
||||
return {
|
||||
valueSat: self._fromBuffer(txout.v),
|
||||
scriptPubKey: val,
|
||||
n: to++,
|
||||
};
|
||||
});
|
||||
tx.time = time;
|
||||
return tx;
|
||||
};
|
||||
|
||||
|
||||
TransactionDb.prototype.fillScriptPubKey = function(txouts, cb) {
|
||||
var self = this;
|
||||
// Complete utxo info
|
||||
async.eachLimit(txouts, CONCURRENCY, function(txout, a_c) {
|
||||
self.fromIdInfoSimple(txout.txid, function(err, info) {
|
||||
if (!info || !info.vout) return a_c(err);
|
||||
|
||||
txout.scriptPubKey = info.vout[txout.index].scriptPubKey.hex;
|
||||
return a_c();
|
||||
});
|
||||
}, function() {
|
||||
self.cacheScriptPubKey(txouts, cb);
|
||||
});
|
||||
};
|
||||
|
||||
TransactionDb.prototype.removeFromTxId = function(txid, cb) {
|
||||
async.series([
|
||||
|
||||
function(c) {
|
||||
db.createReadStream({
|
||||
start: OUTS_PREFIX + txid + '-',
|
||||
end: OUTS_PREFIX + txid + '~',
|
||||
}).pipe(
|
||||
db.createWriteStream({
|
||||
type: 'del'
|
||||
})
|
||||
).on('close', c);
|
||||
},
|
||||
function(c) {
|
||||
db.createReadStream({
|
||||
start: SPENT_PREFIX + txid + '-',
|
||||
end: SPENT_PREFIX + txid + '~'
|
||||
})
|
||||
.pipe(
|
||||
db.createWriteStream({
|
||||
type: 'del'
|
||||
})
|
||||
).on('close', c);
|
||||
}
|
||||
],
|
||||
function(err) {
|
||||
cb(err);
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
|
||||
// relatedAddrs is an optional hash, to collect related addresses in the transaction
|
||||
TransactionDb.prototype._addScript = function(tx, relatedAddrs) {
|
||||
var dbScript = [];
|
||||
var ts = tx.time;
|
||||
var txid = tx.txid || tx.hash;
|
||||
// var u=require('util');
|
||||
// console.log('[TransactionDb.js.518]', u.inspect(tx,{depth:10})); //TODO
|
||||
// Input Outpoints (mark them as spent)
|
||||
for (var ii in tx.vin) {
|
||||
var i = tx.vin[ii];
|
||||
if (i.txid) {
|
||||
var k = SPENT_PREFIX + i.txid + '-' + i.vout + '-' + txid + '-' + i.n;
|
||||
dbScript.push({
|
||||
type: 'put',
|
||||
key: k,
|
||||
value: ts || 0,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (var ii in tx.vout) {
|
||||
var o = tx.vout[ii];
|
||||
if (o.scriptPubKey && o.scriptPubKey.addresses &&
|
||||
o.scriptPubKey.addresses[0] && !o.scriptPubKey.addresses[1] // TODO : not supported=> standard multisig
|
||||
) {
|
||||
var addr = o.scriptPubKey.addresses[0];
|
||||
var sat = o.valueSat || ((o.value || 0) * util.COIN).toFixed(0);
|
||||
|
||||
if (relatedAddrs) relatedAddrs[addr] = 1;
|
||||
var k = OUTS_PREFIX + txid + '-' + o.n;
|
||||
var tsr = END_OF_WORLD_TS - ts;
|
||||
dbScript.push({
|
||||
type: 'put',
|
||||
key: k,
|
||||
value: addr + ':' + sat,
|
||||
}, {
|
||||
type: 'put',
|
||||
key: ADDR_PREFIX + addr + '-' + tsr + '-' + txid + '-' + o.n,
|
||||
value: sat,
|
||||
});
|
||||
}
|
||||
}
|
||||
return dbScript;
|
||||
};
|
||||
|
||||
// adds an unconfimed TX
|
||||
TransactionDb.prototype.add = function(tx, cb) {
|
||||
var relatedAddrs = {};
|
||||
var dbScript = this._addScript(tx, relatedAddrs);
|
||||
db.batch(dbScript, function(err) {
|
||||
return cb(err, relatedAddrs);
|
||||
});
|
||||
};
|
||||
|
||||
TransactionDb.prototype._addManyFromObjs = function(txs, next) {
|
||||
var dbScript = [];
|
||||
for (var ii in txs) {
|
||||
var s = this._addScript(txs[ii]);
|
||||
dbScript = dbScript.concat(s);
|
||||
}
|
||||
db.batch(dbScript, next);
|
||||
};
|
||||
|
||||
TransactionDb.prototype._addManyFromHashes = function(txs, next) {
|
||||
var self = this;
|
||||
var dbScript = [];
|
||||
async.eachLimit(txs, CONCURRENCY, function(tx, each_cb) {
|
||||
if (tx === genesisTXID)
|
||||
return each_cb();
|
||||
|
||||
Rpc.getTxInfo(tx, function(err, inInfo) {
|
||||
if (!inInfo) return each_cb(err);
|
||||
dbScript = dbScript.concat(self._addScript(inInfo));
|
||||
return each_cb();
|
||||
});
|
||||
},
|
||||
function(err) {
|
||||
if (err) return next(err);
|
||||
db.batch(dbScript, next);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
TransactionDb.prototype.addMany = function(txs, next) {
|
||||
if (!txs) return next();
|
||||
|
||||
var fn = (typeof txs[0] === 'string') ?
|
||||
this._addManyFromHashes : this._addManyFromObjs;
|
||||
|
||||
return fn.apply(this, [txs, next]);
|
||||
};
|
||||
|
||||
|
||||
TransactionDb.prototype.getPoolInfo = function(txid, cb) {
|
||||
var self = this;
|
||||
|
||||
Rpc.getTxInfo(txid, function(err, txInfo) {
|
||||
if (err) return cb(false);
|
||||
var ret;
|
||||
|
||||
if (txInfo && txInfo.isCoinBase)
|
||||
ret = self.poolMatch.match(new Buffer(txInfo.vin[0].coinbase, 'hex'));
|
||||
|
||||
return cb(ret);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
TransactionDb.prototype.checkVersion02 = function(cb) {
|
||||
var k = 'txa-';
|
||||
var isV2 = 1;
|
||||
db.createReadStream({
|
||||
start: k,
|
||||
end: k + '~',
|
||||
limit: 1,
|
||||
})
|
||||
.on('data', function(data) {
|
||||
isV2 = 0;
|
||||
})
|
||||
.on('end', function() {
|
||||
return cb(isV2);
|
||||
});
|
||||
};
|
||||
|
||||
TransactionDb.prototype.migrateV02 = function(cb) {
|
||||
var k = 'txa-';
|
||||
var dbScript = [];
|
||||
var c = 0;
|
||||
var c2 = 0;
|
||||
var N = 50000;
|
||||
db.createReadStream({
|
||||
start: k,
|
||||
end: k + '~'
|
||||
})
|
||||
.on('data', function(data) {
|
||||
var k = data.key.split('-');
|
||||
var v = data.value.split(':');
|
||||
dbScript.push({
|
||||
type: 'put',
|
||||
key: ADDR_PREFIX + k[1] + '-' + (END_OF_WORLD_TS - parseInt(v[1])) + '-' + k[2] + '-' + k[3],
|
||||
value: v[0],
|
||||
});
|
||||
if (c++ > N) {
|
||||
console.log('\t%dM txs outs processed', ((c2 += N) / 1e6).toFixed(3)); //TODO
|
||||
db.batch(dbScript, function() {
|
||||
c = 0;
|
||||
dbScript = [];
|
||||
});
|
||||
}
|
||||
})
|
||||
.on('error', function(err) {
|
||||
return cb(err);
|
||||
})
|
||||
.on('end', function() {
|
||||
return cb();
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
module.exports = require('soop')(TransactionDb);
|
||||
85
lib/bus.js
85
lib/bus.js
@ -1,85 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var events = require('events');
|
||||
var util = require('util');
|
||||
|
||||
/**
|
||||
* The bus represents a connection to node, decoupled from the transport layer, that can
|
||||
* listen and subscribe to any events that are exposed by available services. Services
|
||||
* can expose events that can be subscribed to by implementing a `getPublishEvents` method.
|
||||
* @param {Object} params
|
||||
* @param {Node} params.node - A reference to the node
|
||||
*/
|
||||
function Bus(params) {
|
||||
events.EventEmitter.call(this);
|
||||
this.node = params.node;
|
||||
this.remoteAddress = params.remoteAddress;
|
||||
}
|
||||
|
||||
util.inherits(Bus, events.EventEmitter);
|
||||
|
||||
/**
|
||||
* This function will find the service that exposes the event by name and
|
||||
* call the associated subscribe method with the arguments excluding the
|
||||
* first argument of this function.
|
||||
* @param {String} name - The name of the event
|
||||
*/
|
||||
Bus.prototype.subscribe = function(name) {
|
||||
var events = [];
|
||||
|
||||
for(var i in this.node.services) {
|
||||
var service = this.node.services[i];
|
||||
events = events.concat(service.getPublishEvents());
|
||||
}
|
||||
|
||||
for (var j = 0; j < events.length; j++) {
|
||||
var event = events[j];
|
||||
var params = Array.prototype.slice.call(arguments).slice(1);
|
||||
params.unshift(this);
|
||||
if (name === event.name) {
|
||||
event.subscribe.apply(event.scope, params);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* The inverse of the subscribe method.
|
||||
* @param {String} name - The name of the event
|
||||
*/
|
||||
Bus.prototype.unsubscribe = function(name) {
|
||||
var events = [];
|
||||
|
||||
for(var i in this.node.services) {
|
||||
var service = this.node.services[i];
|
||||
events = events.concat(service.getPublishEvents());
|
||||
}
|
||||
|
||||
for (var j = 0; j < events.length; j++) {
|
||||
var event = events[j];
|
||||
var params = Array.prototype.slice.call(arguments).slice(1);
|
||||
params.unshift(this);
|
||||
if (name === event.name) {
|
||||
event.unsubscribe.apply(event.scope, params);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* This function will unsubscribe all events.
|
||||
*/
|
||||
Bus.prototype.close = function() {
|
||||
var events = [];
|
||||
|
||||
for(var i in this.node.services) {
|
||||
var service = this.node.services[i];
|
||||
events = events.concat(service.getPublishEvents());
|
||||
}
|
||||
|
||||
// Unsubscribe from all events
|
||||
for (var j = 0; j < events.length; j++) {
|
||||
var event = events[j];
|
||||
event.unsubscribe.call(event.scope, this);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = Bus;
|
||||
@ -1,37 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var program = require('commander');
|
||||
var path = require('path');
|
||||
var flocore = require('..');
|
||||
|
||||
function main(servicesPath, additionalServices) {
|
||||
/* jshint maxstatements: 100 */
|
||||
|
||||
var version = flocore.version;
|
||||
var start = flocore.scaffold.start;
|
||||
var findConfig = flocore.scaffold.findConfig;
|
||||
var defaultConfig = flocore.scaffold.defaultConfig;
|
||||
|
||||
program
|
||||
.version(version)
|
||||
.description('Start the current node')
|
||||
.option('-c, --config <dir>', 'Specify the directory with Flocore Node configuration');
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
if (program.config) {
|
||||
program.config = path.resolve(process.cwd(), program.config);
|
||||
}
|
||||
var configInfo = findConfig(program.config || process.cwd());
|
||||
if (!configInfo) {
|
||||
configInfo = defaultConfig({
|
||||
additionalServices: additionalServices
|
||||
});
|
||||
}
|
||||
if (servicesPath) {
|
||||
configInfo.servicesPath = servicesPath;
|
||||
}
|
||||
start(configInfo);
|
||||
}
|
||||
|
||||
module.exports = main;
|
||||
@ -1,47 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var Liftoff = require('liftoff');
|
||||
|
||||
function main(parentServicesPath, additionalServices) {
|
||||
|
||||
var liftoff = new Liftoff({
|
||||
name: 'flocore',
|
||||
moduleName: 'flocore-node',
|
||||
configName: 'flocore-node',
|
||||
processTitle: 'flocore'
|
||||
}).on('require', function (name) {
|
||||
console.log('Loading:', name);
|
||||
}).on('requireFail', function (name, err) {
|
||||
console.log('Unable to load:', name, err);
|
||||
}).on('respawn', function (flags, child) {
|
||||
console.log('Detected node flags:', flags);
|
||||
console.log('Respawned to PID:', child.pid);
|
||||
});
|
||||
|
||||
liftoff.launch({
|
||||
cwd: process.cwd()
|
||||
}, function(env){
|
||||
|
||||
var node;
|
||||
if (env.configPath && env.modulePath) {
|
||||
node = require(env.modulePath);
|
||||
node.cli.main();
|
||||
} else {
|
||||
node = require('../../');
|
||||
node.cli.main(parentServicesPath, additionalServices);
|
||||
}
|
||||
|
||||
// Gracefully Shut Down
|
||||
process.on('SIGTERM', function () {
|
||||
console.log("Shutting down flocore-node")
|
||||
node.stop(function() {
|
||||
console.log("flocore-node successfully stopped!")
|
||||
process.exit(0)
|
||||
})
|
||||
})
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
module.exports = main;
|
||||
@ -1,39 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var Liftoff = require('liftoff');
|
||||
|
||||
function main(parentServicesPath, additionalServices) {
|
||||
|
||||
var liftoff = new Liftoff({
|
||||
name: 'flocored',
|
||||
moduleName: 'flocore-node',
|
||||
configName: 'flocore-node',
|
||||
processTitle: 'flocored'
|
||||
}).on('require', function (name) {
|
||||
console.log('Loading:', name);
|
||||
}).on('requireFail', function (name, err) {
|
||||
console.log('Unable to load:', name, err);
|
||||
}).on('respawn', function (flags, child) {
|
||||
console.log('Detected node flags:', flags);
|
||||
console.log('Respawned to PID:', child.pid);
|
||||
});
|
||||
|
||||
liftoff.launch({
|
||||
cwd: process.cwd()
|
||||
}, function(env){
|
||||
|
||||
var node;
|
||||
|
||||
if (env.configPath && env.modulePath) {
|
||||
node = require(env.modulePath);
|
||||
node.cli.daemon();
|
||||
} else {
|
||||
node = require('../../');
|
||||
node.cli.daemon(parentServicesPath, additionalServices);
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
module.exports = main;
|
||||
151
lib/cli/main.js
151
lib/cli/main.js
@ -1,151 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var program = require('commander');
|
||||
var path = require('path');
|
||||
var flocorenode = require('..');
|
||||
var utils = require('../utils');
|
||||
|
||||
function main(servicesPath, additionalServices) {
|
||||
/* jshint maxstatements: 100 */
|
||||
|
||||
var version = flocorenode.version;
|
||||
var create = flocorenode.scaffold.create;
|
||||
var add = flocorenode.scaffold.add;
|
||||
var start = flocorenode.scaffold.start;
|
||||
var remove = flocorenode.scaffold.remove;
|
||||
var callMethod = flocorenode.scaffold.callMethod;
|
||||
var findConfig = flocorenode.scaffold.findConfig;
|
||||
var defaultConfig = flocorenode.scaffold.defaultConfig;
|
||||
|
||||
program
|
||||
.version(version);
|
||||
|
||||
program
|
||||
.command('create <directory>')
|
||||
.description('Create a new node')
|
||||
.option('-d, --datadir <dir>', 'Specify the florincoin database directory')
|
||||
.option('-t, --testnet', 'Enable testnet as the network')
|
||||
.action(function(dirname, cmd){
|
||||
if (cmd.datadir) {
|
||||
cmd.datadir = path.resolve(process.cwd(), cmd.datadir);
|
||||
}
|
||||
var opts = {
|
||||
cwd: process.cwd(),
|
||||
dirname: dirname,
|
||||
datadir: cmd.datadir || './data',
|
||||
isGlobal: false
|
||||
};
|
||||
if (cmd.testnet) {
|
||||
opts.network = 'testnet';
|
||||
}
|
||||
create(opts, function(err) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
console.log('Successfully created node in directory: ', dirname);
|
||||
});
|
||||
});
|
||||
|
||||
program
|
||||
.command('start')
|
||||
.description('Start the current node')
|
||||
.option('-c, --config <dir>', 'Specify the directory with Flocore Node configuration')
|
||||
.action(function(cmd){
|
||||
if (cmd.config) {
|
||||
cmd.config = path.resolve(process.cwd(), cmd.config);
|
||||
}
|
||||
var configInfo = findConfig(cmd.config || process.cwd());
|
||||
if (!configInfo) {
|
||||
configInfo = defaultConfig({
|
||||
additionalServices: additionalServices
|
||||
});
|
||||
}
|
||||
if (servicesPath) {
|
||||
configInfo.servicesPath = servicesPath;
|
||||
}
|
||||
start(configInfo);
|
||||
});
|
||||
|
||||
program
|
||||
.command('install <services...>')
|
||||
.description('Install a service for the current node')
|
||||
.action(function(services){
|
||||
var configInfo = findConfig(process.cwd());
|
||||
if (!configInfo) {
|
||||
throw new Error('Could not find configuration, see `flocore-node create --help`');
|
||||
}
|
||||
var opts = {
|
||||
path: configInfo.path,
|
||||
services: services
|
||||
};
|
||||
add(opts, function(err) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
console.log('Successfully added services(s):', services.join(', '));
|
||||
});
|
||||
}).on('--help', function() {
|
||||
console.log(' Examples:');
|
||||
console.log();
|
||||
console.log(' $ flocore-node add wallet-service');
|
||||
console.log(' $ flocore-node add insight-api');
|
||||
console.log();
|
||||
});
|
||||
|
||||
program
|
||||
.command('uninstall <services...>')
|
||||
.description('Uninstall a service for the current node')
|
||||
.action(function(services){
|
||||
var configInfo = findConfig(process.cwd());
|
||||
if (!configInfo) {
|
||||
throw new Error('Could not find configuration, see `flocore-node create --help`');
|
||||
}
|
||||
var opts = {
|
||||
path: configInfo.path,
|
||||
services: services
|
||||
};
|
||||
remove(opts, function(err) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
console.log('Successfully removed services(s):', services.join(', '));
|
||||
});
|
||||
}).on('--help', function() {
|
||||
console.log(' Examples:');
|
||||
console.log();
|
||||
console.log(' $ flocore-node remove wallet-service');
|
||||
console.log(' $ flocore-node remove insight-api');
|
||||
console.log();
|
||||
});
|
||||
|
||||
program
|
||||
.command('call <method> [params...]')
|
||||
.description('Call an API method')
|
||||
.action(function(method, paramsArg) {
|
||||
var params = utils.parseParamsWithJSON(paramsArg);
|
||||
var configInfo = findConfig(process.cwd());
|
||||
if (!configInfo) {
|
||||
configInfo = defaultConfig();
|
||||
}
|
||||
var options = {
|
||||
protocol: 'http',
|
||||
host: 'localhost',
|
||||
port: configInfo.config.port
|
||||
};
|
||||
callMethod(options, method, params, function(err, data) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
console.log(JSON.stringify(data, null, 2));
|
||||
});
|
||||
});
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
if (process.argv.length === 2) {
|
||||
program.help();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = main;
|
||||
@ -1,13 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
|
||||
module.exports = {
|
||||
BITCOIN_GENESIS_HASH: {
|
||||
livenet: '09c7781c9df90708e278c35d38ea5c9041d7ecfcdd1c56ba67274b7cff3e1cea',
|
||||
regtest: 'ec42fa26ca6dcb1103b59a1d24b161935ea4566f8d5736db8917d5b9a8dee0d7',
|
||||
testnet: '9b7bc86236c34b5e3a39367c036b7fe8807a966c22a7a1f0da2a198a27e03731', //this is testnet3
|
||||
testnet5: '000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943' //this is testnet5
|
||||
},
|
||||
DB_PREFIX: new Buffer('ffff', 'hex')
|
||||
};
|
||||
|
||||
@ -1,12 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var createError = require('errno').create;
|
||||
|
||||
var FlocoreNodeError = createError('FlocoreNodeError');
|
||||
|
||||
var RPCError = createError('RPCError', FlocoreNodeError);
|
||||
|
||||
module.exports = {
|
||||
Error: FlocoreNodeError,
|
||||
RPCError: RPCError
|
||||
};
|
||||
@ -1,3 +0,0 @@
|
||||
var Logger = require('./logger');
|
||||
module.exports.errors = require('./errors');
|
||||
module.exports.log = new Logger();
|
||||
@ -1,75 +1,13 @@
|
||||
'use strict';
|
||||
var winston = require('winston');
|
||||
var config = require('../config/config');
|
||||
|
||||
var flocore = require('flocore-lib');
|
||||
var _ = flocore.deps._;
|
||||
var colors = require('colors/safe');
|
||||
var logger = new winston.Logger({
|
||||
transports: [
|
||||
new winston.transports.Console({
|
||||
level: 'error'
|
||||
}),
|
||||
]
|
||||
});
|
||||
logger.transports.console.level = config.loggerLevel;
|
||||
|
||||
/**
|
||||
* Wraps console.log with some special magic
|
||||
* @constructor
|
||||
*/
|
||||
function Logger(options) {
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
this.formatting = _.isUndefined(options.formatting) ? Logger.DEFAULT_FORMATTING : options.formatting;
|
||||
}
|
||||
|
||||
Logger.DEFAULT_FORMATTING = true;
|
||||
|
||||
/**
|
||||
* Prints an info message
|
||||
* #info
|
||||
*/
|
||||
Logger.prototype.info = function() {
|
||||
this._log.apply(this, ['blue', 'info'].concat(Array.prototype.slice.call(arguments)));
|
||||
};
|
||||
|
||||
/**
|
||||
* Prints an error message
|
||||
* #error
|
||||
*/
|
||||
Logger.prototype.error = function() {
|
||||
this._log.apply(this, ['red', 'error'].concat(Array.prototype.slice.call(arguments)));
|
||||
};
|
||||
|
||||
/**
|
||||
* Prints an debug message
|
||||
* #debug
|
||||
*/
|
||||
Logger.prototype.debug = function() {
|
||||
if (process.env.BITCORE_ENV === 'debug') {
|
||||
this._log.apply(this, ['green', 'debug'].concat(Array.prototype.slice.call(arguments)));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Prints an warn message
|
||||
* #warn
|
||||
*/
|
||||
Logger.prototype.warn = function() {
|
||||
this._log.apply(this, ['yellow', 'warn'].concat(Array.prototype.slice.call(arguments)));
|
||||
};
|
||||
|
||||
/**
|
||||
* Proxies console.log with color and arg parsing magic
|
||||
* #_log
|
||||
*/
|
||||
Logger.prototype._log = function(color) {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
args = args.slice(1);
|
||||
var level = args.shift();
|
||||
|
||||
if (this.formatting) {
|
||||
var date = new Date();
|
||||
var typeString = colors[color].italic(level + ':');
|
||||
args[0] = '[' + date.toISOString() + ']' + ' ' + typeString + ' ' + args[0];
|
||||
}
|
||||
var fn = console.log;
|
||||
if (level === 'error') {
|
||||
fn = console.error;
|
||||
}
|
||||
fn.apply(console, args);
|
||||
};
|
||||
|
||||
module.exports = Logger;
|
||||
module.exports.logger = logger;
|
||||
|
||||
232
lib/node.js
232
lib/node.js
@ -1,232 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var util = require('util');
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
var async = require('async');
|
||||
var assert = require('assert');
|
||||
var flocore = require('flocore-lib');
|
||||
var _ = flocore.deps._;
|
||||
var index = require('./');
|
||||
var log = index.log;
|
||||
var Bus = require('./bus');
|
||||
var errors = require('./errors');
|
||||
|
||||
function Node(config) {
|
||||
|
||||
if(!(this instanceof Node)) {
|
||||
return new Node(config);
|
||||
}
|
||||
|
||||
this._init(config);
|
||||
|
||||
if (!_.isUndefined(config.formatLogs)) {
|
||||
this.log.formatting = config.formatLogs ? true : false;
|
||||
}
|
||||
|
||||
if (config.services) {
|
||||
this._unloadedServices = config.services;
|
||||
}
|
||||
}
|
||||
|
||||
util.inherits(Node, EventEmitter);
|
||||
|
||||
Node.prototype._init = function(config) {
|
||||
this.configPath = config.path;
|
||||
this.errors = errors;
|
||||
this.log = log;
|
||||
|
||||
this.datadir = config.datadir;
|
||||
this.network = null;
|
||||
this.services = {};
|
||||
this._unloadedServices = [];
|
||||
|
||||
this.port = config.port;
|
||||
this.https = config.https;
|
||||
this.httpsOptions = config.httpsOptions;
|
||||
this._setNetwork(config);
|
||||
};
|
||||
|
||||
Node.prototype._setNetwork = function(config) {
|
||||
this.network = config.network;
|
||||
};
|
||||
|
||||
Node.prototype.openBus = function(options) {
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
return new Bus({node: this, remoteAddress: options.remoteAddress});
|
||||
};
|
||||
|
||||
Node.prototype.getAllAPIMethods = function() {
|
||||
var methods = [];
|
||||
for(var i in this.services) {
|
||||
var mod = this.services[i];
|
||||
if (mod.getAPIMethods) {
|
||||
methods = methods.concat(mod.getAPIMethods());
|
||||
}
|
||||
}
|
||||
return methods;
|
||||
};
|
||||
|
||||
Node.prototype.getAllPublishEvents = function() {
|
||||
var events = [];
|
||||
for (var i in this.services) {
|
||||
var mod = this.services[i];
|
||||
if (mod.getPublishEvents) {
|
||||
events = events.concat(mod.getPublishEvents());
|
||||
}
|
||||
}
|
||||
return events;
|
||||
};
|
||||
|
||||
Node.prototype._getServiceOrder = function(services) {
|
||||
|
||||
var names = [];
|
||||
var servicesByName = {};
|
||||
for (var i = 0; i < services.length; i++) {
|
||||
var service = services[i];
|
||||
names.push(service.name);
|
||||
servicesByName[service.name] = service;
|
||||
}
|
||||
|
||||
var stackNames = {};
|
||||
var stack = [];
|
||||
|
||||
function addToStack(names) {
|
||||
for(var i = 0; i < names.length; i++) {
|
||||
|
||||
var name = names[i];
|
||||
var service = servicesByName[name];
|
||||
assert(service, 'Required dependency "' + name + '" not available.');
|
||||
|
||||
addToStack(service.module.dependencies);
|
||||
|
||||
if(!stackNames[name]) {
|
||||
stack.push(service);
|
||||
stackNames[name] = true;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
addToStack(names);
|
||||
|
||||
return stack;
|
||||
};
|
||||
|
||||
Node.prototype._startService = function(serviceInfo, callback) {
|
||||
var self = this;
|
||||
|
||||
log.info('Starting ' + serviceInfo.name);
|
||||
|
||||
var config;
|
||||
if (serviceInfo.config) {
|
||||
assert(_.isObject(serviceInfo.config));
|
||||
assert(!serviceInfo.config.node);
|
||||
assert(!serviceInfo.config.name);
|
||||
config = serviceInfo.config;
|
||||
} else {
|
||||
config = {};
|
||||
}
|
||||
|
||||
config.node = this;
|
||||
config.name = serviceInfo.name;
|
||||
var service = new serviceInfo.module(config);
|
||||
|
||||
self.services[serviceInfo.name] = service;
|
||||
|
||||
service.start(function(err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
if (service.getAPIMethods) {
|
||||
var methodData = service.getAPIMethods();
|
||||
var methodNameConflicts = [];
|
||||
methodData.forEach(function(data) {
|
||||
var name = data[0];
|
||||
var instance = data[1];
|
||||
var method = data[2];
|
||||
|
||||
if (self[name]) {
|
||||
methodNameConflicts.push(name);
|
||||
} else {
|
||||
self[name] = function() {
|
||||
return method.apply(instance, arguments);
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
if (methodNameConflicts.length > 0) {
|
||||
return callback(new Error('Existing API method(s) exists: ' + methodNameConflicts.join(', ')));
|
||||
}
|
||||
}
|
||||
|
||||
callback();
|
||||
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
Node.prototype._logTitle = function() {
|
||||
if (this.configPath) {
|
||||
log.info('Using config:', this.configPath);
|
||||
log.info('Using network:', this.network);
|
||||
}
|
||||
};
|
||||
|
||||
Node.prototype.start = function(callback) {
|
||||
var self = this;
|
||||
|
||||
var services = this._unloadedServices;
|
||||
|
||||
var servicesOrder = this._getServiceOrder(services);
|
||||
|
||||
self._logTitle();
|
||||
|
||||
async.eachSeries(
|
||||
servicesOrder,
|
||||
function(service, next) {
|
||||
self._startService(service, next);
|
||||
},
|
||||
function(err) {
|
||||
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
self.emit('ready');
|
||||
callback();
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
Node.prototype.stop = function(callback) {
|
||||
|
||||
log.info('Beginning shutdown');
|
||||
var self = this;
|
||||
var services = this._getServiceOrder(this._unloadedServices).reverse();
|
||||
|
||||
this.stopping = true;
|
||||
this.emit('stopping');
|
||||
|
||||
async.eachSeries(
|
||||
|
||||
services,
|
||||
function(service, next) {
|
||||
if (self.services[service.name]) {
|
||||
log.info('Stopping ' + service.name);
|
||||
self.services[service.name].stop(next);
|
||||
} else {
|
||||
log.info('Stopping ' + service.name + ' (not started)');
|
||||
setImmediate(next);
|
||||
}
|
||||
},
|
||||
function() {
|
||||
if (callback) {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = Node;
|
||||
@ -1,118 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var async = require('async');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var spawn = require('child_process').spawn;
|
||||
var flocore = require('flocore-lib');
|
||||
var utils = require('../utils');
|
||||
var $ = flocore.util.preconditions;
|
||||
var _ = flocore.deps._;
|
||||
|
||||
/**
|
||||
* @param {String} configFilePath - The absolute path to the configuration file
|
||||
* @param {String} service - The name of the service
|
||||
* @param {Function} done
|
||||
*/
|
||||
function addConfig(configFilePath, service, done) {
|
||||
$.checkState(utils.isAbsolutePath(configFilePath), 'An absolute path is expected');
|
||||
fs.readFile(configFilePath, function(err, data) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
var config = JSON.parse(data);
|
||||
$.checkState(
|
||||
Array.isArray(config.services),
|
||||
'Configuration file is expected to have a services array.'
|
||||
);
|
||||
config.services.push(service);
|
||||
config.services = _.unique(config.services);
|
||||
config.services.sort(function(a, b) {
|
||||
return a > b;
|
||||
});
|
||||
fs.writeFile(configFilePath, JSON.stringify(config, null, 2), done);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {String} configDir - The absolute configuration directory path
|
||||
* @param {String} service - The name of the service
|
||||
* @param {Function} done
|
||||
*/
|
||||
function addService(configDir, service, done) {
|
||||
$.checkState(utils.isAbsolutePath(configDir), 'An absolute path is expected');
|
||||
var npm = spawn('npm', ['install', service, '--save'], {cwd: configDir});
|
||||
|
||||
npm.stdout.on('data', function(data) {
|
||||
process.stdout.write(data);
|
||||
});
|
||||
|
||||
npm.stderr.on('data', function(data) {
|
||||
process.stderr.write(data);
|
||||
});
|
||||
|
||||
npm.on('close', function(code) {
|
||||
if (code !== 0) {
|
||||
return done(new Error('There was an error installing service: ' + service));
|
||||
} else {
|
||||
return done();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {String} options.cwd - The current working directory
|
||||
* @param {String} options.dirname - The flocore-node configuration directory
|
||||
* @param {Array} options.services - An array of strings of service names
|
||||
* @param {Function} done - A callback function called when finished
|
||||
*/
|
||||
function add(options, done) {
|
||||
$.checkArgument(_.isObject(options));
|
||||
$.checkArgument(_.isFunction(done));
|
||||
$.checkArgument(
|
||||
_.isString(options.path) && utils.isAbsolutePath(options.path),
|
||||
'An absolute path is expected'
|
||||
);
|
||||
$.checkArgument(Array.isArray(options.services));
|
||||
|
||||
var configPath = options.path;
|
||||
var services = options.services;
|
||||
|
||||
var flocoreConfigPath = path.resolve(configPath, 'flocore-node.json');
|
||||
var packagePath = path.resolve(configPath, 'package.json');
|
||||
|
||||
if (!fs.existsSync(flocoreConfigPath) || !fs.existsSync(packagePath)) {
|
||||
return done(
|
||||
new Error('Directory does not have a flocore-node.json and/or package.json file.')
|
||||
);
|
||||
}
|
||||
|
||||
var oldPackage = JSON.parse(fs.readFileSync(packagePath));
|
||||
|
||||
async.eachSeries(
|
||||
services,
|
||||
function(service, next) {
|
||||
// npm install <service_name> --save
|
||||
addService(configPath, service, function(err) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
// get the name of the service from package.json
|
||||
var updatedPackage = JSON.parse(fs.readFileSync(packagePath));
|
||||
var newDependencies = _.difference(
|
||||
Object.keys(updatedPackage.dependencies),
|
||||
Object.keys(oldPackage.dependencies)
|
||||
);
|
||||
$.checkState(newDependencies.length === 1);
|
||||
oldPackage = updatedPackage;
|
||||
var serviceName = newDependencies[0];
|
||||
|
||||
// add service to flocore-node.json
|
||||
addConfig(flocoreConfigPath, serviceName, next);
|
||||
});
|
||||
}, done
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = add;
|
||||
@ -1,43 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var socketClient = require('socket.io-client');
|
||||
|
||||
/**
|
||||
* Calls a remote node with a method and params
|
||||
* @param {Object} options
|
||||
* @param {String} method - The name of the method to call
|
||||
* @param {Array} params - An array of the params for the method
|
||||
* @param {Function} done - The callback function
|
||||
*/
|
||||
function callMethod(options, method, params, done) {
|
||||
|
||||
var host = options.host;
|
||||
var protocol = options.protocol;
|
||||
var port = options.port;
|
||||
var url = protocol + '://' + host + ':' + port;
|
||||
var socketOptions = {
|
||||
reconnection: false,
|
||||
connect_timeout: 5000
|
||||
};
|
||||
var socket = socketClient(url, socketOptions);
|
||||
|
||||
socket.on('connect', function(){
|
||||
socket.send({
|
||||
method: method,
|
||||
params: params,
|
||||
}, function(response) {
|
||||
if (response.error) {
|
||||
return done(new Error(response.error.message));
|
||||
}
|
||||
socket.close();
|
||||
done(null, response.result);
|
||||
});
|
||||
});
|
||||
|
||||
socket.on('connect_error', done);
|
||||
|
||||
return socket;
|
||||
|
||||
}
|
||||
|
||||
module.exports = callMethod;
|
||||
@ -1,154 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var spawn = require('child_process').spawn;
|
||||
var flocore = require('flocore-lib');
|
||||
var async = require('async');
|
||||
var $ = flocore.util.preconditions;
|
||||
var _ = flocore.deps._;
|
||||
var path = require('path');
|
||||
var packageFile = require('../../package.json');
|
||||
var mkdirp = require('mkdirp');
|
||||
var fs = require('fs');
|
||||
var defaultConfig = require('./default-config');
|
||||
|
||||
var version = '^' + packageFile.version;
|
||||
|
||||
var BASE_PACKAGE = {
|
||||
description: 'A full Florincoin node build with Flocore',
|
||||
repository: 'https://github.com/user/project',
|
||||
license: 'MIT',
|
||||
readme: 'README.md',
|
||||
dependencies: {
|
||||
'flocore-lib': '^' + flocore.version,
|
||||
'flocore-node': version
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Will create a directory and florincoin.conf file for Florincoin.
|
||||
* @param {String} dataDir - The absolute path
|
||||
* @param {Function} done - The callback function called when finished
|
||||
*/
|
||||
function createFlorincoinDirectory(datadir, done) {
|
||||
mkdirp(datadir, function(err) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
done();
|
||||
|
||||
// Don't create the configuration yet
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Will create a base Flocore Node configuration directory and files.
|
||||
* @param {Object} options
|
||||
* @param {String} options.network - "testnet" or "livenet"
|
||||
* @param {String} options.datadir - The florincoin database directory
|
||||
* @param {String} configDir - The absolute path
|
||||
* @param {Boolean} isGlobal - If the configuration depends on globally installed node services.
|
||||
* @param {Function} done - The callback function called when finished
|
||||
*/
|
||||
function createConfigDirectory(options, configDir, isGlobal, done) {
|
||||
mkdirp(configDir, function(err) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
var configInfo = defaultConfig(options);
|
||||
var config = configInfo.config;
|
||||
|
||||
var configJSON = JSON.stringify(config, null, 2);
|
||||
var packageJSON = JSON.stringify(BASE_PACKAGE, null, 2);
|
||||
try {
|
||||
fs.writeFileSync(configDir + '/flocore-node.json', configJSON);
|
||||
if (!isGlobal) {
|
||||
fs.writeFileSync(configDir + '/package.json', packageJSON);
|
||||
}
|
||||
} catch(e) {
|
||||
done(e);
|
||||
}
|
||||
done();
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Will setup a directory with a Flocore Node directory, configuration file,
|
||||
* florincoin configuration, and will install all necessary dependencies.
|
||||
*
|
||||
* @param {Object} options
|
||||
* @param {String} options.cwd - The current working directory
|
||||
* @param {String} options.dirname - The name of the flocore node configuration directory
|
||||
* @param {String} options.datadir - The path to the florincoin datadir
|
||||
* @param {Function} done - A callback function called when finished
|
||||
*/
|
||||
function create(options, done) {
|
||||
/* jshint maxstatements:20 */
|
||||
|
||||
$.checkArgument(_.isObject(options));
|
||||
$.checkArgument(_.isFunction(done));
|
||||
$.checkArgument(_.isString(options.cwd));
|
||||
$.checkArgument(_.isString(options.dirname));
|
||||
$.checkArgument(_.isBoolean(options.isGlobal));
|
||||
$.checkArgument(_.isString(options.datadir));
|
||||
|
||||
var cwd = options.cwd;
|
||||
var dirname = options.dirname;
|
||||
var datadir = options.datadir;
|
||||
var isGlobal = options.isGlobal;
|
||||
|
||||
var absConfigDir = path.resolve(cwd, dirname);
|
||||
var absDataDir = path.resolve(absConfigDir, datadir);
|
||||
|
||||
async.series([
|
||||
function(next) {
|
||||
// Setup the the flocore-node directory and configuration
|
||||
if (!fs.existsSync(absConfigDir)) {
|
||||
var createOptions = {
|
||||
network: options.network,
|
||||
datadir: datadir
|
||||
};
|
||||
createConfigDirectory(createOptions, absConfigDir, isGlobal, next);
|
||||
} else {
|
||||
next(new Error('Directory "' + absConfigDir+ '" already exists.'));
|
||||
}
|
||||
},
|
||||
function(next) {
|
||||
// Setup the florincoin directory and configuration
|
||||
if (!fs.existsSync(absDataDir)) {
|
||||
createFlorincoinDirectory(absDataDir, next);
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
},
|
||||
function(next) {
|
||||
// Install all of the necessary dependencies
|
||||
if (!isGlobal) {
|
||||
var npm = spawn('npm', ['install'], {cwd: absConfigDir});
|
||||
|
||||
npm.stdout.on('data', function (data) {
|
||||
process.stdout.write(data);
|
||||
});
|
||||
|
||||
npm.stderr.on('data', function (data) {
|
||||
process.stderr.write(data);
|
||||
});
|
||||
|
||||
npm.on('close', function (code) {
|
||||
if (code !== 0) {
|
||||
return next(new Error('There was an error installing dependencies.'));
|
||||
} else {
|
||||
return next();
|
||||
}
|
||||
});
|
||||
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
}
|
||||
], done);
|
||||
|
||||
}
|
||||
|
||||
module.exports = create;
|
||||
@ -1,100 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var path = require('path');
|
||||
var mkdirp = require('mkdirp');
|
||||
var fs = require('fs');
|
||||
var packageJson = require('../../package');
|
||||
|
||||
function getMajorVersion(versionString) {
|
||||
return parseInt(versionString.split('.')[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Will return the path and default flocore-node configuration. It will search for the
|
||||
* configuration file in the "~/.flocore" directory, and if it doesn't exist, it will create one
|
||||
* based on default settings.
|
||||
* @param {Object} [options]
|
||||
* @param {Array} [options.additionalServices] - An optional array of services.
|
||||
*/
|
||||
function getDefaultConfig(options) {
|
||||
/* jshint maxstatements: 40 */
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
|
||||
var defaultPath = path.resolve(process.env.HOME, './.flocore');
|
||||
var defaultConfigFile = path.resolve(defaultPath, './flocore-node.json');
|
||||
|
||||
if (!fs.existsSync(defaultPath)) {
|
||||
mkdirp.sync(defaultPath);
|
||||
}
|
||||
|
||||
if (fs.existsSync(defaultConfigFile)) {
|
||||
var currentConfig = require(defaultConfigFile);
|
||||
|
||||
// config must have a `version` field with major equal to package major version
|
||||
if(currentConfig.version && getMajorVersion(packageJson.version) === getMajorVersion(currentConfig.version)) {
|
||||
return {
|
||||
path: defaultPath,
|
||||
config: currentConfig
|
||||
};
|
||||
}
|
||||
|
||||
console.log(`The configuration file at '${defaultConfigFile}' is incompatible with this version of Flocore.`);
|
||||
|
||||
var now = new Date();
|
||||
// flocore-node.YYYY-MM-DD.UnixTimestamp.json
|
||||
var backupFileName = `flocore-node.${now.getUTCFullYear()}-${now.getUTCMonth()}-${now.getUTCDate()}.${now.getTime()}.json`;
|
||||
var backupFile = path.resolve(defaultPath, backupFileName);
|
||||
fs.renameSync(defaultConfigFile, backupFile);
|
||||
console.log(`The previous configuration file has been moved to: ${backupFile}.`);
|
||||
}
|
||||
|
||||
console.log(`Creating a new configuration file at: ${defaultConfigFile}.`);
|
||||
|
||||
var defaultServices = [
|
||||
'address',
|
||||
'block',
|
||||
'db',
|
||||
'fee',
|
||||
'header',
|
||||
'mempool',
|
||||
'p2p',
|
||||
'timestamp',
|
||||
'transaction',
|
||||
'web'
|
||||
];
|
||||
|
||||
var defaultDataDir = path.resolve(defaultPath, './data');
|
||||
|
||||
if (!fs.existsSync(defaultDataDir)) {
|
||||
mkdirp.sync(defaultDataDir);
|
||||
}
|
||||
|
||||
var defaultConfig = {
|
||||
version: packageJson.version,
|
||||
network: 'livenet',
|
||||
port: 3001,
|
||||
services: options.additionalServices ? defaultServices.concat(options.additionalServices) : defaultServices,
|
||||
datadir: defaultDataDir,
|
||||
servicesConfig: {
|
||||
'flosight-api': {
|
||||
cwdRequirePath: 'node_modules/flosight-api'
|
||||
},
|
||||
'flosight-ui': {
|
||||
cwdRequirePath: 'node_modules/flosight-ui'
|
||||
}
|
||||
}
|
||||
};
|
||||
fs.writeFileSync(defaultConfigFile, JSON.stringify(defaultConfig, null, 2));
|
||||
|
||||
var config = JSON.parse(fs.readFileSync(defaultConfigFile, 'utf-8'));
|
||||
|
||||
return {
|
||||
path: defaultPath,
|
||||
config: config
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
module.exports = getDefaultConfig;
|
||||
@ -1,30 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var flocore = require('flocore-lib');
|
||||
var $ = flocore.util.preconditions;
|
||||
var _ = flocore.deps._;
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var utils = require('../utils');
|
||||
|
||||
/**
|
||||
* Will return the path and flocore-node configuration
|
||||
* @param {String} cwd - The absolute path to the current working directory
|
||||
*/
|
||||
function findConfig(cwd) {
|
||||
$.checkArgument(_.isString(cwd), 'Argument should be a string');
|
||||
$.checkArgument(utils.isAbsolutePath(cwd), 'Argument should be an absolute path');
|
||||
var directory = String(cwd);
|
||||
while (!fs.existsSync(path.resolve(directory, 'flocore-node.json'))) {
|
||||
directory = path.resolve(directory, '../');
|
||||
if (directory === '/') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return {
|
||||
path: directory,
|
||||
config: require(path.resolve(directory, 'flocore-node.json'))
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = findConfig;
|
||||
@ -1,127 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var async = require('async');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var spawn = require('child_process').spawn;
|
||||
var flocore = require('flocore-lib');
|
||||
var $ = flocore.util.preconditions;
|
||||
var _ = flocore.deps._;
|
||||
var utils = require('../utils');
|
||||
|
||||
/**
|
||||
* Will remove a service from flocore-node.json
|
||||
* @param {String} configFilePath - The absolute path to the configuration file
|
||||
* @param {String} service - The name of the module
|
||||
* @param {Function} done
|
||||
*/
|
||||
function removeConfig(configFilePath, service, done) {
|
||||
$.checkArgument(utils.isAbsolutePath(configFilePath), 'An absolute path is expected');
|
||||
fs.readFile(configFilePath, function(err, data) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
var config = JSON.parse(data);
|
||||
$.checkState(
|
||||
Array.isArray(config.services),
|
||||
'Configuration file is expected to have a services array.'
|
||||
);
|
||||
// remove the service from the configuration
|
||||
for (var i = 0; i < config.services.length; i++) {
|
||||
if (config.services[i] === service) {
|
||||
config.services.splice(i, 1);
|
||||
}
|
||||
}
|
||||
config.services = _.unique(config.services);
|
||||
config.services.sort(function(a, b) {
|
||||
return a > b;
|
||||
});
|
||||
fs.writeFile(configFilePath, JSON.stringify(config, null, 2), done);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Will uninstall a Node.js service and remove from package.json.
|
||||
* @param {String} configDir - The absolute configuration directory path
|
||||
* @param {String} service - The name of the service
|
||||
* @param {Function} done
|
||||
*/
|
||||
function uninstallService(configDir, service, done) {
|
||||
$.checkArgument(utils.isAbsolutePath(configDir), 'An absolute path is expected');
|
||||
$.checkArgument(_.isString(service), 'A string is expected for the service argument');
|
||||
|
||||
var child = spawn('npm', ['uninstall', service, '--save'], {cwd: configDir});
|
||||
|
||||
child.stdout.on('data', function(data) {
|
||||
process.stdout.write(data);
|
||||
});
|
||||
|
||||
child.stderr.on('data', function(data) {
|
||||
process.stderr.write(data);
|
||||
});
|
||||
|
||||
child.on('close', function(code) {
|
||||
if (code !== 0) {
|
||||
return done(new Error('There was an error uninstalling service(s): ' + service));
|
||||
} else {
|
||||
return done();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Will remove a Node.js service if it is installed.
|
||||
* @param {String} configDir - The absolute configuration directory path
|
||||
* @param {String} service - The name of the service
|
||||
* @param {Function} done
|
||||
*/
|
||||
function removeService(configDir, service, done) {
|
||||
$.checkArgument(utils.isAbsolutePath(configDir), 'An absolute path is expected');
|
||||
$.checkArgument(_.isString(service), 'A string is expected for the service argument');
|
||||
uninstallService(configDir, service, done);
|
||||
}
|
||||
|
||||
/**
|
||||
* Will remove the Node.js service and from the flocore-node configuration.
|
||||
* @param {String} options.cwd - The current working directory
|
||||
* @param {String} options.dirname - The flocore-node configuration directory
|
||||
* @param {Array} options.services - An array of strings of service names
|
||||
* @param {Function} done - A callback function called when finished
|
||||
*/
|
||||
function remove(options, done) {
|
||||
$.checkArgument(_.isObject(options));
|
||||
$.checkArgument(_.isFunction(done));
|
||||
$.checkArgument(
|
||||
_.isString(options.path) && utils.isAbsolutePath(options.path),
|
||||
'An absolute path is expected'
|
||||
);
|
||||
$.checkArgument(Array.isArray(options.services));
|
||||
|
||||
var configPath = options.path;
|
||||
var services = options.services;
|
||||
|
||||
var flocoreConfigPath = path.resolve(configPath, 'flocore-node.json');
|
||||
var packagePath = path.resolve(configPath, 'package.json');
|
||||
|
||||
if (!fs.existsSync(flocoreConfigPath) || !fs.existsSync(packagePath)) {
|
||||
return done(
|
||||
new Error('Directory does not have a flocore-node.json and/or package.json file.')
|
||||
);
|
||||
}
|
||||
|
||||
async.eachSeries(
|
||||
services,
|
||||
function(service, next) {
|
||||
// if the service is installed remove it
|
||||
removeService(configPath, service, function(err) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
// remove service to flocore-node.json
|
||||
removeConfig(flocoreConfigPath, service, next);
|
||||
});
|
||||
}, done
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = remove;
|
||||
@ -1,241 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var path = require('path');
|
||||
var FlocoreNode = require('../node');
|
||||
var index = require('../');
|
||||
var flocore = require('flocore-lib');
|
||||
var _ = flocore.deps._;
|
||||
var log = index.log;
|
||||
var shuttingDown = false;
|
||||
var fs = require('fs');
|
||||
|
||||
function start(options) {
|
||||
|
||||
var fullConfig = _.clone(options.config);
|
||||
|
||||
var servicesPath;
|
||||
if (options.servicesPath) {
|
||||
servicesPath = options.servicesPath;
|
||||
} else {
|
||||
servicesPath = options.path;
|
||||
}
|
||||
|
||||
fullConfig.path = path.resolve(options.path, './flocore-node.json');
|
||||
|
||||
fullConfig.services = start.setupServices(require, servicesPath, options.config);
|
||||
|
||||
var node = new FlocoreNode(fullConfig);
|
||||
|
||||
// setup handlers for uncaught exceptions and ctrl+c
|
||||
start.registerExitHandlers(process, node);
|
||||
|
||||
node.on('ready', function() {
|
||||
log.info('Flocore Node ready');
|
||||
});
|
||||
|
||||
node.on('error', function(err) {
|
||||
log.error(err);
|
||||
});
|
||||
|
||||
node.start(function(err) {
|
||||
if(err) {
|
||||
log.error('Failed to start services');
|
||||
if (err.stack) {
|
||||
log.error(err.stack);
|
||||
}
|
||||
start.cleanShutdown(process, node);
|
||||
}
|
||||
});
|
||||
|
||||
return node;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks a service for the expected methods
|
||||
* @param {Object} service
|
||||
*/
|
||||
function checkService(service) {
|
||||
if (!service.module.prototype ||
|
||||
!service.module.dependencies ||
|
||||
!service.module.prototype.start ||
|
||||
!service.module.prototype.stop) {
|
||||
throw new Error(
|
||||
'Could not load service "' +
|
||||
service.name +
|
||||
'" as it does not support necessary methods and properties.');
|
||||
}
|
||||
}
|
||||
|
||||
function lookInRequirePathConfig(req, service) {
|
||||
if (!service.config.requirePath) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
if (fs.statSync(service.config.requirePath).isDirectory()) {
|
||||
return req(service.config.requirePath);
|
||||
}
|
||||
var serviceFile = service.config.requirePath.replace(/.js$/, '');
|
||||
return req(serviceFile);
|
||||
} catch(e) {
|
||||
log.info('Checked the service\'s requirePath value, ' +
|
||||
'but could not find the service, checking elsewhere. ' +
|
||||
'Error caught: ' + e.message);
|
||||
}
|
||||
}
|
||||
|
||||
function lookInCwd(req, service) {
|
||||
var location = service.config.cwdRequirePath ? service.config.cwdRequirePath : service.name;
|
||||
try {
|
||||
return req(process.cwd() + '/' + location);
|
||||
} catch(e) {
|
||||
if(e.code !== 'MODULE_NOT_FOUND') {
|
||||
log.error(e);
|
||||
}
|
||||
log.info('Checked the current working directory for service: ' + location);
|
||||
}
|
||||
}
|
||||
|
||||
function lookInBuiltInPath(req, service) {
|
||||
try {
|
||||
var serviceFile = path.resolve(__dirname, '../services/' + service.name);
|
||||
return req(serviceFile);
|
||||
} catch (e) {
|
||||
if (e.code !== 'MODULE_NOT_FOUND') {
|
||||
log.error(e);
|
||||
}
|
||||
log.info('Checked the built-in path: lib/services, for service: ' + service.name);
|
||||
}
|
||||
}
|
||||
|
||||
function lookInModuleManifest(req, service) {
|
||||
try {
|
||||
var servicePackage = req(service.name + '/package.json');
|
||||
var serviceModule = service.name;
|
||||
if (servicePackage.flocoreNode) {
|
||||
serviceModule = serviceModule + '/' + servicePackage.flocoreNode;
|
||||
return req(serviceModule);
|
||||
}
|
||||
} catch(e) {
|
||||
log.info('Checked the module\'s package.json for service: ' + service.name);
|
||||
}
|
||||
}
|
||||
|
||||
function loadModule(req, service) {
|
||||
var serviceCode;
|
||||
|
||||
//first, if we have explicitly set the require path for our service:
|
||||
serviceCode = lookInRequirePathConfig(req, service);
|
||||
|
||||
//second, look in the current working directory (of the controlling terminal, if there is one) for the service code
|
||||
if(!serviceCode) {
|
||||
serviceCode = lookInCwd(req, service);
|
||||
}
|
||||
|
||||
//third, try the built-in services
|
||||
if(!serviceCode) {
|
||||
serviceCode = lookInBuiltInPath(req, service);
|
||||
}
|
||||
|
||||
//fourth, see if there is directory in our module search path that has a
|
||||
//package.json file, if so, then see if there is a flocoreNode field, if so
|
||||
//use this as the path to the service module
|
||||
if(!serviceCode) {
|
||||
serviceCode = lookInModuleManifest(req, service);
|
||||
}
|
||||
|
||||
if (!serviceCode) {
|
||||
throw new Error('Attempted to load the ' + service.name + ' service from: ' +
|
||||
'the requirePath in the services\' config, then "' +
|
||||
process.cwd() + '" then from: "' + __dirname + '/../lib/services' + '" finally from: "' +
|
||||
process.cwd() + '/package.json" - flocoreNode field. All paths failed to find valid nodeJS code.');
|
||||
}
|
||||
|
||||
service.module = serviceCode;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function will loop over the configuration for services and require the
|
||||
* specified modules, and assemble an array in this format:
|
||||
* [
|
||||
* {
|
||||
* name: 'florincoind',
|
||||
* config: {},
|
||||
* module: FlorincoinService
|
||||
* }
|
||||
* ]
|
||||
* @param {Function} req - The require function to use
|
||||
* @param {Array} servicesPath - The local path (for requiring services)
|
||||
* @param {Object} config
|
||||
* @param {Array} config.services - An array of strings of service names.
|
||||
* @returns {Array}
|
||||
*/
|
||||
function setupServices(req, servicesPath, config) {
|
||||
|
||||
module.paths.push(path.resolve(servicesPath, './node_modules'));
|
||||
|
||||
var services = [];
|
||||
if (config.services) {
|
||||
for (var i = 0; i < config.services.length; i++) {
|
||||
var service = {};
|
||||
|
||||
service.name = config.services[i];
|
||||
|
||||
var hasConfig = config.servicesConfig && config.servicesConfig[service.name];
|
||||
service.config = hasConfig ? config.servicesConfig[service.name] : {};
|
||||
|
||||
loadModule(req, service);
|
||||
checkService(service);
|
||||
|
||||
services.push(service);
|
||||
}
|
||||
}
|
||||
return services;
|
||||
}
|
||||
|
||||
function cleanShutdown(_process, node) {
|
||||
node.stop(function(err) {
|
||||
if(err) {
|
||||
log.error('Failed to stop services: ' + err);
|
||||
return _process.exit(1);
|
||||
}
|
||||
log.info('Halted');
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
|
||||
function exitHandler(options, _process, node, err) {
|
||||
// Handle and log errors other than SIGINT shutdown
|
||||
if (err && err !== "SIGINT") {
|
||||
log.error('uncaught exception:', err);
|
||||
if(err.stack) {
|
||||
log.error(err.stack);
|
||||
}
|
||||
if(options.exit)
|
||||
node.stop(function(err) {
|
||||
if(err) {
|
||||
log.error('Failed to stop services: ' + err);
|
||||
}
|
||||
_process.exit(-1);
|
||||
});
|
||||
}
|
||||
// Handle SIGINT (Ctrl+C)
|
||||
if (options.sigint) {
|
||||
if (!shuttingDown) {
|
||||
shuttingDown = true;
|
||||
start.cleanShutdown(_process, node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function registerExitHandlers(_process, node) {
|
||||
_process.on('uncaughtException', exitHandler.bind(null, {exit:false}, _process, node));
|
||||
_process.on('SIGINT', exitHandler.bind(null, {sigint:true}, _process, node));
|
||||
}
|
||||
|
||||
module.exports = start;
|
||||
module.exports.registerExitHandlers = registerExitHandlers;
|
||||
module.exports.exitHandler = exitHandler;
|
||||
module.exports.setupServices = setupServices;
|
||||
module.exports.cleanShutdown = cleanShutdown;
|
||||
@ -1,93 +0,0 @@
|
||||
'use strict';
|
||||
/* exported LRU, assert, constants */
|
||||
|
||||
var util = require('util');
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
var LRU = require('lru-cache');
|
||||
var assert = require('assert');
|
||||
var constants = require('./constants');
|
||||
|
||||
var Service = function(options) {
|
||||
EventEmitter.call(this);
|
||||
|
||||
this.node = options.node;
|
||||
this.name = options.name;
|
||||
};
|
||||
|
||||
util.inherits(Service, EventEmitter);
|
||||
|
||||
/**
|
||||
* Describes the dependencies that should be loaded before this service.
|
||||
*/
|
||||
Service.dependencies = [];
|
||||
|
||||
/**
|
||||
* blockHandler
|
||||
* @param {Block} block - the block being added or removed from the chain
|
||||
* @param {Boolean} add - whether the block is being added or removed
|
||||
* @param {Function} callback - call with the leveldb database operations to perform
|
||||
*/
|
||||
Service.prototype.blockHandler = function(block, add, callback) {
|
||||
// implement in the child class
|
||||
setImmediate(function() {
|
||||
callback(null, []);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* the bus events available for subscription
|
||||
* @return {Array} an array of event info
|
||||
*/
|
||||
Service.prototype.getPublishEvents = function() {
|
||||
// Example:
|
||||
// return [
|
||||
// ['eventname', this, this.subscribeEvent, this.unsubscribeEvent],
|
||||
// ];
|
||||
return [];
|
||||
};
|
||||
|
||||
/**
|
||||
* the API methods to expose
|
||||
* @return {Array} return array of methods
|
||||
*/
|
||||
Service.prototype.getAPIMethods = function() {
|
||||
// Example:
|
||||
// return [
|
||||
// ['getData', this, this.getData, 1]
|
||||
// ];
|
||||
|
||||
return [];
|
||||
};
|
||||
|
||||
// Example:
|
||||
// Service.prototype.getData = function(arg1, callback) {
|
||||
//
|
||||
// };
|
||||
|
||||
/**
|
||||
* Function which is called when module is first initialized
|
||||
*/
|
||||
Service.prototype.start = function(done) {
|
||||
setImmediate(done);
|
||||
};
|
||||
|
||||
/**
|
||||
* Function to be called when flocore-node is stopped
|
||||
*/
|
||||
Service.prototype.stop = function(done) {
|
||||
setImmediate(done);
|
||||
};
|
||||
|
||||
/**
|
||||
* Setup express routes
|
||||
* @param {Express} app
|
||||
*/
|
||||
Service.prototype.setupRoutes = function() {
|
||||
// Setup express routes here
|
||||
};
|
||||
|
||||
Service.prototype.getRoutePrefix = function() {
|
||||
return this.name;
|
||||
};
|
||||
|
||||
module.exports = Service;
|
||||
@ -1,167 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
function Encoding(servicePrefix) {
|
||||
this.servicePrefix = servicePrefix;
|
||||
this.addressIndex = new Buffer('00', 'hex');
|
||||
this.utxoIndex = new Buffer('01', 'hex');
|
||||
this.addressCache = new Buffer('fe', 'hex');
|
||||
}
|
||||
|
||||
Encoding.prototype.encodeAddressIndexKey = function(address, height, txid, index, input, timestamp) {
|
||||
var buffers = [this.servicePrefix, this.addressIndex];
|
||||
|
||||
var addressSizeBuffer = new Buffer(1);
|
||||
addressSizeBuffer.writeUInt8(address.length);
|
||||
var addressBuffer = new Buffer(address, 'utf8');
|
||||
|
||||
buffers.push(addressSizeBuffer);
|
||||
buffers.push(addressBuffer);
|
||||
|
||||
var heightBuffer = new Buffer(4);
|
||||
heightBuffer.writeUInt32BE(height || 0);
|
||||
buffers.push(heightBuffer);
|
||||
|
||||
var txidBuffer = new Buffer(txid || Array(65).join('0'), 'hex');
|
||||
buffers.push(txidBuffer);
|
||||
|
||||
var indexBuffer = new Buffer(4);
|
||||
indexBuffer.writeUInt32BE(index || 0);
|
||||
buffers.push(indexBuffer);
|
||||
|
||||
// this is whether the address appears in an input (1) or output (0)
|
||||
var inputBuffer = new Buffer(1);
|
||||
inputBuffer.writeUInt8(input || 0);
|
||||
buffers.push(inputBuffer);
|
||||
|
||||
var timestampBuffer = new Buffer(4);
|
||||
timestampBuffer.writeUInt32BE(timestamp || 0);
|
||||
buffers.push(timestampBuffer);
|
||||
|
||||
return Buffer.concat(buffers);
|
||||
};
|
||||
|
||||
Encoding.prototype.decodeAddressIndexKey = function(buffer) {
|
||||
|
||||
var addressSize = buffer.readUInt8(3);
|
||||
var address = buffer.slice(4, addressSize + 4).toString('utf8');
|
||||
var height = buffer.readUInt32BE(addressSize + 4);
|
||||
var txid = buffer.slice(addressSize + 8, addressSize + 40).toString('hex');
|
||||
var index = buffer.readUInt32BE(addressSize + 40);
|
||||
var input = buffer.readUInt8(addressSize + 44);
|
||||
var timestamp = buffer.readUInt32BE(addressSize + 45);
|
||||
return {
|
||||
address: address,
|
||||
height: height,
|
||||
txid: txid,
|
||||
index: index,
|
||||
input: input,
|
||||
timestamp: timestamp
|
||||
};
|
||||
};
|
||||
|
||||
Encoding.prototype.encodeUtxoIndexKey = function(address, txid, outputIndex) {
|
||||
var buffers = [this.servicePrefix, this.utxoIndex];
|
||||
|
||||
var addressSizeBuffer = new Buffer(1);
|
||||
addressSizeBuffer.writeUInt8(address.length);
|
||||
var addressBuffer = new Buffer(address, 'utf8');
|
||||
|
||||
buffers.push(addressSizeBuffer);
|
||||
buffers.push(addressBuffer);
|
||||
|
||||
var txidBuffer = new Buffer(txid || new Array(65).join('0'), 'hex');
|
||||
buffers.push(txidBuffer);
|
||||
|
||||
var outputIndexBuffer = new Buffer(4);
|
||||
outputIndexBuffer.writeUInt32BE(outputIndex || 0);
|
||||
buffers.push(outputIndexBuffer);
|
||||
|
||||
return Buffer.concat(buffers);
|
||||
};
|
||||
|
||||
Encoding.prototype.decodeUtxoIndexKey = function(buffer) {
|
||||
var addressSize = buffer.readUInt8(3);
|
||||
var address = buffer.slice(4, addressSize + 4).toString('utf8');
|
||||
var txid = buffer.slice(addressSize + 4, addressSize + 36).toString('hex');
|
||||
var outputIndex = buffer.readUInt32BE(addressSize + 36);
|
||||
|
||||
return {
|
||||
address: address,
|
||||
txid: txid,
|
||||
outputIndex: outputIndex
|
||||
};
|
||||
};
|
||||
|
||||
Encoding.prototype.encodeUtxoIndexValue = function(height, satoshis, timestamp, scriptBuffer) {
|
||||
var heightBuffer = new Buffer(4);
|
||||
heightBuffer.writeUInt32BE(height);
|
||||
var satoshisBuffer = new Buffer(8);
|
||||
satoshisBuffer.writeDoubleBE(satoshis);
|
||||
var timestampBuffer = new Buffer(4);
|
||||
timestampBuffer.writeUInt32BE(timestamp || 0);
|
||||
return Buffer.concat([heightBuffer, satoshisBuffer, timestampBuffer, scriptBuffer]);
|
||||
};
|
||||
|
||||
Encoding.prototype.decodeUtxoIndexValue = function(buffer) {
|
||||
var height = buffer.readUInt32BE();
|
||||
var satoshis = buffer.readDoubleBE(4);
|
||||
var timestamp = buffer.readUInt32BE(12);
|
||||
var scriptBuffer = buffer.slice(16);
|
||||
return {
|
||||
height: height,
|
||||
satoshis: satoshis,
|
||||
timestamp: timestamp,
|
||||
script: scriptBuffer
|
||||
};
|
||||
};
|
||||
|
||||
Encoding.prototype.encodeAddressCacheKey = function(address) {
|
||||
return Buffer.concat([this.servicePrefix, this.addressCache, new Buffer(address, 'utf8')]);
|
||||
}
|
||||
|
||||
Encoding.prototype.decodeAddressCacheKey = function(buffer) {
|
||||
return buffer.slice(3).toString('utf8');
|
||||
}
|
||||
|
||||
Encoding.prototype.encodeAddressCacheValue = function(lastTx, lastBlock, balance, received, sent, txApperances) {
|
||||
|
||||
var buffer = [];
|
||||
|
||||
var balanceBuffer = new Buffer(8);
|
||||
balanceBuffer.writeBigUInt64BE(BigInt(balance));
|
||||
buffer.push(balanceBuffer);
|
||||
|
||||
var receivedBuffer = new Buffer(8);
|
||||
receivedBuffer.writeBigUInt64BE(BigInt(received));
|
||||
buffer.push(receivedBuffer);
|
||||
|
||||
var sentBuffer = new Buffer(8);
|
||||
sentBuffer.writeBigUInt64BE(BigInt(sent));
|
||||
buffer.push(sentBuffer);
|
||||
|
||||
var txApperancesBuffer = new Buffer(4);
|
||||
txApperancesBuffer.writeUInt32BE(txApperances);
|
||||
buffer.push(txApperancesBuffer);
|
||||
|
||||
var txidBuffer = new Buffer(lastTx, 'hex');
|
||||
buffer.push(txidBuffer);
|
||||
|
||||
var blkBuffer = new Buffer(lastBlock, 'hex');
|
||||
buffer.push(blkBuffer);
|
||||
|
||||
return Buffer.concat(buffer);
|
||||
}
|
||||
|
||||
Encoding.prototype.decodeAddressCacheValue = function(buffer) {
|
||||
|
||||
var balance = parseInt(buffer.readBigUInt64BE(0));
|
||||
var received = parseInt(buffer.readBigUInt64BE(8));
|
||||
var sent = parseInt(buffer.readBigUInt64BE(16));
|
||||
var txApperances = buffer.readUInt32BE(24);
|
||||
var lastTx = buffer.slice(28, 60).toString('hex'); //28 + 32 (tx hash buffer length) = 60
|
||||
var lastBlock = buffer.slice(60).toString('hex');
|
||||
return { lastTx, lastBlock, balance, received, sent, txApperances };
|
||||
}
|
||||
|
||||
module.exports = Encoding;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user