Compare commits

...

318 Commits

Author SHA1 Message Date
sairajzero
8121a2ba96 Bug fix
- Fixed: mempool (unconfirmed tx) getting repeated in chain query when using before option
2023-04-27 04:15:43 +05:30
sairajzero
31344d770e Adding 'before' option to address APIs
- Similar to 'after' option but inverse of it. ie, if before txid is passed, then query list/values before the given txid
- 'before' and 'after' option can be use in combination or individually
- cache system for address summary api wont be used with 'after' and/or 'before' option is used
2023-04-27 03:48:09 +05:30
sairajzero
96677310c2 Bug fixes 2023-04-27 01:41:22 +05:30
sairajzero
e180672583 Adding reverse option to address history query
- Using option `reverse` as true will query the latest 1000 (max val) tx instead of the 1st 1000 tx.
2023-04-23 02:55:49 +05:30
sairajzero
158d5aefc2 Decrease MAX_TX_QUERY_LIMIT_SUMMARY to 500
- Summary cache is triggered for addresses with more than 500 tx
2023-04-22 23:55:11 +05:30
sairajzero
e7248320a6 bug fix: txid & blockhash not cached correctly 2023-04-22 22:19:23 +05:30
sairajzero
60289a644b Fix: Incorrect values in cache during reorg
- store the blockhash of lastTx in cache value
- If last cached tx-block was removed (during reorg), then delete the cache and recalculate the values.
2023-04-21 22:51:45 +05:30
sairajzero
d42e569008 update address-cache key prefix 2023-04-19 03:03:21 +05:30
sairajzero
6beb2ecd06 Adding function to delete cache for a given addr
Adding _deleteCache: deletes the cache for given address (useful for dev purposes or to recalculate caches)
2023-04-19 02:31:04 +05:30
sairajzero
4d023760ad Adding del (delete) function in db service 2023-04-19 02:28:44 +05:30
sairajzero
10d9459f26 Bug fix: unconfirmed-tx values corrupting cache 2023-04-19 01:24:37 +05:30
sairajzero
8b07a1e4a5 separate MAX_TX_QUERY_LIMIT for each query type 2023-04-19 01:03:45 +05:30
sairajzero
4afb0dfaaa Bug fix: cache summary not working properly 2023-04-19 00:45:44 +05:30
sairajzero
f8260541ef Fix: Cache value encoding
- Using BigInt (uInt64) for cache value (balance, sent, received) as they are large for uInt32
- Reflect the changes for BigInt (uInt64) in decode cache value
- Removed default values in cache encoding, as if a value is invalid then it must throw error and not write db
2023-04-19 00:42:47 +05:30
sairajzero
6b794aa9a3 Fixes: cache storing unconfirmed values
- removed unconfirmed values from cache store and get
-  do not update lastItem value for unconfirmed tx
2023-04-18 04:37:01 +05:30
sairajzero
adb81616aa Run cache summary
run cache storage for address summary in background when queried result is incomplete
2023-04-18 04:23:41 +05:30
sairajzero
3982807b32 Cache address-summary
Store cache of address summary in db when address has more than MAX_TX_QUERY_LIMIT
2023-04-17 20:13:29 +05:30
sairajzero
0a3a1b5ea6 changing typeof to equivalent lodash 2023-04-17 20:07:12 +05:30
sairajzero
df7710ded1 Adding option mempoolOnly in querying
- mempoolOnly option make the query in db from mempool only (ie, unconfirmed tx only
2023-04-13 03:52:02 +05:30
sairajzero
27f3993884 Fixed: inconsistency addrs API on multiple address
- Fixed: addrs API on multiple addresses when total tx is more than 1000 (MAX_TX_QUERY_LIMIT) giving inconsistent list of tx due to order messed up in parallel query
2023-04-13 02:26:33 +05:30
sairajzero
71bcafb243 Limited data responses identification
- address-query responses data has `incomplete` property set to `true` when addr has more than 1000 (MAX_TX_QUERY_LIMIT)
2023-04-11 04:04:19 +05:30
sairajzero
1b6352573f Limit all address query to MAX_TX_QUERY_LIMIT
- any address data API will now max out at 1000 (MAX_TX_QUERY_LIMIT)
- For addrs with more than 1000 tx, use chained API query to get the complete data (like balance, txid, etc)

- set parallel queue limit to 1: preserve consistency in API queries (and prevent incorrect data)
2023-04-11 03:49:21 +05:30
sairajzero
7f86e488e4 bug fix
Fixed bug: api on addr not giving response (timeoout) when it has only 1 tx
2023-04-06 22:25:03 +05:30
sairajzero
37e08b0801 Bug fix
- Fixed: Incorrect ordering of tx list with unconfirmed tx
2023-02-06 02:02:52 +05:30
Sai Raj
16bed1b811
Merge pull request #9 from ranchimall/api-improvements
API Improvements
2023-02-06 00:18:20 +05:30
sairajzero
bca4fe4f97 Bug fix
- Fixed: data inconsistency and continuity lost in chain querying of tx details
- Fixed: Not getting response when query has no tx. (ie, either address has no tx, or using the most recent tx as the key in `after` option)
2023-02-05 23:01:06 +05:30
sairajzero
7409dbb77d Bug fixes
- Fixed: incorrect data returned via `from` and `to` option
- Fixed: Missing data due to unordered items in getAddressHistory
- Fixed: callback invoked multiple items in _streamAddressSummary due to queue parallel limit
- Fixed: Queue drain being invoked before mempool txs are pushed into queue
2023-02-05 19:03:11 +05:30
sairajzero
774d830fff Improvements to API query options
- Stop request-stream process when stop-flag is on
- Changed: order of reading db to forward (so that continuity will be preserved with `after` option and ws api calls). And changes required for the same.
- Deprecating options (from and to) in calls that are not supported
- Added: Temporary support for from and to option in getAddressHistory
2023-02-05 03:12:04 +05:30
sairajzero
3a75002efc API Query options
- Fixed: option `start` to query from blockheight
- Added option `after`: pass this option in API to get list after the given txid
Note: If both `start` and `after` are given, then greater height will be used
Note: invalid or unconfirmed txid cannot be used in `after` option and will be ignored
2023-02-04 20:29:41 +05:30
sairajzero
d9579853ad Improve handling of duplicate tx query
- temporarily store txids to ignore duplication
- removed the queue pause() and resume() in _streamAddressSummary
2023-02-02 19:14:29 +05:30
sairajzero
3fbcbbe7bc Fixed: APIs giving incorrect data
- Fixed: addr API giving decimals in satoshi values
- Fixed: Incorrect balance, totalSent, totalReceived values returned in API calls (issue: duplication)
- Fixed: incorrect totalCount value in addr API and duplication of tx list
2023-01-28 21:55:58 +05:30
sairajzero
dbfe39991f Fixed: Address-summary request not responding 2023-01-28 02:04:40 +05:30
sairajzero
6c164993bf Update package.json 2023-01-27 22:33:30 +05:30
sairajzero
2145fdb056 pass 'express-ws' module to service setupRoutes
- flosight-api uses 'express-ws' module for ws api calls
2023-01-27 18:07:38 +05:30
sairajzero
4472ed8394 Changing fns to use _streamAddressSummary
Functions updated:
- getAddressHistory
- getAddressSummary

(old fns are kept as it is and renamed to __getAddressHistory and __getAddressSummary respectively)
2023-01-27 17:38:37 +05:30
sairajzero
e13bd5e3e6 Adding _streamAddressSummary
- Fn uses streamer to process data. Thus doesnt store the entire list of txid or tx details
- streamer fn can process the tx data as required
2023-01-27 17:36:39 +05:30
sairajzero
0283be05db Limit max response size 2023-01-27 17:31:02 +05:30
Sai Raj
25eb992cf1
Merge pull request #8 from ranchimall/block-subscribe-stuck-fix
Fix: Block subscribe getting stuck
2023-01-18 00:24:09 +05:30
sairajzero
69e9465b93 block service: set MAX_IGNORED_BLOCK to 16 2023-01-18 00:19:20 +05:30
sairajzero
700abe0500 hotfix: block subscription getting stuck
- Issue: Block subscription getting stuck when a (missing) block wasn't received by block service.
- Solution: Re-trigger the sync process when too many blocks ignored

- Others: fixed a typo in _reportInterval property
2023-01-17 21:54:48 +05:30
sairajzero
0572ee6b35 header: clear _syncCheckInterval upon synced
Header service: clear _syncCheckInterval to null when clearing interval after sync completed
2023-01-17 16:44:42 +05:30
Sai Raj
e6826c7dfc
Merge pull request #6 from ranchimall/startup-sync-fix
Startup sync fix
2023-01-15 12:33:50 +05:30
sairajzero
ece347c825 hotfix: best header not updating for prev fix
- updated header service best header from fcoin node directly.
- set interval check only if fcoin is started by flocore (else header best height ll not get updated and sync complete incorrectly)
2023-01-15 00:02:25 +05:30
sairajzero
b831cbce7e Update .gitignore 2023-01-14 23:56:18 +05:30
sairajzero
f9e2ed304b hotfix for unresponsive header sync 2023-01-11 03:38:38 +05:30
00515c5378
Update index.js
Added a missing return HeaderService.prototype._handleError
2023-01-09 05:12:39 +05:30
Sai Raj
e3f5de4df5
Merge pull request #3 from ranchimall/uncaught-no-shut
hotfix: Do not shutdown on unhandled exceptions
2023-01-08 17:36:33 +05:30
sairajzero
26b65d63a8 hotfix: Do not shutdown on unhandled exceptions 2023-01-08 17:13:30 +05:30
4113d9cfd0
Update index.js 2022-01-25 11:23:06 +05:30
7fa2f096df
Update reorg.js 2022-01-25 11:18:46 +05:30
fcecf08ac0
Update reorg.js 2022-01-25 11:18:13 +05:30
af6048de93
Update index.js 2022-01-25 11:13:27 +05:30
d47b6047e5
Update index.js 2022-01-25 11:00:27 +05:30
c525516a95
Update reorg.js 2022-01-25 10:57:40 +05:30
0eaa4b6fd2
Update index.js 2022-01-25 10:54:54 +05:30
2921e389df
Update block_handler.js 2022-01-25 00:31:31 +05:30
522d00bd52
Update index.js 2022-01-25 00:25:25 +05:30
821aae706d
Update index.js 2022-01-25 00:22:55 +05:30
18d1a16b0c
Update reorg.js 2022-01-25 00:07:57 +05:30
9c0ec67ccf
Update index.js 2022-01-25 00:03:52 +05:30
f0768027c9
Update index.js 2022-01-24 18:06:15 +05:30
80f22f731f
Update index.js 2022-01-24 17:57:48 +05:30
81a3d5f8ff
Update reorg.js 2022-01-24 17:56:37 +05:30
69fc6790ae
Update reorg.js 2022-01-24 17:55:39 +05:30
5db68b6bb7
Update reorg.js 2022-01-24 17:53:30 +05:30
091d7aa863
Update reorg.js 2022-01-24 17:52:14 +05:30
582bdd698a
Update reorg.js 2022-01-24 17:50:10 +05:30
726156843e
Update index.js 2022-01-24 17:45:10 +05:30
846f85e2f8
Update block_handler.js 2022-01-24 17:40:20 +05:30
ca47013c5a
Update block_handler.js 2022-01-24 15:17:50 +05:30
82357f2ecc
Update reorg.js 2022-01-24 15:17:02 +05:30
c3cc5f7465
Update index.js 2022-01-24 15:16:15 +05:30
1888b4a4ae
Throwing error removed
new Error was stopping the running instance of flosight. Substituted by log.info
2022-01-24 15:15:11 +05:30
1edc88f14b
Update node.js 2021-05-26 19:54:53 +05:30
0966ec124b
Stop the stoppage of service 2021-05-25 14:58:20 +05:30
05c6cd7739
Fixed header existence conditions 2021-05-25 11:07:56 +05:30
2c37e05ff7
Added test to check header.height 2021-05-25 11:01:31 +05:30
sairajzero
0cfb80a164 reverting log.info to log.debug 2021-05-24 17:56:13 +05:30
11b0a58351
Fixed syntax issue 2021-05-24 16:54:44 +05:30
869a7c21b4
Converted _sync log.debug to log.info 2021-05-24 16:29:15 +05:30
17dd83de10
db index.js asserts removed for resilience 2021-05-20 09:40:40 +05:30
bad1fb2552
Updated definition of index.log 2021-05-18 11:02:41 +05:30
5344d9cd5b
Fixed the conditional operator 2021-05-17 16:14:54 +05:30
89c43cd9a7
Fixing the conditional operator 2021-05-17 16:11:11 +05:30
0d4a7e3e42
Modification of condition operator 2021-05-17 16:06:06 +05:30
d689045002
Updated comparison operator 2021-05-17 16:00:27 +05:30
Vivek Teega
1efed08a39 1.0.9 Removing assertions in transction and p2p service 2021-05-17 14:02:31 +05:30
Vivek Teega
56e5cb1f25 1.0.8 Commented out assertion stops 2021-05-17 13:37:19 +05:30
Vivek Teega
453d11c64c 1.0.7 Commented out assertion stops 2021-05-17 12:59:36 +05:30
Vivek Teega
3598c92e0f 1.0.6 Commented out more node.stop() calls
- Commented out all node.stop() calls to prevent the node from exiting under various error conditions
2021-05-13 18:46:20 +05:30
Vivek Teega
0061285204 1.0.5 "return" was missing from lib/services/block/index.js
- Reverted back the previous 2 commits
- Added return to lib/services/block/index.js
2021-05-11 13:21:45 +05:30
Vivek Teega
3b8d992504 1.0.4 FLOsight Error Resolution
Keeping the return as, return next();
2021-05-11 13:11:51 +05:30
Vivek Teega
79fff4051b 1.0.3 FLOsight Error Resolution
Commenting out the part which makes the node
2021-05-11 13:09:50 +05:30
Vivek Teega
c3d32b7ffc FLO Crash Error Resolution 2021-05-10 12:39:02 +05:30
Vivek Teega
861e908080 Change in reorg operation
Reorg operation previously used to nuke 2000 block headers and redownload them from peers upon detecting a fork in the blockchain. Flosight has been crashing a lot because of this. We have updated the number of block headers to nuke to 20000
2021-04-12 15:04:07 +05:30
Sky Young
452596702a v5.0.8 2020-06-04 18:08:38 -06:00
Sky Young
0293a7acb2 Bump fcoin to v1.1.4
This fixes an issue on Regtest sync where blocks would stop at 4949
2020-06-04 18:07:33 -06:00
Sky Young
5ca1e5132b
Merge pull request #9 from oipwg/regtest-paramaters
Fix Regtest Support!
2020-01-09 12:09:21 -07:00
Sky Young
a7c8cd4563 Revert changes to header service 2020-01-09 12:02:17 -07:00
Sky Young
80912cc83f bump fcoin version 2020-01-09 11:52:03 -07:00
Sky Young
1eb2dbf29a v5.0.7 2020-01-09 09:11:32 -07:00
Sky Young
d8da495b38 Fix Regtest Support! 2020-01-08 16:14:59 -07:00
Sky Young
8e8e2310a0
Merge pull request #8 from oipwg/fcoin-1-1-2
Bump fcoin version to 1.1.2
2019-10-28 14:06:15 -06:00
Sky Young
45abcd6fa0 Bump fcoin version 2019-10-28 14:03:36 -06:00
Sky Young
c5e5d5a35e
Merge pull request #7 from oipwg/update-flosight-api-5.0.0-beta.75
Update `flosight-api` to 5.0.0 beta.75
2019-09-10 10:23:08 -06:00
Sky Young
9c19c84822 Update Mocha to fix security issue 2019-09-10 09:54:32 -06:00
Sky Young
9e3850b5c7 Update fcoin and flosight-api to latest 2019-09-10 09:52:28 -06:00
Sky Young
cbec5630d3
Merge pull request #6 from oipwg/fix-testnet-addresses
Pass network to Address.toString()
2019-08-16 10:19:38 -06:00
Sky Young
02cc97b0a1 Pass network to Address.toString() 2019-08-16 09:24:20 -06:00
Sky Young
6cd5093d4f
Merge pull request #5 from oipwg/fix-testnet-addresses
Update flosight-api to 5.0.0-beta.74
2019-08-15 17:11:49 -06:00
Sky Young
edf4b2a4f5 Update flosight-api to 5.0.0-beta.74 2019-08-15 17:10:19 -06:00
Sky Young
6e8d452174
Merge pull request #4 from oipwg/update-fcoin
Update fcoin to 1.1.0
2019-07-25 10:53:23 -06:00
Sky Young
cfc422600b Update to published npm versions 2019-07-25 10:50:34 -06:00
Sky Young
978679cad7 Pass fcoin Block template 2019-07-24 17:46:08 -06:00
Sky Young
b13b4f667b Update flocore-lib dependencies 2019-07-23 11:28:43 -06:00
Sky Young
ecf9bbaeea Properly import TX and Block from fcoin 2019-07-22 17:26:54 -06:00
Sky Young
4118f2f140 Update flocore-lib version 2019-07-22 17:25:29 -06:00
Sky Young
59d6cc0867 Handle shutdown of fcoin 2019-07-19 17:33:19 -06:00
Sky Young
53511b021d Gracefully handle SIGTERM 2019-07-19 15:58:38 -06:00
Sky Young
3a2f8d19de Update fcoin 2019-07-19 15:19:59 -06:00
Sky Young
10b25628ba
Merge pull request #3 from oipwg/5.0.1-ancestor-limit-increase
Update fcoin to use new utxo ancestor limit
2019-04-23 16:01:16 -06:00
Sky Young
416c804d8d Update fcoin to use new utxo ancestor limit 2019-04-23 16:00:25 -06:00
Sky Young
885b366422 Bump fcoin one last time :) 2018-10-18 19:26:32 -06:00
Sky Young
4dc88e2bc7 Update fcoin 2018-10-18 18:58:22 -06:00
Sky Young
be533c026f Update fcoin to fix testnet activation period 2018-10-18 18:23:51 -06:00
Sky Young
dad804eb64 Update fcoin dep again 2018-10-16 15:00:32 -06:00
Sky Young
66f2eba199 Update fcoin version 2018-10-16 14:54:40 -06:00
Sky Young
bab703a102 up dep 2018-10-04 13:48:01 -06:00
Sky Young
02006ed18c Set the network version of fcoin before importing wallet
https://github.com/oipwg/fcoin/blob/master/docs/Examples/fullnode-and-wallet.js#L2
2018-10-04 12:37:34 -06:00
Sky Young
b139c33726 use self._bcoin 2018-10-04 12:05:27 -06:00
Sky Young
1268b47d85 Startup a bcoin wallet as well 2018-10-04 12:00:57 -06:00
Sky Young
2b0815df6c plugin id is zmq not bzmq 2018-10-04 11:12:33 -06:00
Sky Young
6c6e0ad446 Add bzmq plugin to allow zmq access 2018-10-04 10:30:00 -06:00
Sky Young
06bb34123a Add config: true to fcoin config to allow use of fcoin.conf file in datadir
This is mainly to allow the setting and changing of API keys in a seperate file, in case people want to access
2018-10-03 13:04:34 -06:00
Sky Young
375ee80b5d Make sure it is the initial sync to log Sync Complete
this prevents logging of Sync Complete every time a new block is added
2018-08-27 12:13:00 -07:00
Sky Young
8715aa5071 Small formatting changes 2018-08-27 12:10:09 -07:00
Sky Young
f488a02d2d Fix syncComplete calculations
The _lastHeaderCount would always be 2000 behind upon startup, even if it is synced with the last header, causing no more headers to be synced. This fixes the issue that prevented syncing occasionally after restart.
2018-08-27 12:10:02 -07:00
Sky Young
db11257a43 Update p2p http RPC port 2018-08-06 15:48:13 -07:00
Sky Young
f37a3556e3 Merge branch 'bitpay-master' 2018-08-06 11:14:03 -07:00
Sky Young
e7ef0db2cb Merge branch 'master' of https://github.com/bitpay/bitcore-node into bitpay-master 2018-08-06 11:12:20 -07:00
Sky Young
ce85a3bede Add flosight-api and flosight-ui as default packages 2018-07-28 16:19:12 -07:00
Sky Young
b081c0a5f8 Only add tx to cache if it has at least 6 transactions 2018-06-08 15:48:39 -07:00
Sky Young
224733811c Update fcoin 2018-05-21 13:35:09 -07:00
Sky Young
c75c1fad61 Update fcoin 2018-05-21 13:12:33 -07:00
Sky Young
383f9f8b12 Update packages 2018-05-18 17:55:44 -07:00
Sky Young
b04ac5466f Update version 2018-05-17 09:58:59 -07:00
Sky Young
b1f828bea9 Revert "Update P2P Ports"
This reverts commit 57bc11a592.
2018-05-17 09:53:52 -07:00
Sky Young
dc7eacb112 Update Genesis Info 2018-05-17 09:47:23 -07:00
Sky Young
d45f6aecac Update protocol version 2018-05-17 09:38:20 -07:00
Sky Young
210b2fbcd7 Replace ts with time due to fcoin based on a higher version of lcoin 2018-05-17 09:36:43 -07:00
Sky Young
799675b234 Update printed version to 5.0.0 2018-05-17 09:35:44 -07:00
Sky Young
b9e4c8a6a5 move fcoin to bcoin 2018-05-17 09:35:30 -07:00
Sky Young
57bc11a592 Update P2P Ports 2018-05-17 09:35:18 -07:00
Sky Young
7ac4d51db1 Update Genesis Hashes to Flo 2018-05-17 08:59:20 -07:00
Sky Young
6358953553 Add Flo Ports 2018-05-16 11:03:32 -07:00
Sky Young
53b970cda7 Update default config path 2018-05-16 10:42:37 -07:00
Sky Young
01ebc7a521 update insight ref to flosight 2018-05-15 15:05:27 -07:00
Sky Young
fe2e29d74a Rename bcoin to fcoin 2018-05-15 15:04:33 -07:00
Sky Young
0588a5f69d Update version number 2018-05-15 14:25:59 -07:00
Sky Young
118280090c Update fcoin package version 2018-05-15 14:25:31 -07:00
Justin Langston
8e9ecff905
Merge branch 'feature/rpc-broadcast-transaction' 2018-05-11 00:12:49 -04:00
Micah Riggan
eb637d3125
Adding rpc for broadcastRawTransaction 2018-05-10 16:32:08 -04:00
Sky Young
2a2f661529 Dependencies: Update fcoin 2018-05-08 13:57:16 -07:00
Sky Young
17a4e0f0cc Use fcoin instead of bcoin 2018-05-08 13:48:43 -07:00
Sky Young
e0e8db1d43 Rename sample json 2018-05-07 16:39:01 -07:00
Sky Young
2fed6b4768 Rename to flocore 2018-05-07 16:30:08 -07:00
Sky Young
9f7587bd71 Update Package 2018-04-30 12:13:06 -07:00
Sky Young
7c8f7f4548 Update Packages 2018-04-30 11:00:14 -07:00
Justin Langston
e65689ab5d
Merge pull request #555 from bitpay/revert-553-revert-548-opt/txlist-cache
add txIdList cache
2018-04-12 14:57:54 -04:00
Matias Alejo Garcia
34f31ac57d
Revert "Revert "add txIdList cache"" 2018-04-12 14:41:18 -03:00
Matias Alejo Garcia
76c81c1dab
Merge pull request #553 from bitpay/revert-548-opt/txlist-cache
Revert "add txIdList cache"
2018-04-12 12:30:41 -03:00
Matias Alejo Garcia
bf8f25d9eb
Revert "add txIdList cache" 2018-04-12 12:30:09 -03:00
Matias Alejo Garcia
20a3f6e9fd
Merge pull request #548 from bitpay/opt/txlist-cache
add txIdList cache
2018-04-12 12:28:31 -03:00
Matias Alejo Garcia
ddda913ccd add txIdList cache 2018-02-12 00:48:30 -03:00
Justin Langston
11612e0877
Merge pull request #547 from bitpay/bug/sort-by-hash
fix history sorting when paging results
2018-02-11 18:55:04 -05:00
Matias Alejo Garcia
0c24271833 change order to 'desc' 2018-02-11 19:47:52 -03:00
Matias Alejo Garcia
01a3df31c8 fix history sorting when paging results 2018-02-11 15:44:39 -03:00
OstlerDev
4a9658184a btc -> flo 2018-01-15 16:43:07 -08:00
Chris Kleeschulte
9db5f2bb34
Bumped version. 2017-11-22 16:02:20 -05:00
Chris Kleeschulte
0e421de897
Fixed spent status from getAddressHistory. 2017-11-22 15:56:36 -05:00
Chris Kleeschulte
6e20b78b12
Bumped version. 2017-11-13 14:22:15 -05:00
Chris Kleeschulte
1dd17c92f2
Bumped version. 2017-11-13 09:54:02 -05:00
Chris Kleeschulte
aa44f2e18c
Bumped version. 2017-11-09 21:59:07 -05:00
Chris Kleeschulte
18a52c214d
Added a reorg to block. 2017-11-09 21:53:37 -05:00
Chris Kleeschulte
262cf8b850
Bumped version. 2017-11-09 17:14:13 -05:00
Chris Kleeschulte
bde062e744
wip on changing subscriptions endpoints. 2017-11-09 11:45:00 -05:00
Chris Kleeschulte
b138a558ae
Merge branch 'master' of github.com:kleetus/bitcore-node 2017-11-08 19:04:10 -05:00
Chris Kleeschulte
9bdd2aa86d
Fixed sorting and filtering problem. 2017-11-08 19:01:13 -05:00
Chris Kleeschulte
ab2163ffdd
Bumped version. 2017-11-08 09:38:58 -05:00
Chris Kleeschulte
af23a8c6e2
Removed console.log 2017-11-08 09:37:25 -05:00
Chris Kleeschulte
749b962085
Bumped version. 2017-11-07 21:40:24 -05:00
Chris Kleeschulte
9a00622de4
Fixed getAddressHistory. 2017-11-07 21:37:54 -05:00
Chris Kleeschulte
b701ab31cf
Bumped version. 2017-11-07 15:39:52 -05:00
Chris Kleeschulte
9a1f1db523
Fixed ordering of txs. 2017-11-07 15:36:38 -05:00
Chris Kleeschulte
b9b819691a
Bumped version. 2017-11-07 13:24:52 -05:00
Chris Kleeschulte
6b8129fdb3
Fixed getAddressHistory. 2017-11-07 00:30:26 -05:00
Chris Kleeschulte
a808573a9d
Bumped version. 2017-11-06 19:34:49 -05:00
Chris Kleeschulte
c2117146c1
Fixed where next is being called. 2017-11-06 19:32:00 -05:00
Chris Kleeschulte
7db4263bf2
Bumped version. 2017-11-06 19:15:58 -05:00
Chris Kleeschulte
b1fe37c260
Fixed missed call to next. 2017-11-06 19:08:35 -05:00
Chris Kleeschulte
5d0923a9d1
Bumped version. 2017-11-05 18:33:40 -05:00
Chris Kleeschulte
74d3c0212b
Improved getAddressHistory. 2017-11-05 18:14:19 -05:00
Chris Kleeschulte
9ee2c16178
Improved getAddressHistory 2017-11-05 17:32:17 -05:00
Chris Kleeschulte
d55986be57
Fixed tests. 2017-11-05 07:57:27 -05:00
Chris Kleeschulte
00a5ea65b4
Removed unneeded indexes. 2017-11-04 17:47:29 -04:00
Chris Kleeschulte
765b7288a7
wip on fix for txs list. 2017-11-03 18:23:42 -04:00
Chris Kleeschulte
cdec5b4596
Bumped version. 2017-11-02 17:44:01 -04:00
Chris Kleeschulte
a63761d0b2
Fixed call to map series. 2017-11-02 17:41:50 -04:00
Chris Kleeschulte
c1056dba47
Bumped version. 2017-11-02 13:49:25 -04:00
Chris Kleeschulte
b9887492bf
Removed retrieving the mempool right after sync. Need to re-think this. 2017-11-02 13:46:26 -04:00
Chris Kleeschulte
e292697a24
Bumped version. 2017-11-02 12:03:00 -04:00
Chris Kleeschulte
725b58cd15
Fixed finding txs after sending. 2017-11-02 11:59:06 -04:00
Chris Kleeschulte
f604e4ca94
Bumped version. 2017-11-01 16:03:08 -04:00
Chris Kleeschulte
baf75b2d60
Fixed confirmations. 2017-11-01 16:00:39 -04:00
Chris Kleeschulte
8abb9fe4cf
Bumped version. 2017-10-30 15:40:47 -04:00
Chris Kleeschulte
bc8dee5810
Fixed waiting for blocks to finish processing before processing a reorg. 2017-10-30 15:30:01 -04:00
Chris Kleeschulte
6a2c6c8d4f
Bumped version. 2017-10-26 17:53:21 -04:00
Chris Kleeschulte
5e1e67d4e6
Fixed variable initialization. 2017-10-26 17:50:41 -04:00
Chris Kleeschulte
f1356bf5aa
Bumped version. 2017-10-26 17:12:46 -04:00
Chris Kleeschulte
0a4e0dd9fd
Fixed tests and repaired reorg logic. 2017-10-26 15:35:01 -04:00
Chris Kleeschulte
299b905d5b
wip on reorg changes. 2017-10-26 09:59:39 -04:00
Chris Kleeschulte
e9d1e7d1a9
Fixed test. 2017-10-25 15:19:42 -04:00
Chris Kleeschulte
2478795213
Disable mempool until system is synchronized. 2017-10-25 15:17:11 -04:00
Chris Kleeschulte
f6bbfa8b07
Bumped version. 2017-10-24 17:49:06 -04:00
Chris Kleeschulte
ad650c383e
Added a block cache to speed up syncing. 2017-10-24 17:46:34 -04:00
Chris Kleeschulte
cb06d8a3cb
Bumped version. 2017-10-24 14:28:23 -04:00
Chris Kleeschulte
99adf07290
Chnaged reorg to use cache hashes. 2017-10-24 14:25:34 -04:00
Chris Kleeschulte
497d6e6bc9
Bump version. 2017-10-20 13:18:42 -04:00
Chris Kleeschulte
4f46947516
Fixed tx api call. 2017-10-20 10:04:33 -04:00
Chris Kleeschulte
e1305c7496
Fixed tests. 2017-10-20 09:25:51 -04:00
Chris Kleeschulte
9f4ebfb1f9
Fixed mempool search by address. 2017-10-20 07:56:48 -04:00
Chris Kleeschulte
b129bc1048
WIP 2017-10-19 19:32:20 -04:00
Chris Kleeschulte
f90e0d2ed5
Removed header interval. 2017-10-19 15:05:38 -04:00
Chris Kleeschulte
007f69d908
Bumped version. 2017-10-18 19:15:54 -04:00
Chris Kleeschulte
98be272925
Added new index for spent txs. 2017-10-18 19:03:04 -04:00
Chris Kleeschulte
5a5dbb624c
Adding in spent status. 2017-10-17 19:39:14 -04:00
Chris Kleeschulte
880f98e669
Fixed pagination for getAddressHistory. 2017-10-17 17:46:11 -04:00
Chris Kleeschulte
0dc69d87af
Added noTxList suppert for getAddressSumaary. 2017-10-17 14:41:54 -04:00
Chris Kleeschulte
22678e3838
Added tx cache. 2017-10-17 14:24:12 -04:00
Chris Kleeschulte
18bb501547
Bumped version. 2017-10-12 19:09:19 -04:00
Chris Kleeschulte
19019e8156
Bumped version. 2017-10-12 18:00:52 -04:00
Chris Kleeschulte
e15695cfa6
Fixed get utxos to retrieve mempool txs. 2017-10-12 17:53:24 -04:00
Chris Kleeschulte
929041b9b6
Bumped version. 2017-10-12 15:08:33 -04:00
Chris Kleeschulte
1af9c07bfa
Fixed issue with query mempool option. 2017-10-12 15:06:13 -04:00
Chris Kleeschulte
8b1099986b
Bumped version and set explicit versions of levelup and leveldown. 2017-10-11 17:22:44 -04:00
Chris Kleeschulte
cecc0592a8
Bumped version. 2017-10-11 16:39:48 -04:00
Chris Kleeschulte
93db6790e2
Repairs to getAddressSummary. 2017-10-11 16:38:28 -04:00
Chris Kleeschulte
7da189acf9
Added warning about re-querying for sync blocks. 2017-10-10 14:10:21 -04:00
Chris Kleeschulte
316ce66c87
Fixing reorg. 2017-10-10 10:55:20 -04:00
Chris Kleeschulte
5007104a4f
Fixed mempool gathering. 2017-10-09 16:24:25 -04:00
Chris Kleeschulte
fba71ee1aa
Fixed get raw tx calls that failed when input values were also in
mempool.
2017-10-09 13:37:10 -04:00
Chris Kleeschulte
a6a123a62d
Bumped version. 2017-10-08 15:44:01 -04:00
Chris Kleeschulte
ce653b5a12
Fixed getting txs from memory pool from calls to getAddressHistory. 2017-10-08 14:15:01 -04:00
Chris Kleeschulte
f10106f9a0
Added sort options to get address history. 2017-10-07 15:48:11 -04:00
Chris Kleeschulte
1d7c998468
Part deux. 2017-10-07 15:36:44 -04:00
Chris Kleeschulte
0007848c07
Fixed logging timing issue. 2017-10-07 15:22:28 -04:00
Chris Kleeschulte
aa6570d23a
Fixed issue with missing header on log entry. 2017-10-07 14:18:14 -04:00
Chris Kleeschulte
d7fb9e9c27
Fixed tests. 2017-10-06 15:22:14 -04:00
Chris Kleeschulte
7c392e9c94
Added locating double spend txs. 2017-10-06 10:30:26 -04:00
Chris Kleeschulte
1e04e08411
Added search memory pool for txs with certain addresses. 2017-10-06 10:02:38 -04:00
Chris Kleeschulte
c7c268f00a
Adding search mempool by address. 2017-10-05 16:18:16 -04:00
Chris Kleeschulte
67ce58d698
Fixed logging issues. 2017-10-04 18:30:22 -04:00
Chris Kleeschulte
1f4c5e5e1f
Fixing recently sent items. 2017-10-04 08:34:52 -04:00
Chris Kleeschulte
d81c1b9966
Bumped version. 2017-10-02 16:37:36 -04:00
Chris Kleeschulte
b8bc017136
Fixed reorg where we don't have all the previous blocks. 2017-10-02 10:13:33 -04:00
Chris Kleeschulte
ffa63fc146
Added reorg fixes. 2017-10-01 19:15:20 -04:00
Chris Kleeschulte
8360336ad2
Fixed start up of block service. 2017-09-28 22:01:14 -04:00
Chris Kleeschulte
f47b43754c
Fixed reorg. 2017-09-28 19:25:58 -04:00
Chris Kleeschulte
09b365772c
more reorg stuff. 2017-09-28 11:01:57 -04:00
Chris Kleeschulte
67c2c07ae5
Bumped version, 2017-09-27 14:08:14 -04:00
Chris Kleeschulte
74542a3e7f
Changed to a more traditional reorg detection. 2017-09-27 14:06:36 -04:00
Chris Kleeschulte
0c75879084 Fixed for getDetailedTransaction. 2017-09-27 10:21:52 -04:00
Chris Kleeschulte
2a1af1e93f Merge pull request #523 from nitsujlangston/gentlerDbClose
wait for db to gracefully close before stopping db service
2017-09-27 10:08:56 -04:00
Justin Langston
2dc0764950
stop db service if/when closed 2017-09-26 20:54:36 -04:00
Chris Kleeschulte
2726c7eb86
Fixed a few routes. 2017-09-26 19:40:25 -04:00
Chris Kleeschulte
3c4aff4027
Bumped version 2017-09-26 12:25:14 -04:00
Chris Kleeschulte
ee97cb5b12
Added fixes for reorg and added time since last block. 2017-09-26 11:05:24 -04:00
Chris Kleeschulte
f5ad8b89fb
Added next hash support in header service. 2017-09-25 21:21:05 -04:00
Chris Kleeschulte
b8e73ae238
Fixed sanity check when block height is zero. 2017-09-25 15:45:15 -04:00
Chris Kleeschulte
3383771b10
Added better reporting when new blocks come in. 2017-09-25 14:51:33 -04:00
Chris Kleeschulte
dcf1426221
Added a reset tip routine. 2017-09-25 14:31:07 -04:00
Chris Kleeschulte
d299856fdc
Bumped version. 2017-09-24 12:35:56 -04:00
Chris Kleeschulte
a5f9d1a6d0
Fixed edge case reorg issues. 2017-09-24 12:34:27 -04:00
Chris Kleeschulte
b160814706
Clean up. 2017-09-22 14:16:02 -04:00
Chris Kleeschulte
a1db879fdc
Fixed tests. 2017-09-22 13:31:52 -04:00
Chris Kleeschulte
ada997c8bd
Added reorg test. 2017-09-21 17:05:09 -04:00
Chris Kleeschulte
d5e5904329
Added more checks. 2017-09-19 11:03:39 -04:00
Chris Kleeschulte
66e82a3fe7
Fixed edge case where new peer has unexpectedly low number of blocks. 2017-09-19 08:48:49 -04:00
Chris Kleeschulte
de163ad4c9
Fixed issues with reorg again. 2017-09-14 10:10:07 -04:00
Chris Kleeschulte
7350fb3bf6
Fixed reorg edge case scenario. 2017-09-13 11:02:48 -04:00
Chris Kleeschulte
6a18c1e46e
Fixed issue with reorg. 2017-09-11 15:41:27 -04:00
Chris Kleeschulte
daa89f3086
Bumped version. 2017-09-08 14:21:04 -04:00
Chris Kleeschulte
82dd1dfe3f
Fixed reorging. 2017-09-08 14:18:55 -04:00
Chris Kleeschulte
02ff6c680c
Fixed sync resume (memory issues).
Input values on tx index are no longer lazy loaded.
2017-09-07 18:55:33 -04:00
Chris Kleeschulte
55ebc03602 Bumped version. 2017-09-01 16:34:22 -04:00
Chris Kleeschulte
3dd9aea3dd
Fixed issue where new blocks were not indexed by header service. 2017-09-01 15:57:03 -04:00
Chris Kleeschulte
99d8a6f7ae
Usability upgrades:
- node will start services prior to bcoin becoming synchronized
- resume after peer disconnection and reconnection
- error message when using non-supported versions of nodeJS.
2017-08-31 15:26:59 -04:00
Chris Kleeschulte
511c0e2b54
Bumped the version. 2017-08-30 18:59:34 -04:00
Chris Kleeschulte
a0031c7e00
Upgrades
- Updated to latest bcoin
- Gave the db a bit more time to shutdown
- Fixed resume functionality if peer disconnects and reconnects later
2017-08-30 18:21:51 -04:00
Chris Kleeschulte
4e94a374c1
Cleanup on aisle package.json. 2017-08-29 17:10:50 -04:00
Chris Kleeschulte
ef03d05ad2
Bumped version. 2017-08-29 16:22:00 -04:00
Chris Kleeschulte
4bd3a06edc
Fixed block sync where blocks come in all at once. 2017-08-29 16:01:11 -04:00
Chris Kleeschulte
7a8bae64a4
wip 2017-08-28 20:05:00 -04:00
Chris Kleeschulte
4ebb7ac569
Added 'listenAddr' to p2p options. 2017-08-28 15:14:52 -04:00
Chris Kleeschulte
678b694a58
Removed regtests; they will be added to insight-api. 2017-08-28 11:19:36 -04:00
Chris Kleeschulte
7c34a160a9
Added status tests. 2017-08-25 18:03:27 -04:00
Chris Kleeschulte
69f239a58b
Added regtests for block, tx. 2017-08-25 17:34:20 -04:00
Chris Kleeschulte
f0360a78a0
Fixed a problem with address balances. 2017-08-25 10:36:00 -04:00
Chris Kleeschulte
678cb83d61 Fixed tests for altered getAddressHistory and getAddressSummary calls. 2017-08-24 09:50:29 -04:00
Chris Kleeschulte
a4cffe3bae
Removed global install directions. 2017-08-21 16:21:55 -04:00
Chris Kleeschulte
d336918dc7
Fixed README. 2017-08-21 16:10:23 -04:00
Chris Kleeschulte
0ee70e7565
Removed the link about upgrading from previous versions of bitcore node.
Moved it to a section at the top.
2017-08-21 13:21:24 -04:00
Chris Kleeschulte
fc6ee8e193 Fixed README for this release. 2017-08-21 11:28:45 -04:00
Jason Dreyzehner
3a867824d6 chore(release): 5.0.0-beta.2 2017-08-18 19:42:19 -04:00
Jason Dreyzehner
e87f97f809 fix(package): add bn.js to deps 2017-08-18 19:39:17 -04:00
Jason Dreyzehner
abd21a5b08 fix(start): remove stray log (which looks like an error) 2017-08-18 19:38:01 -04:00
Jason Dreyzehner
7e34c411f6 chore(release): 5.0.0-beta.1 2017-08-18 19:17:10 -04:00
Chris Kleeschulte
b89864dc4b
Merge branch 'blocks' 2017-08-18 18:30:52 -04:00
90 changed files with 8335 additions and 4327 deletions

7
.gitignore vendored
View File

@ -21,13 +21,16 @@ coverage/*
**/*.config
**/*.creator
*.log
*.tmp
*.tmp.*
.DS_Store
bin/bitcoin*
bin/florincoin*
bin/SHA256SUMS.asc
regtest/data/node1/regtest
regtest/data/node2/regtest
regtest/data/node3/regtest
bitcore-node.json*
flocore-node.json*
*.bak
*.orig
lib/services/insight-api
testnet/*

33
.npmignore Normal file
View File

@ -0,0 +1,33 @@
node_modules/
node_modules/*
coverage/*
.lock-wscript
*.swp
*.Makefile
*.target.gyp.mk
*.node
*.sln
*.sdf
*.vcxproj
*.suo
*.opensdf
*.filters
*.user
*.project
**/*.dylib
**/*.so
**/*.old
**/*.files
**/*.config
**/*.creator
*.log
.DS_Store
bin/florincoin*
bin/SHA256SUMS.asc
regtest/data/node1/regtest
regtest/data/node2/regtest
regtest/data/node3/regtest
flocore-node.json*
*.bak
*.orig
lib/services/insight-api

View File

@ -3,8 +3,3 @@ sudo: false
language: node_js
node_js:
- 8
script:
- npm run coverage
- npm run jshint
after_success:
- npm run coveralls

View File

@ -1,7 +1,7 @@
Copyright (c) 2014-2015 BitPay, Inc.
Parts of this software are based on Bitcoin Core
Copyright (c) 2009-2015 The Bitcoin Core developers
Parts of this software are based on Florincoin Core
Copyright (c) 2009-2015 The Florincoin Core developers
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@ -1,51 +1,57 @@
Bitcore Node
Flocore Node
============
A Bitcoin full node for building applications and services with Node.js. A node is extensible and can be configured to run additional services. At the minimum a node has an interface to [Bitcoin Core with additional indexing](https://github.com/bitpay/bitcoin/tree/0.12.1-bitcore) for more advanced address queries. Additional services can be enabled to make a node more useful such as exposing new APIs, running a block explorer and wallet service.
A Florincoin blockchain indexing and query service. Intended to be used with as a Florincoin full node or in conjunction with a Florincoin full node.
## Upgrading from previous versions of Flocore Node
There is no upgrade path from previous versions of Flocore Node due to the removal of the included Florincoin Core software. By installing this version, you must resynchronize the indexes from scratch.
## Install
```bash
npm install -g bitcore-node
bitcore-node start
npm install
./bin/flocore-node start
```
Note: For your convenience, we distribute bitcoind binaries for x86_64 Linux and x86_64 Mac OS X. Upon npm install, the binaries for your platform will be downloaded. For more detailed installation instructions, or if you want to compile the project yourself, then please see the Bitcore branch of [Bitcoin Core with additional indexing](https://github.com/bitpay/bitcoin/tree/0.12.1-bitcore).
Note: A default configuration file is placed in the flocore user's home directory (~/.flocore/flocore-node.json). Or, alternatively, you can copy the provided "flocore-node.json.sample" file to the project's root directory as flocore-node.json and edit it for your preferences. If you don't have a preferred block source (trusted peer), [Bcoin](https://github.com/bcoin-org/bcoin) will be started automatically and synchronized with the mainnet chain.
## Prerequisites
- GNU/Linux x86_32/x86_64, or OSX 64bit *(for bitcoind distributed binaries)*
- Node.js v0.10, v0.12 or v4
- ZeroMQ *(libzmq3-dev for Ubuntu/Debian or zeromq on OSX)*
- ~200GB of disk storage
- ~8GB of RAM
- Node.js v8.2.0+
- ~500GB of disk storage
- ~4GB of RAM
## Configuration
Bitcore includes a Command Line Interface (CLI) for managing, configuring and interfacing with your Bitcore Node.
The main configuration file is called "flocore-node.json". This file instructs flocore-node for the following options:
```bash
bitcore-node create -d <bitcoin-data-dir> mynode
cd mynode
bitcore-node install <service>
bitcore-node install https://github.com/yourname/helloworld
```
This will create a directory with configuration files for your node and install the necessary dependencies. For more information about (and developing) services, please see the [Service Documentation](docs/services.md).
- location of database files (datadir)
- tcp port for web services, if configured (port)
- florincoin network type (e.g. mainnet, testnet3, regtest), (network)
- what services to include (services)
- the services' configuration (servicesConfig)
## Add-on Services
There are several add-on services available to extend the functionality of Bitcore:
There are several add-on services available to extend the functionality of Flocore:
- [Insight API](https://github.com/bitpay/insight-api)
- [Insight UI](https://github.com/bitpay/insight-ui)
- [Bitcore Wallet Service](https://github.com/bitpay/bitcore-wallet-service)
- [Flocore Wallet Service](https://github.com/bitpay/flocore-wallet-service)
## Documentation
- [Upgrade Notes](docs/upgrade.md)
- [Services](docs/services.md)
- [Bitcoind](docs/services/bitcoind.md) - Interface to Bitcoin Core
- [Fee](docs/services/fee.md) - Creates a service to handle fee queries
- [Header](docs/services/header.md) - Creates a service to handle block headers
- [Block](docs/services/block.md) - Creates a service to handle blocks
- [Transaction](docs/services/transaction.md) - Creates a service to handle transactions
- [Address](docs/services/address.md) - Creates a service to handle addresses
- [Mempool](docs/services/mempool.md) - Creates a service to handle mempool
- [Timestamp](docs/services/timestamp.md) - Creates a service to handle timestamp
- [Db](docs/services/db.md) - Creates a service to handle the database
- [p2p](docs/services/p2p.md) - Creates a service to handle the peer-to-peer network
- [Web](docs/services/web.md) - Creates an express application over which services can expose their web/API content
- [Development Environment](docs/development.md) - Guide for setting up a development environment
- [Node](docs/node.md) - Details on the node constructor
@ -54,12 +60,12 @@ There are several add-on services available to extend the functionality of Bitco
## Contributing
Please send pull requests for bug fixes, code optimization, and ideas for improvement. For more information on how to contribute, please refer to our [CONTRIBUTING](https://github.com/bitpay/bitcore/blob/master/CONTRIBUTING.md) file.
Please send pull requests for bug fixes, code optimization, and ideas for improvement. For more information on how to contribute, please refer to our [CONTRIBUTING](https://github.com/bitpay/flocore/blob/master/CONTRIBUTING.md) file.
## License
Code released under [the MIT license](https://github.com/bitpay/bitcore-node/blob/master/LICENSE).
Code released under [the MIT license](https://github.com/bitpay/flocore-node/blob/master/LICENSE).
Copyright 2013-2015 BitPay, Inc.
Copyright 2013-2017 BitPay, Inc.
- bitcoin: Copyright (c) 2009-2015 Bitcoin Core Developers (MIT License)
- florincoin: Copyright (c) 2009-2015 Florincoin Core Developers (MIT License)

View File

@ -1,4 +0,0 @@
#!/usr/bin/env node
var bitcore = require('../lib/cli/bitcore');
bitcore();

4
bin/flocore-node Executable file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env node
var flocore = require('../lib/cli/flocore');
flocore();

View File

@ -1,22 +0,0 @@
{
"network": "livenet",
"port": 3001,
"datadir": "/tmp",
"services": [
"p2p",
"db",
"header",
"block",
"transaction",
"timestamp",
"mempool",
"address"
],
"servicesConfig": {
"p2p": {
"peers": [
{ "ip": { "v4": "<some trusted full node>" } }
]
}
}
}

1
contrib/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
*.json

View File

@ -0,0 +1,32 @@
'use strict';
var request = require('request');
var config = require('./config.json');
// each of those addresses has a large number of utxos
// we are going to act like this group of addresses is our wallet, this ought to be fun!
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
var url = config.txs.new;
if (process.argv[2] === 'old') {
url = config.txs.old;
}
console.log(url);
var options = {
url: url,
method: 'POST',
qs: { from: 0, to: 5, noAsm: 1, noScriptSig: 1, noSpent: 1 },
json: { addrs: config.addrs }
};
request(options, function(err, response, body) {
console.log(body);
});

63
contrib/getUtxos.js Normal file
View File

@ -0,0 +1,63 @@
'use strict';
// pulls some rando utxos that can be used for testing
var levelup = require('levelup');
var leveldown = require('leveldown');
var Encoding = require('../lib/services/address/encoding');
var fs = require('fs');
var outputFile = '/tmp/large_amounts_utxos.json';
var addresses = [];
var dbLocation = process.argv[2];
console.log('Using db location: ', dbLocation);
var addressPrefix = new Buffer('0006', 'hex');
var startAddress = new Array(35).join('0');
var endAddress = new Array(35).join('f');
var store = levelup(leveldown(dbLocation), {
keyEncoding: 'binary',
valueEncoding: 'binary'
});
var encoding = new Encoding(addressPrefix);
var start = encoding.encodeUtxoIndexKey(startAddress);
var end = encoding.encodeUtxoIndexKey(endAddress);
var res = {};
var limit = 18000000;
var count = 0;
var stream = store.createReadStream({
gte: start,
lte: end
});
stream.on('data', function(data) {
count++;
limit--;
if (limit <= 0) {
stream.emit('end');
}
var key = encoding.decodeUtxoIndexKey(data.key);
if (res[key.address] >= 1) {
res[key.address]++;
} else {
res[key.address] = 1;
}
});
stream.on('end', function() {
Object.keys(res).map(function(key) {
if (res[key] > 1000) {
addresses.push(key);
}
});
fs.writeFileSync(outputFile, JSON.stringify(addresses));
console.log('total utxo count: ', count);
console.log('done');
});

View File

@ -3,8 +3,8 @@
var levelup = require('levelup');
var leveldown = require('leveldown');
var Encoding = require('../lib/services/address/encoding');
var dbPath = '/Users/chrisk/.bwdb/bitcore-node.db';
var bitcore = require('bitcore-lib');
var dbPath = '/Users/chrisk/.bwdb/flocore-node.db';
var flocore = require('flocore-lib');
var db = levelup(dbPath, {keyEncoding: 'binary', valueEncoding: 'binary'});
var prefix = new Buffer('0002', 'hex');
@ -33,7 +33,7 @@ stream.on('data', function(data) {
for(var i = 0; i < inputValuesLength / 8; i++) {
inputValues.push(buffer.readDoubleBE(i * 8 + 14));
}
var transaction = new bitcore.Transaction(data.value.slice(inputValues.length * 8 + 14));
var transaction = new flocore.Transaction(data.value.slice(inputValues.length * 8 + 14));
transaction.__height = height;
transaction.__inputValues = inputValues;
transaction.__timestamp = timestamp;

View File

@ -6,6 +6,6 @@
# e.g. ./contrib/restart_bwdb.sh && tail -f /tmp/bwdb-out
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
pkill -2 -x bitcore
pkill -2 -x flocore
wait
exec $DIR/../bin/bitcore-node start >> /tmp/bwdb-out 2>&1 &
exec $DIR/../bin/flocore-node start >> /tmp/bwdb-out 2>&1 &

View File

@ -4,14 +4,14 @@ Requires=network.target
[Service]
Type=simple
WorkingDirectory=/usr/opt/bitcore
WorkingDirectory=/usr/opt/flocore
ExecStart=/usr/bin/bwdb
ExecReload=/bin/kill -HUP $MAINPID
Restart=on-failure
RestartSec=15
User=bitcore
User=flocore
ExecStartPre=/bin/mkdir -p /run/bwdb
ExecStartPre=/bin/chown bitcore:bitcore /run/bwdb
ExecStartPre=/bin/chown flocore:flocore /run/bwdb
ExecStartPre=/bin/chmod 755 /run/bwdb
PermissionsStartOnly=true
TimeoutStopSec=300

View File

@ -1,5 +1,5 @@
# Bus
The bus provides a way to subscribe to events from any of the services running. It's implemented abstract from transport specific implementation. The primary use of the bus in Bitcore Node is for subscribing to events via a web socket.
The bus provides a way to subscribe to events from any of the services running. It's implemented abstract from transport specific implementation. The primary use of the bus in Flocore Node is for subscribing to events via a web socket.
## Opening/Closing
@ -20,11 +20,11 @@ bus.close();
```javascript
// subscribe to all transaction events
bus.subscribe('bitcoind/rawtransaction');
bus.subscribe('florincoind/rawtransaction');
// to subscribe to new block hashes
bus.subscribe('bitcoind/hashblock');
bus.subscribe('florincoind/hashblock');
// unsubscribe
bus.unsubscribe('bitcoind/rawtransaction');
bus.unsubscribe('florincoind/rawtransaction');
```

View File

@ -10,22 +10,22 @@ nvm install v4
## Fork and Download Repositories
To develop bitcore-node:
To develop flocore-node:
```bash
cd ~
git clone git@github.com:<yourusername>/bitcore-node.git
git clone git@github.com:<yourusername>/bitcore-lib.git
git clone git@github.com:<yourusername>/flocore-node.git
git clone git@github.com:<yourusername>/flocore-lib.git
```
To develop bitcoin or to compile from source:
To develop florincoin or to compile from source:
```bash
git clone git@github.com:<yourusername>/bitcoin.git
git clone git@github.com:<yourusername>/florincoin.git
git fetch origin <branchname>:<branchname>
git checkout <branchname>
```
**Note**: See bitcoin documentation for building bitcoin on your platform.
**Note**: See florincoin documentation for building florincoin on your platform.
## Install Development Dependencies
@ -46,27 +46,27 @@ brew install zeromq
## Install and Symlink
```bash
cd bitcore-lib
cd flocore-lib
npm install
cd ../bitcore-node
cd ../flocore-node
npm install
```
**Note**: If you get a message about not being able to download bitcoin distribution, you'll need to compile bitcoind from source, and setup your configuration to use that version.
**Note**: If you get a message about not being able to download florincoin distribution, you'll need to compile florincoind from source, and setup your configuration to use that version.
We now will setup symlinks in `bitcore-node` *(repeat this for any other modules you're planning on developing)*:
We now will setup symlinks in `flocore-node` *(repeat this for any other modules you're planning on developing)*:
```bash
cd node_modules
rm -rf bitcore-lib
ln -s ~/bitcore-lib
rm -rf bitcoind-rpc
ln -s ~/bitcoind-rpc
rm -rf flocore-lib
ln -s ~/flocore-lib
rm -rf florincoind-rpc
ln -s ~/florincoind-rpc
```
And if you're compiling or developing bitcoin:
And if you're compiling or developing florincoin:
```bash
cd ../bin
ln -sf ~/bitcoin/src/bitcoind
ln -sf ~/florincoin/src/florincoind
```
## Run Tests
@ -78,19 +78,19 @@ npm install mocha -g
To run all test suites:
```bash
cd bitcore-node
cd flocore-node
npm run regtest
npm run test
```
To run a specific unit test in watch mode:
```bash
mocha -w -R spec test/services/bitcoind.unit.js
mocha -w -R spec test/services/florincoind.unit.js
```
To run a specific regtest:
```bash
mocha -R spec regtest/bitcoind.js
mocha -R spec regtest/florincoind.js
```
## Running a Development Node
@ -102,27 +102,27 @@ cd ~
mkdir devnode
cd devnode
mkdir node_modules
touch bitcore-node.json
touch flocore-node.json
touch package.json
```
Edit `bitcore-node.json` with something similar to:
Edit `flocore-node.json` with something similar to:
```json
{
"network": "livenet",
"port": 3001,
"services": [
"bitcoind",
"florincoind",
"web",
"insight-api",
"insight-ui",
"<additional_service>"
],
"servicesConfig": {
"bitcoind": {
"florincoind": {
"spawn": {
"datadir": "/home/<youruser>/.bitcoin",
"exec": "/home/<youruser>/bitcoin/src/bitcoind"
"datadir": "/home/<youruser>/.florincoin",
"exec": "/home/<youruser>/florincoin/src/florincoind"
}
}
}
@ -135,13 +135,13 @@ Setup symlinks for all of the services and dependencies:
```bash
cd node_modules
ln -s ~/bitcore-lib
ln -s ~/bitcore-node
ln -s ~/flocore-lib
ln -s ~/flocore-node
ln -s ~/insight-api
ln -s ~/insight-ui
```
Make sure that the `<datadir>/bitcoin.conf` has the necessary settings, for example:
Make sure that the `<datadir>/florincoin.conf` has the necessary settings, for example:
```
server=1
whitelist=127.0.0.1
@ -152,11 +152,11 @@ spentindex=1
zmqpubrawtx=tcp://127.0.0.1:28332
zmqpubhashblock=tcp://127.0.0.1:28332
rpcallowip=127.0.0.1
rpcuser=bitcoin
rpcuser=florincoin
rpcpassword=local321
```
From within the `devnode` directory with the configuration file, start the node:
```bash
../bitcore-node/bin/bitcore-node start
../flocore-node/bin/flocore-node start
```

View File

@ -14,17 +14,17 @@ A node represents a collection of services that are loaded together. For more in
```js
var index = require('bitcore-node');
var Bitcoin = index.services.Bitcoin;
var index = require('flocore-node');
var Florincoin = index.services.Florincoin;
var Node = index.Node;
var configuration = {
datadir: '/home/user/.bitcoin',
datadir: '/home/user/.florincoin',
network: 'testnet',
services: [
{
name: 'bitcoind',
module: Bitcoin,
name: 'florincoind',
module: Florincoin,
config: {}
}
]
@ -37,7 +37,7 @@ node.start(function() {
});
node.on('ready', function() {
console.log('Bitcoin Node Ready');
console.log('Florincoin Node Ready');
});
node.on('error', function(err) {

View File

@ -1,10 +1,10 @@
# Release Process
Binaries for bitcoind are distributed for convenience and built deterministically with Gitian, signatures for bitcoind are located at the [gitian.sigs](https://github.com/bitpay/gitian.sigs) respository.
Binaries for florincoind are distributed for convenience and built deterministically with Gitian, signatures for florincoind are located at the [gitian.sigs](https://github.com/bitpay/gitian.sigs) respository.
## How to Release
When publishing to npm, the .gitignore file is used to exclude files from the npm publishing process. Be sure that the bitcore-node directory has only the directories and files that you would like to publish to npm. You might need to run the commands below on each platform that you intend to publish (e.g. Mac and Linux).
When publishing to npm, the .gitignore file is used to exclude files from the npm publishing process. Be sure that the flocore-node directory has only the directories and files that you would like to publish to npm. You might need to run the commands below on each platform that you intend to publish (e.g. Mac and Linux).
To make a release, bump the `version` of the `package.json`:

View File

@ -1,20 +1,20 @@
# Scaffold
A collection of functions for creating, managing, starting, stopping and interacting with a Bitcore node.
A collection of functions for creating, managing, starting, stopping and interacting with a Flocore node.
## Install
This function will add a service to a node by installing the necessary dependencies and modifying the `bitcore-node.json` configuration.
This function will add a service to a node by installing the necessary dependencies and modifying the `flocore-node.json` configuration.
## Start
This function will load a configuration file `bitcore-node.json` and instantiate and start a node based on the configuration.
This function will load a configuration file `flocore-node.json` and instantiate and start a node based on the configuration.
## Find Config
This function will recursively find a configuration `bitcore-node.json` file in parent directories and return the result.
This function will recursively find a configuration `flocore-node.json` file in parent directories and return the result.
## Default Config
This function will return a default configuration with the default services based on environment variables, and will default to using the standard `/home/user/.bitcoin` data directory.
This function will return a default configuration with the default services based on environment variables, and will default to using the standard `/home/user/.florincoin` data directory.
## Uninstall
This function will remove a service from a node by uninstalling the necessary dependencies and modifying the `bitcore-node.json` configuration.
This function will remove a service from a node by uninstalling the necessary dependencies and modifying the `flocore-node.json` configuration.
## Call Method
This function will call an API method on a node via the JSON-RPC interface.

View File

@ -1,16 +1,16 @@
# Services
Bitcore Node has a service module system that can start up additional services that can include additional:
Flocore Node has a service module system that can start up additional services that can include additional:
- Blockchain indexes (e.g. querying balances for addresses)
- API methods
- HTTP routes
- Event types to publish and subscribe
The `bitcore-node.json` file describes which services will load for a node:
The `flocore-node.json` file describes which services will load for a node:
```json
{
"services": [
"bitcoind", "web"
"florincoind", "web"
]
}
```
@ -20,36 +20,36 @@ Services correspond with a Node.js module as described in 'package.json', for ex
```json
{
"dependencies": {
"bitcore-lib": "^0.13.7",
"bitcore-node": "^0.2.0",
"flocore-lib": "^0.13.7",
"flocore-node": "^0.2.0",
"insight-api": "^3.0.0"
}
}
```
_Note:_ If you already have a bitcore-node database, and you want to query data from previous blocks in the blockchain, you will need to reindex. Reindexing right now means deleting your bitcore-node database and resyncing.
_Note:_ If you already have a flocore-node database, and you want to query data from previous blocks in the blockchain, you will need to reindex. Reindexing right now means deleting your flocore-node database and resyncing.
## Using Services Programmatically
If, instead, you would like to run a custom node, you can include services by including them in your configuration object when initializing a new node.
```js
//Require bitcore
var bitcore = require('bitcore-node');
//Require flocore
var flocore = require('flocore-node');
//Services
var Bitcoin = bitcore.services.Bitcoin;
var Web = bitcore.services.Web;
var Florincoin = flocore.services.Florincoin;
var Web = flocore.services.Web;
var myNode = new bitcore.Node({
var myNode = new flocore.Node({
network: 'regtest'
services: [
{
name: 'bitcoind',
module: Bitcoin,
name: 'florincoind',
module: Florincoin,
config: {
spawn: {
datadir: '/home/<username>/.bitcoin',
exec: '/home/<username>/bitcore-node/bin/bitcoind'
datadir: '/home/<username>/.florincoin',
exec: '/home/<username>/flocore-node/bin/florincoind'
}
}
},
@ -67,7 +67,7 @@ var myNode = new bitcore.Node({
Now that you've loaded your services you can access them via `myNode.services.<service-name>.<method-name>`. For example if you wanted to check the balance of an address, you could access the address service like so.
```js
myNode.services.bitcoind.getAddressBalance('1HB5XMLmzFVj8ALj6mfBsbifRoD4miY36v', false, function(err, total) {
myNode.services.florincoind.getAddressBalance('1HB5XMLmzFVj8ALj6mfBsbifRoD4miY36v', false, function(err, total) {
console.log(total.balance); //Satoshi amount of this address
});
```
@ -82,7 +82,7 @@ A new service can be created by inheriting from `Node.Service` and implementing
- `Service.prototype.getPublishEvents()` - Describes which events can be subscribed to for this service, useful to subscribe to events over the included web socket API.
- `Service.prototype.setupRoutes()` - A service can extend HTTP routes on an express application by implementing this method.
The `package.json` for the service module can either export the `Node.Service` directly, or specify a specific module to load by including `"bitcoreNode": "lib/bitcore-node.js"`.
The `package.json` for the service module can either export the `Node.Service` directly, or specify a specific module to load by including `"flocoreNode": "lib/flocore-node.js"`.
Please take a look at some of the existing services for implementation specifics.

23
docs/services/address.md Normal file
View File

@ -0,0 +1,23 @@
# Address Service
The address service provides an address index for the Florincoin blockchain. Specifically, it builds and maintains the following information about every address ever used on the Florincoin network:
- block heights the address appeared in
- transaction ids and the index in the transaction
- whether the address appeared in an input or output
- the timestamp for the block
Additionally, the address index also maintains the unspent transaction output index for the Florincoin blockchain. Example queries for this type of data is provided by 'getAddressUnspentOutputs', 'getAddressSummary', and 'getAddressHistory'.
This service is generally used to support other services and is not used externally.
## Service Configuration
none
## Other services this service Depends on
- db
- header
- transaction
- timestamp

View File

@ -1,333 +0,0 @@
# Bitcoin Service
The Bitcoin Service is a Node.js interface to [Bitcoin Core](https://github.com/bitcoin/bitcoin) for querying information about the bitcoin block chain. It will manage starting and stopping `bitcoind` or connect to several running `bitcoind` processes. It uses a branch of a [branch of Bitcoin Core](https://github.com/bitpay/bitcoin/tree/0.12.1-bitcore) with additional indexes for querying information about addresses and blocks. Results are cached for performance and there are several additional API methods added for common queries.
## Configuration
The default configuration will include a "spawn" configuration in "bitcoind". This defines the location of the block chain database and the location of the `bitcoind` daemon executable. The below configuration points to a local clone of `bitcoin`, and will start `bitcoind` automatically with your Node.js application.
```json
"servicesConfig": {
"bitcoind": {
"spawn": {
"datadir": "/home/bitcore/.bitcoin",
"exec": "/home/bitcore/bitcoin/src/bitcoind"
}
}
}
```
It's also possible to connect to separately managed `bitcoind` processes with round-robin quering, for example:
```json
"servicesConfig": {
"bitcoind": {
"connect": [
{
"rpchost": "127.0.0.1",
"rpcport": 30521,
"rpcuser": "bitcoin",
"rpcpassword": "local321",
"zmqpubrawtx": "tcp://127.0.0.1:30611"
},
{
"rpchost": "127.0.0.1",
"rpcport": 30522,
"rpcuser": "bitcoin",
"rpcpassword": "local321",
"zmqpubrawtx": "tcp://127.0.0.1:30622"
},
{
"rpchost": "127.0.0.1",
"rpcport": 30523,
"rpcuser": "bitcoin",
"rpcpassword": "local321",
"zmqpubrawtx": "tcp://127.0.0.1:30633"
}
]
}
}
```
**Note**: For detailed example configuration see [`regtest/cluster.js`](regtest/cluster.js)
## API Documentation
Methods are available by directly interfacing with the service:
```js
node.services.bitcoind.<methodName>
```
### Chain
**Getting Latest Blocks**
```js
// gives the block hashes sorted from low to high within a range of timestamps
var high = 1460393372; // Mon Apr 11 2016 12:49:25 GMT-0400 (EDT)
var low = 1460306965; // Mon Apr 10 2016 12:49:25 GMT-0400 (EDT)
node.services.bitcoind.getBlockHashesByTimestamp(high, low, function(err, blockHashes) {
//...
});
// get the current tip of the chain
node.services.bitcoind.getBestBlockHash(function(err, blockHash) {
//...
})
```
**Getting Synchronization and Node Status**
```js
// gives a boolean if the daemon is fully synced (not the initial block download)
node.services.bitcoind.isSynced(function(err, synced) {
//...
})
// gives the current estimate of blockchain download as a percentage
node.services.bitcoind.syncPercentage(function(err, percent) {
//...
});
// gives information about the chain including total number of blocks
node.services.bitcoind.getInfo(function(err, info) {
//...
});
```
**Generate Blocks**
```js
// will generate a block for the "regtest" network (development purposes)
var numberOfBlocks = 10;
node.services.bitcoind.generateBlock(numberOfBlocks, function(err, blockHashes) {
//...
});
```
### Blocks and Transactions
**Getting Block Information**
It's possible to query blocks by both block hash and by height. Blocks are given as Node.js Buffers and can be parsed via Bitcore:
```js
var blockHeight = 0;
node.services.bitcoind.getRawBlock(blockHeight, function(err, blockBuffer) {
if (err) {
throw err;
}
var block = bitcore.Block.fromBuffer(blockBuffer);
console.log(block);
};
// get a bitcore object of the block (as above)
node.services.bitcoind.getBlock(blockHash, function(err, block) {
//...
};
// get only the block header and index (including chain work, height, and previous hash)
node.services.bitcoind.getBlockHeader(blockHeight, function(err, blockHeader) {
//...
});
// get the block with a list of txids
node.services.bitcoind.getBlockOverview(blockHash, function(err, blockOverview) {
//...
};
```
**Retrieving and Sending Transactions**
Get a transaction asynchronously by reading it from disk:
```js
var txid = '7426c707d0e9705bdd8158e60983e37d0f5d63529086d6672b07d9238d5aa623';
node.services.bitcoind.getRawTransaction(txid, function(err, transactionBuffer) {
if (err) {
throw err;
}
var transaction = bitcore.Transaction().fromBuffer(transactionBuffer);
});
// get a bitcore object of the transaction (as above)
node.services.bitcoind.getTransaction(txid, function(err, transaction) {
//...
});
// retrieve the transaction with input values, fees, spent and block info
node.services.bitcoind.getDetailedTransaction(txid, function(err, transaction) {
//...
});
```
Send a transaction to the network:
```js
var numberOfBlocks = 3;
node.services.bitcoind.estimateFee(numberOfBlocks, function(err, feesPerKilobyte) {
//...
});
node.services.bitcoind.sendTransaction(transaction.serialize(), function(err, hash) {
//...
});
```
### Addresses
**Get Unspent Outputs**
One of the most common uses will be to retrieve unspent outputs necessary to create a transaction, here is how to get the unspent outputs for an address:
```js
var address = 'mgY65WSfEmsyYaYPQaXhmXMeBhwp4EcsQW';
node.services.bitcoind.getAddressUnspentOutputs(address, options, function(err, unspentOutputs) {
// see below
});
```
The `unspentOutputs` will have the format:
```js
[
{
address: 'mgY65WSfEmsyYaYPQaXhmXMeBhwp4EcsQW',
txid: '9d956c5d324a1c2b12133f3242deff264a9b9f61be701311373998681b8c1769',
outputIndex: 1,
height: 150,
satoshis: 1000000000,
script: '76a9140b2f0a0c31bfe0406b0ccc1381fdbe311946dadc88ac',
confirmations: 3
}
]
```
**View Balances**
```js
var address = 'mgY65WSfEmsyYaYPQaXhmXMeBhwp4EcsQW';
node.services.bitcoind.getAddressBalance(address, options, function(err, balance) {
// balance will be in satoshis with "received" and "balance"
});
```
**View Address History**
This method will give history of an address limited by a range of block heights by using the "start" and "end" arguments. The "start" value is the more recent, and greater, block height. The "end" value is the older, and lesser, block height. This feature is most useful for synchronization as previous history can be omitted. Furthermore for large ranges of block heights, results can be paginated by using the "from" and "to" arguments.
If "queryMempool" is set as true (it is true by default), it will show unconfirmed transactions from the bitcoin mempool. However, if you specify "start" and "end", "queryMempool" is ignored and is always false.
If "queryMempoolOnly" is set as true (it is false by default), it will show *only* unconfirmed transactions from mempool.
```js
var addresses = ['mgY65WSfEmsyYaYPQaXhmXMeBhwp4EcsQW'];
var options = {
start: 345000,
end: 344000,
queryMempool: true // since we presented range, queryMempool will be ignored
};
node.services.bitcoind.getAddressHistory(addresses, options, function(err, history) {
// see below
});
```
The history format will be:
```js
{
totalCount: 1, // The total number of items within "start" and "end"
items: [
{
addresses: {
'mgY65WSfEmsyYaYPQaXhmXMeBhwp4EcsQW': {
inputIndexes: [],
outputIndexes: [0]
}
},
satoshis: 1000000000,
tx: <detailed_transaction> // the same format as getDetailedTransaction
}
]
}
```
**View Address Summary**
```js
var address = 'mgY65WSfEmsyYaYPQaXhmXMeBhwp4EcsQW';
var options = {
noTxList: false
};
node.services.bitcoind.getAddressSummary(address, options, function(err, summary) {
// see below
});
```
The `summary` will have the format (values are in satoshis):
```js
{
totalReceived: 1000000000,
totalSpent: 0,
balance: 1000000000,
unconfirmedBalance: 1000000000,
appearances: 1,
unconfirmedAppearances: 0,
txids: [
'3f7d13efe12e82f873f4d41f7e63bb64708fc4c942eb8c6822fa5bd7606adb00'
]
}
```
**Notes**:
- `totalReceived` does not exclude change *(the amount of satoshis originating from the same address)*
- `unconfirmedBalance` is the delta that the unconfirmed transactions have on the total balance *(can be both positive and negative)*
- `unconfirmedAppearances` is the total number of unconfirmed transactions
- `appearances` is the total confirmed transactions
- `txids` Are sorted in block order with the most recent at the beginning. A maximum of 1000 *(default)* will be returned, the `from` and `to` options can be used to get further values.
## Events
The Bitcoin Service exposes two events via the Bus, and there are a few events that can be directly registered:
```js
node.services.bitcoind.on('tip', function(blockHash) {
// a new block tip has been added, if there is a rapid update (with a second) this will not emit every tip update
});
node.services.bitcoind.on('tx', function(transactionBuffer) {
// a new transaction has entered the mempool
});
node.services.bitcoind.on('block', function(blockHash) {
// a new block has been added
});
```
For details on instantiating a bus for a node, see the [Bus Documentation](../bus.md).
- Name: `bitcoind/rawtransaction`
- Name: `bitcoind/hashblock`
- Name: `bitcoind/addresstxid`, Arguments: [address, address...]
**Examples:**
```js
bus.subscribe('bitcoind/rawtransaction');
bus.subscribe('bitcoind/hashblock');
bus.subscribe('bitcoind/addresstxid', ['13FMwCYz3hUhwPcaWuD2M1U2KzfTtvLM89']);
bus.on('bitcoind/rawtransaction', function(transactionHex) {
//...
});
bus.on('bitcoind/hashblock', function(blockhashHex) {
//...
});
bus.on('bitcoind/addresstxid', function(data) {
// data.address;
// data.txid;
});
```

20
docs/services/block.md Normal file
View File

@ -0,0 +1,20 @@
# Block Service
The block service provides a block index for the Florincoin blockchain. Specifically, there are two data points this service tracks:
- block hash
- raw block
This service is generally used to support other services and is not used externally.
## Service Configuration
none
## Other services this service Depends on
- header
- timestamp
- p2p
- db

13
docs/services/db.md Normal file
View File

@ -0,0 +1,13 @@
# Db Service
The db service provides an abstraction over the underlying database used to store the indexes in flocore-node.
## Service Configuration
None
## Other services this service Depends on
None

22
docs/services/fee.md Normal file
View File

@ -0,0 +1,22 @@
# Fee Service
The fee service is a requirement of the insight-api service (not a flocore-node built-in service). Its primary purpose is to query a florincoin full node for the most up-to-date miner fees for transactions. A florincoin full node such as [BTC1](https://github.com/btc1/florincoin) or [bcoin](https://github.com/bcoin-org/bcoin) with an available RPC interface is required.
## Service Configuration
```json
"fee": {
"rpc": {
"user": "user",
"pass": "pass",
"host": "localhost",
"protocol": "http",
"port": 8332
}
}
```
## Usage Example
```bash
curl http://localhost:3001/insight-api/estimateFee
```

19
docs/services/header.md Normal file
View File

@ -0,0 +1,19 @@
# Header Service
The header service provides a header index for the Florincoin blockchain. Specifically, it builds and maintains the following information about every florincoin block header:
- block hash
- block height
- block header
This service is generally used to support other services and is not used externally.
## Service Configuration
none
## Other services this service Depends on
- db
- p2p

16
docs/services/mempool.md Normal file
View File

@ -0,0 +1,16 @@
# Mempool Service
The mempool service provides a mempool transaction index for the Florincoin blockchain. Specifically, it maintains a larger index of mempool transactions than a typical full node can manage on its own.
- transaction id
- transaction
This service is generally used to support other services and is not used externally.
## Service Configuration
none
## Other services this service Depends on
- db

21
docs/services/p2p.md Normal file
View File

@ -0,0 +1,21 @@
# P2P Service
The p2p service provides a peer-to-peer interface for the Florincoin blockchain. This service abstracts the connection and commnuication interface between the Florincoin and the rest of flocore node.
This service also provides the publisher interface on flocore-node bus architecture. The P2P service will publish header, block and transaction events.
## Service Configuration
```json
"p2p": {
"peers": [
{ "ip": { "v4": "127.0.0.1" }, "port": 8333 }
]
}
```
## Other services this service Depends on
None

View File

@ -0,0 +1,18 @@
# Timestamp Service
The timestamp service provides a block timestamp index for the Florincoin blockchain. The only reason this index needs to exist is to ensure that block timestamps are always strictly greater than all the previous block timestamps. In the native block timestamps, this is not always the case. Without this index, accounting systems that are based on time spans (pretty much all of them), there will be issues accounting for transactions accurately.
- block timestamp
- block hash
This service is generally used to support other services and is not used externally.
## Service Configuration
none
## Other services this service Depends on
- db

View File

@ -0,0 +1,21 @@
# Transaction Service
The transaction service provides a transaction index for the Florincoin blockchain. Specifically, it builds and maintains the following information about every transaction on the Florincoin network:
- transaction ids and transactions
- input values for every transaction
- the timestamp for the block that the transaction appears in
- the block height for the block that the transaction appears in
This service is generally used to support other services and is not used externally.
## Service Configuration
none
## Other services this service Depends on
- p2p
- db
- timestamp
- mempool

View File

@ -1,5 +1,5 @@
# Web Service
The web service creates an express app which can be used by services for setting up web routes for API's, static content, web applications, etc. This allows users to interact with various bitcore node services over one http or https port.
The web service creates an express app which can be used by services for setting up web routes for API's, static content, web applications, etc. This allows users to interact with various flocore node services over one http or https port.
In order for your service to add routes, it must implement the `setupRoutes()` and `getRoutePrefix()` methods.
@ -22,7 +22,7 @@ MyService.prototype.getRoutePrefix = function() {
```
## Configuring Web Service for HTTPS
You can run the web service over https by editing your bitcore node config, setting https to true and adding httpsOptions:
You can run the web service over https by editing your flocore node config, setting https to true and adding httpsOptions:
```json
{

View File

@ -1,14 +1,14 @@
# Upgrade Notes
## From Bitcore 3.0.0 to 4.0.0
## From Flocore 3.0.0 to 4.0.0
`bitcore-node@2.1.1` to `bitcore-node@3.0.0`
`flocore-node@2.1.1` to `flocore-node@3.0.0`
This major upgrade includes changes to indexes, API methods and services. Please review below details before upgrading.
### Indexes
Indexes include *more information* and are now also *faster*. Because of this a **reindex will be necessary** when upgrading as the address and database indexes are now a part of bitcoind with three new `bitcoin.conf` options:
Indexes include *more information* and are now also *faster*. Because of this a **reindex will be necessary** when upgrading as the address and database indexes are now a part of florincoind with three new `florincoin.conf` options:
- `-addressindex`
- `-timestampindex`
- `-spentindex`
@ -17,18 +17,18 @@ To start reindexing add `reindex=1` during the **first startup only**.
### Configuration Options
- The `bitcoin.conf` file in will need to be updated to include additional indexes *(see below)*.
- The `datadir` option is now a part of `bitcoind` spawn configuration, and there is a new option to connect to multiple bitcoind processes (Please see [Bitcoin Service Docs](services/bitcoind.md) for more details). The services `db` and `address` are now a part of the `bitcoind` service. Here is how to update `bitcore-node.json` configuration options:
- The `florincoin.conf` file in will need to be updated to include additional indexes *(see below)*.
- The `datadir` option is now a part of `florincoind` spawn configuration, and there is a new option to connect to multiple florincoind processes (Please see [Florincoin Service Docs](services/florincoind.md) for more details). The services `db` and `address` are now a part of the `florincoind` service. Here is how to update `flocore-node.json` configuration options:
**Before**:
```json
{
"datadir": "/home/<username>/.bitcoin",
"datadir": "/home/<username>/.florincoin",
"network": "livenet",
"port": 3001,
"services": [
"address",
"bitcoind",
"florincoind",
"db",
"web"
]
@ -41,21 +41,21 @@ To start reindexing add `reindex=1` during the **first startup only**.
"network": "livenet",
"port": 3001,
"services": [
"bitcoind",
"florincoind",
"web"
],
"servicesConfig": {
"bitcoind": {
"florincoind": {
"spawn": {
"datadir": "/home/<username>/.bitcoin",
"exec": "/home/<username>/bitcore-node/bin/bitcoind"
"datadir": "/home/<username>/.florincoin",
"exec": "/home/<username>/flocore-node/bin/florincoind"
}
}
}
}
```
It will also be necessary to update `bitcoin.conf` settings, to include these fields:
It will also be necessary to update `florincoin.conf` settings, to include these fields:
```
server=1
whitelist=127.0.0.1
@ -70,8 +70,8 @@ rpcuser=<user>
rpcpassword=<password>
```
**Important**: Once changes have been made you'll also need to add the `reindex=1` option **only for the first startup** to regenerate the indexes. Once this is complete you should be able to remove the `bitcore-node.db` directory with the old indexes.
**Important**: Once changes have been made you'll also need to add the `reindex=1` option **only for the first startup** to regenerate the indexes. Once this is complete you should be able to remove the `flocore-node.db` directory with the old indexes.
### API and Service Changes
- Many API methods that were a part of the `db` and `address` services are now a part of the `bitcoind` service. Please see [Bitcoin Service Docs](services/bitcoind.md) for more details.
- The `db` and `address` services are deprecated, most of the functionality still exists. Any services that were extending indexes with the `db` service, will need to manage chain state itself, or build the indexes within `bitcoind`.
- Many API methods that were a part of the `db` and `address` services are now a part of the `florincoind` service. Please see [Florincoin Service Docs](services/florincoind.md) for more details.
- The `db` and `address` services are deprecated, most of the functionality still exists. Any services that were extending indexes with the `db` service, will need to manage chain state itself, or build the indexes within `florincoind`.

34
flocore-node.json.sample Normal file
View File

@ -0,0 +1,34 @@
{
"network": "testnet",
"port": 3001,
"datadir": "/tmp",
"services": [
"p2p",
"db",
"header",
"block",
"mempool",
"address",
"transaction",
"timestamp",
"fee",
"insight-api",
"web"
],
"servicesConfig": {
"insight-api": {
"routePrefix": "api",
"disableRateLimiter": true,
"enableCache": true
},
"fee": {
"rpc": {
"user": "local",
"pass": "local",
"host": "localhost",
"protocol": "http",
"port": 18332
}
}
}
}

View File

@ -20,7 +20,7 @@ module.exports.scaffold.defaultConfig = require('./lib/scaffold/default-config')
module.exports.cli = {};
module.exports.cli.main = require('./lib/cli/main');
module.exports.cli.daemon = require('./lib/cli/daemon');
module.exports.cli.bitcore = require('./lib/cli/bitcore');
module.exports.cli.bitcored = require('./lib/cli/bitcored');
module.exports.cli.flocore = require('./lib/cli/flocore');
module.exports.cli.flocored = require('./lib/cli/flocored');
module.exports.lib = require('bitcore-lib');
module.exports.lib = require('flocore-lib');

View File

@ -2,20 +2,20 @@
var program = require('commander');
var path = require('path');
var bitcore = require('..');
var flocore = require('..');
function main(servicesPath, additionalServices) {
/* jshint maxstatements: 100 */
var version = bitcore.version;
var start = bitcore.scaffold.start;
var findConfig = bitcore.scaffold.findConfig;
var defaultConfig = bitcore.scaffold.defaultConfig;
var version = flocore.version;
var start = flocore.scaffold.start;
var findConfig = flocore.scaffold.findConfig;
var defaultConfig = flocore.scaffold.defaultConfig;
program
.version(version)
.description('Start the current node')
.option('-c, --config <dir>', 'Specify the directory with Bitcore Node configuration');
.option('-c, --config <dir>', 'Specify the directory with Flocore Node configuration');
program.parse(process.argv);

View File

@ -5,10 +5,10 @@ var Liftoff = require('liftoff');
function main(parentServicesPath, additionalServices) {
var liftoff = new Liftoff({
name: 'bitcore',
moduleName: 'bitcore-node',
configName: 'bitcore-node',
processTitle: 'bitcore'
name: 'flocore',
moduleName: 'flocore-node',
configName: 'flocore-node',
processTitle: 'flocore'
}).on('require', function (name) {
console.log('Loading:', name);
}).on('requireFail', function (name, err) {
@ -31,6 +31,15 @@ function main(parentServicesPath, additionalServices) {
node.cli.main(parentServicesPath, additionalServices);
}
// Gracefully Shut Down
process.on('SIGTERM', function () {
console.log("Shutting down flocore-node")
node.stop(function() {
console.log("flocore-node successfully stopped!")
process.exit(0)
})
})
});
}

View File

@ -5,10 +5,10 @@ var Liftoff = require('liftoff');
function main(parentServicesPath, additionalServices) {
var liftoff = new Liftoff({
name: 'bitcored',
moduleName: 'bitcore-node',
configName: 'bitcore-node',
processTitle: 'bitcored'
name: 'flocored',
moduleName: 'flocore-node',
configName: 'flocore-node',
processTitle: 'flocored'
}).on('require', function (name) {
console.log('Loading:', name);
}).on('requireFail', function (name, err) {

View File

@ -2,20 +2,20 @@
var program = require('commander');
var path = require('path');
var bitcorenode = require('..');
var flocorenode = require('..');
var utils = require('../utils');
function main(servicesPath, additionalServices) {
/* jshint maxstatements: 100 */
var version = bitcorenode.version;
var create = bitcorenode.scaffold.create;
var add = bitcorenode.scaffold.add;
var start = bitcorenode.scaffold.start;
var remove = bitcorenode.scaffold.remove;
var callMethod = bitcorenode.scaffold.callMethod;
var findConfig = bitcorenode.scaffold.findConfig;
var defaultConfig = bitcorenode.scaffold.defaultConfig;
var version = flocorenode.version;
var create = flocorenode.scaffold.create;
var add = flocorenode.scaffold.add;
var start = flocorenode.scaffold.start;
var remove = flocorenode.scaffold.remove;
var callMethod = flocorenode.scaffold.callMethod;
var findConfig = flocorenode.scaffold.findConfig;
var defaultConfig = flocorenode.scaffold.defaultConfig;
program
.version(version);
@ -23,7 +23,7 @@ function main(servicesPath, additionalServices) {
program
.command('create <directory>')
.description('Create a new node')
.option('-d, --datadir <dir>', 'Specify the bitcoin database directory')
.option('-d, --datadir <dir>', 'Specify the florincoin database directory')
.option('-t, --testnet', 'Enable testnet as the network')
.action(function(dirname, cmd){
if (cmd.datadir) {
@ -49,7 +49,7 @@ function main(servicesPath, additionalServices) {
program
.command('start')
.description('Start the current node')
.option('-c, --config <dir>', 'Specify the directory with Bitcore Node configuration')
.option('-c, --config <dir>', 'Specify the directory with Flocore Node configuration')
.action(function(cmd){
if (cmd.config) {
cmd.config = path.resolve(process.cwd(), cmd.config);
@ -72,7 +72,7 @@ function main(servicesPath, additionalServices) {
.action(function(services){
var configInfo = findConfig(process.cwd());
if (!configInfo) {
throw new Error('Could not find configuration, see `bitcore-node create --help`');
throw new Error('Could not find configuration, see `flocore-node create --help`');
}
var opts = {
path: configInfo.path,
@ -87,8 +87,8 @@ function main(servicesPath, additionalServices) {
}).on('--help', function() {
console.log(' Examples:');
console.log();
console.log(' $ bitcore-node add wallet-service');
console.log(' $ bitcore-node add insight-api');
console.log(' $ flocore-node add wallet-service');
console.log(' $ flocore-node add insight-api');
console.log();
});
@ -98,7 +98,7 @@ function main(servicesPath, additionalServices) {
.action(function(services){
var configInfo = findConfig(process.cwd());
if (!configInfo) {
throw new Error('Could not find configuration, see `bitcore-node create --help`');
throw new Error('Could not find configuration, see `flocore-node create --help`');
}
var opts = {
path: configInfo.path,
@ -113,8 +113,8 @@ function main(servicesPath, additionalServices) {
}).on('--help', function() {
console.log(' Examples:');
console.log();
console.log(' $ bitcore-node remove wallet-service');
console.log(' $ bitcore-node remove insight-api');
console.log(' $ flocore-node remove wallet-service');
console.log(' $ flocore-node remove insight-api');
console.log();
});

View File

@ -3,9 +3,9 @@
module.exports = {
BITCOIN_GENESIS_HASH: {
livenet: '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f',
regtest: '0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206',
testnet: '000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943', //this is testnet3
livenet: '09c7781c9df90708e278c35d38ea5c9041d7ecfcdd1c56ba67274b7cff3e1cea',
regtest: 'ec42fa26ca6dcb1103b59a1d24b161935ea4566f8d5736db8917d5b9a8dee0d7',
testnet: '9b7bc86236c34b5e3a39367c036b7fe8807a966c22a7a1f0da2a198a27e03731', //this is testnet3
testnet5: '000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943' //this is testnet5
},
DB_PREFIX: new Buffer('ffff', 'hex')

View File

@ -2,11 +2,11 @@
var createError = require('errno').create;
var BitcoreNodeError = createError('BitcoreNodeError');
var FlocoreNodeError = createError('FlocoreNodeError');
var RPCError = createError('RPCError', BitcoreNodeError);
var RPCError = createError('RPCError', FlocoreNodeError);
module.exports = {
Error: BitcoreNodeError,
Error: FlocoreNodeError,
RPCError: RPCError
};

View File

@ -1,7 +1,7 @@
'use strict';
var bitcore = require('bitcore-lib');
var _ = bitcore.deps._;
var flocore = require('flocore-lib');
var _ = flocore.deps._;
var colors = require('colors/safe');
/**

View File

@ -4,8 +4,8 @@ var util = require('util');
var EventEmitter = require('events').EventEmitter;
var async = require('async');
var assert = require('assert');
var bitcore = require('bitcore-lib');
var _ = bitcore.deps._;
var flocore = require('flocore-lib');
var _ = flocore.deps._;
var index = require('./');
var log = index.log;
var Bus = require('./bus');
@ -201,14 +201,6 @@ Node.prototype.start = function(callback) {
);
};
Node.prototype.getNetworkName = function() {
var network = this.network.name;
if (this.network.regtestEnabled) {
network = 'regtest';
}
return network;
};
Node.prototype.stop = function(callback) {
log.info('Beginning shutdown');
@ -234,7 +226,7 @@ Node.prototype.stop = function(callback) {
if (callback) {
callback();
}
});
});
};
module.exports = Node;

View File

@ -4,10 +4,10 @@ var async = require('async');
var fs = require('fs');
var path = require('path');
var spawn = require('child_process').spawn;
var bitcore = require('bitcore-lib');
var flocore = require('flocore-lib');
var utils = require('../utils');
var $ = bitcore.util.preconditions;
var _ = bitcore.deps._;
var $ = flocore.util.preconditions;
var _ = flocore.deps._;
/**
* @param {String} configFilePath - The absolute path to the configuration file
@ -62,7 +62,7 @@ function addService(configDir, service, done) {
/**
* @param {String} options.cwd - The current working directory
* @param {String} options.dirname - The bitcore-node configuration directory
* @param {String} options.dirname - The flocore-node configuration directory
* @param {Array} options.services - An array of strings of service names
* @param {Function} done - A callback function called when finished
*/
@ -78,12 +78,12 @@ function add(options, done) {
var configPath = options.path;
var services = options.services;
var bitcoreConfigPath = path.resolve(configPath, 'bitcore-node.json');
var flocoreConfigPath = path.resolve(configPath, 'flocore-node.json');
var packagePath = path.resolve(configPath, 'package.json');
if (!fs.existsSync(bitcoreConfigPath) || !fs.existsSync(packagePath)) {
if (!fs.existsSync(flocoreConfigPath) || !fs.existsSync(packagePath)) {
return done(
new Error('Directory does not have a bitcore-node.json and/or package.json file.')
new Error('Directory does not have a flocore-node.json and/or package.json file.')
);
}
@ -108,8 +108,8 @@ function add(options, done) {
oldPackage = updatedPackage;
var serviceName = newDependencies[0];
// add service to bitcore-node.json
addConfig(bitcoreConfigPath, serviceName, next);
// add service to flocore-node.json
addConfig(flocoreConfigPath, serviceName, next);
});
}, done
);

View File

@ -1,10 +1,10 @@
'use strict';
var spawn = require('child_process').spawn;
var bitcore = require('bitcore-lib');
var flocore = require('flocore-lib');
var async = require('async');
var $ = bitcore.util.preconditions;
var _ = bitcore.deps._;
var $ = flocore.util.preconditions;
var _ = flocore.deps._;
var path = require('path');
var packageFile = require('../../package.json');
var mkdirp = require('mkdirp');
@ -14,22 +14,22 @@ var defaultConfig = require('./default-config');
var version = '^' + packageFile.version;
var BASE_PACKAGE = {
description: 'A full Bitcoin node build with Bitcore',
description: 'A full Florincoin node build with Flocore',
repository: 'https://github.com/user/project',
license: 'MIT',
readme: 'README.md',
dependencies: {
'bitcore-lib': '^' + bitcore.version,
'bitcore-node': version
'flocore-lib': '^' + flocore.version,
'flocore-node': version
}
};
/**
* Will create a directory and bitcoin.conf file for Bitcoin.
* Will create a directory and florincoin.conf file for Florincoin.
* @param {String} dataDir - The absolute path
* @param {Function} done - The callback function called when finished
*/
function createBitcoinDirectory(datadir, done) {
function createFlorincoinDirectory(datadir, done) {
mkdirp(datadir, function(err) {
if (err) {
throw err;
@ -42,10 +42,10 @@ function createBitcoinDirectory(datadir, done) {
}
/**
* Will create a base Bitcore Node configuration directory and files.
* Will create a base Flocore Node configuration directory and files.
* @param {Object} options
* @param {String} options.network - "testnet" or "livenet"
* @param {String} options.datadir - The bitcoin database directory
* @param {String} options.datadir - The florincoin database directory
* @param {String} configDir - The absolute path
* @param {Boolean} isGlobal - If the configuration depends on globally installed node services.
* @param {Function} done - The callback function called when finished
@ -61,7 +61,7 @@ function createConfigDirectory(options, configDir, isGlobal, done) {
var configJSON = JSON.stringify(config, null, 2);
var packageJSON = JSON.stringify(BASE_PACKAGE, null, 2);
try {
fs.writeFileSync(configDir + '/bitcore-node.json', configJSON);
fs.writeFileSync(configDir + '/flocore-node.json', configJSON);
if (!isGlobal) {
fs.writeFileSync(configDir + '/package.json', packageJSON);
}
@ -74,13 +74,13 @@ function createConfigDirectory(options, configDir, isGlobal, done) {
}
/**
* Will setup a directory with a Bitcore Node directory, configuration file,
* bitcoin configuration, and will install all necessary dependencies.
* Will setup a directory with a Flocore Node directory, configuration file,
* florincoin configuration, and will install all necessary dependencies.
*
* @param {Object} options
* @param {String} options.cwd - The current working directory
* @param {String} options.dirname - The name of the bitcore node configuration directory
* @param {String} options.datadir - The path to the bitcoin datadir
* @param {String} options.dirname - The name of the flocore node configuration directory
* @param {String} options.datadir - The path to the florincoin datadir
* @param {Function} done - A callback function called when finished
*/
function create(options, done) {
@ -103,7 +103,7 @@ function create(options, done) {
async.series([
function(next) {
// Setup the the bitcore-node directory and configuration
// Setup the the flocore-node directory and configuration
if (!fs.existsSync(absConfigDir)) {
var createOptions = {
network: options.network,
@ -115,9 +115,9 @@ function create(options, done) {
}
},
function(next) {
// Setup the bitcoin directory and configuration
// Setup the florincoin directory and configuration
if (!fs.existsSync(absDataDir)) {
createBitcoinDirectory(absDataDir, next);
createFlorincoinDirectory(absDataDir, next);
} else {
next();
}

View File

@ -10,8 +10,8 @@ function getMajorVersion(versionString) {
}
/**
* Will return the path and default bitcore-node configuration. It will search for the
* configuration file in the "~/.bitcore" directory, and if it doesn't exist, it will create one
* Will return the path and default flocore-node configuration. It will search for the
* configuration file in the "~/.flocore" directory, and if it doesn't exist, it will create one
* based on default settings.
* @param {Object} [options]
* @param {Array} [options.additionalServices] - An optional array of services.
@ -22,8 +22,8 @@ function getDefaultConfig(options) {
options = {};
}
var defaultPath = path.resolve(process.env.HOME, './.bitcore');
var defaultConfigFile = path.resolve(defaultPath, './bitcore-node.json');
var defaultPath = path.resolve(process.env.HOME, './.flocore');
var defaultConfigFile = path.resolve(defaultPath, './flocore-node.json');
if (!fs.existsSync(defaultPath)) {
mkdirp.sync(defaultPath);
@ -40,11 +40,11 @@ function getDefaultConfig(options) {
};
}
console.log(`The configuration file at '${defaultConfigFile}' is incompatible with this version of Bitcore.`);
console.log(`The configuration file at '${defaultConfigFile}' is incompatible with this version of Flocore.`);
var now = new Date();
// bitcore-node.YYYY-MM-DD.UnixTimestamp.json
var backupFileName = `bitcore-node.${now.getUTCFullYear()}-${now.getUTCMonth()}-${now.getUTCDate()}.${now.getTime()}.json`;
// flocore-node.YYYY-MM-DD.UnixTimestamp.json
var backupFileName = `flocore-node.${now.getUTCFullYear()}-${now.getUTCMonth()}-${now.getUTCDate()}.${now.getTime()}.json`;
var backupFile = path.resolve(defaultPath, backupFileName);
fs.renameSync(defaultConfigFile, backupFile);
console.log(`The previous configuration file has been moved to: ${backupFile}.`);
@ -78,11 +78,11 @@ function getDefaultConfig(options) {
services: options.additionalServices ? defaultServices.concat(options.additionalServices) : defaultServices,
datadir: defaultDataDir,
servicesConfig: {
'insight-api': {
cwdRequirePath: 'node_modules/insight-api'
'flosight-api': {
cwdRequirePath: 'node_modules/flosight-api'
},
'insight-ui': {
cwdRequirePath: 'node_modules/insight-ui'
'flosight-ui': {
cwdRequirePath: 'node_modules/flosight-ui'
}
}
};

View File

@ -1,21 +1,21 @@
'use strict';
var bitcore = require('bitcore-lib');
var $ = bitcore.util.preconditions;
var _ = bitcore.deps._;
var flocore = require('flocore-lib');
var $ = flocore.util.preconditions;
var _ = flocore.deps._;
var path = require('path');
var fs = require('fs');
var utils = require('../utils');
/**
* Will return the path and bitcore-node configuration
* Will return the path and flocore-node configuration
* @param {String} cwd - The absolute path to the current working directory
*/
function findConfig(cwd) {
$.checkArgument(_.isString(cwd), 'Argument should be a string');
$.checkArgument(utils.isAbsolutePath(cwd), 'Argument should be an absolute path');
var directory = String(cwd);
while (!fs.existsSync(path.resolve(directory, 'bitcore-node.json'))) {
while (!fs.existsSync(path.resolve(directory, 'flocore-node.json'))) {
directory = path.resolve(directory, '../');
if (directory === '/') {
return false;
@ -23,7 +23,7 @@ function findConfig(cwd) {
}
return {
path: directory,
config: require(path.resolve(directory, 'bitcore-node.json'))
config: require(path.resolve(directory, 'flocore-node.json'))
};
}

View File

@ -4,13 +4,13 @@ var async = require('async');
var fs = require('fs');
var path = require('path');
var spawn = require('child_process').spawn;
var bitcore = require('bitcore-lib');
var $ = bitcore.util.preconditions;
var _ = bitcore.deps._;
var flocore = require('flocore-lib');
var $ = flocore.util.preconditions;
var _ = flocore.deps._;
var utils = require('../utils');
/**
* Will remove a service from bitcore-node.json
* Will remove a service from flocore-node.json
* @param {String} configFilePath - The absolute path to the configuration file
* @param {String} service - The name of the module
* @param {Function} done
@ -82,9 +82,9 @@ function removeService(configDir, service, done) {
}
/**
* Will remove the Node.js service and from the bitcore-node configuration.
* Will remove the Node.js service and from the flocore-node configuration.
* @param {String} options.cwd - The current working directory
* @param {String} options.dirname - The bitcore-node configuration directory
* @param {String} options.dirname - The flocore-node configuration directory
* @param {Array} options.services - An array of strings of service names
* @param {Function} done - A callback function called when finished
*/
@ -100,12 +100,12 @@ function remove(options, done) {
var configPath = options.path;
var services = options.services;
var bitcoreConfigPath = path.resolve(configPath, 'bitcore-node.json');
var flocoreConfigPath = path.resolve(configPath, 'flocore-node.json');
var packagePath = path.resolve(configPath, 'package.json');
if (!fs.existsSync(bitcoreConfigPath) || !fs.existsSync(packagePath)) {
if (!fs.existsSync(flocoreConfigPath) || !fs.existsSync(packagePath)) {
return done(
new Error('Directory does not have a bitcore-node.json and/or package.json file.')
new Error('Directory does not have a flocore-node.json and/or package.json file.')
);
}
@ -117,8 +117,8 @@ function remove(options, done) {
if (err) {
return next(err);
}
// remove service to bitcore-node.json
removeConfig(bitcoreConfigPath, service, next);
// remove service to flocore-node.json
removeConfig(flocoreConfigPath, service, next);
});
}, done
);

View File

@ -1,10 +1,10 @@
'use strict';
var path = require('path');
var BitcoreNode = require('../node');
var FlocoreNode = require('../node');
var index = require('../');
var bitcore = require('bitcore-lib');
var _ = bitcore.deps._;
var flocore = require('flocore-lib');
var _ = flocore.deps._;
var log = index.log;
var shuttingDown = false;
var fs = require('fs');
@ -20,17 +20,17 @@ function start(options) {
servicesPath = options.path;
}
fullConfig.path = path.resolve(options.path, './bitcore-node.json');
fullConfig.path = path.resolve(options.path, './flocore-node.json');
fullConfig.services = start.setupServices(require, servicesPath, options.config);
var node = new BitcoreNode(fullConfig);
var node = new FlocoreNode(fullConfig);
// setup handlers for uncaught exceptions and ctrl+c
start.registerExitHandlers(process, node);
node.on('ready', function() {
log.info('Bitcore Node ready');
log.info('Flocore Node ready');
});
node.on('error', function(err) {
@ -101,9 +101,8 @@ function lookInBuiltInPath(req, service) {
try {
var serviceFile = path.resolve(__dirname, '../services/' + service.name);
return req(serviceFile);
} catch(e) {
console.log(e);
if(e.code !== 'MODULE_NOT_FOUND') {
} catch (e) {
if (e.code !== 'MODULE_NOT_FOUND') {
log.error(e);
}
log.info('Checked the built-in path: lib/services, for service: ' + service.name);
@ -114,8 +113,8 @@ function lookInModuleManifest(req, service) {
try {
var servicePackage = req(service.name + '/package.json');
var serviceModule = service.name;
if (servicePackage.bitcoreNode) {
serviceModule = serviceModule + '/' + servicePackage.bitcoreNode;
if (servicePackage.flocoreNode) {
serviceModule = serviceModule + '/' + servicePackage.flocoreNode;
return req(serviceModule);
}
} catch(e) {
@ -140,7 +139,7 @@ function loadModule(req, service) {
}
//fourth, see if there is directory in our module search path that has a
//package.json file, if so, then see if there is a bitcoreNode field, if so
//package.json file, if so, then see if there is a flocoreNode field, if so
//use this as the path to the service module
if(!serviceCode) {
serviceCode = lookInModuleManifest(req, service);
@ -150,7 +149,7 @@ function loadModule(req, service) {
throw new Error('Attempted to load the ' + service.name + ' service from: ' +
'the requirePath in the services\' config, then "' +
process.cwd() + '" then from: "' + __dirname + '/../lib/services' + '" finally from: "' +
process.cwd() + '/package.json" - bitcoreNode field. All paths failed to find valid nodeJS code.');
process.cwd() + '/package.json" - flocoreNode field. All paths failed to find valid nodeJS code.');
}
service.module = serviceCode;
@ -161,9 +160,9 @@ function loadModule(req, service) {
* specified modules, and assemble an array in this format:
* [
* {
* name: 'bitcoind',
* name: 'florincoind',
* config: {},
* module: BitcoinService
* module: FlorincoinService
* }
* ]
* @param {Function} req - The require function to use
@ -202,23 +201,26 @@ function cleanShutdown(_process, node) {
return _process.exit(1);
}
log.info('Halted');
_process.exit(0);
process.exit(0);
});
}
function exitHandler(options, _process, node, err) {
if (err) {
// Handle and log errors other than SIGINT shutdown
if (err && err !== "SIGINT") {
log.error('uncaught exception:', err);
if(err.stack) {
log.error(err.stack);
}
node.stop(function(err) {
if(err) {
log.error('Failed to stop services: ' + err);
}
_process.exit(-1);
});
if(options.exit)
node.stop(function(err) {
if(err) {
log.error('Failed to stop services: ' + err);
}
_process.exit(-1);
});
}
// Handle SIGINT (Ctrl+C)
if (options.sigint) {
if (!shuttingDown) {
shuttingDown = true;
@ -228,7 +230,7 @@ function exitHandler(options, _process, node, err) {
}
function registerExitHandlers(_process, node) {
_process.on('uncaughtException', exitHandler.bind(null, {exit:true}, _process, node));
_process.on('uncaughtException', exitHandler.bind(null, {exit:false}, _process, node));
_process.on('SIGINT', exitHandler.bind(null, {sigint:true}, _process, node));
}

View File

@ -72,7 +72,7 @@ Service.prototype.start = function(done) {
};
/**
* Function to be called when bitcore-node is stopped
* Function to be called when flocore-node is stopped
*/
Service.prototype.stop = function(done) {
setImmediate(done);

View File

@ -2,11 +2,13 @@
function Encoding(servicePrefix) {
this.servicePrefix = servicePrefix;
this.addressIndex = new Buffer('00', 'hex');
this.utxoIndex = new Buffer('01', 'hex');
this.addressCache = new Buffer('fe', 'hex');
}
Encoding.prototype.encodeAddressIndexKey = function(address, height, txid, index, input, timestamp) {
var prefix = new Buffer('00', 'hex');
var buffers = [this.servicePrefix, prefix];
var buffers = [this.servicePrefix, this.addressIndex];
var addressSizeBuffer = new Buffer(1);
addressSizeBuffer.writeUInt8(address.length);
@ -58,8 +60,7 @@ Encoding.prototype.decodeAddressIndexKey = function(buffer) {
};
Encoding.prototype.encodeUtxoIndexKey = function(address, txid, outputIndex) {
var prefix = new Buffer('01', 'hex');
var buffers = [this.servicePrefix, prefix];
var buffers = [this.servicePrefix, this.utxoIndex];
var addressSizeBuffer = new Buffer(1);
addressSizeBuffer.writeUInt8(address.length);
@ -114,5 +115,53 @@ Encoding.prototype.decodeUtxoIndexValue = function(buffer) {
};
};
Encoding.prototype.encodeAddressCacheKey = function(address) {
return Buffer.concat([this.servicePrefix, this.addressCache, new Buffer(address, 'utf8')]);
}
Encoding.prototype.decodeAddressCacheKey = function(buffer) {
return buffer.slice(3).toString('utf8');
}
Encoding.prototype.encodeAddressCacheValue = function(lastTx, lastBlock, balance, received, sent, txApperances) {
var buffer = [];
var balanceBuffer = new Buffer(8);
balanceBuffer.writeBigUInt64BE(BigInt(balance));
buffer.push(balanceBuffer);
var receivedBuffer = new Buffer(8);
receivedBuffer.writeBigUInt64BE(BigInt(received));
buffer.push(receivedBuffer);
var sentBuffer = new Buffer(8);
sentBuffer.writeBigUInt64BE(BigInt(sent));
buffer.push(sentBuffer);
var txApperancesBuffer = new Buffer(4);
txApperancesBuffer.writeUInt32BE(txApperances);
buffer.push(txApperancesBuffer);
var txidBuffer = new Buffer(lastTx, 'hex');
buffer.push(txidBuffer);
var blkBuffer = new Buffer(lastBlock, 'hex');
buffer.push(blkBuffer);
return Buffer.concat(buffer);
}
Encoding.prototype.decodeAddressCacheValue = function(buffer) {
var balance = parseInt(buffer.readBigUInt64BE(0));
var received = parseInt(buffer.readBigUInt64BE(8));
var sent = parseInt(buffer.readBigUInt64BE(16));
var txApperances = buffer.readUInt32BE(24);
var lastTx = buffer.slice(28, 60).toString('hex'); //28 + 32 (tx hash buffer length) = 60
var lastBlock = buffer.slice(60).toString('hex');
return { lastTx, lastBlock, balance, received, sent, txApperances };
}
module.exports = Encoding;

File diff suppressed because it is too large Load Diff

View File

@ -242,7 +242,7 @@ ProcessSerial.prototype._write = function(block, enc, callback) {
self.block.once('concurrentaddblock', function() {
if(!check()) {
var err = new Error('Concurrent block ' + self.block.concurrentTip.__height + ' is less than ' + block.__height);
var err = 'Concurrent block ' + self.block.concurrentTip.__height + ' is less than ' + block.__height;
return self.emit('error', err);
}
self._process(block, callback);

View File

@ -1,6 +1,6 @@
'use strict';
var Block = require('bcoin').block;
var Block = require('fcoin').Block;
// stores -- block header as key, block itself as value (optionally)
function Encoding(servicePrefix) {

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
'use strict';
var bitcore = require('bitcore-lib');
var BufferUtil = bitcore.util.buffer;
var flocore = require('flocore-lib');
var BufferUtil = flocore.util.buffer;
var async = require('async');
function Reorg(node, block) {
@ -315,7 +315,7 @@ Reorg.prototype.findCommonAncestorAndNewHashes = function(oldTipHash, newTipHash
}
if(!mainPosition && !forkPosition) {
return next(new Error('Unknown common ancestor'));
return next('Unknown common ancestor');
}
next();

View File

@ -3,6 +3,7 @@
var util = require('util');
var fs = require('fs');
var async = require('async');
var _ = require('lodash');
var levelup = require('levelup');
var leveldown = require('leveldown');
var mkdirp = require('mkdirp');
@ -49,18 +50,19 @@ DB.dependencies = [];
DB.prototype._onError = function(err) {
if (!this._stopping) {
log.error('Db Service: error: ' + err);
this.node.stop();
//FLO Crash Error Resolution by RanchiMall 10th May 2021
//this.node.stop();
}
};
DB.prototype._setDataPath = function() {
assert(fs.existsSync(this.node.datadir), 'Node is expected to have a "datadir" property');
if (this.node.network === 'livenet' || this.node.network === 'mainnet') {
this.dataPath = this.node.datadir + '/bitcorenode.db';
this.dataPath = this.node.datadir + '/flocorenode.db';
} else if (this.node.network === 'regtest') {
this.dataPath = this.node.datadir + '/regtest/bitcorenode.db';
this.dataPath = this.node.datadir + '/regtest/flocorenode.db';
} else if (this.node.network === 'testnet') {
this.dataPath = this.node.datadir + '/testnet/bitcorenode.db';
this.dataPath = this.node.datadir + '/testnet/flocorenode.db';
} else {
throw new Error('Unknown network: ' + this.network);
}
@ -78,7 +80,12 @@ DB.prototype.start = function(callback) {
mkdirp.sync(this.dataPath);
}
this._store = levelup(this.dataPath, { db: this.levelupStore, keyEncoding: 'binary', valueEncoding: 'binary'});
this._store = levelup(this.levelupStore(this.dataPath), {
keyEncoding: 'binary',
valueEncoding: 'binary',
writeBufferSize: 8 * 1024 * 1024,
cacheSize: 1024 * 1024 * 1024 // 1 GB of memory for cache.
});
setImmediate(callback);
@ -89,7 +96,7 @@ DB.prototype.get = function(key, options, callback) {
var cb = callback;
var opts = options;
if (typeof callback !== 'function') {
if (!_.isFunction(callback)) {
cb = options;
opts = {};
}
@ -112,63 +119,95 @@ DB.prototype.get = function(key, options, callback) {
} else {
cb(new Error('Shutdown sequence underway, not able to complete the query'));
// FLOSight Error Correction from RanchiMall 20th May 2021.
//cb(new Error('Shutdown sequence underway, not able to complete the query'));
log.error('Shutdown sequence underway, not able to complete the query');
}
};
DB.prototype.put = function(key, value, callback) {
assert(Buffer.isBuffer(key), 'key NOT a buffer as expected.');
if (value) {
assert(Buffer.isBuffer(value), 'value exists but NOT a buffer as expected.');
}
var self = this;
if (self._stopping) {
if (this._stopping) {
callback();
}
self._store.put(key, value, callback);
// FLOSight Error Correction from RanchiMall 20th May 2021. removed the unhandled assert and replaced by looging of error
if (Buffer.isBuffer(key) == false) {
log.error('key NOT a buffer as expected.');
}
// assert(Buffer.isBuffer(key), 'key NOT a buffer as expected.');
if (value) {
// FLOSight Error Correction from RanchiMall 20th May 2021. removed the unhandled assert and replaced by looging of error
if (Buffer.isBuffer(value) == false) {
log.error('value exists but NOT a buffer as expected.');
}
//assert(Buffer.isBuffer(value), 'value exists but NOT a buffer as expected.');
}
this._store.put(key, value, callback);
};
DB.prototype.del = function(key, callback) {
if (this._stopping) {
callback();
}
// FLOSight Error Correction from RanchiMall 20th May 2021. removed the unhandled assert and replaced by looging of error
if (Buffer.isBuffer(key) == false) {
log.error('key NOT a buffer as expected.');
}
// assert(Buffer.isBuffer(key), 'key NOT a buffer as expected.');
this._store.del(key, callback);
}
DB.prototype.batch = function(ops, callback) {
var self = this;
if (self._stopping) {
if (this._stopping) {
return callback();
}
for(var i = 0; i < ops.length; i++) {
assert(Buffer.isBuffer(ops[i].key), 'key NOT a buffer as expected.');
// FLOSight Error Correction from RanchiMall 20th May 2021. removed the unhandled assert and replaced by looging of error
if (Buffer.isBuffer(ops[i].key) == false) {
log.error('key NOT a buffer as expected.');
}
//assert(Buffer.isBuffer(ops[i].key), 'key NOT a buffer as expected.');
if (ops[i].value) {
assert(Buffer.isBuffer(ops[i].value), 'value exists but NOT a buffer as expected.');
// FLOSight Error Correction from RanchiMall 20th May 2021. removed the unhandled assert and replaced by looging of error
if (Buffer.isBuffer(ops[i].value) == false) {
log.error('value exists but NOT a buffer as expected.');
}
//assert(Buffer.isBuffer(ops[i].value), 'value exists but NOT a buffer as expected.');
}
}
self._store.batch(ops, callback);
this._store.batch(ops, callback);
};
DB.prototype.createReadStream = function(op) {
if (this._stopping) {
return;
}
var stream = this._store.createReadStream(op);
stream.on('error', this._onError.bind(this));
return stream;
};
DB.prototype.createKeyStream = function(op) {
if (this._stopping) {
return;
}
@ -183,11 +222,10 @@ DB.prototype.stop = function(callback) {
};
DB.prototype.close = function(callback) {
if (this._store && this._store.isOpen()) {
this._store.close(callback);
return;
if(!this._store || this._store.isClosed()){
return callback();
}
setImmediate(callback);
this._store.close(callback);
};
DB.prototype.getAPIMethods = function() {
@ -236,8 +274,8 @@ DB.prototype.getServiceTip = function(serviceName, callback) {
DB.prototype.getPrefix = function(service, callback) {
var self = this;
var keyBuf = Buffer.concat([ self._dbPrefix, new Buffer('prefix-', 'utf8'), new Buffer(service, 'utf8') ]);
var unusedBuf = Buffer.concat([ self._dbPrefix, new Buffer('nextUnused', 'utf8') ]);
var keyBuf = Buffer.concat([ self._dbPrefix, new Buffer('prefix-', 'utf8'), new Buffer(service, 'utf8') ]);
var unusedBuf = Buffer.concat([ self._dbPrefix, new Buffer('nextUnused', 'utf8') ]);
function getPrefix(next) {
@ -249,6 +287,7 @@ DB.prototype.getPrefix = function(service, callback) {
if (!buf) {
return next();
}
log.info('Db Service: service prefix for: ' + service + ' is: ' + buf.toString('hex'));
callback(null, buf);
});
@ -308,8 +347,13 @@ DB.prototype.getPrefix = function(service, callback) {
putPrefix,
putUnused
],
callback
);
function(err, prefix) {
if (err) {
return callback(err);
}
log.info('Db Service: service prefix for: ' + service + ' is: ' + prefix.toString('hex'));
callback(null, prefix);
});
};
module.exports = DB;

View File

@ -2,18 +2,18 @@
var BaseService = require('../../service');
var inherits = require('util').inherits;
var BitcoreRPC = require('bitcoind-rpc');
var FlocoreRPC = require('florincoind-rpc');
var FeeService = function(options) {
this._config = options.rpc || {
user: 'bitcoin',
user: 'florincoin',
pass: 'local321',
host: 'localhost',
protocol: 'http',
port: 8332
port: 7312
};
BaseService.call(this, options);
this._client = new BitcoreRPC(this._config);
this._client = new FlocoreRPC(this._config);
};
inherits(FeeService, BaseService);

View File

@ -43,7 +43,19 @@ Encoding.prototype.encodeHeaderValue = function(header) {
var heightBuf = new Buffer(4);
heightBuf.writeUInt32BE(header.height);
var chainworkBuf = new Buffer(header.chainwork, 'hex');
return Buffer.concat([hashBuf, versionBuf, prevHash, merkleRoot, tsBuf, bitsBuf, nonceBuf, heightBuf, chainworkBuf ]);
var nextHash = new Buffer(header.nextHash || new Array(65).join('0'), 'hex');
return Buffer.concat([
hashBuf,
versionBuf,
prevHash,
merkleRoot,
tsBuf,
bitsBuf,
nonceBuf,
heightBuf,
chainworkBuf,
nextHash
]);
};
Encoding.prototype.decodeHeaderValue = function(buffer) {
@ -55,7 +67,8 @@ Encoding.prototype.decodeHeaderValue = function(buffer) {
var bits = buffer.readUInt32BE(104);
var nonce = buffer.readUInt32BE(108);
var height = buffer.readUInt32BE(112);
var chainwork = buffer.slice(116).toString('hex');
var chainwork = buffer.slice(116, 116 + 32).toString('hex');
var nextHash = buffer.slice(116 + 32).toString('hex');
return {
hash: hash,
version: version,
@ -65,7 +78,8 @@ Encoding.prototype.decodeHeaderValue = function(buffer) {
bits: bits,
nonce: nonce,
height: height,
chainwork: chainwork
chainwork: chainwork,
nextHash: nextHash
};
};

File diff suppressed because it is too large Load Diff

View File

@ -1,20 +1,22 @@
'use strict';
var tx = require('bcoin').tx;
var tx = require('fcoin').TX;
function Encoding(servicePrefix) {
this.servicePrefix = servicePrefix;
this.txPrefix = new Buffer('00', 'hex');
this.addressPrefix = new Buffer('01', 'hex');
}
Encoding.prototype.encodeMempoolTransactionKey = function(txid) {
var buffers = [this.servicePrefix];
var buffers = [this.servicePrefix, this.txPrefix];
var txidBuffer = new Buffer(txid, 'hex');
buffers.push(txidBuffer);
return Buffer.concat(buffers);
};
Encoding.prototype.decodeMempoolTransactionKey = function(buffer) {
return buffer.slice(2).toString('hex');
return buffer.slice(3).toString('hex');
};
Encoding.prototype.encodeMempoolTransactionValue = function(transaction) {
@ -25,5 +27,51 @@ Encoding.prototype.decodeMempoolTransactionValue = function(buffer) {
return tx.fromRaw(buffer);
};
Encoding.prototype.encodeMempoolAddressKey = function(address, txid, index, input) {
var buffers = [this.servicePrefix, this.addressPrefix];
var addressSizeBuffer = new Buffer(1);
addressSizeBuffer.writeUInt8(address.length);
var addressBuffer = new Buffer(address, 'utf8');
buffers.push(addressSizeBuffer);
buffers.push(addressBuffer);
var txidBuffer = new Buffer(txid || Array(65).join('0'), 'hex');
buffers.push(txidBuffer);
var indexBuffer = new Buffer(4);
indexBuffer.writeUInt32BE(index || 0);
buffers.push(indexBuffer);
// this is whether the address appears in an input (1) or output (0)
var inputBuffer = new Buffer(1);
inputBuffer.writeUInt8(input || 0);
buffers.push(inputBuffer);
return Buffer.concat(buffers);
};
Encoding.prototype.decodeMempoolAddressKey = function(buffer) {
var addressSize = buffer.readUInt8(3);
var address = buffer.slice(4, addressSize + 4).toString('utf8');
var txid = buffer.slice(addressSize + 4, addressSize + 36).toString('hex');
var index = buffer.readUInt32BE(addressSize + 36);
var input = buffer.readUInt8(addressSize + 40);
return {
address: address,
txid: txid,
index: index,
input: input
};
};
module.exports = Encoding;

View File

@ -1,31 +1,56 @@
'use strict';
var BaseService = require('../../service');
var util = require('util');
var utils = require('../../utils');
var Encoding = require('./encoding');
var index = require('../../');
var log = index.log;
var log = require('../..').log;
var utils = require('../../utils');
var MempoolService = function(options) {
BaseService.call(this, options);
this._subscriptions = {};
this._subscriptions.transaction = [];
this._db = this.node.services.db;
this._p2p = this.node.services.p2p;
this._network = this.node.network;
this._flush = options.flush;
this._enabled = false;
if (this._network === 'livenet') {
this._network = 'main';
}
if (this._network === 'regtest') {
this._network = 'testnet';
}
};
util.inherits(MempoolService, BaseService);
MempoolService.dependencies = ['db', 'block'];
MempoolService.dependencies = ['db'];
MempoolService.prototype.getAPIMethods = function() {
MempoolService.prototype.subscribe = function(name, emitter) {
this._subscriptions[name].push(emitter);
log.info(emitter.remoteAddress, 'subscribe:', 'mempool/' + name, 'total:', this._subscriptions[name].length);
};
MempoolService.prototype.unsubscribe = function(name, emitter) {
var index = this._subscriptions[name].indexOf(emitter);
if (index > -1) {
this._subscriptions[name].splice(index, 1);
}
log.info(emitter.remoteAddress, 'unsubscribe:', 'mempool/' + name, 'total:', this._subscriptions[name].length);
};
var methods = [
['getMempoolTransaction', this, this.getMempoolTransaction, 1]
['getMempoolTransaction', this, this.getMempoolTransaction, 1],
['getTxidsByAddress', this, this.getTxsByAddress, 2],
];
return methods;
};
MempoolService.prototype.getPublishEvents = function() {
return [
{
name: 'mempool/transaction',
@ -34,26 +59,6 @@ MempoolService.prototype.getPublishEvents = function() {
unsubscribe: this.unsubscribe.bind(this, 'transaction')
}
];
};
MempoolService.prototype.subscribe = function(name, emitter) {
this._subscriptions[name].push(emitter);
log.info(emitter.remoteAddress, 'subscribe:', 'mempool/' + name, 'total:', this._subscriptions[name].length);
};
MempoolService.prototype.unsubscribe = function(name, emitter) {
var index = this._subscriptions[name].indexOf(emitter);
if (index > -1) {
this._subscriptions[name].splice(index, 1);
}
log.info(emitter.remoteAddress, 'unsubscribe:', 'mempool/' + name, 'total:', this._subscriptions[name].length);
};
MempoolService.prototype.start = function(callback) {
@ -64,17 +69,59 @@ MempoolService.prototype.start = function(callback) {
return callback(err);
}
self._encoding = new Encoding(prefix);
self._startSubscriptions();
if (self._flush) {
return self._flushMempool(callback);
}
log.info('Mempool Service: mempool disabled until full sync.');
callback();
});
};
MempoolService.prototype._flushMempool = function(callback) {
var self = this;
var totalCount = 0;
log.warn('Mempool Service: flushing mempool, this could take a minute.');
var criteria = {
gte: self._encoding.encodeMempoolTransactionKey(new Array(65).join('0')),
lte: self._encoding.encodeMempoolTransactionKey(new Array(65).join('f'))
};
var timer = setInterval(function() {
log.info('Mempool Service: removed: ' + totalCount + ' records during mempool flush.');
}, 5000);
timer.unref();
var stream = self._db.createReadStream(criteria);
stream.on('data', function(data) {
var ops = self._getAddressOperations(self._encoding.decodeMempoolTransactionValue(data.value));
ops.push({
type: 'del',
key: data.key
});
totalCount += ops.length;
self._db.batch(ops);
});
stream.on('end', function() {
clearInterval(timer);
log.info('Mempool Service: completed flushing: ' + totalCount + ' tx mempool records.');
callback();
});
};
MempoolService.prototype.onReorg = function(args, callback) {
var oldBlockList = args[1];
var removalOps = [];
var oldBlockList = args[1];
for(var i = 0; i < oldBlockList.length; i++) {
var block = oldBlockList[i];
@ -91,44 +138,124 @@ MempoolService.prototype.onReorg = function(args, callback) {
value: value
});
removalOps = removalOps.concat(this._getAddressOperations(tx, true));
}
}
callback(null, removalOps);
setImmediate(function() {
callback(null, removalOps);
});
};
MempoolService.prototype._startSubscriptions = function() {
if (this._subscribed) {
var self = this;
if (self._subscribed) {
return;
}
this._subscribed = true;
if (!this._bus) {
this._bus = this.node.openBus({remoteAddress: 'localhost-mempool'});
self._subscribed = true;
if (!self._bus) {
self._bus = self.node.openBus({remoteAddress: 'localhost-mempool'});
}
this._bus.on('p2p/transaction', this._onTransaction.bind(this));
this._bus.subscribe('p2p/transaction');
self._bus.on('p2p/transaction', self._onTransaction.bind(self));
self._bus.subscribe('p2p/transaction');
};
MempoolService.prototype.enable = function() {
log.info('Mempool Service: Mempool enabled.');
this._startSubscriptions();
this._enabled = true;
};
MempoolService.prototype.onBlock = function(block, callback) {
// remove this block's txs from mempool
var self = this;
var ops = block.txs.map(function(tx) {
return {
var ops = [];
for(var i = 0; i < block.txs.length; i++) {
var tx = block.txs[i];
// tx index
ops.push({
type: 'del',
key: self._encoding.encodeMempoolTransactionKey(tx.txid())
};
});
});
// address index
ops = ops.concat(self._getAddressOperations(tx));
}
callback(null, ops);
};
MempoolService.prototype._getAddressOperations = function(tx, reverse) {
var ops = [];
var address;
var action = reverse ? 'put' : 'del';
for(var i = 0; i < tx.outputs.length; i++) {
var output = tx.outputs[i];
address = utils.getAddress(output, this._network);
if (!address) {
continue;
}
ops.push({
type: action,
key: this._encoding.encodeMempoolAddressKey(address, tx.txid(), i, 0)
});
}
for(i = 0; i < tx.inputs.length; i++) {
var input = tx.inputs[i];
address = utils.getAddress(input, this._network);
if (!address) {
continue;
}
ops.push({
type: action,
key: this._encoding.encodeMempoolAddressKey(address, tx.txid(), i, 1)
});
}
return ops;
};
MempoolService.prototype._onTransaction = function(tx) {
this._db.put(this._encoding.encodeMempoolTransactionKey(tx.txid()),
this._encoding.encodeMempoolTransactionValue(tx));
var self = this;
var ops = [{
type: 'put',
key: self._encoding.encodeMempoolTransactionKey(tx.txid()),
value: self._encoding.encodeMempoolTransactionValue(tx)
}];
ops = ops.concat(self._getAddressOperations(tx, true));
self._db.batch(ops, function(err) {
if(err) {
log.error(err);
// self.node.stop();
}
for (var i = 0; i < self._subscriptions.transaction.length; i++) {
self._subscriptions.transaction[i].emit('mempool/transaction', tx);
}
});
};
MempoolService.prototype.getMempoolTransaction = function(txid, callback) {
@ -151,6 +278,42 @@ MempoolService.prototype.getMempoolTransaction = function(txid, callback) {
};
MempoolService.prototype.getTxidsByAddress = function(address, type, callback) {
var self = this;
var results = [];
var start = self._encoding.encodeMempoolAddressKey(address);
var end = Buffer.concat([ start.slice(0, -37), new Buffer(new Array(75).join('f'), 'hex') ]);
var criteria = {
gte: start,
lte: end
};
var stream = self._db.createKeyStream(criteria);
stream.on('error', function() {
return [];
});
stream.on('end', function() {
callback(null, results);
});
stream.on('data', function(key) {
var addressInfo = self._encoding.decodeMempoolAddressKey(key);
if (type === 'input') {
type = 1;
} else if (type === 'output') {
type = 0;
}
if (type === 'both' || type === addressInfo.input) {
results.push({ txid: addressInfo.txid, height: 0xffffffff });
}
});
};
MempoolService.prototype.stop = function(callback) {
callback();
};

View File

@ -2,53 +2,51 @@
var index = require('../../');
var log = index.log;
var bcoin = require('bcoin');
var EE = require('events').EventEmitter;
var bcoin = require('fcoin');
// var bzmq = require('bzmq');
var Bcoin = function(options) {
this._config = this._getConfig(options);
this.emitter = new EE();
};
Bcoin.prototype.start = function(done) {
var self = this;
self._bcoin = bcoin.fullnode(self._config);
Bcoin.prototype.start = function(callback) {
this._bcoin = new bcoin.FullNode(this._config);
log.info('Starting Bcoin full node...');
log.info('Starting fcoin FullNode...');
self._bcoin.open().then(function() {
self._bcoin.connect().then(function() {
log.info('Waiting for Bcoin to sync');
self._bcoin.startSync();
if (self._bcoin.chain.synced){
return done();
}
self._bcoin.chain.once('full', function() {
done();
});
this._bcoin.open().then(() => {
this._bcoin.connect().then(() => {
this._bcoin.startSync();
callback();
});
});
};
Bcoin.prototype.stop = function() {
this._bcoin.stopSync();
this._bcoin.disconnect();
this._bcoin.close();
Bcoin.prototype.stop = function(callback) {
this._bcoin.close().then(() => {
log.info("fcoin shutdown")
callback()
});
};
// --- privates
Bcoin.prototype._getConfig = function(options) {
var config = {
db: 'leveldb',
checkpoints: true,
network: options.network || 'main',
listen: true,
port: options.port,
logFile: true,
logConsole: true,
logLevel: 'info',
port: options.port,
persistent: true,
workers: true
// indexTx: true,
// indexAddress: true,
checkpoints: true,
memory: false,
workers: true,
listen: true
};
if (options.prefix) {
config.prefix = options.prefix;

View File

@ -1,6 +1,6 @@
'use strict';
var p2p = require('bitcore-p2p');
var p2p = require('flocore-p2p');
var LRU = require('lru-cache');
var util = require('util');
var index = require('../../');
@ -8,7 +8,11 @@ var log = index.log;
var BaseService = require('../../service');
var assert = require('assert');
var Bcoin = require('./bcoin');
var Networks = require('bitcore-lib').Networks;
var BcoinBlock = require('fcoin').Block;
var BcoinTx = require('fcoin').TX;
var Networks = require('flocore-lib').Networks;
var BitcoreRPC = require('bitcoind-rpc');
var LRU = require('lru-cache');
var P2P = function(options) {
@ -19,11 +23,17 @@ var P2P = function(options) {
BaseService.call(this, options);
this._options = options;
this._initRPC(options);
this._initP2P();
this._initPubSub();
this._bcoin = null;
this._currentBestHeight = null;
this._latestBits = 0x1d00ffff;
this._outgoingTxs = LRU(100); // these are outgoing txs that are awaiting getdata messages
this._blockCache = options.blockCacheCount || LRU({
max: 10,
maxAge: 1000 * 60 * 5}); // keep this modest to avoid memory dumps [hash -> block]
};
util.inherits(P2P, BaseService);
@ -37,7 +47,7 @@ P2P.prototype.clearInventoryCache = function() {
P2P.prototype.getAPIMethods = function() {
var methods = [
['clearInventoryCache', this, this.clearInventoryCache, 0],
['getBlocks', this, this.getBlocks, 1],
['getP2PBlock', this, this.getP2PBlock, 1],
['getHeaders', this, this.getHeaders, 1],
['getMempool', this, this.getMempool, 0],
['sendTransaction', this, this.sendTransaction, 1]
@ -49,12 +59,38 @@ P2P.prototype.getNumberOfPeers = function() {
return this._pool.numberConnected;
};
P2P.prototype.getBlocks = function(filter) {
P2P.prototype.getP2PBlock = function(opts, callback) {
var peer = this._getPeer();
var blockFilter = this._setResourceFilter(filter, 'blocks');
peer.sendMessage(this.messages.GetBlocks(blockFilter));
// opts is { filter: {<start and end hashes>}, blockHash: block hash we want }
var self = this;
// do we already have this block in our cache?
var block = self._blockCache.get(opts.blockHash);
if (block) {
return callback(block);
}
var peer = self._getPeer();
var blockFilter = self._setResourceFilter(opts.filter, 'blocks');
// there is a possibility that the main chain has reorganized after we last
// computed our expected block and before our peer computes what block to
// send us in response.
// In self case, we want to abandon self block and remove its listener.
// Our caller should also reset its expectations and re-compute its expected
// block and call us again.
// If we are wrong about the reorg, then the peer is just really slow and we ought
// to use self peer anyway.
setTimeout(function() {
self.removeListener(opts.blockHash, callback);
}, 5000);
self.once(opts.blockHash, callback);
peer.sendMessage(self.messages.GetBlocks(blockFilter, { Block: BcoinBlock }));
};
P2P.prototype.getHeaders = function(filter) {
@ -65,14 +101,9 @@ P2P.prototype.getHeaders = function(filter) {
};
P2P.prototype.getMempool = function(filter) {
P2P.prototype.getMempool = function() {
var peer = this._getPeer();
this._setResourceFilter(filter, 'mempool');
peer.sendMessage(this.messages.MemPool());
};
P2P.prototype.getPublishEvents = function() {
@ -99,8 +130,8 @@ P2P.prototype.getPublishEvents = function() {
};
P2P.prototype.sendTransaction = function(tx) {
p2p.sendMessage(this.messages.Inventory(tx));
P2P.prototype.sendTransaction = function(tx, callback) {
return this._client.sendRawTransaction(tx, callback);
};
@ -122,9 +153,11 @@ P2P.prototype._disconnectPool = function() {
};
P2P.prototype.stop = function(callback) {
if (this._bcoin){
return this._bcoin.stop(callback);
}
setImmediate(callback);
};
@ -166,16 +199,27 @@ P2P.prototype._broadcast = function(subscribers, name, entity) {
}
};
P2P.prototype._connect = function() {
P2P.prototype._setRetryInterval = function() {
var self = this;
if (!self._retryInterval && !self.node.stopping) {
self._retryInterval = setInterval(function() {
log.info('Retrying connection to p2p network.');
self._pool.connect();
}, 5000);
}
};
P2P.prototype._connect = function() {
var self = this;
log.info('Connecting to p2p network.');
self._pool.connect();
var retryInterval = setInterval(function() {
self._pool.connect();
}, 5000);
self._pool.once('peerready', function() {
clearInterval(retryInterval);
});
self._setRetryInterval();
};
P2P.prototype._getBestHeight = function() {
@ -209,6 +253,23 @@ P2P.prototype._initCache = function() {
this._inv = LRU(1000);
};
P2P.prototype._initRPC = function (options) {
var port = 7313
if (this.node.network === 'testnet')
port = 17313
this._config = options.rpc || {
user: 'flocore',
pass: 'flocorepassw123',
host: 'localhost',
protocol: 'http',
port: port
};
this._client = new BitcoreRPC(this._config);
}
P2P.prototype._initP2P = function() {
this._maxPeers = this._options.maxPeers || 60;
this._minPeers = this._options.minPeers || 0;
@ -217,7 +278,7 @@ P2P.prototype._initP2P = function() {
if (this.node.network === 'regtest') {
Networks.enableRegtest();
}
this.messages = new p2p.Messages({ network: Networks.get(this.node.network) });
this.messages = new p2p.Messages({ network: Networks.get(this.node.network), Transaction: BcoinTx, Block: BcoinBlock });
this._peerHeights = [];
this._peers = [];
this._peerIndex = 0;
@ -230,8 +291,10 @@ P2P.prototype._initPool = function() {
opts.addrs = this._configPeers;
}
opts.dnsSeed = false;
opts.listenAddr = false;
opts.maxPeers = this._maxPeers;
opts.network = this.node.network;
p2p.Pool.RetrySeconds = 3;
this._pool = new p2p.Pool(opts);
};
@ -243,18 +306,30 @@ P2P.prototype._initPubSub = function() {
};
P2P.prototype._onPeerBlock = function(peer, message) {
this._blockCache.set(message.block.rhash(), message.block);
this.emit(message.block.rhash(), message.block);
this._broadcast(this.subscriptions.block, 'p2p/block', message.block);
};
P2P.prototype._onPeerDisconnect = function(peer, addr) {
if (!this.node.stopping) {
this._connect();
return;
this._removePeer(peer);
if (this._peers.length < 1) {
this._setRetryInterval();
}
this._removePeer(peer);
log.info('Disconnected from peer: ' + addr.ip.v4);
};
P2P.prototype._onPeerGetData = function(peer, message) {
// we can only respond to tx messages
var txId = message.inventory[0].hash.reverse().toString('hex');
var tx = this._outgoingTxs.get(txId);
if (tx) {
peer.sendMessage(this.messages.Transaction(tx, { Transaction: BcoinTx }));
}
};
P2P.prototype._onPeerHeaders = function(peer, message) {
@ -282,10 +357,38 @@ P2P.prototype._onPeerInventory = function(peer, message) {
}
};
P2P.prototype._matchNetwork = function(network) {
if (this.node.network !== network.name &&
this.node.network !== network.alias) {
log.error('Configured network: "' + this.node.network +
'" does not match our peer\'s reported network: "' +
network.name + '".');
// return this.node.stop();
return ;
}
return this.node.network === network.name ? network.name : network.alias;
};
P2P.prototype._onPeerReady = function(peer, addr) {
// clear any interval timers that we previously set
if (this._retryInterval) {
clearInterval(this._retryInterval);
this._retryInterval = null;
}
// want to make sure the peer we are connecting to matches our network config.
var network = this._matchNetwork(peer.network);
if (!network) {
return;
}
log.info('Connected to peer: ' + addr.ip.v4 + ', network: ' +
peer.network.alias + ', version: ' + peer.version + ', subversion: ' +
network + ', version: ' + peer.version + ', subversion: ' +
peer.subversion + ', status: ' + peer.status + ', port: ' +
peer.port + ', best height: ' + peer.bestHeight);
@ -300,10 +403,7 @@ P2P.prototype._onPeerReady = function(peer, addr) {
P2P.prototype._onPeerTx = function(peer, message) {
var filteredMessage = this._applyMempoolFilter(message);
if (filteredMessage) {
this._broadcast(this.subscriptions.transaction, 'p2p/transaction', message.transaction);
}
this._broadcast(this.subscriptions.transaction, 'p2p/transaction', message.transaction);
};
P2P.prototype._removePeer = function(peer) {
@ -319,41 +419,53 @@ P2P.prototype._setListeners = function() {
self._pool.on('peertx', self._onPeerTx.bind(self));
self._pool.on('peerblock', self._onPeerBlock.bind(self));
self._pool.on('peerheaders', self._onPeerHeaders.bind(self));
self._pool.on('peergetdata', self._onPeerGetData.bind(self));
self.node.on('ready', self._connect.bind(self));
};
P2P.prototype._setResourceFilter = function(filter, resource) {
P2P.prototype._setResourceFilter = function(filter) {
if (resource === 'headers' || resource === 'blocks') {
assert(filter && filter.startHash, 'A "startHash" field is required to retrieve headers or blocks');
if (!filter.endHash) {
filter.endHash = 0;
// FLOSight Error Correction from RanchiMall 17th May 2021. removed the unhandled assert and replaced by looging of error
if (filter == false || filter.startHash == false) {
log.error('A "startHash" field is required to retrieve headers or blocks');
}
return { starts: [filter.startHash], stop: filter.endHash };
}
if (resource === 'mempool') {
this._mempoolFilter = filter;
return;
// assert(filter && filter.startHash, 'A "startHash" field is required to retrieve headers or blocks');
if (!filter.endHash) {
filter.endHash = 0;
}
return { starts: [filter.startHash], stop: filter.endHash };
};
P2P.prototype._startBcoin = function(callback) {
var self = this;
const network = ['livenet', 'live', 'main', 'mainnet'].indexOf(this.node.network) !== -1? 'main' : 'testnet';
self._bcoin = new Bcoin({
var network;
var port;
if (['livenet', 'live', 'main', 'mainnet'].indexOf(this.node.network) !== -1) {
network = 'main';
port = this._configPeers[0].port || 7312;
} else if (this.node.network !== 'regtest') {
network = 'testnet';
port = this._configPeers[0].port || 17312;
} else {
network = this.node.network;
port = this._configPeers[0].port || 17412;
}
this._bcoin = new Bcoin({
network: network,
prefix: self.node.datadir,
port: 48333
prefix: this.node.datadir,
port: port
});
self._bcoin.start(callback);
this._bcoin.start(callback);
};
P2P.prototype._startBcoinIfNecessary = function(callback) {
if (!this._hasPeers()) {
log.info('Peers not explicitly configured, starting a local bcoin node.');
this._configPeers = [{ip: {v4: '127.0.0.1'}, port: 48333}];
this._configPeers = [{ ip: { v4: '127.0.0.1'} }];
return this._startBcoin(callback);
}
setImmediate(callback);

View File

@ -91,7 +91,7 @@ TimestampService.prototype.onBlock = function(block, callback) {
var operations = [];
var ts = block.ts;
var ts = block.time;
var hash = block.rhash();
if (ts <= this._lastBlockTimestamp) {
@ -121,7 +121,7 @@ TimestampService.prototype.onBlock = function(block, callback) {
TimestampService.prototype.onReorg = function(args, callback) {
var self = this;
var commonAncestorHeader = args[0];
var commonAncestorHash = args[0];
var oldBlockList = args[1];
var removalOps = [];
@ -131,7 +131,7 @@ TimestampService.prototype.onReorg = function(args, callback) {
removalOps.concat([
{
type: 'del',
key: self._encoding.encodeTimestampBlockKey(block.ts),
key: self._encoding.encodeTimestampBlockKey(block.__ts),
},
{
type: 'del',
@ -141,7 +141,7 @@ TimestampService.prototype.onReorg = function(args, callback) {
});
// look up the adjusted timestamp from our own database and set the lastTimestamp to it
self.getTimestamp(commonAncestorHeader.hash, function(err, timestamp) {
self.getTimestamp(commonAncestorHash, function(err, timestamp) {
if (err) {
return callback(err);
@ -159,12 +159,17 @@ TimestampService.prototype.getTimestampSync = function(hash) {
TimestampService.prototype.getTimestamp = function(hash, callback) {
var self = this;
self._db.get(self._encoding.encodeBlockTimestampKey(hash), function(err, data) {
if (err) {
return callback(err);
}
if (!data) {
return callback();
}
callback(null, self._encoding.decodeBlockTimestampValue(data));
});
};
TimestampService.prototype.getHash = function(timestamp, callback) {

View File

@ -1,23 +1,28 @@
'use strict';
var Tx = require('bcoin').tx;
var Tx = require('fcoin').TX;
function Encoding(servicePrefix) {
this.servicePrefix = servicePrefix;
this.txIndex = new Buffer('00', 'hex');
this.spentIndex = new Buffer('01', 'hex');
this.doubleSpentIndex = new Buffer('02', 'hex');
}
Encoding.prototype.encodeTransactionKey = function(txid) {
return Buffer.concat([this.servicePrefix, new Buffer(txid, 'hex')]);
return Buffer.concat([this.servicePrefix, this.txIndex, new Buffer(txid, 'hex')]);
};
Encoding.prototype.decodeTransactionKey = function(buffer) {
return buffer.slice(2).toString('hex');
return buffer.slice(3).toString('hex');
};
Encoding.prototype.encodeTransactionValue = function(transaction) {
var heightBuffer = new Buffer(4);
heightBuffer.writeUInt32BE(transaction.__height);
var hashBuffer = new Buffer(transaction.__blockhash, 'hex');
var timestampBuffer = new Buffer(4);
timestampBuffer.writeUInt32BE(transaction.__timestamp);
@ -30,28 +35,108 @@ Encoding.prototype.encodeTransactionValue = function(transaction) {
var inputValuesLengthBuffer = new Buffer(2);
inputValuesLengthBuffer.writeUInt16BE(inputValues.length);
return new Buffer.concat([heightBuffer, timestampBuffer,
return new Buffer.concat([heightBuffer, hashBuffer, timestampBuffer,
inputValuesLengthBuffer, inputValuesBuffer, transaction.toRaw()]);
};
Encoding.prototype.decodeTransactionValue = function(buffer) {
var height = buffer.readUInt32BE();
var timestamp = buffer.readUInt32BE(4);
var inputValuesLength = buffer.readUInt16BE(8);
var blockhash = buffer.slice(4, 36).toString('hex');
var timestamp = buffer.readUInt32BE(36);
var inputValuesLength = buffer.readUInt16BE(40);
var inputValues = [];
for(var i = 0; i < inputValuesLength; i++) {
inputValues.push(buffer.readDoubleBE(i * 8 + 10));
inputValues.push(buffer.readDoubleBE(i * 8 + 42));
}
var txBuf = buffer.slice(inputValues.length * 8 + 10);
var txBuf = buffer.slice(inputValues.length * 8 + 42);
var transaction = Tx.fromRaw(txBuf);
transaction.__height = height;
transaction.__blockhash = blockhash;
transaction.__inputValues = inputValues;
transaction.__timestamp = timestamp;
return transaction;
};
// for every input we receive, we make an entry for what output it spends
Encoding.prototype.encodeSpentKey = function(txid, outputIndex) {
var outputIndexBuffer = new Buffer(4);
outputIndexBuffer.writeUInt32BE(outputIndex);
return Buffer.concat([this.servicePrefix, this.spentIndex, new Buffer(txid, 'hex'), outputIndexBuffer]);
};
Encoding.prototype.decodeSpentKey = function(buffer) {
var txid = buffer.slice(3, 35).toString('hex');
var outputIndex = buffer.readUInt32BE(35);
return {
txid: txid,
outputIndex: outputIndex
};
};
Encoding.prototype.encodeSpentValue = function(txid, inputIndex, blockHeight, blockHash) {
var inputIndexBuffer = new Buffer(4);
inputIndexBuffer.writeUInt32BE(inputIndex);
var blockHeightBuffer = new Buffer(4);
blockHeightBuffer.writeUInt32BE(blockHeight);
var blockHashBuffer = new Buffer(blockHash, 'hex');
return Buffer.concat([new Buffer(txid, 'hex'), inputIndexBuffer, blockHeightBuffer, blockHashBuffer]);
};
Encoding.prototype.decodeSpentValue = function(buffer) {
var txid = buffer.slice(0, 32).toString('hex');
var inputIndex = buffer.readUInt32BE(32);
var blockHeight = buffer.readUInt32BE(36, 40);
var blockHash = buffer.slice(40).toString('hex');
return {
txid: txid,
inputIndex: inputIndex,
blockHeight: blockHeight,
blockHash: blockHash
};
};
Encoding.prototype.encodeDoubleSpentKey = function(txid, outputIndex) {
var outputIndexBuffer = new Buffer(4);
outputIndexBuffer.writeUInt32BE(outputIndex);
return Buffer.concat([this.servicePrefix, this.spentIndex, new Buffer(txid, 'hex'), outputIndexBuffer]);
};
Encoding.prototype.decodeDoubleSpentKey = function(buffer) {
var txid = buffer.slice(3, 35).toString('hex');
var outputIndex = buffer.readUInt32BE(35);
return {
txid: txid,
outputIndex: outputIndex
};
};
Encoding.prototype.encodeDoubleSpentValue = function(txid, inputIndex, blockHeight, blockHash) {
var inputIndexBuffer = new Buffer(4);
inputIndexBuffer.writeUInt32BE(inputIndex);
var blockHeightBuffer = new Buffer(4);
blockHeightBuffer.writeUInt32BE(inputIndex);
var blockHashBuffer = new Buffer(blockHash, 'hex');
return Buffer.concat([new Buffer(txid, 'hex'), inputIndexBuffer, blockHeightBuffer, blockHashBuffer]);
};
Encoding.prototype.decodeDoubleSpentValue = function(buffer) {
var txid = buffer.slice(0, 32).toString('hex');
var inputIndex = buffer.readUInt32BE(32, 36);
var blockHeight = buffer.readUInt32BE(36, 40);
var blockHash = buffer.slice(40).toString('hex');
return {
txid: txid,
inputIndex: inputIndex,
blockHeight: blockHeight,
blockHash: blockHash
};
};
module.exports = Encoding;

View File

@ -3,11 +3,11 @@
var BaseService = require('../../service');
var inherits = require('util').inherits;
var Encoding = require('./encoding');
var utils = require('../../utils');
var _ = require('lodash');
var log = require('../../index').log;
var async = require('async');
var assert = require('assert');
var LRU = require('lru-cache');
var log = require('../../index').log;
function TransactionService(options) {
BaseService.call(this, options);
@ -17,6 +17,17 @@ function TransactionService(options) {
this._header = this.node.services.header;
this._p2p = this.node.services.p2p;
this._timestamp = this.node.services.timestamp;
this._network = this.node.network;
if (this._network === 'livenet') {
this._network = 'main';
}
if (this._network === 'regtest') {
this._network = 'testnet';
}
// caches
this._cacheTx = LRU(1000);
}
inherits(TransactionService, BaseService);
@ -24,9 +35,10 @@ inherits(TransactionService, BaseService);
TransactionService.dependencies = [
'p2p',
'db',
'block',
'timestamp',
'mempool'
'mempool',
'block',
'header'
];
// ---- start public function protorypes
@ -35,68 +47,171 @@ TransactionService.prototype.getAPIMethods = function() {
['getRawTransaction', this, this.getRawTransaction, 1],
['getTransaction', this, this.getTransaction, 1],
['getDetailedTransaction', this, this.getDetailedTransaction, 1],
['getInputValues', this, this.getInputValues, 1]
['setTxMetaInfo', this, this.setTxMetaInfo, 2]
];
};
TransactionService.prototype.getDetailedTransaction = function(txid, options, callback) {
this.getTransaction(txid, options, callback);
var self = this;
self.getTransaction(txid, options, function(err, tx) {
if (err) {
return callback(err);
}
if (!tx) {
return callback();
}
// get the spentTxId, spentHeight, spentIndex, spendBlockHash
async.parallel([
function(next) {
async.eachOfLimit(tx.outputs, 4, function(output, index, next) {
self._db.get(self._encoding.encodeSpentKey(txid, index), function(err, value) {
if (err) {
return next(err);
}
if (!value) {
return next();
}
var spentIndex = self._encoding.decodeSpentValue(value);
tx.outputs[index].spentTxId = spentIndex.txid;
tx.outputs[index].spentIndex = spentIndex.inputIndex;
tx.outputs[index].spentHeight = spentIndex.blockHeight;
tx.outputs[index].spentBlockHash = spentIndex.blockHash;
next();
});
}, next);
},
function(next) {
async.eachOfLimit(tx.inputs, 4, function(input, index, next) {
self._db.get(self._encoding.encodeDoubleSpentKey(input.prevout.txid(), index), function(err, value) {
if (err) {
return next(err);
}
if (!value) {
return next();
}
var doubleSpendInfo = self._encoding.decodeDoubleSpentValue(value);
tx.inputs[index].doubleSpentTxID = doubleSpendInfo.txid;
next();
});
}, next);
}
], function(err) {
if (err) {
return callback(err);
}
callback(null, tx);
});
});
};
TransactionService.prototype.getTransaction = function(txid, options, callback) {
var self = this;
if (typeof callback !== 'function') {
if (!_.isFunction(callback)) {
callback = options;
}
var cacheTx = self._cacheTx.get(txid);
if (cacheTx) {
return callback(null, cacheTx);
}
async.waterfall([
function(next) {
self._getTransaction(txid, options, next);
},
self._getTransaction.bind(self, txid, options),
self._getMempoolTransaction.bind(self),
self.getInputValues.bind(self),
self._setMetaInfo.bind(self)
], callback);
self.setTxMetaInfo.bind(self)
], function(err, tx) {
if (err) {
return callback(err);
}
if (tx && tx.confirmations >= 6) {
self._cacheTx.set(txid, tx);
}
callback(err, tx);
});
};
TransactionService.prototype._setMetaInfo = function(tx, options, callback) {
TransactionService.prototype.setTxMetaInfo = function(tx, options, callback) {
var self = this;
if (!tx) {
return callback();
}
// output values
var outputSatoshis = 0;
tx.outputs.forEach(function(output) {
outputSatoshis += output.value;
});
tx.outputSatoshis = outputSatoshis;
//input values
if (!tx.inputs[0].isCoinbase()) {
var inputSatoshis = 0;
tx.__inputValues.forEach(function(val) {
if (val >+ 0) {
inputSatoshis += val;
async.waterfall([
function(next) {
if (tx.__inputValues) {
return next(null, tx);
}
});
var feeSatoshis = inputSatoshis - outputSatoshis;
tx.inputSatoshis = inputSatoshis;
tx.feeSatoshis = feeSatoshis;
// the tx's that contain these input values could, themselves be unconfirmed
// we are also assuming that this tx is from the mempool
self._getInputValues(tx, options, function(err, inputValues) {
}
if (err) {
return callback(err);
}
callback(null, tx);
tx.__inputValues = inputValues;
tx.confirmations = 0;
tx.blockHash = null;
tx.__blockHash = null;
next(null, tx);
});
},
function(tx, next) {
// output values
var outputSatoshis = 0;
tx.outputs.forEach(function(output) {
outputSatoshis += output.value;
});
tx.outputSatoshis = outputSatoshis;
//input values
if (!tx.inputs[0].isCoinbase()) {
var inputSatoshis = 0;
// FLOSight Error Correction from RanchiMall 17th May 2021. removed the unhandled assert and replaced by looging of error
if (tx.__inputValues.length != tx.inputs.length) {
log.error('Transaction Service: input values length is not the same as the number of inputs.');
}
// assert(tx.__inputValues.length === tx.inputs.length, 'Transaction Service: input values length is not the same as the number of inputs.');
tx.__inputValues.forEach(function(val) {
if (val > 0) {
inputSatoshis += val;
}
});
var feeSatoshis = inputSatoshis - outputSatoshis;
tx.inputSatoshis = inputSatoshis;
tx.feeSatoshis = feeSatoshis;
}
next(null, tx);
}
], function(err, tx) {
if (err) {
return callback(err);
}
callback(null, tx);
});
};
@ -119,9 +234,7 @@ TransactionService.prototype._getMempoolTransaction = function(txid, tx, options
return callback(null, tx, options);
}
tx.confirmations = 0;
callback(null, tx, options);
});
};
@ -130,8 +243,19 @@ TransactionService.prototype._getTransaction = function(txid, options, callback)
var self = this;
var key = self._encoding.encodeTransactionKey(txid);
// txs will be in the index, the current block at LOWER tx indexes
// or they don't exist for the purposes of this function
// inputValues will be on the tx already by this point.
var currentBlockTx = options && options.processedTxs &&
options.processedTxs[txid] ? options.processedTxs[txid] : null;
if (currentBlockTx) {
return setImmediate(function() {
callback(null, txid, currentBlockTx, options);
});
}
var key = self._encoding.encodeTransactionKey(txid);
self._db.get(key, function(err, tx) {
if (err) {
@ -143,85 +267,74 @@ TransactionService.prototype._getTransaction = function(txid, options, callback)
}
tx = self._encoding.decodeTransactionValue(tx);
tx.confirmations = self._header.getBestHeight() - tx.__height;
tx.confirmations = self._block.getTip().height - tx.__height + 1;
tx.__confirmations = self._block.getTip().height - tx.__height + 1;
tx.height = tx.__height;
tx.blockhash = tx.__blockhash;
self._header.getBlockHeader(tx.__height, function(err, header) {
if (err) {
return callback(err);
}
if (header) {
tx.blockHash = header.hash;
}
callback(null, txid, tx, options);
});
callback(null, txid, tx, options);
});
};
TransactionService.prototype.getInputValues = function(tx, options, callback) {
TransactionService.prototype._getInputValues = function(tx, options, callback) {
var self = this;
if (!tx) {
return callback(null, tx, options);
}
var _tx = tx;
async.eachOfLimit(tx.inputs, 4, function(input, index, next) {
async.mapLimit(tx.inputs, 4, function(input, next) {
if (!tx.__inputValues) {
tx.__inputValues = [];
}
var inputSatoshis = tx.__inputValues[index];
if (inputSatoshis >= 0 || input.isCoinbase()) {
return next();
if (input.isCoinbase()) {
return next(null, 0);
}
var outputIndex = input.prevout.index;
self._getTransaction(input.prevout.txid(), options, function(err, txid, _tx) {
async.waterfall([
// check tx index first, most likely place
function(next) {
self._getTransaction(input.prevout.txid(), options, next);
},
// if not there, then check mempool
function(txid, tx, options, next) {
if (tx) {
return next(null, txid, tx);
}
self._mempool.getMempoolTransaction(input.prevout.txid(), function(err, memTx) {
if (err) {
return next(err);
}
next(null, txid, memTx);
});
},
// if not in mempool or tx index, we just don't have it, yet?
function(txid, tx, next) {
if (!tx) {
return next(log.error('Transaction Service: prev transacion: (' + input.prevout.txid() + ') for tx: ' +
_tx.txid() + ' at input index: ' + outputIndex + ' is missing from the index or not in the memory pool. It could be' +
' that the parent tx has not yet been relayed to us, but will be relayed in the near future.'));
}
if (err || !_tx) {
return next(err || new Error('tx not found for tx id: ' + input.prevout.txid()));
var output = tx.outputs[outputIndex];
// FLOSight Error Correction from RanchiMall 17th May 2021. removed the unhandled assert and replaced by looging of error
if (output == false) {
log.error('Expected an output, but did not get one for tx: ' + tx.txid() + ' outputIndex: ' + outputIndex);
}
// assert(output, 'Expected an output, but did not get one for tx: ' + tx.txid() + ' outputIndex: ' + outputIndex);
next(null, output.value);
}
var output = _tx.outputs[outputIndex];
assert(output, 'Expected an output, but did not get one for tx: ' + _tx.txid() + ' outputIndex: ' + outputIndex);
tx.__inputValues[index] = output.value;
next();
});
}, function(err) {
if (err) {
return callback(err);
}
var key = self._encoding.encodeTransactionKey(tx.txid());
var value = self._encoding.encodeTransactionValue(tx);
self._db.put(key, value, function(err) {
], function(err, val) {
if (err) {
return callback(err);
return next(err);
}
callback(null, tx, options);
next(null, val);
});
}, callback);
});
};
TransactionService.prototype.sendTransaction = function(tx, callback) {
this._p2p.sendTransaction(tx, callback);
};
TransactionService.prototype.start = function(callback) {
@ -253,16 +366,31 @@ TransactionService.prototype._getBlockTimestamp = function(hash) {
TransactionService.prototype.onBlock = function(block, callback) {
var self = this;
var processedTxs = {};
if (self.node.stopping) {
return callback();
}
var operations = block.txs.map(function(tx) {
return self._processTransaction(tx, { block: block });
});
async.mapSeries(block.txs, function(tx, next) {
callback(null, operations);
processedTxs[tx.txid()] = tx;
self._processTransaction(tx, { block: block, processedTxs: processedTxs }, next);
}, function(err, operations) {
if (err) {
return callback(err);
}
// FLOSight Error Correction from RanchiMall 17th May 2021. removed the unhandled assert and replaced by looging of error
if (block.txs.length != operations.length) {
log.error('It seems we are not indexing the correct number of transactions.');
}
// assert(block.txs.length === operations.length, 'It seems we are not indexing the correct number of transactions.');
callback(null, _.flattenDeep(operations));
});
};
@ -274,6 +402,8 @@ TransactionService.prototype.onReorg = function(args, callback) {
var removalOps = [];
// remove the txid -> tx entries
// remove the prevTxid, outputIndex -> txid, inputIndex
for(var i = 0; i < oldBlockList.length; i++) {
var block = oldBlockList[i];
@ -287,34 +417,126 @@ TransactionService.prototype.onReorg = function(args, callback) {
key: self._encoding.encodeTransactionKey(tx.txid())
});
// remove all the spent index information
for(var k = 0; k < tx.inputs.length; k++) {
var input = tx.inputs[k];
removalOps.push({
type: 'del',
key: self._encoding.encodeSpentKey(input.prevout.txid(), input.prevout.index)
});
}
}
}
callback(null, removalOps);
setImmediate(function() {
callback(null, removalOps);
});
};
TransactionService.prototype._processTransaction = function(tx, opts) {
TransactionService.prototype._getSpentInfo = function(input, callback) {
if (!this.node.stopping) {
return this._db.get(this._encoding.encodeSpentKey(input.prevout.txid(), input.prevout.index), callback);
}
callback();
};
// this index is very simple txid -> tx, but we also need to find each
// input's prev output value, the adjusted timestamp for the block and
// the tx's block height
TransactionService.prototype._getSpentTxOperations = function(tx, callback) {
var self = this;
// input values
tx.__inputValues = []; // these are lazy-loaded on the first access of the tx
var ops = [];
// if any of this tx's inputs are double spending, then make an entry into this index.
async.eachOfLimit(tx.inputs, 4, function(input, index, next) {
// timestamp
tx.__timestamp = this._getBlockTimestamp(opts.block.rhash());
assert(tx.__timestamp, 'Timestamp is required when saving a transaction.');
self._getSpentInfo(input, function(err, info) {
// height
tx.__height = opts.block.height;
assert(tx.__height, 'Block height is required when saving a trasnaction.');
if (err) {
return callback(err);
}
return {
key: this._encoding.encodeTransactionKey(tx.txid()),
value: this._encoding.encodeTransactionValue(tx)
};
if (info) {
ops.push({
key: self._encoding.encodeDoubleSpentKey(input.prevout.txid(), input.prevout.index),
value: self._encoding.encodeDoubleSpentValue(tx.txid(), index, tx.__height, tx.__blockhash)
});
return next();
}
ops.push({
key: self._encoding.encodeSpentKey(input.prevout.txid(), input.prevout.index),
value: self._encoding.encodeSpentValue(tx.txid(), index, tx.__height, tx.__blockhash)
});
next();
});
}, function(err) {
if (err) {
return callback(err);
}
callback(null, ops);
});
};
TransactionService.prototype._processTransaction = function(tx, opts, callback) {
var self = this;
self._getInputValues(tx, opts, function(err, inputValues) {
if (err) {
return callback(err);
}
// FLOSight Error Correction from RanchiMall 17th May 2021. removed the unhandled assert and replaced by looging of error
if (inputValues == false || inputValues.length != tx.inputs.length) {
log.error('Input values missing from tx.');
}
// assert(inputValues && inputValues.length === tx.inputs.length, 'Input values missing from tx.');
// inputValues
tx.__inputValues = inputValues;
// timestamp
tx.__timestamp = self._getBlockTimestamp(opts.block.rhash());
// FLOSight Error Correction from RanchiMall 17th May 2021. removed the unhandled assert and replaced by looging of error
if (tx.__timestamp == false) {
log.error('Timestamp is required when saving a transaction.');
}
// assert(tx.__timestamp, 'Timestamp is required when saving a transaction.');
// height
tx.__height = opts.block.__height;
// FLOSight Error Correction from RanchiMall 17th May 2021. removed the unhandled assert and replaced by looging of error
if (tx.__height == false) {
log.error('Block height is required when saving a trasnaction.');
}
//assert(tx.__height, 'Block height is required when saving a trasnaction.');
// block hash
tx.__blockhash = opts.block.rhash();
var operations = [{
key: self._encoding.encodeTransactionKey(tx.txid()),
value: self._encoding.encodeTransactionValue(tx)
}];
// spent key and value
// for each input in this tx, it spend some tx's prev outs, so index those
// this also accounts for double spend operations
self._getSpentTxOperations(tx, function(err, ops) {
if (err) {
return callback(err);
}
operations = operations.concat(ops);
callback(null, operations);
});
});
};

View File

@ -4,13 +4,14 @@ var fs = require('fs');
var http = require('http');
var https = require('https');
var express = require('express');
var express_ws = require('express-ws');
var bodyParser = require('body-parser');
var socketio = require('socket.io');
var inherits = require('util').inherits;
var BaseService = require('../../service');
var bitcore = require('bitcore-lib');
var _ = bitcore.deps._;
var flocore = require('flocore-lib');
var _ = flocore.deps._;
var index = require('../../');
var log = index.log;
@ -105,7 +106,7 @@ WebService.prototype.setupAllRoutes = function() {
if(service.getRoutePrefix && service.setupRoutes) {
this.app.use('/' + this.node.services[key].getRoutePrefix(), subApp);
this.node.services[key].setupRoutes(subApp, express);
this.node.services[key].setupRoutes(subApp, express, express_ws);
} else {
log.debug('No routes defined for: ' + key);
}

View File

@ -7,6 +7,9 @@ var BN = require('bn.js');
var utils = {};
utils.isHeight = function(blockArg) {
if (!blockArg && blockArg !== 0) {
return false;
}
return _.isNumber(blockArg) || (blockArg.length < 40 && /^[0-9]+$/.test(blockArg));
};
@ -103,4 +106,76 @@ utils.SimpleMap = function SimpleMap() {
};
};
utils.IndeterminateProgressBar = function IndeterminateProgressBar() {
var states = ['|', '/', '-', '\\'];
this.state = 0;
this.tick = function() {
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(states[this.state++ % states.length]);
};
};
utils.convertMillisecondsToHumanReadable = function(ms) {
var ret = '';
var minutes;
var seconds;
if (!ms && ms !== 0) {
return 'invalid number of ms.';
}
if (ms >= 60000) {
minutes = Math.floor(ms / 60000);
ms = ms % 60000;
}
if (ms >= 1000) {
seconds = Math.floor(ms / 1000);
ms = ms % 1000;
}
if (minutes) {
ret = minutes + ' minute(s). ';
}
if (seconds) {
ret += seconds + ' second(s). ';
}
ret += ms + ' millisecond(s).';
return ret;
};
utils.dedupByTxid = function(list) {
var used = [];
return _.compact(_.flattenDeep(list)).filter(function(item) {
var pass = used.indexOf(item.txid()) === -1;
used.push(item.txid());
return pass;
});
};
utils.orderByConfirmations = function(list) {
// newly confirmed first
return _.sortBy(list, function(item) {
return item.confirmations;
});
};
// items is output or input
utils.getAddress = function(item, network) {
var address = item.getAddress();
if (!address) {
return;
}
address.network = network;
return address.toString(network);
};
module.exports = utils;

5312
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,73 +1,59 @@
{
"name": "bitcore-node",
"description": "Full node with extended capabilities using Bitcore and Bitcoin Core",
"name": "flocore-node",
"description": "Full node with extended capabilities using Flocore and Florincoin Core",
"engines": {
"node": ">=8.2.0"
"node": ">=8.0.0"
},
"author": "BitPay <dev@bitpay.com>",
"version": "5.0.0",
"version": "5.0.9-beta-rm",
"main": "./index.js",
"repository": "git://github.com/bitpay/bitcore-node.git",
"homepage": "https://github.com/bitpay/bitcore-node",
"repository": "git://github.com/ranchimall/flocore-node.git",
"homepage": "https://github.com/ranchimall/flocore-node",
"bugs": {
"url": "https://github.com/bitpay/bitcore-node/issues"
"url": "https://github.com/ranchimall/flocore-node/issues"
},
"contributors": [
{
"name": "Christopher Jeffrey"
},
{
"name": "Braydon Fuller",
"email": "braydon@bitpay.com"
},
{
"name": "Chris Kleeschulte",
"email": "chrisk@bitpay.com"
},
{
"name": "Patrick Nagurny",
"email": "patrick@bitpay.com"
}
],
"bin": {
"bitcore-node": "./bin/bitcore-node"
"flocore-node": "./bin/flocore-node"
},
"scripts": {
"test": "mocha -R spec --recursive",
"jshint": "jshint --reporter=node_modules/jshint-stylish ./lib",
"coverage": "istanbul cover _mocha -- --recursive",
"coveralls": "./node_modules/.bin/istanbul cover ./node_modules/mocha/bin/_mocha --report lcovonly -- --recursive -R spec && cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js"
"test": "NODE_ENV=test mocha -R spec --recursive test"
},
"tags": [
"bitcoin",
"bitcoind",
"florincoin",
"florincoind",
"bcoin",
"bitcoin full node",
"bitcoin index",
"florincoin full node",
"florincoin index",
"block explorer",
"wallet backend"
],
"dependencies": {
"async": "^2.5.0",
"bcoin": "bcoin-org/bcoin#886008a1822ce1da7fa8395ee7db4bcc1750a28a",
"bitcoind-rpc": "^0.6.0",
"bitcore-lib": "bitpay/bitcore-lib#transitional",
"bitcore-p2p": "bitpay/bitcore-p2p#bcoin",
"bitcoind-rpc": "^0.7.2",
"bn.js": "^4.11.8",
"body-parser": "^1.13.3",
"colors": "^1.1.2",
"commander": "^2.8.1",
"errno": "^0.1.4",
"express": "^4.13.3",
"leveldown": "",
"levelup": "",
"express-ws": "^5.0.2",
"fcoin": "^1.1.4",
"flocore-lib": "^0.15.2",
"flocore-message": "^1.0.7",
"flocore-p2p": "^5.0.0-beta.8",
"florincoind-rpc": "0.7.1",
"flosight-api": "github:ranchimall/flosight-api",
"flosight-ui": "github:ranchimall/flosight-ui",
"leveldown": "^2.0.0",
"levelup": "^2.0.0",
"liftoff": "^2.2.0",
"lodash": "^4.17.4",
"lru-cache": "^4.0.2",
"memwatch-next": "^0.3.0",
"lodash": "^4.17.12",
"lru-cache": "^4.1.1",
"mkdirp": "0.5.0",
"path-is-absolute": "^1.0.0",
"socket.io": "^1.4.5",
"socket.io-client": "^1.4.5"
"socket.io-client": "^1.4.5",
"xxhash": "^0.2.4"
},
"devDependencies": {
"chai": "^3.5.0",
@ -75,7 +61,7 @@
"istanbul": "^0.4.3",
"jshint": "^2.9.2",
"jshint-stylish": "^2.1.0",
"mocha": "",
"mocha": "^6.2.0",
"proxyquire": "^1.3.1",
"rimraf": "^2.4.2",
"sinon": "^1.15.4"

View File

@ -3,10 +3,10 @@
var should = require('chai').should();
describe('Index Exports', function() {
it('will export bitcore-lib', function() {
var bitcore = require('../');
should.exist(bitcore.lib);
should.exist(bitcore.lib.Transaction);
should.exist(bitcore.lib.Block);
it('will export flocore-lib', function() {
var flocore = require('../');
should.exist(flocore.lib);
should.exist(flocore.lib.Transaction);
should.exist(flocore.lib.Block);
});
});

View File

@ -1,30 +0,0 @@
[2017-08-16T13:44:43.245Z] info: Connecting to p2p network.
client sending: magic:: 0b110907 command:: 76657273696f6e0000000000 length:: 65000000 checksum:: 735475bc message:: 7111010001000000000000004b4c945900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006e2b8235df4c158a0f2f626974636f72653a312e312e322f0000000001
server sending: magic:: 0b110907 command:: 76657273696f6e0000000000 length:: 67000000 checksum:: 46b5c9ae
client sending: magic:: 0b110907 command:: 76657261636b000000000000 length:: 00000000 checksum:: 5df6e0e2 message::
server sending: magic:: 0b110907 command:: 76657261636b000000000000 length:: 00000000 checksum:: 5df6e0e2
[2017-08-16T13:44:43.261Z] info: Connected to peer: 192.168.3.5, network: regtest, version: 70015, subversion: /Satoshi:0.14.99/, status: ready, port: 18333, best height: 1178711
[2017-08-16T13:44:43.262Z] info: Header Service: Gathering: 2001 header(s) from the peer-to-peer network.
[2017-08-16T13:44:43.262Z] info: Header Service: download progress: 1176710/1178711 (99.83%)
client sending: magic:: 0b110907 command:: 676574686561646572730000 length:: 45000000 checksum:: 857caf8b message:: 7111010001145ed5b8587723d506f208c0aaf9c4d628bcba4bacd1d30f90270000000000000000000000000000000000000000000000000000000000000000000000000000
server sending: magic:: command:: length:: checksum::
server sending: magic:: 0b110907 command:: 616c65727400000000000000 length:: a8000000 checksum:: 1bf9aaea
server sending: magic:: 0b110907 command:: 70696e670000000000000000 length:: 08000000 checksum:: 7c640b03
client sending: magic:: 0b110907 command:: 706f6e670000000000000000 length:: 08000000 checksum:: 7c640b03 message:: e79ac440be90a476
server sending: magic:: 0b110907 command:: 676574686561646572730000 length:: 25040000 checksum:: 552dc886
server sending: magic:: command:: length:: checksum::
server sending: magic:: 0b110907 command:: 686561646572730000000000 length:: d3780200 checksum:: 29213586
server sending: magic:: 0b110907 command:: 686561646572730000000000 length:: d3780200 checksum:: 29213586
server sending: magic:: 0b110907 command:: 686561646572730000000000 length:: d3780200 checksum:: 29213586
server sending: magic:: 0b110907 command:: 686561646572730000000000 length:: d3780200 checksum:: 29213586
server sending: magic:: command:: length:: checksum::
[2017-08-16T13:44:43.411Z] info: Header Service: download progress: 1178710/1178711 (100.00%)
client sending: magic:: 0b110907 command:: 676574686561646572730000 length:: 45000000 checksum:: cd33b9da message:: 71110100019f1309c60de611c5cdec7e0b24fb00da0d16fb706f1ae21a500f0000000000000000000000000000000000000000000000000000000000000000000000000000
server sending: magic:: 0b110907 command:: 686561646572730000000000 length:: 52000000 checksum:: a4022af1
server sending: magic:: 0b110907 command:: 686561646572730000000000 length:: 52000000 checksum:: a4022af1
server sending: magic:: command:: length:: checksum::
[2017-08-16T13:44:43.419Z] info: localhost-header subscribe: p2p/block total: 1
[2017-08-16T13:44:43.419Z] info: Header Service: emitting headers to block service.
[2017-08-16T13:44:43.419Z] info: Block Service: Gathering: 0 block(s) from the peer-to-peer network.
[2017-08-16T13:44:43.419Z] info: Block Service: The best block hash is: 00000000000004842ea914123b8010541a41174a11ba62b244d0aec19840467c at height: 1178711

View File

@ -1,4 +0,0 @@
[
"000000201a3c951a20b5d603144ce060c86e95fed1869524e66acfc46bdf08d96f664209b4b1c32ec485f4ad27c5402a1b16a0b1135364b7c9b0dcf4276f9fa3fd215d1b08cc9559ffff7f20000000000102000000010000000000000000000000000000000000000000000000000000000000000000ffffffff03570101ffffffff0200f2052a01000000232102a5566542d1f0f202541d98755628a41dcd4416b50db820e2b04d5ecb0bd02b73ac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf900000000"
]

View File

@ -1,276 +0,0 @@
'use strict';
var expect = require('chai').expect;
var net = require('net');
var spawn = require('child_process').spawn;
var path = require('path');
var rimraf = require('rimraf');
var mkdirp = require('mkdirp');
var fs = require('fs');
var p2p = require('bitcore-p2p');
var bitcore = require('bitcore-lib');
var Networks = bitcore.Networks;
var Header = bitcore.BlockHeader;
var Block = bitcore.Block;
var BcoinBlock = require('bcoin').block;
var http = require('http');
Networks.enableRegtest();
var messages = new p2p.Messages({ network: Networks.get('regtest'), Block: BcoinBlock });
var server;
var rawBlocks = require('./data/blocks.json');
var rawReorgBlocks = require('./data/blocks_reorg.json')[0];
var reorgBlock = BcoinBlock.fromRaw(rawReorgBlocks, 'hex');
var blocks = rawBlocks.map(function(rawBlock) {
return new Block(new Buffer(rawBlock, 'hex'));
});
var headers = blocks.map(function(block) {
return block.header;
});
var debug = true;
var bitcoreDataDir = '/tmp/bitcore';
var bitcore = {
configFile: {
file: bitcoreDataDir + '/bitcore-node.json',
conf: {
network: 'regtest',
port: 53001,
datadir: bitcoreDataDir,
services: [
'p2p',
'db',
'header',
'block',
'address',
'transaction',
'mempool',
'web',
'insight-api',
'fee',
'timestamp'
],
servicesConfig: {
'p2p': {
'peers': [
{ 'ip': { 'v4': '127.0.0.1' }, port: 18444 }
]
},
'insight-api': {
'routePrefix': 'api'
}
}
}
},
httpOpts: {
protocol: 'http:',
hostname: 'localhost',
port: 53001,
},
opts: { cwd: bitcoreDataDir },
datadir: bitcoreDataDir,
exec: path.resolve(__dirname, '../../bin/bitcore-node'),
args: ['start'],
process: null
};
var blockIndex = 0;
var tcpSocket;
var startFakeNode = function() {
server = net.createServer(function(socket) {
tcpSocket = socket;
socket.on('end', function() {
console.log('bitcore-node has ended the connection');
});
socket.on('data', function(data) {
var command = data.slice(4, 16).toString('hex');
var message;
if (command === '76657273696f6e0000000000') { //version
message = messages.Version();
}
if (command === '76657261636b000000000000') { //verack
message = messages.VerAck();
}
if (command === '676574686561646572730000') { //getheaders
message = messages.Headers(headers, { BlockHeader: Header });
}
if (command === '676574626c6f636b73000000') { //getblocks
var block = blocks[blockIndex];
if (!block) {
return;
}
var blockHash = block.hash;
var inv = p2p.Inventory.forBlock(blockHash);
message = messages.Inventory([inv]);
}
if (command === '676574646174610000000000') { //getdata
var raw = rawBlocks[blockIndex++];
var blk = BcoinBlock.fromRaw(raw, 'hex');
message = messages.Block(blk, { Block: BcoinBlock });
}
if (message) {
socket.write(message.toBuffer());
}
});
socket.pipe(socket);
});
server.listen(18444, '127.0.0.1');
};
var shutdownFakeNode = function() {
server.close();
};
var shutdownBitcore = function(callback) {
if (bitcore.process) {
bitcore.process.kill();
}
callback();
};
var startBitcore = function(callback) {
rimraf(bitcoreDataDir, function(err) {
if(err) {
return callback(err);
}
mkdirp(bitcoreDataDir, function(err) {
if(err) {
return callback(err);
}
fs.writeFileSync(bitcore.configFile.file, JSON.stringify(bitcore.configFile.conf));
var args = bitcore.args;
bitcore.process = spawn(bitcore.exec, args, bitcore.opts);
bitcore.process.stdout.on('data', function(data) {
if (debug) {
process.stdout.write(data.toString());
}
});
bitcore.process.stderr.on('data', function(data) {
if (debug) {
process.stderr.write(data.toString());
}
});
callback();
});
});
};
describe('Reorg', function() {
// 1. spin up bitcore-node and have it connect to our custom tcp socket
// 2. feed it a few headers
// 3. feed it a few blocks
// 4. feed it a block that reorgs
this.timeout(60000);
before(function(done) {
startFakeNode();
startBitcore(done);
});
after(function(done) {
shutdownFakeNode();
shutdownBitcore(done);
});
it('should reorg correctly when already synced', function(done) {
// at this point we have a fully synced chain at height 7....
// we now want to send a new block number 7 whose prev hash is block 6 (it should be block 7)
// we then should reorg back to block 6 then back up to the new block 7
setTimeout(function() {
console.log('From Test: reorging to block: ' + reorgBlock.rhash());
// send the reorg block
rawBlocks.push(rawReorgBlocks);
var blockHash = reorgBlock.rhash();
var inv = p2p.Inventory.forBlock(blockHash);
var msg = messages.Inventory([inv]);
tcpSocket.write(msg.toBuffer());
// wait 2 secs until the reorg happens, if it takes any longer the test ought to fail anyway
setTimeout(function() {
var error;
var request = http.request('http://localhost:53001/api/block/' + reorgBlock.rhash(), function(res) {
if (res.statusCode !== 200 && res.statusCode !== 201) {
if (error) {
return;
}
return done('Error from bitcore-node webserver: ' + res.statusCode);
}
var resError;
var resData = '';
res.on('error', function(e) {
resError = e;
});
res.on('data', function(data) {
resData += data;
});
res.on('end', function() {
if (error) {
return;
}
var data = JSON.parse(resData);
expect(data.height).to.equal(7);
expect(data.hash).to.equal(reorgBlock.rhash());
done(resError, resData);
});
});
request.on('error', function(e) {
error = e;
done(error);
});
request.write('');
request.end();
}, 2000);
}, 2000);
});
});

View File

@ -25,7 +25,7 @@ describe('#add', function() {
throw err;
}
fs.writeFile(
testDir + '/s0/s1/bitcore-node.json',
testDir + '/s0/s1/flocore-node.json',
JSON.stringify(startConfig),
function(err) {
if (err) {
@ -90,12 +90,12 @@ describe('#add', function() {
});
});
it('will update bitcore-node.json services', function(done) {
it('will update flocore-node.json services', function(done) {
var callCount = 0;
var oldPackage = {
dependencies: {
'bitcore-lib': '^v0.13.7',
'bitcore-node': '^v0.2.0'
'flocore-lib': '^v0.13.7',
'flocore-node': '^v0.2.0'
}
};
var spawn = sinon.stub().returns({
@ -130,7 +130,7 @@ describe('#add', function() {
services: ['a', 'b', 'c']
}, function(err) {
should.not.exist(err);
var configPath = path.resolve(testDir, 's0/s1/bitcore-node.json');
var configPath = path.resolve(testDir, 's0/s1/flocore-node.json');
var config = JSON.parse(fs.readFileSync(configPath));
config.services.should.deep.equal(['a','b','c']);
done();

View File

@ -33,7 +33,7 @@ describe('#create', function() {
if (err) {
throw err;
}
mkdirp(testDir + '/.bitcoin', function(err) {
mkdirp(testDir + '/.florincoin', function(err) {
if (err) {
throw err;
}
@ -64,14 +64,14 @@ describe('#create', function() {
throw err;
}
var configPath = testDir + '/mynode/bitcore-node.json';
var configPath = testDir + '/mynode/flocore-node.json';
var packagePath = testDir + '/mynode/package.json';
should.equal(fs.existsSync(configPath), true);
should.equal(fs.existsSync(packagePath), true);
var config = JSON.parse(fs.readFileSync(configPath));
config.services.should.deep.equal(['bitcoind', 'db', 'address', 'web']);
config.services.should.deep.equal(['florincoind', 'db', 'address', 'web']);
config.datadir.should.equal('./data');
config.network.should.equal('livenet');
@ -104,7 +104,7 @@ describe('#create', function() {
dirname: 'mynode3',
name: 'My Node 3',
isGlobal: true,
datadir: '../.bitcoin'
datadir: '../.florincoin'
}, function(err) {
if (err) {
throw err;
@ -139,7 +139,7 @@ describe('#create', function() {
dirname: 'mynode4',
name: 'My Node 4',
isGlobal: false,
datadir: '../.bitcoin'
datadir: '../.florincoin'
}, function(err) {
should.exist(err);
err.message.should.equal('There was an error installing dependencies.');

View File

@ -25,7 +25,7 @@ describe('#remove', function() {
throw err;
}
fs.writeFile(
testDir + '/s0/s1/bitcore-node.json',
testDir + '/s0/s1/flocore-node.json',
JSON.stringify(startConfig),
function(err) {
if (err) {
@ -64,7 +64,7 @@ describe('#remove', function() {
});
});
it('will update bitcore-node.json services', function(done) {
it('will update flocore-node.json services', function(done) {
var spawn = sinon.stub().returns({
stdout: {
on: sinon.stub()
@ -92,7 +92,7 @@ describe('#remove', function() {
services: ['b']
}, function(err) {
should.not.exist(err);
var configPath = path.resolve(testDir, 's0/s1/bitcore-node.json');
var configPath = path.resolve(testDir, 's0/s1/flocore-node.json');
var config = JSON.parse(fs.readFileSync(configPath));
config.services.should.deep.equal(['a', 'c']);
done();

View File

@ -1,6 +1,6 @@
'use strict';
var bitcore = require('bitcore-lib');
var flocore = require('flocore-lib');
var should = require('chai').should();
var Encoding = require('../../../lib/services/address/encoding');
@ -38,7 +38,7 @@ describe('Address service encoding', function() {
new Buffer(txid, 'hex'),
new Buffer('00000005', 'hex')]);
var txHex = '0100000001cc3ffe0638792c8b39328bb490caaefe2cf418f2ce0144956e0c22515f29724d010000006a473044022030ce9fa68d1a32abf0cd4adecf90fb998375b64fe887c6987278452b068ae74c022036a7d00d1c8af19e298e04f14294c807ebda51a20389ad751b4ff3c032cf8990012103acfcb348abb526526a9f63214639d79183871311c05b2eebc727adfdd016514fffffffff02f6ae7d04000000001976a9144455183e407ee4d3423858c8a3275918aedcd18e88aca99b9b08010000001976a9140beceae2c29bfde08d2b6d80b33067451c5887be88ac00000000';
var tx = new bitcore.Transaction(txHex);
var tx = new flocore.Transaction(txHex);
var sats = tx.outputs[0].satoshis;
var satsBuf = new Buffer(8);
satsBuf.writeDoubleBE(sats);

View File

@ -8,11 +8,12 @@ var Encoding = require('../../../lib/services/address/encoding');
var Readable = require('stream').Readable;
var EventEmitter = require('events').EventEmitter;
var bcoin = require('bcoin');
var lodash = require('lodash');
describe('Address Service', function() {
var tx = Tx.fromRaw( '0100000004de9b4bb17f627096a9ee0b4528e4eae17df5b5c69edc29704c2e84a7371db29f010000006b483045022100f5b1a0d33b7be291c3953c25f8ae39d98601aa7099a8674daf638a08b86c7173022006ce372da5ad088a1cc6e5c49c2760a1b6f085eb1b51b502211b6bc9508661f9012102ec5e3731e54475dd2902326f43602a03ae3d62753324139163f81f20e787514cffffffff7a1d4e5fc2b8177ec738cd723a16cf2bf493791e55573445fc0df630fe5e2d64010000006b483045022100cf97f6cb8f126703e9768545dfb20ffb10ba78ae3d101aa46775f5a239b075fc02203150c4a89a11eaf5e404f4f96b62efa4455e9525765a025525c7105a7e47b6db012102c01e11b1d331f999bbdb83e8831de503cd52a01e3834a95ccafd615c67703d77ffffffff9e52447116415ca0d0567418a1a4ef8f27be3ff5a96bf87c922f3723d7db5d7c000000006b483045022100f6c117e536701be41a6b0b544d7c3b1091301e4e64a6265b6eb167b15d16959d022076916de4b115e700964194ce36a24cb9105f86482f4abbc63110c3f537cd5770012102ddf84cc7bee2d6a82ac09628a8ad4a26cd449fc528b81e7e6cc615707b8169dfffffffff5815d9750eb3572e30d6fd9df7afb4dbd76e042f3aa4988ac763b3fdf8397f80010000006a473044022028f4402b736066d93d2a32b28ccd3b7a21d84bb58fcd07fe392a611db94cdec5022018902ee0bf2c3c840c1b81ead4e6c87c88c48b2005bf5eea796464e561a620a8012102b6cdd1a6cd129ef796faeedb0b840fcd0ca00c57e16e38e46ee7028d59812ae7ffffffff0220a10700000000001976a914c342bcd1a7784d9842f7386b8b3b8a3d4171a06e88ac59611100000000001976a91449f8c749a9960dc29b5cbe7d2397cea7d26611bb88ac00000000', 'hex');
var blocks = require('../../regtest/data/blocks.json');
var blocks = require('../../data/blocks.json');
var addressService;
var sandbox;
@ -20,7 +21,6 @@ describe('Address Service', function() {
sandbox = sinon.sandbox.create();
addressService = new AddressService({
node: {
getNetworkName: function() { return 'regtest'; },
services: []
}
});
@ -54,9 +54,12 @@ describe('Address Service', function() {
describe('#getAddressHistory', function() {
it('should get the address history', function(done) {
sandbox.stub(addressService, '_getAddressHistory').callsArgWith(2, null, {});
it('should get the address history (null case)', function(done) {
sandbox.stub(addressService, '_getAddressTxidHistory').callsArgWith(2, null, null);
sandbox.stub(addressService, '_getAddressTxHistory').callsArgWith(1, null, []);
addressService.getAddressHistory(['a', 'b', 'c'], { from: 12, to: 14 }, function(err, res) {
if (err) {
@ -64,82 +67,276 @@ describe('Address Service', function() {
}
expect(res).to.be.deep.equal({
totalItems: 3,
from: 12,
to: 14,
items: [ {}, {}, {} ]
totalCount: 0,
items: []
});
done();
});
});
});
it('should get the sorted address history', function(done) {
describe('#_getAddresHistory', function() {
it('should get the address history', function(done) {
var encoding = new Encoding(new Buffer('0001', 'hex'));
addressService._encoding = encoding;
var address = 'a';
var opts = { from: 12, to: 14 };
var txid = '1c6ea4a55a3edaac0a05e93b52908f607376a8fdc5387c492042f8baa6c05085';
var data = [ null, encoding.encodeAddressIndexKey(address, 123, txid, 1, 1) ];
var getTransaction = sandbox.stub().callsArgWith(2, null, {});
addressService._tx = { getTransaction: getTransaction };
var old_getAddressTxidHistory = addressService._getAddressTxidHistory;
addressService._getAddressTxidHistory = function(addr, options, cb) {
options.txIdList = [
{
txid: "d",
height: 10,
},
{
txid: "c",
height: 10,
},
{
txid: "a",
height: 101,
},
{
txid: "b",
height: 100,
},
];
return cb();
};
var txidStream = new Readable();
txidStream._read = function() {
txidStream.push(data.pop());
}
var old_getAddressTxHistory = addressService._getAddressTxHistory;
addressService._getAddressTxHistory = function(options, cb) {
return cb(null, options.txIdList);
};
var createReadStream = sandbox.stub().returns(txidStream);
addressService._db = { createKeyStream: createReadStream };
addressService.getAddressHistory(['a', 'b', 'c'], { from: 12, to: 14 }, function(err, res) {
addressService._getAddressHistory(address, opts, function(err, res) {
if (err) {
return done(err);
}
expect(getTransaction.calledOnce).to.be.true;
expect(res).to.deep.equal([{}]);
expect(res.totalCount).equal(4);
expect(lodash.map(res.items,'txid')).to.be.deep.equal(['a','b','c','d']);
addressService._getAddressTxidHistory = old_getAddressTxHistory;
addressService._getAddressTxHistory = old_getAddressTxHistory;
done();
});
});
it('should remove duplicated items in history', function(done) {
var old_getAddressTxidHistory = addressService._getAddressTxidHistory;
addressService._getAddressTxidHistory = function(addr, options, cb) {
options.txIdList = [
{
txid: "b",
height: 10,
},
{
txid: "b",
height: 10,
},
{
txid: "d",
height: 101,
},
{
txid: "c",
height: 100,
},
{
txid: "d",
height: 101,
},
];
return cb();
};
var old_getAddressTxHistory = addressService._getAddressTxHistory;
addressService._getAddressTxHistory = function(options, cb) {
return cb(null, options.txIdList);
};
addressService.getAddressHistory(['a', 'b', 'c'], { from: 12, to: 14 }, function(err, res) {
if (err) {
return done(err);
}
expect(res.totalCount).equal(3);
expect(lodash.map(res.items,'txid')).to.be.deep.equal(['d','c','b']);
addressService._getAddressTxidHistory = old_getAddressTxHistory;
addressService._getAddressTxHistory = old_getAddressTxHistory;
done();
});
});
describe('TxIdList cache', function() {
var list, old_getAddressTxidHistory, old_getAddressTxHistory;
beforeEach(function(done){
this.clock = sinon.useFakeTimers();
list = [];
for(let i=1000; i>0; i--) {
list.push({
txid: "txid" + i,
height: 1000 + i,
});
};
old_getAddressTxidHistory = addressService._getAddressTxidHistory;
// Note that this stub DOES NOT respect options.from/to as the real function
addressService._getAddressTxidHistory = function(addr, options, cb) {
options.txIdList = lodash.clone(list);
return cb();
};
old_getAddressTxHistory = addressService._getAddressTxHistory;
addressService._getAddressTxHistory = function(options, cb) {
return cb(null, options.txIdList);
};
addressService.getAddressHistory(['a', 'b', 'c'], { from: 0, to: 10 }, function(err, res, cacheUsed) {
if (err) {
return done(err);
}
expect(res.totalCount).equal(1000);
expect(res.items,'txid').to.be.deep.equal(list);
expect(cacheUsed).equal(false);
done();
});
});
afterEach(function(done){
this.clock.restore();
addressService._getAddressTxidHistory = old_getAddressTxHistory;
addressService._getAddressTxHistory = old_getAddressTxHistory;
done();
});
it('should not cache the address txlist history when from =0 ', function(done) {
addressService.getAddressHistory(['a', 'b', 'c'], { from: 0, to: 10 }, function(err, res, cacheUsed) {
if (err) {
return done(err);
}
expect(res.totalCount).equal(1000);
expect(res.items,'txid').to.be.deep.equal(list);
expect(cacheUsed).equal(false);
done();
});
});
it('should cache the address txlist history', function(done) {
addressService.getAddressHistory(['a', 'b', 'c'], { from: 1, to: 10 }, function(err, res, cacheUsed) {
if (err) {
return done(err);
}
expect(cacheUsed).equal(true);
expect(res.totalCount).equal(1000);
expect(res.items,'txid').to.be.deep.equal(list);
done();
});
});
it('should retrive cached list using cachekey', function(done) {
addressService.getAddressHistory([], { from: 1, to: 10, cacheKey: 977282097 }, function(err, res, cacheUsed) {
if (err) {
return done(err);
}
expect(cacheUsed).equal(true);
expect(res.totalCount).equal(1000);
expect(res.items,'txid').to.be.deep.equal(list);
done();
});
});
it('should expire cache', function(done) {
this.clock.tick(35*1000);
addressService.getAddressHistory(['a', 'b', 'c'], { from: 1, to: 10 }, function(err, res, cacheUsed) {
if (err) {
return done(err);
}
expect(cacheUsed).equal(false);
expect(res.totalCount).equal(1000);
expect(res.items,'txid').to.be.deep.equal(list);
done();
});
});
it('should cache using the address as key', function(done) {
addressService.getAddressHistory(['a', 'b', 'c', 'd'], { from: 1, to: 10 }, function(err, res, cacheUsed) {
if (err) {
return done(err);
}
expect(cacheUsed).equal(false);
expect(res.totalCount).equal(1000);
expect(res.items,'txid').to.be.deep.equal(list);
addressService.getAddressHistory(['a', 'b', 'c', 'd'], { from: 1, to: 10 }, function(err, res, cacheUsed) {
if (err) {
return done(err);
}
expect(cacheUsed).equal(true);
expect(res.totalCount).equal(1000);
expect(res.items,'txid').to.be.deep.equal(list);
done();
});
});
});
});
});
describe('#_getAddressTxidHistory', function() {
it('should get the address txid history', function(done) {
addressService._mempool = { getTxidsByAddress: sinon.stub().callsArgWith(2, null, []) };
var txidStream = new Readable();
sandbox.stub(addressService, '_getTxidStream').returns(txidStream);
var addressInfoBuf = addressService._encoding.encodeAddressIndexKey('a', 10, tx.txid(), 1, 1, 1234567);
var options = {txIdList: []};
addressService._getAddressTxidHistory('a', options, function(err) {
if (err) {
return done(err);
}
expect(options.txIdList).to.deep.equal([{txid: tx.txid(), height: 10}]);
done();
});
txidStream.push(addressInfoBuf);
txidStream.push(null);
});
});
describe('#AddressSummary', function() {
it('should get the address summary', function(done) {
var encoding = new Encoding(new Buffer('0001', 'hex'));
addressService._encoding = encoding;
var address = 'a';
var txid = tx.txid();
var data = [ null, encoding.encodeAddressIndexKey(address, 123, txid, 1, 0) ];
var inputValues = [120, 0, 120, 120];
tx.__inputValues = inputValues;
var getTransaction = sandbox.stub().callsArgWith(2, null, tx);
addressService._tx = { getTransaction: getTransaction };
addressService._header = { getBestHeight: function() { return 150; } };
it('should get the address summary, incoming', function(done) {
var txidStream = new Readable();
var _tx = tx;
_tx.__inputValues = [ 0, 0, 0, 0 ];
var results = { items: [_tx] };
txidStream._read = function() {
txidStream.push(data.pop());
}
var createReadStream = sandbox.stub().returns(txidStream);
addressService._db = { createKeyStream: createReadStream };
addressService.getAddressSummary(address, {}, function(err, res) {
sandbox.stub(addressService, 'getAddressHistory').callsArgWith(2, null, results);
addressService.getAddressSummary('1JoSiR4dBcSrGs2AZBP2gCHqCCsgzccsGb', {}, function(err, res) {
if (err) {
return done(err);
}
expect(getTransaction.calledOnce).to.be.true;
expect(res).to.deep.equal({ addrStr: 'a',
balance: 0.01139033,
balanceSat: 1139033,
totalReceived: 0.01139033,
totalReceivedSat: 1139033,
expect(res).to.deep.equal({ addrStr: '1JoSiR4dBcSrGs2AZBP2gCHqCCsgzccsGb',
balance: 0.005,
balanceSat: 500000,
totalReceived: 0.005,
totalReceivedSat: 500000,
totalSent: 0,
totalSentSat: 0,
unconfirmedBalance: 0,
@ -154,6 +351,7 @@ describe('Address Service', function() {
});
});
describe('#getAddressUnspentOutputs', function() {
it('should get address utxos', function(done) {
@ -169,10 +367,11 @@ describe('Address Service', function() {
value: encoding.encodeUtxoIndexValue(123, 120000, ts, tx.outputs[1].script.raw)
};
addressService._header = { getBestHeight: function() { return 150; } };
addressService._block = { getTip: function() { return { height: 150 }; } };
var txidStream = new EventEmitter();
addressService._mempool = { getTxidsByAddress: sinon.stub().callsArgWith(2, null, []) };
var createReadStream = sandbox.stub().returns(txidStream);
addressService._db = { createReadStream: createReadStream };
@ -181,14 +380,14 @@ describe('Address Service', function() {
return done(err);
}
expect(res[0]).to.deep.equal({
address: "a",
address: 'a',
amount: 0.0012,
confirmations: 27,
confirmationsFromCache: true,
height: 123,
confirmations: 28,
satoshis: 120000,
scriptPubKey: "76a91449f8c749a9960dc29b5cbe7d2397cea7d26611bb88ac",
scriptPubKey: '76a91449f8c749a9960dc29b5cbe7d2397cea7d26611bb88ac',
ts: 1546300800,
txid: "25e28f9fb0ada5353b7d98d85af5524b2f8df5b0b0e2d188f05968bceca603eb",
txid: '25e28f9fb0ada5353b7d98d85af5524b2f8df5b0b0e2d188f05968bceca603eb',
vout: 1
});
done();
@ -203,22 +402,23 @@ describe('Address Service', function() {
describe('#onReorg', function() {
it('should reorg', function(done ) {
it('should reorg when there is nothing to reorg', function(done ) {
var commonAncestorHeader = bcoin.block.fromRaw(blocks[5], 'hex').toHeaders().toJSON();
var oldBlocks = [bcoin.block.fromRaw(blocks[6], 'hex')];
var block = bcoin.block.fromRaw(blocks[6], 'hex');
block.__ts = 55555;
block.__height = 999;
var oldBlocks = [block];
addressService.onReorg([commonAncestorHeader, oldBlocks], function(err, ops) {
expect(ops.length).to.equal(1);
expect(ops[0].type).to.equal('del');
expect(ops.length).to.equal(2);
done();
});
});
});
});

View File

@ -6,11 +6,12 @@ var sinon = require('sinon');
var bcoin = require('bcoin');
var Block = bcoin.block;
var Encoding = require('../../../lib/services/block/encoding');
var utils = require('../../../lib/utils');
describe('Block Service', function() {
var blockService;
var blocks = require('../../regtest/data/blocks.json');
var blocks = require('../../data/blocks.json');
var block1 = Block.fromRaw('010000006a39821735ec18a366d95b391a7ff10dee181a198f1789b0550e0d00000000002b0c80fa52b669022c344c3e09e6bb9698ab90707bb4bb412af3fbf31cfd2163a601514c5a0c011c572aef0f0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff08045a0c011c022003ffffffff0100f2052a01000000434104c5b694d72e601091fd733c6b18b94795c13e2db6b1474747e7be914b407854cad37cee3058f85373b9f9dbb0014e541c45851d5f85e83a1fd7c45e54423718f3ac00000000', 'hex');
var block2 = Block.fromRaw('01000000fb3c5deea3902d5e6e0222435688795152ae0f737715b0bed6a88b00000000008ec0f92d33b05617cb3c3b4372aa0c2ae3aeb8aa7f34fe587db8e55b578cfac6b601514c5a0c011c98a831000101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff08045a0c011c027f01ffffffff0100f2052a0100000043410495fee5189566db550919ad2b4e5f9111dbdc2cb60b5c71ea4c0fdad59a961c42eb289e5b9fdc4cb3f3fec6dd866172720bae3e3b881fc203fcaf98bf902c53f1ac00000000', 'hex');
@ -19,7 +20,6 @@ describe('Block Service', function() {
sandbox = sinon.sandbox.create();
blockService = new BlockService({
node: {
getNetworkName: function() { return 'regtest'; },
services: []
}
});
@ -44,28 +44,25 @@ describe('Block Service', function() {
});
});
describe('#_findCommonAncestor', function() {
describe('#_findLatestValidBlockHeader', function() {
it('should find the common ancestor between the current chain and the new chain', function(done) {
it('should find the latest valid block header whose hash is also in our block index', function(done) {
blockService._tip = { hash: block2.rhash(), height: 70901 };
blockService._tip = { hash: 'aa', height: 2 };
var encodedData = blockService._encoding.encodeBlockValue(block2);
blockService._header = { getBlockHeader: sandbox.stub().callsArgWith(1, null, { hash: 'aa', height: 2 }) };
blockService._findLatestValidBlockHeader(function(err, header) {
var get = sandbox.stub().callsArgWith(1, null, encodedData);
var headers = { get: sandbox.stub().returns({ prevHash: block1.rhash() }) };
blockService._db = { get: get };
blockService._findCommonAncestor('aa', headers, function(err, common, oldBlocks) {
if (err) {
if(err) {
return done(err);
}
expect(common).to.equal('aa');
expect(oldBlocks).to.deep.equal([]);
expect(header).to.deep.equal({ hash: 'aa', height: 2 });
done();
});
});
});
describe('#getBestBlockHash', function() {
@ -98,64 +95,53 @@ describe('Block Service', function() {
describe('#_onBlock', function() {
it('should process blocks', function() {
it('should process blocks', function(done) {
var getBlock = sandbox.stub(blockService, '_getBlock').callsArgWith(1, null, null);
var processBlock = sandbox.stub(blockService, '_processBlock').callsArgWith(1, null);
blockService._onBlock(block2, function(err) {
if(err) {
return done(err);
}
expect(processBlock.calledOnce).to.be.true;
expect(getBlock.calledOnce).to.be.true;
done();
});
});
it('should not process blocks', function(done) {
var getBlock = sandbox.stub(blockService, '_getBlock').callsArgWith(1, null, block2);
var processBlock = sandbox.stub(blockService, '_processBlock');
blockService._tip = { hash: block1.rhash(), height: 1 };
blockService._onBlock(block2);
expect(processBlock.calledOnce).to.be.true;
blockService._onBlock(block2, function(err) {
if(err) {
return done(err);
}
expect(getBlock.calledOnce).to.be.true;
expect(processBlock.called).to.be.false;
done();
});
});
it('should not process blocks', function() {
var processBlock = sandbox.stub(blockService, '_processBlock');
blockService._tip = { hash: block2.rhash(), height: 1 };
blockService._onBlock(block1);
expect(processBlock.calledOnce).to.be.false;
});
});
describe('#_setListeners', function() {
it('should set listeners for headers, reorg', function() {
var on = sandbox.stub();
var once = sandbox.stub();
blockService._header = { on: on, once: once };
blockService._setListeners();
expect(on.calledOnce).to.be.true;
expect(once.calledOnce).to.be.true;
});
});
describe('#_setTip', function() {
it('should set the tip if given a block', function() {
blockService._db = {};
it('should set the tip if given a block', function(done) {
var saveTip = sandbox.stub(blockService, '_saveTip').callsArgWith(1, null);
blockService._tip = { height: 99, hash: '00' };
blockService._setTip({ height: 100, hash: 'aa' });
expect(blockService._tip).to.deep.equal({ height: 100, hash: 'aa' });
blockService._setTip({ height: 100, hash: 'aa' }, function(err) {
if(err) {
return done(err);
}
expect(blockService._tip).to.deep.equal({ height: 100, hash: 'aa' });
done();
});
});
});
describe('#_startSubscriptions', function() {
it('should start the subscriptions if not already subscribed', function() {
var on = sinon.stub();
var subscribe = sinon.stub();
var openBus = sinon.stub().returns({ on: on, subscribe: subscribe });
blockService.node = { openBus: openBus };
blockService._startSubscriptions();
expect(blockService._subscribed).to.be.true;
expect(openBus.calledOnce).to.be.true;
expect(on.calledOnce).to.be.true;
expect(subscribe.calledOnce).to.be.true;
});
});
describe('#_startSync', function() {
it('should start the sync of blocks if type set', function() {
blockService._header = { getLastHeader: sinon.stub.returns({ height: 100 }) };
it('should start the sync of blocks', function() {
blockService._header = { getLastHeader: sinon.stub().returns({ height: 100 }) };
blockService._tip = { height: 98 };
var sync = sandbox.stub(blockService, '_sync');
blockService._startSync();
@ -166,18 +152,19 @@ describe('Block Service', function() {
describe('#start', function() {
it('should get the prefix', function(done) {
it('should get the service started', function(done) {
var getPrefix = sandbox.stub().callsArgWith(1, null, blockService._encoding);
var getServiceTip = sandbox.stub().callsArgWith(1, null, { height: 1, hash: 'aa' });
var setListeners = sandbox.stub(blockService, '_setListeners');
var startSub = sandbox.stub(blockService, '_startSubscriptions');
var setTip = sandbox.stub(blockService, '_setTip');
var performSanityCheck = sandbox.stub(blockService, '_performSanityCheck').callsArgWith(1, null, { hash: 'aa', height: 123 });
var loadRecentBlockHashes = sandbox.stub(blockService, '_loadRecentBlockHashes').callsArgWith(0, null, new utils.SimpleMap());
var setTip = sandbox.stub(blockService, '_setTip').callsArgWith(1, null);
blockService.node = { openBus: sandbox.stub() };
blockService._db = { getPrefix: getPrefix, getServiceTip: getServiceTip };
blockService._header = { on: sinon.stub() };
blockService.start(function() {
expect(blockService._encoding).to.be.an.instanceof(Encoding);
expect(getServiceTip.calledOnce).to.be.true;
expect(getPrefix.calledOnce).to.be.true;
expect(startSub.calledOnce).to.be.true;
expect(setTip.calledOnce).to.be.true;
done();
});

View File

@ -34,6 +34,9 @@ describe('DB', function() {
describe('#start', function() {
it('should start the db service by creating a db dir, ' +
' if necessary, and setting the store', function(done) {
dbService._setDataPath();
dbService.start(function() {
dbService._store.should.be.instanceOf(Levelup);
done();
@ -74,7 +77,7 @@ describe('DB', function() {
it('should set the data path', function() {
dbService._setDataPath();
dbService.dataPath.should.equal('/tmp/regtest/bitcorenode.db');
dbService.dataPath.should.equal('/tmp/regtest/flocorenode.db');
});
});
@ -233,10 +236,11 @@ describe('DB', function() {
});
describe('#close', function() {
this.timeout(3000);
it('should close the store if there is a store and it is open', function(done) {
var close = sandbox.stub().callsArgWith(0, null);
dbService._store = { isOpen: sinon.stub().returns(true), close: close };
dbService._store = { isClosed: sinon.stub().returns(false), close: close };
dbService.close(function(err) {
if(err) {
@ -246,6 +250,7 @@ describe('DB', function() {
done();
});
});
this.timeout(2000);
});
describe('#getServiceTip', function() {

View File

@ -13,7 +13,7 @@ describe('#Fee Service', function() {
sandbox = sinon.sandbox.create();
feeService = new FeeService({
rpc: {
user: 'bitcoin',
user: 'florincoin',
pass: 'local321',
host: 'localhost',
protocol: 'http',

View File

@ -22,10 +22,12 @@ describe('Header service encoding', function() {
bits: 400000,
nonce: 123456,
height: 123,
chainwork: '0000000000000000000000000000000000000000000000000000000200020002'
chainwork: '0000000000000000000000000000000000000000000000000000000200020002',
nextHash: '91b58f19b6eecba94ed0f6e463e8e334ec0bcda7880e2985c82a8f32e4d03ade'
};
var versionBuf = new Buffer(4);
var prevHashBuf = new Buffer(header.prevHash, 'hex');
var nextHashBuf = new Buffer(header.nextHash, 'hex');
var merkleRootBuf = new Buffer(header.merkleRoot, 'hex');
var tsBuf = new Buffer(4);
var bitsBuf = new Buffer(4);
@ -67,7 +69,8 @@ describe('Header service encoding', function() {
bitsBuf,
nonceBuf,
heightBuf,
chainBuf
chainBuf,
nextHashBuf
]));
});
@ -81,7 +84,8 @@ describe('Header service encoding', function() {
bitsBuf,
nonceBuf,
heightBuf,
chainBuf
chainBuf,
nextHashBuf
])).should.deep.equal(header);
});
});

View File

@ -7,9 +7,10 @@ var assert = chai.assert;
var expect = chai.expect;
var Encoding = require('../../../lib/services/header/encoding');
var utils = require('../../../lib/utils');
var Block = require('bitcore-lib').Block;
var Block = require('flocore-lib').Block;
var BN = require('bn.js');
var Emitter = require('events').EventEmitter;
var bcoin = require('bcoin');
describe('Header Service', function() {
@ -22,7 +23,6 @@ describe('Header Service', function() {
sandbox = sinon.sandbox.create();
headerService = new HeaderService({
node: {
getNetworkName: function() { return 'regtest'; },
services: []
}
});
@ -41,7 +41,9 @@ describe('Header Service', function() {
var getServiceTip = sandbox.stub().callsArgWith(1, null, { height: 123, hash: 'a' });
var setListeners = sandbox.stub(headerService, '_setListeners');
var getPrefix = sandbox.stub().callsArgWith(1, null, new Buffer('ffee', 'hex'));
var getLastHeader = sandbox.stub(headerService, '_getLastHeader').callsArgWith(0, null);
var adjustHeadersForCheckPointTip = sandbox.stub(headerService, '_adjustHeadersForCheckPointTip').callsArgWith(0, null);
var setGenesisBlock = sandbox.stub(headerService, '_setGenesisBlock').callsArgWith(0, null);
headerService.GENESIS_HASH = '00';
var openBus = sandbox.stub();
headerService.node = { openBus: openBus };
var _startHeaderSubscription = sandbox.stub(headerService, '_startHeaderSubscription');
@ -49,10 +51,10 @@ describe('Header Service', function() {
headerService._db = { getPrefix: getPrefix, getServiceTip: getServiceTip, batch: sinon.stub() };
headerService.start(function() {
expect(_startHeaderSubscription.calledOnce).to.be.true;
expect(getLastHeader.calledOnce).to.be.true;
expect(setGenesisBlock.calledOnce).to.be.true;
expect(adjustHeadersForCheckPointTip.calledOnce).to.be.true;
expect(setListeners.calledOnce).to.be.true;
expect(headerService._tip).to.be.deep.equal({ height: 123, hash: 'a' });
expect(headerService._tip).to.be.deep.equal({ height: 0, hash: '00' });
expect(headerService._encoding).to.be.instanceOf(Encoding);
done();
});
@ -98,25 +100,27 @@ describe('Header Service', function() {
describe('#_startSync', function() {
it('should start the sync process', function() {
headerService._bestHeight = 123;
headerService._tip = { height: 120 };
var getHeaders = sandbox.stub();
headerService._p2p = { getHeaders: getHeaders };
var removeAllSubs = sandbox.stub(headerService, '_removeAllSubscriptions');
headerService._blockProcessor = { length: sinon.stub().returns(0) };
headerService._bestHeight = 100;
headerService._tip = { height: 98 };
var sync = sandbox.stub(headerService, '_sync');
headerService._startSync();
expect(getHeaders.calledOnce).to.be.true;
expect(removeAllSubs.calledOnce).to.be.true;
expect(sync.calledOnce).to.be.true;
});
});
describe('#_sync', function() {
it('should sync header', function() {
headerService._numNeeded = 1000;
headerService._tip = { height: 121, hash: 'a' };
var getHeaders = sandbox.stub();
headerService._p2p = { getHeaders: getHeaders };
it('should sync headers', function() {
var startHeaderSub = sandbox.stub(headerService, '_startHeaderSubscription');
var getP2PHeaders = sandbox.stub(headerService, '_getP2PHeaders');
headerService._tip = { hash: 'aa' };
headerService._sync();
expect(getHeaders.calledOnce).to.be.true;
expect(getP2PHeaders.calledOnce).to.be.true;
expect(startHeaderSub.calledOnce).to.be.true;
});
});
@ -129,7 +133,12 @@ describe('Header Service', function() {
var onHeader = sandbox.stub(headerService, '_onHeader');
var saveHeaders = sandbox.stub(headerService, '_saveHeaders');
headerService._tip = { height: 123, hash: 'aa' };
var lastHeader = Object.assign({ height: 1, chainwork: new Array(65).join('0') }, prevHeader);
headerService._lastHeader = lastHeader;
headerService._onHeaders(headers);
expect(onHeader.calledOnce).to.be.true;
expect(saveHeaders.calledOnce).to.be.true;
@ -159,4 +168,28 @@ describe('Header Service', function() {
});
describe('#_adjustHeadersForCheckPointTip', function() {
it('should get the last header from which to start synchronizing more headers', function(done) {
var stream = new Emitter();
var header = Object.assign({ chainwork: '00', height: 2 }, prevHeader );
var headerBuf = headerService._encoding.encodeHeaderValue(header);
headerService._tip = { height: 2, hash: 'aa' };
headerService._db = {
createReadStream: sandbox.stub().returns(stream),
batch: sandbox.stub().callsArgWith(1, null)
};
headerService._adjustHeadersForCheckPointTip(function(err) {
if(err) {
return done(err);
}
expect(headerService._tip.hash).to.equal(header.hash);
done();
});
stream.emit('data', { value: headerBuf });
stream.emit('end');
});
});
});

View File

@ -8,19 +8,25 @@ var Encoding = require('../../../lib/services/mempool/encoding');
describe('Block service encoding', function() {
var servicePrefix = new Buffer('0000', 'hex');
var txPrefix = new Buffer('00', 'hex');
var addressPrefix = new Buffer('01', 'hex');
var encoding = new Encoding(servicePrefix);
var hash = '25e28f9fb0ada5353b7d98d85af5524b2f8df5b0b0e2d188f05968bceca603eb';
var txString = '0100000004de9b4bb17f627096a9ee0b4528e4eae17df5b5c69edc29704c2e84a7371db29f010000006b483045022100f5b1a0d33b7be291c3953c25f8ae39d98601aa7099a8674daf638a08b86c7173022006ce372da5ad088a1cc6e5c49c2760a1b6f085eb1b51b502211b6bc9508661f9012102ec5e3731e54475dd2902326f43602a03ae3d62753324139163f81f20e787514cffffffff7a1d4e5fc2b8177ec738cd723a16cf2bf493791e55573445fc0df630fe5e2d64010000006b483045022100cf97f6cb8f126703e9768545dfb20ffb10ba78ae3d101aa46775f5a239b075fc02203150c4a89a11eaf5e404f4f96b62efa4455e9525765a025525c7105a7e47b6db012102c01e11b1d331f999bbdb83e8831de503cd52a01e3834a95ccafd615c67703d77ffffffff9e52447116415ca0d0567418a1a4ef8f27be3ff5a96bf87c922f3723d7db5d7c000000006b483045022100f6c117e536701be41a6b0b544d7c3b1091301e4e64a6265b6eb167b15d16959d022076916de4b115e700964194ce36a24cb9105f86482f4abbc63110c3f537cd5770012102ddf84cc7bee2d6a82ac09628a8ad4a26cd449fc528b81e7e6cc615707b8169dfffffffff5815d9750eb3572e30d6fd9df7afb4dbd76e042f3aa4988ac763b3fdf8397f80010000006a473044022028f4402b736066d93d2a32b28ccd3b7a21d84bb58fcd07fe392a611db94cdec5022018902ee0bf2c3c840c1b81ead4e6c87c88c48b2005bf5eea796464e561a620a8012102b6cdd1a6cd129ef796faeedb0b840fcd0ca00c57e16e38e46ee7028d59812ae7ffffffff0220a10700000000001976a914c342bcd1a7784d9842f7386b8b3b8a3d4171a06e88ac59611100000000001976a91449f8c749a9960dc29b5cbe7d2397cea7d26611bb88ac00000000'
var txString = '0100000004de9b4bb17f627096a9ee0b4528e4eae17df5b5c69edc29704c2e84a7371db29f010000006b483045022100f5b1a0d33b7be291c3953c25f8ae39d98601aa7099a8674daf638a08b86c7173022006ce372da5ad088a1cc6e5c49c2760a1b6f085eb1b51b502211b6bc9508661f9012102ec5e3731e54475dd2902326f43602a03ae3d62753324139163f81f20e787514cffffffff7a1d4e5fc2b8177ec738cd723a16cf2bf493791e55573445fc0df630fe5e2d64010000006b483045022100cf97f6cb8f126703e9768545dfb20ffb10ba78ae3d101aa46775f5a239b075fc02203150c4a89a11eaf5e404f4f96b62efa4455e9525765a025525c7105a7e47b6db012102c01e11b1d331f999bbdb83e8831de503cd52a01e3834a95ccafd615c67703d77ffffffff9e52447116415ca0d0567418a1a4ef8f27be3ff5a96bf87c922f3723d7db5d7c000000006b483045022100f6c117e536701be41a6b0b544d7c3b1091301e4e64a6265b6eb167b15d16959d022076916de4b115e700964194ce36a24cb9105f86482f4abbc63110c3f537cd5770012102ddf84cc7bee2d6a82ac09628a8ad4a26cd449fc528b81e7e6cc615707b8169dfffffffff5815d9750eb3572e30d6fd9df7afb4dbd76e042f3aa4988ac763b3fdf8397f80010000006a473044022028f4402b736066d93d2a32b28ccd3b7a21d84bb58fcd07fe392a611db94cdec5022018902ee0bf2c3c840c1b81ead4e6c87c88c48b2005bf5eea796464e561a620a8012102b6cdd1a6cd129ef796faeedb0b840fcd0ca00c57e16e38e46ee7028d59812ae7ffffffff0220a10700000000001976a914c342bcd1a7784d9842f7386b8b3b8a3d4171a06e88ac59611100000000001976a91449f8c749a9960dc29b5cbe7d2397cea7d26611bb88ac00000000';
var address = '1234567';
var now = Math.floor(Date.now() / 1000);
var nowBuf = new Buffer(4);
nowBuf.writeUInt32BE(now);
describe('Mempool', function() {
it('should encode mempool transaction key', function() {
encoding.encodeMempoolTransactionKey(hash).should.deep.equal(Buffer.concat([ servicePrefix, new Buffer(hash, 'hex') ]));
encoding.encodeMempoolTransactionKey(hash).should.deep.equal(Buffer.concat([ servicePrefix, txPrefix, new Buffer(hash, 'hex') ]));
});
it('should decode mempool transaction key', function() {
encoding.decodeMempoolTransactionKey(Buffer.concat([ servicePrefix, new Buffer(hash, 'hex') ])).should.deep.equal(hash);
encoding.decodeMempoolTransactionKey(Buffer.concat([ servicePrefix, txPrefix, new Buffer(hash, 'hex') ])).should.deep.equal(hash);
});
it('should encode mempool transaction value', function() {
@ -34,6 +40,36 @@ describe('Block service encoding', function() {
mytx.should.deep.equal(tx.fromRaw(txString, 'hex'));
});
it('should encode mempool address key', function() {
encoding.encodeMempoolAddressKey(address, hash, 0, 1)
.should.deep.equal(Buffer.concat([
servicePrefix,
addressPrefix,
new Buffer('07', 'hex'),
new Buffer(address),
new Buffer(hash, 'hex'),
new Buffer('00000000', 'hex'),
new Buffer('01', 'hex')
]));
});
it('should decode mempool address key', function() {
encoding.decodeMempoolAddressKey(Buffer.concat([
servicePrefix,
addressPrefix,
new Buffer('07', 'hex'),
new Buffer(address),
new Buffer(hash, 'hex'),
new Buffer('00000000', 'hex'),
new Buffer('01', 'hex') ])).should.deep.equal({
address: address,
txid: hash,
index: 0,
input: 1,
});
});
});
});

View File

@ -18,7 +18,6 @@ describe('Mempool Service', function() {
sandbox = sinon.sandbox.create();
mempoolService = new MempoolService({
node: {
getNetworkName: function() { return 'regtest'; },
services: []
}
});
@ -33,12 +32,10 @@ describe('Mempool Service', function() {
it('should get the db prefix', function(done) {
var getPrefix = sandbox.stub().callsArgWith(1, null, new Buffer('0001', 'hex'));
var startSubs = sandbox.stub(mempoolService, '_startSubscriptions');
mempoolService._db = { getPrefix: getPrefix };
mempoolService.start(function() {
expect(getPrefix.calledOnce).to.be.true;
expect(startSubs.calledOnce).to.be.true;
done();
});
});
@ -74,18 +71,22 @@ describe('Mempool Service', function() {
describe('#_onTransaction', function() {
it('should add the transaction to the database', function() {
var put = sandbox.stub();
mempoolService._db = { put: put };
var batch = sandbox.stub();
mempoolService._db = { batch: batch };
mempoolService._onTransaction(tx);
expect(put.calledOnce).to.be.true;
expect(batch.calledOnce).to.be.true;
});
});
describe('#_onBlock', function() {
it('should remove block\'s txs from database', function(done) {
mempoolService.node = { openBus: sinon.stub() };
mempoolService._p2p = { getMempool: sinon.stub() };
sandbox.stub(mempoolService, '_startSubscriptions');
mempoolService.enable();
mempoolService.onBlock(block, function(err, ops) {
expect(ops[0].type).to.deep.equal('del');
expect(ops[0].key.toString('hex')).to.deep.equal('00006321fd1cf3fbf32a41bbb47b7090ab9896bbe6093e4c342c0269b652fa800c2b');
expect(ops[0].key.toString('hex')).to.deep.equal('0000006321fd1cf3fbf32a41bbb47b7090ab9896bbe6093e4c342c0269b652fa800c2b');
done();
});
});

View File

@ -1,5 +1,4 @@
'use strict';
var should = require('chai').should();
var Tx = require('bcoin').tx;
@ -8,24 +7,29 @@ var Encoding = require('../../../lib/services/transaction/encoding');
describe('Transaction service encoding', function() {
var servicePrefix = new Buffer('0000', 'hex');
var encoding = new Encoding(servicePrefix);
var txid = '91b58f19b6eecba94ed0f6e463e8e334ec0bcda7880e2985c82a8f32e4d03add';
var blockHash = txid;
var txHex = '0100000001cc3ffe0638792c8b39328bb490caaefe2cf418f2ce0144956e0c22515f29724d010000006a473044022030ce9fa68d1a32abf0cd4adecf90fb998375b64fe887c6987278452b068ae74c022036a7d00d1c8af19e298e04f14294c807ebda51a20389ad751b4ff3c032cf8990012103acfcb348abb526526a9f63214639d79183871311c05b2eebc727adfdd016514fffffffff02f6ae7d04000000001976a9144455183e407ee4d3423858c8a3275918aedcd18e88aca99b9b08010000001976a9140beceae2c29bfde08d2b6d80b33067451c5887be88ac00000000';
var tx = Tx.fromRaw(txHex, 'hex');
var txEncoded = Buffer.concat([new Buffer('00000002', 'hex'), new Buffer('00000001', 'hex'), new Buffer('0002', 'hex'), new Buffer('40000000000000004008000000000000', 'hex'), tx.toRaw()]);
var txEncoded = Buffer.concat([new Buffer('00000002', 'hex'), new Buffer(blockHash, 'hex'), new Buffer('00000001', 'hex'), new Buffer('0002', 'hex'), new Buffer('40000000000000004008000000000000', 'hex'), tx.toRaw()]);
var indexBuf = new Buffer(4);
indexBuf.writeUInt32BE(3);
it('should encode transaction key' , function() {
var txBuf = new Buffer(txid, 'hex');
encoding.encodeTransactionKey(txid).should.deep.equal(Buffer.concat([servicePrefix, txBuf]));
encoding.encodeTransactionKey(txid).should.deep.equal(Buffer.concat([servicePrefix, new Buffer('00', 'hex'), txBuf]));
});
it('should decode transaction key', function() {
encoding.decodeTransactionKey(Buffer.concat([servicePrefix, new Buffer(txid, 'hex')]))
encoding.decodeTransactionKey(Buffer.concat([servicePrefix, new Buffer('00', 'hex'), new Buffer(txid, 'hex')]))
.should.equal(txid);
});
it('should encode transaction value', function() {
tx.__height = 2;
tx.__blockhash = blockHash;
tx.__timestamp = 1;
tx.__inputValues = [ 2, 3 ];
@ -35,8 +39,29 @@ describe('Transaction service encoding', function() {
it('should decode transaction value', function() {
var tx = encoding.decodeTransactionValue(txEncoded);
tx.__height.should.equal(2);
tx.__timestamp.should.equal(1);
tx.__inputValues.should.deep.equal([2,3]);
tx.toRaw().toString('hex').should.equal(txHex);
});
it('should encode spent key', function() {
encoding.encodeSpentKey(txid, 3).should.deep.equal(Buffer.concat([servicePrefix,
new Buffer('01', 'hex'), new Buffer(txid, 'hex'), indexBuf]));
});
it('should decode spent key', function() {
encoding.decodeSpentKey(Buffer.concat([servicePrefix,
new Buffer('01', 'hex'), new Buffer(txid, 'hex'), indexBuf])).should.deep.equal({ txid: txid, outputIndex: 3 });
});
it('should encode spent value', function() {
encoding.encodeSpentValue(txid, 3, 3, txid).should.deep.equal(Buffer.concat([new Buffer(txid, 'hex'), indexBuf, indexBuf, new Buffer(blockHash, 'hex')]));
});
it('should decode spent value', function() {
encoding.decodeSpentValue(Buffer.concat([new Buffer(txid, 'hex'), indexBuf,
indexBuf, new Buffer(blockHash, 'hex')]))
.should.deep.equal({ txid: txid, inputIndex: 3, blockHeight: 3, blockHash: blockHash });
});
});

View File

@ -18,7 +18,6 @@ describe('Transaction Service', function() {
sandbox = sinon.sandbox.create();
txService = new TxService({
node: {
getNetworkName: function() { return 'regtest'; },
services: []
}
});
@ -47,19 +46,6 @@ describe('Transaction Service', function() {
});
});
describe('#sendTransaction', function() {
it('should send a raw transaction', function(done) {
var sendTransaction = sandbox.stub().callsArg(0);
txService._p2p = { sendTransaction: sendTransaction };
txService.sendTransaction(function(err) {
if (err) {
return done(err);
}
done();
});
});
});
describe('#_getBlockTimestamp', function() {
it('should get the block\'s timestamp', function() {
var getTimestamp = sandbox.stub().returns(1);
@ -73,7 +59,7 @@ describe('Transaction Service', function() {
it('should process new blocks that come in from the block service', function(done) {
var _processTransaction = sandbox.stub(txService, '_processTransaction');
var _processTransaction = sandbox.stub(txService, '_processTransaction').callsArgWith(2, null, {});
txService.onBlock(block, function(err, ops) {
if (err) {
@ -101,26 +87,45 @@ describe('Transaction Service', function() {
});
describe('#getInputValues', function() {
describe('#_getInputValues', function() {
it('should add missing input values on a tx', function(done) {
it('should get input values', function(done) {
var put = sandbox.stub().callsArgWith(2, null);
txService._db = { put: put };
sandbox.stub(txService, '_getTransaction').callsArgWith(2, null, tx.txid(), tx);
sandbox.stub(txService, '_getTransaction').callsArgWith(2, null, tx.txid(), tx, {});
tx.__inputValues = [];
txService.getInputValues(tx, {}, function(err, tx) {
txService._getInputValues(tx, {}, function(err, values) {
if (err) {
return done(err);
}
tx.__inputValues.should.deep.equal([1139033, 1139033, 500000, 1139033]);
values.should.deep.equal([1139033, 1139033, 500000, 1139033]);
done();
});
});
});
describe('#setMetaTxInfo', function() {
it('should set the appropriate meta data on a tx.', function(done) {
sandbox.stub(txService, '_getInputValues').callsArgWith(2, null, [2]);
var tx = { outputs: [ { value: 1 } ], inputs: [ { value: 2, isCoinbase: sinon.stub().returns(false) } ] };
txService.setTxMetaInfo(tx, {}, function(err, _tx) {
if (err) {
return done(err);
}
_tx.__inputValues.should.deep.equal([2]);
_tx.confirmations.should.equal(0);
_tx.inputSatoshis.should.equal(2);
done();
});
});
});
});

View File

@ -122,4 +122,15 @@ describe('Utils', function() {
map.getLastIndex().should.equal('last value');
});
});
describe('#convertMillisecondsToHumanReadable', function() {
it('should convert a number of milliseconds to human readable format', function() {
var actual1 = utils.convertMillisecondsToHumanReadable(164532);
actual1.should.equal('2 minute(s). 44 second(s). 532 millisecond(s).');
var actual2 = utils.convertMillisecondsToHumanReadable(1);
actual2.should.equal('1 millisecond(s).');
var actual3 = utils.convertMillisecondsToHumanReadable(null);
actual3.should.equal('invalid number of ms.');
});
});
});