From 8435a116f117d1289df0b4c26b899799ebff0012 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Tue, 19 Feb 2019 17:41:17 -0800 Subject: [PATCH 01/31] blockstore: add file block storage --- lib/blockstore/abstract.js | 100 ++++++ lib/blockstore/file.js | 371 ++++++++++++++++++++++ lib/blockstore/index.js | 15 + lib/blockstore/layout.js | 30 ++ lib/blockstore/level.js | 128 ++++++++ lib/blockstore/records.js | 149 +++++++++ test/blockstore-test.js | 634 +++++++++++++++++++++++++++++++++++++ test/util/common.js | 8 + 8 files changed, 1435 insertions(+) create mode 100644 lib/blockstore/abstract.js create mode 100644 lib/blockstore/file.js create mode 100644 lib/blockstore/index.js create mode 100644 lib/blockstore/layout.js create mode 100644 lib/blockstore/level.js create mode 100644 lib/blockstore/records.js create mode 100644 test/blockstore-test.js diff --git a/lib/blockstore/abstract.js b/lib/blockstore/abstract.js new file mode 100644 index 00000000..df7172ae --- /dev/null +++ b/lib/blockstore/abstract.js @@ -0,0 +1,100 @@ +/*! + * blockstore/abstract.js - abstract block store for bcoin + * Copyright (c) 2019, Braydon Fuller (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +const Logger = require('blgr'); + +/** + * Abstract Block Store + * + * @alias module:blockstore.AbstractBlockStore + * @abstract + */ + +class AbstractBlockStore { + /** + * Create an abstract blockstore. + * @constructor + */ + + constructor(options) { + this.options = options || {}; + + if (this.options.logger != null) + this.logger = this.options.logger.context('blockstore'); + else + this.logger = Logger.global.context('blockstore'); + } + + /** + * This method opens any necessary resources and + * initializes the store to be ready to be queried. + * @returns {Promise} + */ + + async open() { + throw new Error('Abstract method.'); + } + + /** + * This method closes resources and prepares + * store to be closed. + * @returns {Promise} + */ + + async close() { + throw new Error('Abstract method.'); + } + + /** + * This method stores block data. The action should be idempotent. + * If the data is already stored, the behavior will be the same. Any + * concurrent requests to store the same data will produce the same + * result, and will not conflict with each other. + * @returns {Promise} + */ + + async write(hash, data) { + throw new Error('Abstract method.'); + } + + /** + * This method will retrieve block data. Smaller portions of + * the block can be read by using the offset and size arguments. + * @returns {Promise} + */ + + async read(hash, offset, size) { + throw new Error('Abstract method.'); + } + + /** + * This will free resources for storing the block data. This + * may not mean that the block is deleted, but that it should + * no longer consume any local storage resources. + * @returns {Promise} + */ + + async prune(hash) { + throw new Error('Abstract method.'); + } + + /** + * This will check if a block has been stored and is available. + * @returns {Promise} + */ + + async has(hash) { + throw new Error('Abstract method.'); + } +} + +/* + * Expose + */ + +module.exports = AbstractBlockStore; diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js new file mode 100644 index 00000000..e91da649 --- /dev/null +++ b/lib/blockstore/file.js @@ -0,0 +1,371 @@ +/*! + * blockstore/file.js - file block store for bcoin + * Copyright (c) 2019, Braydon Fuller (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +const {isAbsolute, resolve, join} = require('path'); +const bdb = require('bdb'); +const assert = require('bsert'); +const fs = require('bfile'); +const bio = require('bufio'); +const Network = require('../protocol/network'); +const Block = require('../primitives/block'); +const AbstractBlockStore = require('./abstract'); +const {BlockRecord, FileRecord} = require('./records'); +const layout = require('./layout'); + +/** + * File Block Store + * + * @alias module:blockstore:FileBlockStore + * @abstract + */ + +class FileBlockStore extends AbstractBlockStore { + /** + * Create a blockstore that stores blocks in files. + * @constructor + */ + + constructor(options) { + super(); + + assert(isAbsolute(options.location), 'Location not absolute.'); + + this.location = options.location; + this.db = bdb.create({ + location: resolve(this.location, './index') + }); + this.maxFileLength = options.maxFileLength || 128 * 1024 * 1024; + + this.network = Network.primary; + + if (options.network != null) + this.network = Network.get(options.network); + } + + /** + * Compares the number of files in the directory + * with the recorded number of files. If there are any + * inconsistencies it will reindex all blocks. + * @private + * @returns {Promise} + */ + + async index() { + const regexp = /^blk(\d{5})\.dat$/; + const all = await fs.readdir(this.location); + const dats = all.filter(f => regexp.test(f)); + const filenos = dats.map(f => parseInt(f.match(regexp)[1])); + + let missing = false; + + for (const fileno of filenos) { + const rec = await this.db.get(layout.f.encode(fileno)); + if (!rec) { + missing = true; + break; + } + } + + if (!missing) + return; + + this.logger.info('Indexing FileBlockStore...'); + + for (const fileno of filenos) { + const b = this.db.batch(); + const filepath = this.filepath(fileno); + const data = await fs.readFile(filepath); + const reader = bio.read(data); + let magic = null; + let blocks = 0; + + while (reader.left() >= 4) { + magic = reader.readU32(); + if (magic !== this.network.magic) { + reader.seek(4); + continue; + } + + const length = reader.readU32(); + const position = reader.offset; + + const block = Block.fromReader(reader); + const hash = block.hash(); + + const blockrecord = new BlockRecord({ + file: fileno, + position: position, + length: length + }); + + blocks += 1; + b.put(layout.b.encode(hash), blockrecord.toRaw()); + } + + const filerecord = new FileRecord({ + blocks: blocks, + used: reader.offset, + length: this.maxFileLength + }); + + b.put(layout.f.encode(fileno), filerecord.toRaw()); + + await b.write(); + + this.logger.info(`Indexed ${blocks} blocks from ${filepath}...`); + } + } + + /** + * Opens the file block store. It will regenerate necessary block + * indexing if the index is missing or inconsistent. + * @returns {Promise} + */ + + async open() { + this.logger.info('Opening FileBlockStore...'); + + await this.db.open(); + await this.db.verify(layout.V.encode(), 'fileblockstore', 0); + + await this.index(); + } + + /** + * This closes the file block store and underlying + * databases for indexing. + */ + + async close() { + this.logger.info('Closing FileBlockStore...'); + + await this.db.close(); + } + + /** + * This method will determine the file path based on the file number + * and the current block data location. + * @param {Number} fileno - The number of the file. + * @returns {Promise} + */ + + filepath(fileno) { + const pad = 5; + + let num = fileno.toString(10); + + if (num.length > pad) + throw new Error('File number too large.'); + + while (num.length < pad) + num = `0${num}`; + + return join(this.location, `blk${num}.dat`); + } + + /** + * This method will select and potentially allocate a file to + * write a block based on the size. + * @param {Number} length - The number of bytes of the data to be written. + * @returns {Promise} + */ + + async allocate(length) { + if (length > this.maxFileLength) + throw new Error('Block length above max file length.'); + + let fileno = 0; + let filerecord = null; + let filepath = null; + + const last = await this.db.get(layout.R.encode()); + if (last) + fileno = bio.read(last).readU32(); + + filepath = this.filepath(fileno); + + const rec = await this.db.get(layout.f.encode(fileno)); + + if (rec) { + filerecord = FileRecord.fromRaw(rec); + } else { + filerecord = new FileRecord({ + blocks: 0, + used: 0, + length: this.maxFileLength + }); + } + + if (filerecord.used + length > filerecord.length) { + fileno += 1; + filepath = this.filepath(fileno); + filerecord = new FileRecord({ + blocks: 0, + used: 0, + length: this.maxFileLength + }); + } + + return {fileno, filerecord, filepath}; + } + + /** + * This method stores block data in files. + * @param {Buffer} hash - The block hash + * @param {Buffer} data - The block data + * @returns {Promise} + */ + + async write(hash, data) { + const mlength = 8; + const blength = data.length; + const length = data.length + mlength; + + const { + fileno, + filerecord, + filepath + } = await this.allocate(length); + + const mposition = filerecord.used; + const bposition = filerecord.used + mlength; + + const bwm = bio.write(mlength); + bwm.writeU32(this.network.magic); + bwm.writeU32(blength); + const magic = bwm.render(); + + const fd = await fs.open(filepath, 'a'); + + const mwritten = await fs.write(fd, magic, 0, mlength, mposition); + const bwritten = await fs.write(fd, data, 0, blength, bposition); + + await fs.close(fd); + + if (mwritten !== mlength) + throw new Error('Could not write block magic.'); + + if (bwritten !== blength) + throw new Error('Could not write block.'); + + filerecord.blocks += 1; + filerecord.used += length; + + const b = this.db.batch(); + + const blockrecord = new BlockRecord({ + file: fileno, + position: bposition, + length: blength + }); + + b.put(layout.b.encode(hash), blockrecord.toRaw()); + b.put(layout.f.encode(fileno), filerecord.toRaw()); + + const bw = bio.write(4); + b.put(layout.R.encode(), bw.writeU32(fileno).render()); + + await b.write(); + } + + /** + * This method will retrieve block data. Smaller portions of the + * block (e.g. transactions) can be read by using the offset and + * length arguments. + * @param {Buffer} hash - The block hash + * @param {Number} offset - The offset within the block + * @param {Number} length - The number of bytes of the data + * @returns {Promise} + */ + + async read(hash, offset, length) { + const raw = await this.db.get(layout.b.encode(hash)); + if (!raw) + return null; + + const blockrecord = BlockRecord.fromRaw(raw); + + const filepath = this.filepath(blockrecord.file); + + let position = blockrecord.position; + + if (offset) + position += offset; + + if (!length) + length = blockrecord.length; + + if (offset + length > blockrecord.length) + throw new Error('Out-of-bounds read.'); + + const data = Buffer.alloc(length); + + const fd = await fs.open(filepath, 'r'); + await fs.read(fd, data, 0, length, position); + await fs.close(fd); + + return data; + } + + /** + * This will free resources for storing the block data. The block + * data may not be deleted from disk immediately, the index for + * the block is removed and will not be able to be read. The underlying + * file is unlinked when all blocks in a file have been pruned. + * @param {Buffer} hash - The block hash + * @returns {Promise} + */ + + async prune(hash) { + const braw = await this.db.get(layout.b.encode(hash)); + if (!braw) + return false; + + const blockrecord = BlockRecord.fromRaw(braw); + + const fraw = await this.db.get(layout.f.encode(blockrecord.file)); + if (!fraw) + return false; + + const filerecord = FileRecord.fromRaw(fraw); + + filerecord.blocks -= 1; + + const b = this.db.batch(); + + if (filerecord.blocks === 0) + b.del(layout.f.encode(blockrecord.file)); + else + b.put(layout.f.encode(blockrecord.file), filerecord.toRaw()); + + b.del(layout.b.encode(hash)); + + await b.write(); + + if (filerecord.blocks === 0) + await fs.unlink(this.filepath(blockrecord.file)); + + return true; + } + + /** + * This will check if a block has been stored and is available. + * @param {Buffer} hash - The block hash + * @returns {Promise} + */ + + async has(hash) { + return await this.db.has(layout.b.encode(hash)); + } +} + +/* + * Expose + */ + +module.exports = FileBlockStore; diff --git a/lib/blockstore/index.js b/lib/blockstore/index.js new file mode 100644 index 00000000..77bf9715 --- /dev/null +++ b/lib/blockstore/index.js @@ -0,0 +1,15 @@ +/*! + * blockstore/index.js - bitcoin blockstore for bcoin + * Copyright (c) 2019, Braydon Fuller (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +/** + * @module blockstore + */ + +exports.AbstractBlockStore = require('./abstract'); +exports.FileBlockStore = require('./file'); +exports.LevelBlockStore = require('./level'); diff --git a/lib/blockstore/layout.js b/lib/blockstore/layout.js new file mode 100644 index 00000000..5b4d6e39 --- /dev/null +++ b/lib/blockstore/layout.js @@ -0,0 +1,30 @@ +/*! + * blockstore/layout.js - file block store data layout for bcoin + * Copyright (c) 2019, Braydon Fuller (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +const bdb = require('bdb'); + +/* + * Database Layout: + * V -> db version + * R -> last file entry + * f[uint32] -> file entry + * b[hash] -> block entry + */ + +const layout = { + V: bdb.key('V'), + R: bdb.key('R'), + f: bdb.key('f', ['uint32']), + b: bdb.key('b', ['hash256']) +}; + +/* + * Expose + */ + +module.exports = layout; diff --git a/lib/blockstore/level.js b/lib/blockstore/level.js new file mode 100644 index 00000000..42764c62 --- /dev/null +++ b/lib/blockstore/level.js @@ -0,0 +1,128 @@ +/*! + * blockstore/level.js - leveldb block store for bcoin + * Copyright (c) 2019, Braydon Fuller (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +const {isAbsolute, resolve} = require('path'); +const bdb = require('bdb'); +const assert = require('bsert'); +const AbstractBlockStore = require('./abstract'); +const layout = require('./layout'); + +/** + * LevelDB Block Store + * + * @alias module:blockstore:LevelBlockStore + * @abstract + */ + +class LevelBlockStore extends AbstractBlockStore { + /** + * Create a blockstore that stores blocks in LevelDB. + * @constructor + */ + + constructor(options) { + super(); + + assert(isAbsolute(options.location), 'Location not absolute.'); + + this.location = options.location; + this.db = bdb.create({ + location: resolve(this.location, './index') + }); + } + + /** + * Opens the block storage. + * @returns {Promise} + */ + + async open() { + this.logger.info('Opening LevelBlockStore...'); + + await this.db.open(); + await this.db.verify(layout.V.encode(), 'levelblockstore', 0); + } + + /** + * Closes the block storage. + */ + + async close() { + this.logger.info('Closing LevelBlockStore...'); + + await this.db.close(); + } + + /** + * This method stores block data in LevelDB. + * @param {Buffer} hash - The block hash + * @param {Buffer} data - The block data + * @returns {Promise} + */ + + async write(hash, data) { + this.db.put(layout.b.encode(hash), data); + } + + /** + * This method will retrieve block data. Smaller portions of the + * block (e.g. transactions) can be returned using the offset and + * length arguments. However, the entire block will be read as the + * data is stored in a key/value database. + * @param {Buffer} hash - The block hash + * @param {Number} offset - The offset within the block + * @param {Number} length - The number of bytes of the data + * @returns {Promise} + */ + + async read(hash, offset, length) { + let raw = await this.db.get(layout.b.encode(hash)); + + if (offset) { + if (offset + length > raw.length) + throw new Error('Out-of-bounds read.'); + + raw = raw.slice(offset, offset + length); + } + + return raw; + } + + /** + * This will free resources for storing the block data. The block + * data may not be immediately removed from disk, and will be reclaimed + * during LevelDB compaction. + * @param {Buffer} hash - The block hash + * @returns {Promise} + */ + + async prune(hash) { + if (!await this.has(hash)) + return false; + + await this.db.del(layout.b.encode(hash)); + + return true; + } + + /** + * This will check if a block has been stored and is available. + * @param {Buffer} hash - The block hash + * @returns {Promise} + */ + + async has(hash) { + return this.db.has(layout.b.encode(hash)); + } +} + +/* + * Expose + */ + +module.exports = LevelBlockStore; diff --git a/lib/blockstore/records.js b/lib/blockstore/records.js new file mode 100644 index 00000000..1f75ce00 --- /dev/null +++ b/lib/blockstore/records.js @@ -0,0 +1,149 @@ +/*! + * blockstore/records.js - block store records + * Copyright (c) 2019, Braydon Fuller (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +const assert = require('bsert'); +const bio = require('bufio'); + +/** + * @module blockstore/records + */ + +/** + * Block Record + */ + +class BlockRecord { + /** + * Create a block record. + * @constructor + */ + + constructor(options = {}) { + this.file = options.file || 0; + this.position = options.position || 0; + this.length = options.length || 0; + + assert((this.file >>> 0) === this.file); + assert((this.position >>> 0) === this.position); + assert((this.length >>> 0) === this.length); + } + + /** + * Inject properties from serialized data. + * @private + * @param {Buffer} data + */ + + fromRaw(data) { + const br = bio.read(data); + + this.file = br.readU32(); + this.position = br.readU32(); + this.length = br.readU32(); + + return this; + } + + /** + * Instantiate block record from serialized data. + * @param {Hash} hash + * @param {Buffer} data + * @returns {BlockRecord} + */ + + static fromRaw(data) { + return new this().fromRaw(data); + } + + /** + * Serialize the block record. + * @returns {Buffer} + */ + + toRaw() { + const bw = bio.write(12); + + bw.writeU32(this.file); + bw.writeU32(this.position); + bw.writeU32(this.length); + + return bw.render(); + } +} + +/** + * File Record + */ + +class FileRecord { + /** + * Create a chain state. + * @constructor + */ + + constructor(options = {}) { + this.blocks = options.blocks || 0; + this.used = options.used || 0; + this.length = options.length || 0; + + assert((this.blocks >>> 0) === this.blocks); + assert((this.used >>> 0) === this.used); + assert((this.length >>> 0) === this.length); + } + + /** + * Inject properties from serialized data. + * @private + * @param {Buffer} data + */ + + fromRaw(data) { + const br = bio.read(data); + + this.blocks = br.readU32(); + this.used = br.readU32(); + this.length = br.readU32(); + + return this; + } + + /** + * Instantiate file record from serialized data. + * @param {Hash} hash + * @param {Buffer} data + * @returns {ChainState} + */ + + static fromRaw(data) { + return new this().fromRaw(data); + } + + /** + * Serialize the file record. + * @returns {Buffer} + */ + + toRaw() { + const bw = bio.write(12); + + bw.writeU32(this.blocks); + bw.writeU32(this.used); + bw.writeU32(this.length); + + return bw.render(); + } +} + +/* + * Expose + */ + +exports.BlockRecord = BlockRecord; +exports.FileRecord = FileRecord; + +module.exports = exports; diff --git a/test/blockstore-test.js b/test/blockstore-test.js new file mode 100644 index 00000000..2dead60e --- /dev/null +++ b/test/blockstore-test.js @@ -0,0 +1,634 @@ +/* eslint-env mocha */ +/* eslint prefer-arrow-callback: "off" */ + +'use strict'; + +const Logger = require('blgr'); +const assert = require('./util/assert'); +const common = require('./util/common'); +const {resolve} = require('path'); +const fs = require('bfile'); +const {rimraf} = require('./util/common'); +const {mkdirp} = require('bfile'); +const random = require('bcrypto/lib/random'); + +const vectors = [ + common.readBlock('block300025'), + common.readBlock('block426884'), + common.readBlock('block898352') +]; + +const { + AbstractBlockStore, + FileBlockStore, + LevelBlockStore +} = require('../lib/blockstore'); + +const layout = require('../lib/blockstore/layout'); + +const { + BlockRecord, + FileRecord +} = require('../lib/blockstore/records'); + +describe('BlockStore', function() { + describe('Abstract', function() { + let logger = null; + + function context(ctx) { + return {info: () => ctx}; + } + + beforeEach(() => { + logger = Logger.global; + Logger.global = {context}; + }); + + afterEach(() => { + Logger.global = logger; + }); + + it('construct with custom logger', async () => { + const store = new AbstractBlockStore({logger: {context}}); + assert(store.logger); + assert(store.logger.info); + assert.equal(store.logger.info(), 'blockstore'); + }); + + it('construct with default logger', async () => { + const store = new AbstractBlockStore(); + assert(store.logger); + assert(store.logger.info); + assert.equal(store.logger.info(), 'blockstore'); + }); + + it('has unimplemented base methods', async () => { + const methods = ['open', 'close', 'write', 'read', + 'prune', 'has']; + + const store = new AbstractBlockStore(); + + for (const method of methods) { + assert(store[method]); + + let err = null; + try { + await store[method](); + } catch (e) { + err = e; + } + assert(err, `Expected unimplemented method ${method}.`); + assert.equal(err.message, 'Abstract method.'); + } + }); + }); + + describe('Records', function() { + describe('BlockRecord', function() { + function constructError(options) { + let err = null; + + try { + new BlockRecord({ + file: options.file, + position: options.position, + length: options.length + }); + } catch (e) { + err = e; + } + + assert(err); + } + + function toAndFromRaw(options) { + const rec1 = new BlockRecord(options); + assert.equal(rec1.file, options.file); + assert.equal(rec1.position, options.position); + assert.equal(rec1.length, options.length); + + const raw = rec1.toRaw(); + const rec2 = BlockRecord.fromRaw(raw); + assert.equal(rec2.file, options.file); + assert.equal(rec2.position, options.position); + assert.equal(rec2.length, options.length); + } + + it('construct with correct options', () => { + const rec = new BlockRecord({ + file: 12, + position: 23392, + length: 4194304 + }); + assert.equal(rec.file, 12); + assert.equal(rec.position, 23392); + assert.equal(rec.length, 4194304); + }); + + it('construct null record', () => { + const rec = new BlockRecord(); + assert.equal(rec.file, 0); + assert.equal(rec.position, 0); + assert.equal(rec.length, 0); + }); + + it('fail with signed number (file)', () => { + constructError({file: -1, position: 1, length: 1}); + }); + + it('fail with signed number (position)', () => { + constructError({file: 1, position: -1, length: 1}); + }); + + it('fail with signed number (length)', () => { + constructError({file: 1, position: 1, length: -1}); + }); + + it('fail with non-32-bit number (file)', () => { + constructError({file: Math.pow(2, 32), position: 1, length: 1}); + }); + + it('fail with non-32-bit number (position)', () => { + constructError({file: 1, position: Math.pow(2, 32), length: 1}); + }); + + it('fail with non-32-bit number (length)', () => { + constructError({file: 1, position: 1, length: Math.pow(2, 32)}); + }); + + it('construct with max 32-bit numbers', () => { + const max = Math.pow(2, 32) - 1; + + const rec = new BlockRecord({ + file: max, + position: max, + length: max + }); + + assert(rec); + assert.equal(rec.file, max); + assert.equal(rec.position, max); + assert.equal(rec.length, max); + }); + + it('serialize/deserialize file record (min)', () => { + toAndFromRaw({file: 0, position: 0, length: 0}); + }); + + it('serialize/deserialize file record', () => { + toAndFromRaw({file: 12, position: 23392, length: 4194304}); + }); + + it('serialize/deserialize file record (max)', () => { + const max = Math.pow(2, 32) - 1; + toAndFromRaw({file: max, position: max, length: max}); + }); + }); + + describe('FileRecord', function() { + function constructError(options) { + let err = null; + + try { + new FileRecord({ + blocks: options.blocks, + used: options.used, + length: options.length + }); + } catch (e) { + err = e; + } + + assert(err); + } + + function toAndFromRaw(options) { + const rec1 = new FileRecord(options); + assert.equal(rec1.blocks, options.blocks); + assert.equal(rec1.used, options.used); + assert.equal(rec1.length, options.length); + + const raw = rec1.toRaw(); + const rec2 = FileRecord.fromRaw(raw); + assert.equal(rec2.blocks, options.blocks); + assert.equal(rec2.used, options.used); + assert.equal(rec2.length, options.length); + } + + it('construct with correct options', () => { + const rec = new FileRecord({ + blocks: 1, + used: 4194304, + length: 20971520 + }); + assert.equal(rec.blocks, 1); + assert.equal(rec.used, 4194304); + assert.equal(rec.length, 20971520); + }); + + it('fail to with signed number (blocks)', () => { + constructError({blocks: -1, used: 1, length: 1}); + }); + + it('fail to with signed number (used)', () => { + constructError({blocks: 1, used: -1, length: 1}); + }); + + it('fail to with signed number (length)', () => { + constructError({blocks: 1, used: 1, length: -1}); + }); + + it('fail to with non-32-bit number (blocks)', () => { + constructError({blocks: Math.pow(2, 32), used: 1, length: 1}); + }); + + it('fail to with non-32-bit number (used)', () => { + constructError({blocks: 1, used: Math.pow(2, 32), length: 1}); + }); + + it('fail to with non-32-bit number (length)', () => { + constructError({blocks: 1, used: 1, length: Math.pow(2, 32)}); + }); + + it('serialize/deserialize block record (min)', () => { + toAndFromRaw({blocks: 0, used: 0, length: 0}); + }); + + it('serialize/deserialize block record', () => { + toAndFromRaw({blocks: 10, used: 4194304, length: 20971520}); + }); + + it('serialize/deserialize block record (max)', () => { + const max = Math.pow(2, 32) - 1; + toAndFromRaw({blocks: max, used: max, length: max}); + }); + }); + }); + + describe('FileBlockStore (Unit)', function() { + const location = '/tmp/.bcoin/blocks'; + let store = null; + + before(() => { + store = new FileBlockStore({ + location: location, + maxFileLength: 1024 + }); + }); + + describe('allocate', function() { + it('will fail with length above file max', async () => { + let err = null; + try { + await store.allocate(1025); + } catch (e) { + err = e; + } + assert(err); + assert.equal(err.message, 'Block length above max file length.'); + }); + }); + + describe('filepath', function() { + it('will give correct path (0)', () => { + const filepath = store.filepath(0); + assert.equal(filepath, '/tmp/.bcoin/blocks/blk00000.dat'); + }); + + it('will give correct path (1)', () => { + const filepath = store.filepath(7); + assert.equal(filepath, '/tmp/.bcoin/blocks/blk00007.dat'); + }); + + it('will give correct path (2)', () => { + const filepath = store.filepath(23); + assert.equal(filepath, '/tmp/.bcoin/blocks/blk00023.dat'); + }); + + it('will give correct path (3)', () => { + const filepath = store.filepath(456); + assert.equal(filepath, '/tmp/.bcoin/blocks/blk00456.dat'); + }); + + it('will give correct path (4)', () => { + const filepath = store.filepath(8999); + assert.equal(filepath, '/tmp/.bcoin/blocks/blk08999.dat'); + }); + + it('will give correct path (5)', () => { + const filepath = store.filepath(99999); + assert.equal(filepath, '/tmp/.bcoin/blocks/blk99999.dat'); + }); + + it('will fail over max size', () => { + let err = null; + try { + store.filepath(100000); + } catch (e) { + err = e; + } + + assert(err); + assert.equal(err.message, 'File number too large.'); + }); + }); + }); + + describe('FileBlockStore (Integration 1)', function() { + const location = '/tmp/bcoin-blockstore-test'; + let store = null; + + beforeEach(async () => { + await rimraf(location); + await mkdirp(location); + + store = new FileBlockStore({ + location: location, + maxFileLength: 1024 + }); + + await store.open(); + }); + + afterEach(async () => { + await store.close(); + }); + + it('will write and read a block', async () => { + const block1 = random.randomBytes(128); + const hash = random.randomBytes(32); + + await store.write(hash, block1); + + const block2 = await store.read(hash); + + assert.bufferEqual(block1, block2); + }); + + it('will read a block w/ offset and length', async () => { + const block1 = random.randomBytes(128); + const hash = random.randomBytes(32); + + await store.write(hash, block1); + + const offset = 79; + const size = 15; + + const block2 = await store.read(hash, offset, size); + + assert.bufferEqual(block1.slice(offset, offset + size), block2); + }); + + it('will fail to read w/ out-of-bounds length', async () => { + const block1 = random.randomBytes(128); + const hash = random.randomBytes(32); + + await store.write(hash, block1); + + const offset = 79; + const size = 50; + + let err = null; + try { + await store.read(hash, offset, size); + } catch (e) { + err = e; + } + + assert(err); + assert.equal(err.message, 'Out-of-bounds read.'); + }); + + it('will allocate new files', async () => { + const blocks = []; + + for (let i = 0; i < 16; i++) { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + blocks.push({hash, block}); + await store.write(hash, block); + const block2 = await store.read(hash); + assert.bufferEqual(block2, block); + } + + const first = await fs.stat(store.filepath(0)); + const second = await fs.stat(store.filepath(1)); + const third = await fs.stat(store.filepath(2)); + assert.equal(first.size, 952); + assert.equal(second.size, 952); + assert.equal(third.size, 272); + + const len = first.size + second.size + third.size - (8 * 16); + assert.equal(len, 128 * 16); + + for (let i = 0; i < 16; i++) { + const expect = blocks[i]; + const block = await store.read(expect.hash); + assert.bufferEqual(block, expect.block); + } + }); + + it('will return null if block not found', async () => { + const hash = random.randomBytes(32); + const block = await store.read(hash); + assert.strictEqual(block, null); + }); + + it('will check if block exists (false)', async () => { + const hash = random.randomBytes(32); + const exists = await store.has(hash); + assert.strictEqual(exists, false); + }); + + it('will check if block exists (true)', async () => { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + await store.write(hash, block); + const exists = await store.has(hash); + assert.strictEqual(exists, true); + }); + + it('will prune blocks', async () => { + const hashes = []; + for (let i = 0; i < 16; i++) { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + hashes.push(hash); + await store.write(hash, block); + } + + const first = await fs.stat(store.filepath(0)); + const second = await fs.stat(store.filepath(1)); + const third = await fs.stat(store.filepath(2)); + + const len = first.size + second.size + third.size - (8 * 16); + assert.equal(len, 128 * 16); + + for (let i = 0; i < 16; i++) { + const pruned = await store.prune(hashes[i]); + assert.strictEqual(pruned, true); + } + + assert.equal(await fs.exists(store.filepath(0)), false); + assert.equal(await fs.exists(store.filepath(1)), false); + assert.equal(await fs.exists(store.filepath(2)), false); + + for (let i = 0; i < 16; i++) { + const exists = await store.has(hashes[i]); + assert.strictEqual(exists, false); + } + + const exists = await store.db.has(layout.f.encode(0)); + assert.strictEqual(exists, false); + }); + }); + + describe('FileBlockStore (Integration 2)', function() { + const location = '/tmp/bcoin-blockstore-test'; + let store = null; + + beforeEach(async () => { + await rimraf(location); + await mkdirp(location); + + store = new FileBlockStore({ + location: location, + maxFileLength: 1024 * 1024 + }); + + await store.open(); + }); + + afterEach(async () => { + await store.close(); + }); + + it('will import from files (e.g. db corruption)', async () => { + const blocks = []; + + for (let i = 0; i < vectors.length; i++) { + const [block] = vectors[i].getBlock(); + const hash = block.hash(); + const raw = block.toRaw(); + + blocks.push({hash, block: raw}); + await store.write(hash, raw); + } + + await store.close(); + + await rimraf(resolve(location, './index')); + + store = new FileBlockStore({ + location: location, + maxFileLength: 1024 + }); + + await store.open(); + + for (let i = 0; i < vectors.length; i++) { + const expect = blocks[i]; + const block = await store.read(expect.hash); + assert.equal(block.length, expect.block.length); + assert.bufferEqual(block, expect.block); + } + }); + }); + + describe('LevelBlockStore', function() { + const location = '/tmp/bcoin-blockstore-test'; + let store = null; + + beforeEach(async () => { + await rimraf(location); + await mkdirp(location); + + store = new LevelBlockStore({ + location: location + }); + + await store.open(); + }); + + afterEach(async () => { + await store.close(); + }); + + it('will write and read a block', async () => { + const block1 = random.randomBytes(128); + const hash = random.randomBytes(32); + + await store.write(hash, block1); + + const block2 = await store.read(hash); + + assert.bufferEqual(block1, block2); + }); + + it('will read a block w/ offset and length', async () => { + const block1 = random.randomBytes(128); + const hash = random.randomBytes(32); + + await store.write(hash, block1); + + const offset = 79; + const size = 15; + + const block2 = await store.read(hash, offset, size); + + assert.bufferEqual(block1.slice(offset, offset + size), block2); + }); + + it('will fail to read w/ out-of-bounds length', async () => { + const block1 = random.randomBytes(128); + const hash = random.randomBytes(32); + + await store.write(hash, block1); + + const offset = 79; + const size = 50; + + let err = null; + try { + await store.read(hash, offset, size); + } catch (e) { + err = e; + } + + assert(err); + assert.equal(err.message, 'Out-of-bounds read.'); + }); + + it('will check if block exists (false)', async () => { + const hash = random.randomBytes(32); + const exists = await store.has(hash); + assert.strictEqual(exists, false); + }); + + it('will check if block exists (true)', async () => { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + await store.write(hash, block); + const exists = await store.has(hash); + assert.strictEqual(exists, true); + }); + + it('will prune blocks (true)', async () => { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + await store.write(hash, block); + const pruned = await store.prune(hash); + assert.strictEqual(pruned, true); + const block2 = await store.read(hash); + assert.strictEqual(block2, null); + }); + + it('will prune blocks (false)', async () => { + const hash = random.randomBytes(32); + const exists = await store.has(hash); + assert.strictEqual(exists, false); + const pruned = await store.prune(hash); + assert.strictEqual(pruned, false); + }); + }); +}); diff --git a/test/util/common.js b/test/util/common.js index 9928eb45..bdaa8620 100644 --- a/test/util/common.js +++ b/test/util/common.js @@ -85,6 +85,14 @@ common.writeTX = function writeTX(name, tx, view) { common.writeFile(`${name}-undo.raw`, undoRaw); }; +common.rimraf = async function(p) { + const allowed = new RegExp('^\/tmp\/(.*)$'); + if (!allowed.test(p)) + throw new Error(`Path not allowed: ${p}.`); + + return await fs.rimraf(p); +}; + function parseUndo(data) { const br = bio.read(data); const items = []; From 0609ce72fc76893563ffc21c60d9606c82c00498 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Wed, 20 Feb 2019 18:37:37 -0800 Subject: [PATCH 02/31] bench: add benchmarks for blockstore --- bench/blockstore.js | 404 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 404 insertions(+) create mode 100644 bench/blockstore.js diff --git a/bench/blockstore.js b/bench/blockstore.js new file mode 100644 index 00000000..a068cc9e --- /dev/null +++ b/bench/blockstore.js @@ -0,0 +1,404 @@ +/*! + * bench/blockstore.js - benchmark block store for bcoin + * + * This can be run to benchmark the performance of the blockstore + * module for writing, reading and pruning block data. Results are + * written to stdout as JSON or formated bench results. + * + * Usage: + * node ./blockstore.js [--maxfile=] [--total=] + [--location=] [--store=] [--unsafe] + * + * Options: + * - `maxfile` The maximum file size (applies to "file" store). + * - `total` The total number of block bytes to write. + * - `location` The location to store block data. + * - `store` This can be "file" or "level". + * - `output` This can be "json" or "bench". + * - `unsafe` This will allocate block data directly from memory + * instead of random, it is faster. + * + * Copyright (c) 2019, Braydon Fuller (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +process.title = 'blockstore-bench'; + +const {isAbsolute} = require('path'); +const {mkdirp} = require('bfile'); +const random = require('bcrypto/lib/random'); +const {BufferMap} = require('buffer-map'); + +const { + FileBlockStore, + LevelBlockStore +} = require('../lib/blockstore'); + +const config = { + 'maxfile': { + value: true, + parse: a => parseInt(a), + valid: a => Number.isSafeInteger(a), + fallback: 128 * 1024 * 1024 + }, + 'total': { + value: true, + parse: a => parseInt(a), + valid: a => Number.isSafeInteger(a), + fallback: 3 * 1024 * 1024 * 1024 + }, + 'location': { + value: true, + valid: a => isAbsolute(a), + fallback: '/tmp/bcoin-bench-blockstore' + }, + 'store': { + value: true, + valid: a => (a === 'file' || a === 'level'), + fallback: 'file' + }, + 'output': { + value: true, + valid: a => (a === 'json' || a === 'bench'), + fallback: 'bench' + }, + 'unsafe': { + value: false, + valid: a => (a === true || a === false), + fallback: false + } +}; + +/** + * These block sizes were generated from bitcoin mainnet blocks by putting + * sizes into bins of 256 ^ (2 * n) as the upper bound and calculating + * the percentage of each and then distributing to roughly match the + * percentage of the following: + * + * |-------------|------------| + * | percentage | bytes | + * |-------------|------------| + * | 23.4055 | 1048576 | + * | 15.5338 | 256 | + * | 12.2182 | 262144 | + * | 8.4079 | 524288 | + * | 7.1289 | 131072 | + * | 6.9197 | 65536 | + * | 6.7073 | 2097152 | + * | 4.6753 | 32768 | + * | 3.9695 | 4096 | + * | 3.3885 | 16384 | + * | 2.6526 | 8192 | + * | 2.0048 | 512 | + * | 1.587 | 1024 | + * | 1.3976 | 2048 | + * | 0.0032 | 4194304 | + * |-------------|------------| + */ + +const distribution = [ + 1048576, 256, 256, 524288, 262144, 256, 131072, 256, 524288, 256, 131072, + 1048576, 262144, 1048576, 2097152, 256, 1048576, 65536, 256, 262144, 8192, + 32768, 32768, 256, 1048576, 524288, 2097152, 1024, 1048576, 1048576, 131072, + 131072, 262144, 512, 1048576, 1048576, 1024, 1048576, 1048576, 262144, 2048, + 262144, 256, 1048576, 131072, 4096, 524288, 65536, 4096, 65536, 131072, + 2097152, 2097152, 2097152, 256, 524288, 4096, 262144, 65536, 65536, 262144, + 16384, 1048576, 32768, 262144, 1048576, 256, 131072, 1048576, 1048576, + 1048576, 8192, 1048576, 256, 16384, 1048576, 256, 256, 524288, 256, 32768, + 16384, 32768, 1048576, 512, 4096, 1048576, 1048576, 524288, 65536, 2097152, + 512, 262144, 8192, 524288, 131072, 65536, 16384, 2048, 262144, 1048576, + 1048576, 256, 524288, 262144, 4194304, 262144, 2097152 +]; + +(async () => { + let settings = null; + try { + settings = processArgs(process.argv, config); + } catch (err) { + console.log(err.message); + process.exit(1); + } + + await mkdirp(settings.location); + + let store = null; + let output = null; + + if (settings.store === 'file') { + store = new FileBlockStore({ + location: settings.location, + maxFileLength: settings.maxfile + }); + } else if (settings.store === 'level') { + store = new LevelBlockStore({ + location: settings.location + }); + } + + if (settings.output === 'bench') { + output = new BenchOutput(); + } else if (settings.output === 'json') { + output = new JSONOutput(); + } + + await store.open(); + + const hashes = []; + const lengths = new BufferMap(); + + output.start(); + + // 1. Write data to the block store + let written = 0; + + async function write() { + for (const length of distribution) { + const hash = random.randomBytes(32); + let raw = null; + if (settings.unsafe) { + raw = Buffer.allocUnsafe(length); + } else { + raw = random.randomBytes(length); + } + + const start = process.hrtime(); + await store.write(hash, raw); + const elapsed = process.hrtime(start); + + hashes.push(hash); + lengths.set(hash, length); + written += length; + + output.result('write', start, elapsed, length); + + if (written >= settings.total) + break; + } + } + + while (written < settings.total) + await write(); + + // 2. Read data from the block store + for (const hash of hashes) { + const start = process.hrtime(); + const raw = await store.read(hash); + const elapsed = process.hrtime(start); + + output.result('read', start, elapsed, raw.length); + } + + // 3. Read data not in the order it was written (random) + for (let i = 0; i < hashes.length; i++) { + const rand = random.randomInt() / 0xffffffff * (hashes.length - 1) | 0; + const hash = hashes[rand]; + + const start = process.hrtime(); + const raw = await store.read(hash); + const elapsed = process.hrtime(start); + + output.result('randomread', start, elapsed, raw.length); + } + + // 4. Prune data from the block store + for (const hash of hashes) { + const start = process.hrtime(); + await store.prune(hash); + const elapsed = process.hrtime(start); + const length = lengths.get(hash); + + output.result('prune', start, elapsed, length); + } + + output.end(); + + await store.close(); +})().catch((err) => { + console.error(err); + process.exit(1); +}); + +class JSONOutput { + constructor() { + this.time = process.hrtime(); + this.index = 0; + } + + start() { + process.stdout.write('['); + } + + result(type, start, elapsed, length) { + if (this.index > 0) + process.stdout.write(','); + + const since = [start[0] - this.time[0], start[1] - this.time[1]]; + const smicro = (since[0] * 1000000) + (since[1] / 1000); + const emicro = (elapsed[0] * 1000000) + (elapsed[1] / 1000); + + process.stdout.write(`{"type":"${type}","start":${smicro},`); + process.stdout.write(`"elapsed":${emicro},"length":${length},`); + process.stdout.write(`"index":${this.index}}`); + + this.index += 1; + } + + end() { + process.stdout.write(']'); + } +} + +class BenchOutput { + constructor() { + this.time = process.hrtime(); + this.index = 0; + this.results = {}; + this.interval = null; + this.stdout = process.stdout; + } + + start() { + this.stdout.write('Starting benchmark...\n'); + this.interval = setInterval(() => { + this.stdout.write(`Operation count=${this.index}\n`); + }, 5000); + } + + result(type, start, elapsed, length) { + const micro = (elapsed[0] * 1000000) + (elapsed[1] / 1000); + + if (!this.results[type]) + this.results[type] = {}; + + if (!this.results[type][length]) + this.results[type][length] = []; + + this.results[type][length].push(micro); + + this.index += 1; + } + + end() { + clearInterval(this.interval); + + this.stdout.write('Benchmark finished.\n'); + + function format(value) { + if (typeof value === 'number') + value = value.toFixed(2); + + if (typeof value !== 'string') + value = value.toString(); + + while (value.length < 15) + value = `${value} `; + + return value; + } + + function title(value) { + if (typeof value !== 'string') + value = value.toString(); + + while (value.length < 85) + value = ` ${value} `; + + if (value.length > 85) + value = value.slice(0, 85); + + return value; + } + + for (const type in this.results) { + this.stdout.write('\n'); + this.stdout.write(`${title(type)}\n`); + this.stdout.write(`${'='.repeat(85)}\n`); + this.stdout.write(`${format('length')}`); + this.stdout.write(`${format('operations')}`); + this.stdout.write(`${format('min')}`); + this.stdout.write(`${format('max')}`); + this.stdout.write(`${format('average')}`); + this.stdout.write(`${format('median')}`); + this.stdout.write('\n'); + this.stdout.write(`${'-'.repeat(85)}\n`); + + for (const length in this.results[type]) { + const times = this.results[type][length]; + + times.sort((a, b) => a - b); + + let min = Infinity; + let max = 0; + + let total = 0; + + for (const micro of times) { + if (micro < min) + min = micro; + + if (micro > max) + max = micro; + + total += micro; + } + + const average = total / times.length; + const median = times[times.length / 2 | 0]; + + this.stdout.write(`${format(length)}`); + this.stdout.write(`${format(times.length.toString())}`); + this.stdout.write(`${format(min)}`); + this.stdout.write(`${format(max)}`); + this.stdout.write(`${format(average)}`); + this.stdout.write(`${format(median)}`); + this.stdout.write('\n'); + } + this.stdout.write('\n'); + } + this.stdout.write('\n'); + } +} + +function processArgs(argv, config) { + const args = {}; + + for (const key in config) + args[key] = config[key].fallback; + + for (let i = 2; i < process.argv.length; i++) { + const arg = process.argv[i]; + const match = arg.match(/^(\-){1,2}([a-z]+)(\=)?(.*)?$/); + + if (!match) { + throw new Error(`Unexpected argument: ${arg}.`); + } else { + const key = match[2]; + let value = match[4]; + + if (!config[key]) + throw new Error(`Invalid argument: ${arg}.`); + + if (config[key].value && !value) { + value = process.argv[i + 1]; + i++; + } else if (!config[key].value && !value) { + value = true; + } else if (!config[key].value && value) { + throw new Error(`Unexpected value: ${key}=${value}`); + } + + if (config[key].parse) + value = config[key].parse(value); + + if (value) + args[key] = value; + + if (!config[key].valid(args[key])) + throw new Error(`Invalid value: ${key}=${value}`); + } + } + + return args; +} From 620f21d6f509bd76d91e12d5dff499e6cb5976da Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Wed, 27 Feb 2019 16:14:28 -0800 Subject: [PATCH 03/31] blockstore: await for level block store writes --- lib/blockstore/level.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/blockstore/level.js b/lib/blockstore/level.js index 42764c62..eee7bb2a 100644 --- a/lib/blockstore/level.js +++ b/lib/blockstore/level.js @@ -66,7 +66,7 @@ class LevelBlockStore extends AbstractBlockStore { */ async write(hash, data) { - this.db.put(layout.b.encode(hash), data); + return this.db.put(layout.b.encode(hash), data); } /** From 64fb7c1d8826894516ea6e0768ca85a0bc58dc15 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Wed, 27 Feb 2019 16:18:12 -0800 Subject: [PATCH 04/31] bench: add bench json output for blockstore --- bench/blockstore.js | 131 ++++++++++++++++++++++++++++++++++---------- 1 file changed, 101 insertions(+), 30 deletions(-) diff --git a/bench/blockstore.js b/bench/blockstore.js index a068cc9e..362869ea 100644 --- a/bench/blockstore.js +++ b/bench/blockstore.js @@ -61,7 +61,7 @@ const config = { }, 'output': { value: true, - valid: a => (a === 'json' || a === 'bench'), + valid: a => (a === 'json' || a === 'bench' || a === 'benchjson'), fallback: 'bench' }, 'unsafe': { @@ -139,6 +139,8 @@ const distribution = [ if (settings.output === 'bench') { output = new BenchOutput(); + } else if (settings.output === 'benchjson') { + output = new BenchJSONOutput(); } else if (settings.output === 'json') { output = new JSONOutput(); } @@ -235,8 +237,8 @@ class JSONOutput { process.stdout.write(','); const since = [start[0] - this.time[0], start[1] - this.time[1]]; - const smicro = (since[0] * 1000000) + (since[1] / 1000); - const emicro = (elapsed[0] * 1000000) + (elapsed[1] / 1000); + const smicro = hrToMicro(since); + const emicro = hrToMicro(elapsed); process.stdout.write(`{"type":"${type}","start":${smicro},`); process.stdout.write(`"elapsed":${emicro},"length":${length},`); @@ -267,7 +269,7 @@ class BenchOutput { } result(type, start, elapsed, length) { - const micro = (elapsed[0] * 1000000) + (elapsed[1] / 1000); + const micro = hrToMicro(elapsed); if (!this.results[type]) this.results[type] = {}; @@ -325,34 +327,14 @@ class BenchOutput { this.stdout.write(`${'-'.repeat(85)}\n`); for (const length in this.results[type]) { - const times = this.results[type][length]; - - times.sort((a, b) => a - b); - - let min = Infinity; - let max = 0; - - let total = 0; - - for (const micro of times) { - if (micro < min) - min = micro; - - if (micro > max) - max = micro; - - total += micro; - } - - const average = total / times.length; - const median = times[times.length / 2 | 0]; + const cal = calculate(this.results[type][length]); this.stdout.write(`${format(length)}`); - this.stdout.write(`${format(times.length.toString())}`); - this.stdout.write(`${format(min)}`); - this.stdout.write(`${format(max)}`); - this.stdout.write(`${format(average)}`); - this.stdout.write(`${format(median)}`); + this.stdout.write(`${format(cal.operations.toString())}`); + this.stdout.write(`${format(cal.min)}`); + this.stdout.write(`${format(cal.max)}`); + this.stdout.write(`${format(cal.average)}`); + this.stdout.write(`${format(cal.median)}`); this.stdout.write('\n'); } this.stdout.write('\n'); @@ -361,6 +343,95 @@ class BenchOutput { } } +class BenchJSONOutput { + constructor() { + this.time = null; + this.results = {}; + this.stdout = process.stdout; + } + + start() { + this.time = process.hrtime(); + } + + result(type, start, elapsed, length) { + const micro = hrToMicro(elapsed); + + if (!this.results[type]) + this.results[type] = {}; + + if (!this.results[type][length]) + this.results[type][length] = []; + + this.results[type][length].push(micro); + } + + end() { + const report = { + summary: [], + time: hrToMicro(process.hrtime(this.time)), + elapsed: 0 + }; + + for (const type in this.results) { + for (const length in this.results[type]) { + const cal = calculate(this.results[type][length]); + + report.elapsed += cal.total; + + report.summary.push({ + type: type, + length: length, + operations: cal.operations, + min: cal.min, + max: cal.max, + average: cal.average, + median: cal.median + }); + } + } + + this.stdout.write(JSON.stringify(report, null, 2)); + this.stdout.write('\n'); + } +} + +function hrToMicro(time) { + return (time[0] * 1000000) + (time[1] / 1000); +} + +function calculate(times) { + times.sort((a, b) => a - b); + + let min = Infinity; + let max = 0; + + let total = 0; + + for (const micro of times) { + if (micro < min) + min = micro; + + if (micro > max) + max = micro; + + total += micro; + } + + const average = total / times.length; + const median = times[times.length / 2 | 0]; + const operations = times.length; + + return { + total, + operations, + min, + max, + average, + median + }; +} + function processArgs(argv, config) { const args = {}; From 2d08b296f769c6beae1a7ffacc7d8144d08df696 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 28 Feb 2019 11:53:42 -0800 Subject: [PATCH 05/31] blockstore: recover from block write interrupt --- lib/blockstore/file.js | 11 ++++++++- test/blockstore-test.js | 49 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 1 deletion(-) diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index e91da649..231dc573 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -191,9 +191,12 @@ class FileBlockStore extends AbstractBlockStore { const rec = await this.db.get(layout.f.encode(fileno)); + let touch = false; + if (rec) { filerecord = FileRecord.fromRaw(rec); } else { + touch = true; filerecord = new FileRecord({ blocks: 0, used: 0, @@ -204,6 +207,7 @@ class FileBlockStore extends AbstractBlockStore { if (filerecord.used + length > filerecord.length) { fileno += 1; filepath = this.filepath(fileno); + touch = true; filerecord = new FileRecord({ blocks: 0, used: 0, @@ -211,6 +215,11 @@ class FileBlockStore extends AbstractBlockStore { }); } + if (touch) { + const fd = await fs.open(filepath, 'w'); + await fs.close(fd); + } + return {fileno, filerecord, filepath}; } @@ -240,7 +249,7 @@ class FileBlockStore extends AbstractBlockStore { bwm.writeU32(blength); const magic = bwm.render(); - const fd = await fs.open(filepath, 'a'); + const fd = await fs.open(filepath, 'r+'); const mwritten = await fs.write(fd, magic, 0, mlength, mposition); const bwritten = await fs.write(fd, data, 0, blength, bposition); diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 2dead60e..53043516 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -4,6 +4,7 @@ 'use strict'; const Logger = require('blgr'); +const bio = require('bufio'); const assert = require('./util/assert'); const common = require('./util/common'); const {resolve} = require('path'); @@ -428,6 +429,54 @@ describe('BlockStore', function() { } }); + it('will recover from interrupt during block write', async () => { + { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + await store.write(hash, block); + + const block2 = await store.read(hash); + assert.bufferEqual(block2, block); + } + + // Manually insert a partially written block to the + // end of file as would be the case of an untimely + // interrupted write of a block. The file record + // would not be updated to include the used bytes and + // thus this data should be overwritten. + { + const filepath = store.filepath(0); + + const fd = await fs.open(filepath, 'a'); + + const bw = bio.write(8); + bw.writeU32(store.network.magic); + bw.writeU32(73); + const magic = bw.render(); + + const failblock = random.randomBytes(73); + + const mwritten = await fs.write(fd, magic, 0, 8); + const bwritten = await fs.write(fd, failblock, 0, 73); + + await fs.close(fd); + + assert.equal(mwritten, 8); + assert.equal(bwritten, 73); + } + + // Now check that this block has the correct position + // in the file and that it can be read correctly. + { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + await store.write(hash, block); + + const block2 = await store.read(hash); + assert.bufferEqual(block2, block); + } + }); + it('will return null if block not found', async () => { const hash = random.randomBytes(32); const block = await store.read(hash); From abd2ae4b5d8128ed7aae5b697ea14242512d20f4 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 28 Feb 2019 11:04:46 -0800 Subject: [PATCH 06/31] blockstore: prevent blocks writes at the same position --- lib/blockstore/file.js | 9 +++++++++ test/blockstore-test.js | 25 +++++++++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index 231dc573..e735b442 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -45,6 +45,8 @@ class FileBlockStore extends AbstractBlockStore { if (options.network != null) this.network = Network.get(options.network); + + this.writing = false; } /** @@ -231,6 +233,11 @@ class FileBlockStore extends AbstractBlockStore { */ async write(hash, data) { + if (this.writing) + throw new Error('Already writing.'); + + this.writing = true; + const mlength = 8; const blength = data.length; const length = data.length + mlength; @@ -280,6 +287,8 @@ class FileBlockStore extends AbstractBlockStore { b.put(layout.R.encode(), bw.writeU32(fileno).render()); await b.write(); + + this.writing = false; } /** diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 53043516..a7bea129 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -477,6 +477,31 @@ describe('BlockStore', function() { } }); + it('will not write blocks at the same position', (done) => { + let err = null; + let finished = 0; + + for (let i = 0; i < 16; i++) { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + + // Accidently don't use `await` and attempt to + // write multiple blocks in parallel and at the + // same file position. + const promise = store.write(hash, block); + promise.catch((e) => { + err = e; + }).finally(() => { + finished += 1; + if (finished >= 16) { + assert(err); + assert(err.message, 'Already writing.'); + done(); + } + }); + } + }); + it('will return null if block not found', async () => { const hash = random.randomBytes(32); const block = await store.read(hash); From 3cec13ef5eb0f417270a1cb074a8fc84d1c31740 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Wed, 6 Mar 2019 11:27:41 -0800 Subject: [PATCH 07/31] blockstore: add block data types with an undo type --- lib/blockstore/abstract.js | 46 ++++++++-- lib/blockstore/common.js | 31 +++++++ lib/blockstore/file.js | 152 ++++++++++++++++++++++++++------ lib/blockstore/layout.js | 12 +-- lib/blockstore/level.js | 58 +++++++++++- test/blockstore-test.js | 175 +++++++++++++++++++++++++++++++++---- 6 files changed, 409 insertions(+), 65 deletions(-) create mode 100644 lib/blockstore/common.js diff --git a/lib/blockstore/abstract.js b/lib/blockstore/abstract.js index df7172ae..69113e73 100644 --- a/lib/blockstore/abstract.js +++ b/lib/blockstore/abstract.js @@ -51,10 +51,16 @@ class AbstractBlockStore { } /** - * This method stores block data. The action should be idempotent. - * If the data is already stored, the behavior will be the same. Any - * concurrent requests to store the same data will produce the same - * result, and will not conflict with each other. + * This method stores block undo coin data. + * @returns {Promise} + */ + + async writeUndo(hash, data) { + throw new Error('Abstract method.'); + } + + /** + * This method stores block data. * @returns {Promise} */ @@ -62,6 +68,15 @@ class AbstractBlockStore { throw new Error('Abstract method.'); } + /** + * This method will retrieve block undo coin data. + * @returns {Promise} + */ + + async readUndo(hash) { + throw new Error('Abstract method.'); + } + /** * This method will retrieve block data. Smaller portions of * the block can be read by using the offset and size arguments. @@ -73,9 +88,16 @@ class AbstractBlockStore { } /** - * This will free resources for storing the block data. This - * may not mean that the block is deleted, but that it should - * no longer consume any local storage resources. + * This will free resources for storing the block undo coin data. + * @returns {Promise} + */ + + async pruneUndo(hash) { + throw new Error('Abstract method.'); + } + + /** + * This will free resources for storing the block data. * @returns {Promise} */ @@ -83,6 +105,16 @@ class AbstractBlockStore { throw new Error('Abstract method.'); } + /** + * This will check if a block undo coin data has been stored + * and is available. + * @returns {Promise} + */ + + async hasUndo(hash) { + throw new Error('Abstract method.'); + } + /** * This will check if a block has been stored and is available. * @returns {Promise} diff --git a/lib/blockstore/common.js b/lib/blockstore/common.js new file mode 100644 index 00000000..3bfed645 --- /dev/null +++ b/lib/blockstore/common.js @@ -0,0 +1,31 @@ +/*! + * common.js - block store constants for bcoin + * Copyright (c) 2019, Braydon Fuller (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +/** + * @module blockstore/common + */ + +/** + * Data types. + * @enum {Number} + */ + +exports.types = { + BLOCK: 1, + UNDO: 2 +}; + +/** + * File prefixes for data types. + * @enum {String} + */ + +exports.prefixes = { + 1: 'blk', + 2: 'blu' +}; diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index e735b442..1db803d2 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -16,6 +16,7 @@ const Block = require('../primitives/block'); const AbstractBlockStore = require('./abstract'); const {BlockRecord, FileRecord} = require('./records'); const layout = require('./layout'); +const {types, prefixes} = require('./common'); /** * File Block Store @@ -66,7 +67,7 @@ class FileBlockStore extends AbstractBlockStore { let missing = false; for (const fileno of filenos) { - const rec = await this.db.get(layout.f.encode(fileno)); + const rec = await this.db.get(layout.f.encode(types.BLOCK, fileno)); if (!rec) { missing = true; break; @@ -80,7 +81,7 @@ class FileBlockStore extends AbstractBlockStore { for (const fileno of filenos) { const b = this.db.batch(); - const filepath = this.filepath(fileno); + const filepath = this.filepath(types.BLOCK, fileno); const data = await fs.readFile(filepath); const reader = bio.read(data); let magic = null; @@ -106,7 +107,7 @@ class FileBlockStore extends AbstractBlockStore { }); blocks += 1; - b.put(layout.b.encode(hash), blockrecord.toRaw()); + b.put(layout.b.encode(types.BLOCK, hash), blockrecord.toRaw()); } const filerecord = new FileRecord({ @@ -115,7 +116,7 @@ class FileBlockStore extends AbstractBlockStore { length: this.maxFileLength }); - b.put(layout.f.encode(fileno), filerecord.toRaw()); + b.put(layout.f.encode(types.BLOCK, fileno), filerecord.toRaw()); await b.write(); @@ -152,11 +153,12 @@ class FileBlockStore extends AbstractBlockStore { /** * This method will determine the file path based on the file number * and the current block data location. + * @private * @param {Number} fileno - The number of the file. * @returns {Promise} */ - filepath(fileno) { + filepath(type, fileno) { const pad = 5; let num = fileno.toString(10); @@ -167,17 +169,27 @@ class FileBlockStore extends AbstractBlockStore { while (num.length < pad) num = `0${num}`; - return join(this.location, `blk${num}.dat`); + let filepath = null; + + const prefix = prefixes[type]; + + if (!prefix) + throw new Error('Unknown file prefix.'); + + filepath = join(this.location, `${prefix}${num}.dat`); + + return filepath; } /** * This method will select and potentially allocate a file to * write a block based on the size. + * @private * @param {Number} length - The number of bytes of the data to be written. * @returns {Promise} */ - async allocate(length) { + async allocate(type, length) { if (length > this.maxFileLength) throw new Error('Block length above max file length.'); @@ -185,13 +197,13 @@ class FileBlockStore extends AbstractBlockStore { let filerecord = null; let filepath = null; - const last = await this.db.get(layout.R.encode()); + const last = await this.db.get(layout.F.encode(type)); if (last) fileno = bio.read(last).readU32(); - filepath = this.filepath(fileno); + filepath = this.filepath(type, fileno); - const rec = await this.db.get(layout.f.encode(fileno)); + const rec = await this.db.get(layout.f.encode(type, fileno)); let touch = false; @@ -208,7 +220,7 @@ class FileBlockStore extends AbstractBlockStore { if (filerecord.used + length > filerecord.length) { fileno += 1; - filepath = this.filepath(fileno); + filepath = this.filepath(type, fileno); touch = true; filerecord = new FileRecord({ blocks: 0, @@ -225,6 +237,17 @@ class FileBlockStore extends AbstractBlockStore { return {fileno, filerecord, filepath}; } + /** + * This method stores block undo coin data in files. + * @param {Buffer} hash - The block hash + * @param {Buffer} data - The block data + * @returns {Promise} + */ + + async writeUndo(hash, data) { + return this._write(types.UNDO, hash, data); + } + /** * This method stores block data in files. * @param {Buffer} hash - The block hash @@ -233,6 +256,20 @@ class FileBlockStore extends AbstractBlockStore { */ async write(hash, data) { + return this._write(types.BLOCK, hash, data); + } + + /** + * This method stores block data in files with by appending + * data to the last written file and updating indexes to point + * to the file and position. + * @private + * @param {Buffer} hash - The block hash + * @param {Buffer} data - The block data + * @returns {Promise} + */ + + async _write(type, hash, data) { if (this.writing) throw new Error('Already writing.'); @@ -246,7 +283,7 @@ class FileBlockStore extends AbstractBlockStore { fileno, filerecord, filepath - } = await this.allocate(length); + } = await this.allocate(type, length); const mposition = filerecord.used; const bposition = filerecord.used + mlength; @@ -280,17 +317,27 @@ class FileBlockStore extends AbstractBlockStore { length: blength }); - b.put(layout.b.encode(hash), blockrecord.toRaw()); - b.put(layout.f.encode(fileno), filerecord.toRaw()); + b.put(layout.b.encode(type, hash), blockrecord.toRaw()); + b.put(layout.f.encode(type, fileno), filerecord.toRaw()); - const bw = bio.write(4); - b.put(layout.R.encode(), bw.writeU32(fileno).render()); + const last = bio.write(4).writeU32(fileno).render(); + b.put(layout.F.encode(type), last); await b.write(); this.writing = false; } + /** + * This method will retrieve block undo coin data. + * @param {Buffer} hash - The block hash + * @returns {Promise} + */ + + async readUndo(hash) { + return this._read(types.UNDO, hash); + } + /** * This method will retrieve block data. Smaller portions of the * block (e.g. transactions) can be read by using the offset and @@ -302,13 +349,28 @@ class FileBlockStore extends AbstractBlockStore { */ async read(hash, offset, length) { - const raw = await this.db.get(layout.b.encode(hash)); + return this._read(types.BLOCK, hash, offset, length); + } + + /** + * This methods reads data from disk by retrieving the index of + * the data and reading from the correponding file and location. + * @private + * @param {Buffer} type - The data type + * @param {Buffer} hash - The block hash + * @param {Number} offset - The offset within the block + * @param {Number} length - The number of bytes of the data + * @returns {Promise} + */ + + async _read(type, hash, offset, length) { + const raw = await this.db.get(layout.b.encode(type, hash)); if (!raw) return null; const blockrecord = BlockRecord.fromRaw(raw); - const filepath = this.filepath(blockrecord.file); + const filepath = this.filepath(type, blockrecord.file); let position = blockrecord.position; @@ -331,22 +393,43 @@ class FileBlockStore extends AbstractBlockStore { } /** - * This will free resources for storing the block data. The block - * data may not be deleted from disk immediately, the index for - * the block is removed and will not be able to be read. The underlying - * file is unlinked when all blocks in a file have been pruned. + * This will free resources for storing the block undo coin data. + * @param {Buffer} hash - The block hash + * @returns {Promise} + */ + + async pruneUndo(hash) { + return this._prune(types.UNDO, hash); + } + + /** + * This will free resources for storing the block data. * @param {Buffer} hash - The block hash * @returns {Promise} */ async prune(hash) { - const braw = await this.db.get(layout.b.encode(hash)); + return this._prune(types.BLOCK, hash); + } + + /** + * This will free resources for storing the block data. The block + * data may not be deleted from disk immediately, the index for the + * block is removed and will not be able to be read. The underlying + * file is unlinked when all blocks in a file have been pruned. + * @private + * @param {Buffer} hash - The block hash + * @returns {Promise} + */ + + async _prune(type, hash) { + const braw = await this.db.get(layout.b.encode(type, hash)); if (!braw) return false; const blockrecord = BlockRecord.fromRaw(braw); - const fraw = await this.db.get(layout.f.encode(blockrecord.file)); + const fraw = await this.db.get(layout.f.encode(type, blockrecord.file)); if (!fraw) return false; @@ -357,20 +440,31 @@ class FileBlockStore extends AbstractBlockStore { const b = this.db.batch(); if (filerecord.blocks === 0) - b.del(layout.f.encode(blockrecord.file)); + b.del(layout.f.encode(type, blockrecord.file)); else - b.put(layout.f.encode(blockrecord.file), filerecord.toRaw()); + b.put(layout.f.encode(type, blockrecord.file), filerecord.toRaw()); - b.del(layout.b.encode(hash)); + b.del(layout.b.encode(type, hash)); await b.write(); if (filerecord.blocks === 0) - await fs.unlink(this.filepath(blockrecord.file)); + await fs.unlink(this.filepath(type, blockrecord.file)); return true; } + /** + * This will check if a block undo coin has been stored + * and is available. + * @param {Buffer} hash - The block hash + * @returns {Promise} + */ + + async hasUndo(hash) { + return await this.db.has(layout.b.encode(types.UNDO, hash)); + } + /** * This will check if a block has been stored and is available. * @param {Buffer} hash - The block hash @@ -378,7 +472,7 @@ class FileBlockStore extends AbstractBlockStore { */ async has(hash) { - return await this.db.has(layout.b.encode(hash)); + return await this.db.has(layout.b.encode(types.BLOCK, hash)); } } diff --git a/lib/blockstore/layout.js b/lib/blockstore/layout.js index 5b4d6e39..8b221d5c 100644 --- a/lib/blockstore/layout.js +++ b/lib/blockstore/layout.js @@ -11,16 +11,16 @@ const bdb = require('bdb'); /* * Database Layout: * V -> db version - * R -> last file entry - * f[uint32] -> file entry - * b[hash] -> block entry + * B[type] -> last file record by type + * f[type][fileno] -> file record by type and file number + * b[type][hash] -> block record by type and block hash */ const layout = { V: bdb.key('V'), - R: bdb.key('R'), - f: bdb.key('f', ['uint32']), - b: bdb.key('b', ['hash256']) + F: bdb.key('F', ['uint32']), + f: bdb.key('f', ['uint32', 'uint32']), + b: bdb.key('b', ['uint32', 'hash256']) }; /* diff --git a/lib/blockstore/level.js b/lib/blockstore/level.js index eee7bb2a..bc965c87 100644 --- a/lib/blockstore/level.js +++ b/lib/blockstore/level.js @@ -11,6 +11,7 @@ const bdb = require('bdb'); const assert = require('bsert'); const AbstractBlockStore = require('./abstract'); const layout = require('./layout'); +const {types} = require('./common'); /** * LevelDB Block Store @@ -58,6 +59,17 @@ class LevelBlockStore extends AbstractBlockStore { await this.db.close(); } + /** + * This method stores block undo coin data in LevelDB. + * @param {Buffer} hash - The block hash + * @param {Buffer} data - The block data + * @returns {Promise} + */ + + async writeUndo(hash, data) { + return this.db.put(layout.b.encode(types.UNDO, hash), data); + } + /** * This method stores block data in LevelDB. * @param {Buffer} hash - The block hash @@ -66,7 +78,17 @@ class LevelBlockStore extends AbstractBlockStore { */ async write(hash, data) { - return this.db.put(layout.b.encode(hash), data); + return this.db.put(layout.b.encode(types.BLOCK, hash), data); + } + + /** + * This method will retrieve block undo coin data. + * @param {Buffer} hash - The block hash + * @returns {Promise} + */ + + async readUndo(hash) { + return this.db.get(layout.b.encode(types.UNDO, hash)); } /** @@ -81,7 +103,7 @@ class LevelBlockStore extends AbstractBlockStore { */ async read(hash, offset, length) { - let raw = await this.db.get(layout.b.encode(hash)); + let raw = await this.db.get(layout.b.encode(types.BLOCK, hash)); if (offset) { if (offset + length > raw.length) @@ -93,6 +115,23 @@ class LevelBlockStore extends AbstractBlockStore { return raw; } + /** + * This will free resources for storing the block undo coin data. + * The block data may not be immediately removed from disk, and will + * be reclaimed during LevelDB compaction. + * @param {Buffer} hash - The block hash + * @returns {Promise} + */ + + async pruneUndo(hash) { + if (!await this.hasUndo(hash)) + return false; + + await this.db.del(layout.b.encode(types.UNDO, hash)); + + return true; + } + /** * This will free resources for storing the block data. The block * data may not be immediately removed from disk, and will be reclaimed @@ -105,11 +144,22 @@ class LevelBlockStore extends AbstractBlockStore { if (!await this.has(hash)) return false; - await this.db.del(layout.b.encode(hash)); + await this.db.del(layout.b.encode(types.BLOCK, hash)); return true; } + /** + * This will check if a block undo coin data has been stored + * and is available. + * @param {Buffer} hash - The block hash + * @returns {Promise} + */ + + async hasUndo(hash) { + return this.db.has(layout.b.encode(types.UNDO, hash)); + } + /** * This will check if a block has been stored and is available. * @param {Buffer} hash - The block hash @@ -117,7 +167,7 @@ class LevelBlockStore extends AbstractBlockStore { */ async has(hash) { - return this.db.has(layout.b.encode(hash)); + return this.db.has(layout.b.encode(types.BLOCK, hash)); } } diff --git a/test/blockstore-test.js b/test/blockstore-test.js index a7bea129..62ef22e1 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -26,6 +26,7 @@ const { } = require('../lib/blockstore'); const layout = require('../lib/blockstore/layout'); +const {types} = require('../lib/blockstore/common'); const { BlockRecord, @@ -281,7 +282,7 @@ describe('BlockStore', function() { it('will fail with length above file max', async () => { let err = null; try { - await store.allocate(1025); + await store.allocate(types.BLOCK, 1025); } catch (e) { err = e; } @@ -292,39 +293,39 @@ describe('BlockStore', function() { describe('filepath', function() { it('will give correct path (0)', () => { - const filepath = store.filepath(0); + const filepath = store.filepath(types.BLOCK, 0); assert.equal(filepath, '/tmp/.bcoin/blocks/blk00000.dat'); }); it('will give correct path (1)', () => { - const filepath = store.filepath(7); + const filepath = store.filepath(types.BLOCK, 7); assert.equal(filepath, '/tmp/.bcoin/blocks/blk00007.dat'); }); it('will give correct path (2)', () => { - const filepath = store.filepath(23); + const filepath = store.filepath(types.BLOCK, 23); assert.equal(filepath, '/tmp/.bcoin/blocks/blk00023.dat'); }); it('will give correct path (3)', () => { - const filepath = store.filepath(456); + const filepath = store.filepath(types.BLOCK, 456); assert.equal(filepath, '/tmp/.bcoin/blocks/blk00456.dat'); }); it('will give correct path (4)', () => { - const filepath = store.filepath(8999); + const filepath = store.filepath(types.BLOCK, 8999); assert.equal(filepath, '/tmp/.bcoin/blocks/blk08999.dat'); }); it('will give correct path (5)', () => { - const filepath = store.filepath(99999); + const filepath = store.filepath(types.BLOCK, 99999); assert.equal(filepath, '/tmp/.bcoin/blocks/blk99999.dat'); }); it('will fail over max size', () => { let err = null; try { - store.filepath(100000); + store.filepath(types.BLOCK, 100000); } catch (e) { err = e; } @@ -332,6 +333,11 @@ describe('BlockStore', function() { assert(err); assert.equal(err.message, 'File number too large.'); }); + + it('will give undo type', () => { + const filepath = store.filepath(types.UNDO, 99999); + assert.equal(filepath, '/tmp/.bcoin/blocks/blu99999.dat'); + }); }); }); @@ -366,6 +372,17 @@ describe('BlockStore', function() { assert.bufferEqual(block1, block2); }); + it('will write and read block undo coins', async () => { + const block1 = random.randomBytes(128); + const hash = random.randomBytes(32); + + await store.writeUndo(hash, block1); + + const block2 = await store.readUndo(hash); + + assert.bufferEqual(block1, block2); + }); + it('will read a block w/ offset and length', async () => { const block1 = random.randomBytes(128); const hash = random.randomBytes(32); @@ -412,9 +429,9 @@ describe('BlockStore', function() { assert.bufferEqual(block2, block); } - const first = await fs.stat(store.filepath(0)); - const second = await fs.stat(store.filepath(1)); - const third = await fs.stat(store.filepath(2)); + const first = await fs.stat(store.filepath(types.BLOCK, 0)); + const second = await fs.stat(store.filepath(types.BLOCK, 1)); + const third = await fs.stat(store.filepath(types.BLOCK, 2)); assert.equal(first.size, 952); assert.equal(second.size, 952); assert.equal(third.size, 272); @@ -429,6 +446,35 @@ describe('BlockStore', function() { } }); + it('will allocate new files with block undo coins', async () => { + const blocks = []; + + for (let i = 0; i < 16; i++) { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + blocks.push({hash, block}); + await store.writeUndo(hash, block); + const block2 = await store.readUndo(hash); + assert.bufferEqual(block2, block); + } + + const first = await fs.stat(store.filepath(types.UNDO, 0)); + const second = await fs.stat(store.filepath(types.UNDO, 1)); + const third = await fs.stat(store.filepath(types.UNDO, 2)); + assert.equal(first.size, 952); + assert.equal(second.size, 952); + assert.equal(third.size, 272); + + const len = first.size + second.size + third.size - (8 * 16); + assert.equal(len, 128 * 16); + + for (let i = 0; i < 16; i++) { + const expect = blocks[i]; + const block = await store.readUndo(expect.hash); + assert.bufferEqual(block, expect.block); + } + }); + it('will recover from interrupt during block write', async () => { { const block = random.randomBytes(128); @@ -445,7 +491,7 @@ describe('BlockStore', function() { // would not be updated to include the used bytes and // thus this data should be overwritten. { - const filepath = store.filepath(0); + const filepath = store.filepath(types.BLOCK, 0); const fd = await fs.open(filepath, 'a'); @@ -522,6 +568,20 @@ describe('BlockStore', function() { assert.strictEqual(exists, true); }); + it('will check if block undo coins exists (false)', async () => { + const hash = random.randomBytes(32); + const exists = await store.hasUndo(hash); + assert.strictEqual(exists, false); + }); + + it('will check if block undo coins exists (true)', async () => { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + await store.writeUndo(hash, block); + const exists = await store.hasUndo(hash); + assert.strictEqual(exists, true); + }); + it('will prune blocks', async () => { const hashes = []; for (let i = 0; i < 16; i++) { @@ -531,9 +591,9 @@ describe('BlockStore', function() { await store.write(hash, block); } - const first = await fs.stat(store.filepath(0)); - const second = await fs.stat(store.filepath(1)); - const third = await fs.stat(store.filepath(2)); + const first = await fs.stat(store.filepath(types.BLOCK, 0)); + const second = await fs.stat(store.filepath(types.BLOCK, 1)); + const third = await fs.stat(store.filepath(types.BLOCK, 2)); const len = first.size + second.size + third.size - (8 * 16); assert.equal(len, 128 * 16); @@ -543,16 +603,50 @@ describe('BlockStore', function() { assert.strictEqual(pruned, true); } - assert.equal(await fs.exists(store.filepath(0)), false); - assert.equal(await fs.exists(store.filepath(1)), false); - assert.equal(await fs.exists(store.filepath(2)), false); + assert.equal(await fs.exists(store.filepath(types.BLOCK, 0)), false); + assert.equal(await fs.exists(store.filepath(types.BLOCK, 1)), false); + assert.equal(await fs.exists(store.filepath(types.BLOCK, 2)), false); for (let i = 0; i < 16; i++) { const exists = await store.has(hashes[i]); assert.strictEqual(exists, false); } - const exists = await store.db.has(layout.f.encode(0)); + const exists = await store.db.has(layout.f.encode(types.BLOCK, 0)); + assert.strictEqual(exists, false); + }); + + it('will prune block undo coins', async () => { + const hashes = []; + for (let i = 0; i < 16; i++) { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + hashes.push(hash); + await store.writeUndo(hash, block); + } + + const first = await fs.stat(store.filepath(types.UNDO, 0)); + const second = await fs.stat(store.filepath(types.UNDO, 1)); + const third = await fs.stat(store.filepath(types.UNDO, 2)); + + const len = first.size + second.size + third.size - (8 * 16); + assert.equal(len, 128 * 16); + + for (let i = 0; i < 16; i++) { + const pruned = await store.pruneUndo(hashes[i]); + assert.strictEqual(pruned, true); + } + + assert.equal(await fs.exists(store.filepath(types.UNDO, 0)), false); + assert.equal(await fs.exists(store.filepath(types.UNDO, 1)), false); + assert.equal(await fs.exists(store.filepath(types.UNDO, 2)), false); + + for (let i = 0; i < 16; i++) { + const exists = await store.hasUndo(hashes[i]); + assert.strictEqual(exists, false); + } + + const exists = await store.db.has(layout.f.encode(types.UNDO, 0)); assert.strictEqual(exists, false); }); }); @@ -639,6 +733,17 @@ describe('BlockStore', function() { assert.bufferEqual(block1, block2); }); + it('will write and read block undo coins', async () => { + const block1 = random.randomBytes(128); + const hash = random.randomBytes(32); + + await store.writeUndo(hash, block1); + + const block2 = await store.readUndo(hash); + + assert.bufferEqual(block1, block2); + }); + it('will read a block w/ offset and length', async () => { const block1 = random.randomBytes(128); const hash = random.randomBytes(32); @@ -687,6 +792,20 @@ describe('BlockStore', function() { assert.strictEqual(exists, true); }); + it('will check if block undo coins exists (false)', async () => { + const hash = random.randomBytes(32); + const exists = await store.has(hash); + assert.strictEqual(exists, false); + }); + + it('will check if block undo coins exists (true)', async () => { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + await store.writeUndo(hash, block); + const exists = await store.hasUndo(hash); + assert.strictEqual(exists, true); + }); + it('will prune blocks (true)', async () => { const block = random.randomBytes(128); const hash = random.randomBytes(32); @@ -704,5 +823,23 @@ describe('BlockStore', function() { const pruned = await store.prune(hash); assert.strictEqual(pruned, false); }); + + it('will prune block undo coins (true)', async () => { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + await store.writeUndo(hash, block); + const pruned = await store.pruneUndo(hash); + assert.strictEqual(pruned, true); + const block2 = await store.readUndo(hash); + assert.strictEqual(block2, null); + }); + + it('will prune block undo coins (false)', async () => { + const hash = random.randomBytes(32); + const exists = await store.hasUndo(hash); + assert.strictEqual(exists, false); + const pruned = await store.pruneUndo(hash); + assert.strictEqual(pruned, false); + }); }); }); From 6be21203af1c942c35b59e80fcb743a713571d87 Mon Sep 17 00:00:00 2001 From: Javed Khan Date: Wed, 6 Mar 2019 15:43:43 -0800 Subject: [PATCH 08/31] blockchain: integrate blockstore into chaindb --- lib/blockchain/chaindb.js | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/lib/blockchain/chaindb.js b/lib/blockchain/chaindb.js index a361df4b..aa6845e4 100644 --- a/lib/blockchain/chaindb.js +++ b/lib/blockchain/chaindb.js @@ -11,6 +11,7 @@ const assert = require('bsert'); const bdb = require('bdb'); const bio = require('bufio'); const LRU = require('blru'); +const {resolve} = require('path'); const {BufferMap, BufferSet} = require('buffer-map'); const Amount = require('../btc/amount'); const Network = require('../protocol/network'); @@ -24,6 +25,7 @@ const Address = require('../primitives/address'); const ChainEntry = require('./chainentry'); const TXMeta = require('../primitives/txmeta'); const CoinEntry = require('../coins/coinentry'); +const FileBlockStore = require('../blockstore/file'); /** * ChainDB @@ -46,6 +48,9 @@ class ChainDB { this.state = new ChainState(); this.pending = null; this.current = null; + this.blockStore = new FileBlockStore({ + location: resolve(options.location, '../blocks') + }); this.cacheHash = new LRU(this.options.entryCache, null, BufferMap); this.cacheHeight = new LRU(this.options.entryCache); @@ -60,7 +65,8 @@ class ChainDB { this.logger.info('Opening ChainDB...'); await this.db.open(); - await this.db.verify(layout.V.encode(), 'chain', 4); + await this.db.verify(layout.V.encode(), 'chain', 5); + await this.blockStore.open(); const state = await this.getState(); @@ -101,7 +107,8 @@ class ChainDB { * @returns {Promise} */ - close() { + async close() { + await this.blockStore.close(); return this.db.close(); } @@ -768,8 +775,8 @@ class ChainDB { if (!hash) throw new Error(`Cannot find hash for ${i}.`); - b.del(layout.b.encode(hash)); b.del(layout.u.encode(hash)); + await this.blockStore.prune(hash); } try { @@ -1052,7 +1059,7 @@ class ChainDB { if (!hash) return null; - return this.db.get(layout.b.encode(hash)); + return this.blockStore.read(hash); } /** @@ -1639,7 +1646,7 @@ class ChainDB { this.del(layout.p.encode(tip.hash)); this.del(layout.h.encode(tip.hash)); this.del(layout.e.encode(tip.hash)); - this.del(layout.b.encode(tip.hash)); + await this.blockStore.prune(tip.hash); // Queue up hash to be removed // on successful write. @@ -1667,7 +1674,7 @@ class ChainDB { // Write actual block data (this may be // better suited to flat files in the future). - this.put(layout.b.encode(hash), block.toRaw()); + await this.blockStore.write(hash, block.toRaw()); if (!view) return; @@ -1691,7 +1698,7 @@ class ChainDB { if (!block) throw new Error('Block not found.'); - this.del(layout.b.encode(block.hash())); + await this.blockStore.prune(block.hash()); return this.disconnectBlock(entry, block); } @@ -1851,8 +1858,8 @@ class ChainDB { if (!hash) return; - this.del(layout.b.encode(hash)); this.del(layout.u.encode(hash)); + await this.blockStore.prune(hash); } /** From 8748e4dd2d1a7202e598d3166f5428dc50c73885 Mon Sep 17 00:00:00 2001 From: Javed Khan Date: Wed, 6 Mar 2019 15:46:04 -0800 Subject: [PATCH 09/31] migrate: move blocks to blockstore files --- migrate/chaindb4to5.js | 115 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 115 insertions(+) create mode 100644 migrate/chaindb4to5.js diff --git a/migrate/chaindb4to5.js b/migrate/chaindb4to5.js new file mode 100644 index 00000000..585fc868 --- /dev/null +++ b/migrate/chaindb4to5.js @@ -0,0 +1,115 @@ +'use strict'; + +const assert = require('assert'); +const bdb = require('bdb'); +const layout = require('../lib/blockchain/layout'); +const FileBlockStore = require('../lib/blockstore/file'); +const fs = require('bfile'); +const {resolve} = require('path'); + +assert(process.argv.length > 2, 'Please pass in a database path.'); + +// migration - +// chaindb: leveldb to flat files + +const db = bdb.create({ + location: process.argv[2], + memory: false, + compression: true, + cacheSize: 32 << 20, + createIfMissing: false +}); + +async function ensure(location) { + if (fs.unsupported) + return undefined; + + return fs.mkdirp(location); +} + +const location = resolve(process.argv[2], '../blocks'); + +const blockStore = new FileBlockStore({ + location: location +}); + +async function updateVersion() { + const ver = await checkVersion(); + + console.log('Updating version to %d.', ver + 1); + + const buf = Buffer.allocUnsafe(5 + 4); + buf.write('chain', 0, 'ascii'); + buf.writeUInt32LE(5, 5, true); + + const parent = db.batch(); + parent.put(layout.V.encode(), buf); + await parent.write(); +} + +async function checkVersion() { + console.log('Checking version.'); + + const data = await db.get(layout.V.encode()); + assert(data, 'No version.'); + + const ver = data.readUInt32LE(5, true); + + if (ver !== 4) + throw Error(`DB is version ${ver}.`); + + return ver; +} + +async function migrateBlocks() { + console.log('Migrating blocks'); + + let parent = db.batch(); + + const iter = db.iterator({ + gte: layout.b.min(), + lte: layout.b.max(), + keys: true, + values: true + }); + + let total = 0; + await iter.each(async (key, value) => { + const hash = key.slice(1); + await blockStore.write(hash, value); + parent.del(key); + + if (++total % 10000 === 0) { + console.log('Migrated up %d blocks.', total); + await parent.write(); + parent = db.batch(); + } + }); + await parent.write(); +} + +/* + * Execute + */ + +(async () => { + await db.open(); + await ensure(location); + await blockStore.open(); + + console.log('Opened %s.', process.argv[2]); + + await checkVersion(); + await migrateBlocks(); + await updateVersion(); + + await db.compactRange(); + await db.close(); + await blockStore.close(); +})().then(() => { + console.log('Migration complete.'); + process.exit(0); +}).catch((err) => { + console.error(err.stack); + process.exit(1); +}); From 11af5456ce64f016308c083325147c76784e2a64 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 7 Mar 2019 10:55:20 -0800 Subject: [PATCH 10/31] blockstore: additional options and create function --- lib/blockstore/file.js | 7 ++++++- lib/blockstore/index.js | 33 ++++++++++++++++++++++++++++++--- lib/blockstore/level.js | 10 +++++----- 3 files changed, 41 insertions(+), 9 deletions(-) diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index 1db803d2..9b176575 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -37,9 +37,14 @@ class FileBlockStore extends AbstractBlockStore { assert(isAbsolute(options.location), 'Location not absolute.'); this.location = options.location; + this.indexLocation = resolve(this.location, './index'); + this.db = bdb.create({ - location: resolve(this.location, './index') + location: this.indexLocation, + cacheSize: options.cacheSize, + compression: false }); + this.maxFileLength = options.maxFileLength || 128 * 1024 * 1024; this.network = Network.primary; diff --git a/lib/blockstore/index.js b/lib/blockstore/index.js index 77bf9715..b8e4c634 100644 --- a/lib/blockstore/index.js +++ b/lib/blockstore/index.js @@ -6,10 +6,37 @@ 'use strict'; +const {join} = require('path'); + +const AbstractBlockStore = require('./abstract'); +const LevelBlockStore = require('./level'); +const FileBlockStore = require('./file'); + /** * @module blockstore */ -exports.AbstractBlockStore = require('./abstract'); -exports.FileBlockStore = require('./file'); -exports.LevelBlockStore = require('./level'); +exports.create = (options) => { + const location = join(options.prefix, 'blocks'); + + if (options.memory) { + return new LevelBlockStore({ + network: options.network, + logger: options.logger, + location: location, + cacheSize: options.cacheSize, + memory: options.memory + }); + } + + return new FileBlockStore({ + network: options.network, + logger: options.logger, + location: location, + cacheSize: options.cacheSize + }); +}; + +exports.AbstractBlockStore = AbstractBlockStore; +exports.FileBlockStore = FileBlockStore; +exports.LevelBlockStore = LevelBlockStore; diff --git a/lib/blockstore/level.js b/lib/blockstore/level.js index bc965c87..83b9db7d 100644 --- a/lib/blockstore/level.js +++ b/lib/blockstore/level.js @@ -6,9 +6,7 @@ 'use strict'; -const {isAbsolute, resolve} = require('path'); const bdb = require('bdb'); -const assert = require('bsert'); const AbstractBlockStore = require('./abstract'); const layout = require('./layout'); const {types} = require('./common'); @@ -29,11 +27,13 @@ class LevelBlockStore extends AbstractBlockStore { constructor(options) { super(); - assert(isAbsolute(options.location), 'Location not absolute.'); - this.location = options.location; + this.db = bdb.create({ - location: resolve(this.location, './index') + location: this.location, + cacheSize: options.cacheSize, + compression: false, + memory: options.memory }); } From 0b0dd58a913fe5203142527821a7eea57476035f Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 7 Mar 2019 11:02:22 -0800 Subject: [PATCH 11/31] node: add blockstore to full node --- lib/blockchain/chain.js | 7 +++++++ lib/blockchain/chaindb.js | 21 ++++++++------------- lib/node/fullnode.js | 15 ++++++++++++++- test/chain-test.js | 9 +++++++++ test/mempool-test.js | 10 +++++++++- test/pow-test.js | 10 +++++++++- 6 files changed, 56 insertions(+), 16 deletions(-) diff --git a/lib/blockchain/chain.js b/lib/blockchain/chain.js index 5dab1d9f..08d6dfaa 100644 --- a/lib/blockchain/chain.js +++ b/lib/blockchain/chain.js @@ -49,6 +49,7 @@ class Chain extends AsyncEmitter { this.network = this.options.network; this.logger = this.options.logger.context('chain'); + this.blocks = this.options.blocks; this.workers = this.options.workers; this.db = new ChainDB(this.options); @@ -2662,6 +2663,7 @@ class ChainOptions { constructor(options) { this.network = Network.primary; this.logger = Logger.global; + this.blocks = null; this.workers = null; this.prefix = null; @@ -2695,6 +2697,11 @@ class ChainOptions { */ fromOptions(options) { + assert(options.blocks && typeof options.blocks === 'object', + 'Chain requires a blockstore.'); + + this.blocks = options.blocks; + if (options.network != null) this.network = Network.get(options.network); diff --git a/lib/blockchain/chaindb.js b/lib/blockchain/chaindb.js index aa6845e4..20eefea1 100644 --- a/lib/blockchain/chaindb.js +++ b/lib/blockchain/chaindb.js @@ -11,7 +11,6 @@ const assert = require('bsert'); const bdb = require('bdb'); const bio = require('bufio'); const LRU = require('blru'); -const {resolve} = require('path'); const {BufferMap, BufferSet} = require('buffer-map'); const Amount = require('../btc/amount'); const Network = require('../protocol/network'); @@ -25,7 +24,6 @@ const Address = require('../primitives/address'); const ChainEntry = require('./chainentry'); const TXMeta = require('../primitives/txmeta'); const CoinEntry = require('../coins/coinentry'); -const FileBlockStore = require('../blockstore/file'); /** * ChainDB @@ -42,15 +40,14 @@ class ChainDB { this.options = options; this.network = this.options.network; this.logger = this.options.logger.context('chaindb'); + this.blocks = this.options.blocks; this.db = bdb.create(this.options); + this.stateCache = new StateCache(this.network); this.state = new ChainState(); this.pending = null; this.current = null; - this.blockStore = new FileBlockStore({ - location: resolve(options.location, '../blocks') - }); this.cacheHash = new LRU(this.options.entryCache, null, BufferMap); this.cacheHeight = new LRU(this.options.entryCache); @@ -66,7 +63,6 @@ class ChainDB { await this.db.open(); await this.db.verify(layout.V.encode(), 'chain', 5); - await this.blockStore.open(); const state = await this.getState(); @@ -108,7 +104,6 @@ class ChainDB { */ async close() { - await this.blockStore.close(); return this.db.close(); } @@ -776,7 +771,7 @@ class ChainDB { throw new Error(`Cannot find hash for ${i}.`); b.del(layout.u.encode(hash)); - await this.blockStore.prune(hash); + await this.blocks.prune(hash); } try { @@ -1059,7 +1054,7 @@ class ChainDB { if (!hash) return null; - return this.blockStore.read(hash); + return this.blocks.read(hash); } /** @@ -1646,7 +1641,7 @@ class ChainDB { this.del(layout.p.encode(tip.hash)); this.del(layout.h.encode(tip.hash)); this.del(layout.e.encode(tip.hash)); - await this.blockStore.prune(tip.hash); + await this.blocks.prune(tip.hash); // Queue up hash to be removed // on successful write. @@ -1674,7 +1669,7 @@ class ChainDB { // Write actual block data (this may be // better suited to flat files in the future). - await this.blockStore.write(hash, block.toRaw()); + await this.blocks.write(hash, block.toRaw()); if (!view) return; @@ -1698,7 +1693,7 @@ class ChainDB { if (!block) throw new Error('Block not found.'); - await this.blockStore.prune(block.hash()); + await this.blocks.prune(block.hash()); return this.disconnectBlock(entry, block); } @@ -1859,7 +1854,7 @@ class ChainDB { return; this.del(layout.u.encode(hash)); - await this.blockStore.prune(hash); + await this.blocks.prune(hash); } /** diff --git a/lib/node/fullnode.js b/lib/node/fullnode.js index 0e6a8d0e..99e1ad9b 100644 --- a/lib/node/fullnode.js +++ b/lib/node/fullnode.js @@ -16,6 +16,7 @@ const Miner = require('../mining/miner'); const Node = require('./node'); const HTTP = require('./http'); const RPC = require('./rpc'); +const blockstore = require('../blockstore'); /** * Full Node @@ -40,10 +41,20 @@ class FullNode extends Node { // SPV flag. this.spv = false; - // Instantiate blockchain. + // Instantiate block storage. + this.blocks = blockstore.create({ + network: this.network, + logger: this.logger, + prefix: this.config.prefix, + cacheSize: this.config.mb('block-cache-size'), + memory: this.config.bool('memory') + }); + + // Chain needs access to blocks. this.chain = new Chain({ network: this.network, logger: this.logger, + blocks: this.blocks, workers: this.workers, memory: this.config.bool('memory'), prefix: this.config.prefix, @@ -218,6 +229,7 @@ class FullNode extends Node { this.opened = true; await this.handlePreopen(); + await this.blocks.open(); await this.chain.open(); await this.mempool.open(); await this.miner.open(); @@ -250,6 +262,7 @@ class FullNode extends Node { await this.miner.close(); await this.mempool.close(); await this.chain.close(); + await this.blocks.close(); await this.handleClose(); } diff --git a/test/chain-test.js b/test/chain-test.js index d50237ac..9c915031 100644 --- a/test/chain-test.js +++ b/test/chain-test.js @@ -17,6 +17,7 @@ const Output = require('../lib/primitives/output'); const common = require('../lib/blockchain/common'); const nodejsUtil = require('util'); const Opcode = require('../lib/script/opcode'); +const BlockStore = require('../lib/blockstore/level'); const opcodes = Script.opcodes; const ZERO_KEY = Buffer.alloc(33, 0x00); @@ -30,8 +31,14 @@ const workers = new WorkerPool({ enabled: true }); +const blocks = new BlockStore({ + memory: true, + network +}); + const chain = new Chain({ memory: true, + blocks, network, workers }); @@ -115,6 +122,7 @@ describe('Chain', function() { this.timeout(process.browser ? 1200000 : 60000); it('should open chain and miner', async () => { + await blocks.open(); await chain.open(); await miner.open(); }); @@ -895,5 +903,6 @@ describe('Chain', function() { it('should cleanup', async () => { await miner.close(); await chain.close(); + await blocks.close(); }); }); diff --git a/test/mempool-test.js b/test/mempool-test.js index ed76f869..247720f6 100644 --- a/test/mempool-test.js +++ b/test/mempool-test.js @@ -18,6 +18,7 @@ const Script = require('../lib/script/script'); const opcodes = Script.opcodes; const Witness = require('../lib/script/witness'); const MemWallet = require('./util/memwallet'); +const BlockStore = require('../lib/blockstore/level'); const ALL = Script.hashType.ALL; const ONE_HASH = Buffer.alloc(32, 0x00); @@ -27,9 +28,14 @@ const workers = new WorkerPool({ enabled: true }); +const blocks = new BlockStore({ + memory: true +}); + const chain = new Chain({ memory: true, - workers + workers, + blocks }); const mempool = new Mempool({ @@ -68,6 +74,7 @@ describe('Mempool', function() { it('should open mempool', async () => { await workers.open(); + await blocks.open(); await chain.open(); await mempool.open(); chain.state.flags |= Script.flags.VERIFY_WITNESS; @@ -453,6 +460,7 @@ describe('Mempool', function() { it('should destroy mempool', async () => { await mempool.close(); await chain.close(); + await blocks.close(); await workers.close(); }); }); diff --git a/test/pow-test.js b/test/pow-test.js index bd2707f5..6f54ad5a 100644 --- a/test/pow-test.js +++ b/test/pow-test.js @@ -7,6 +7,7 @@ const assert = require('./util/assert'); const Chain = require('../lib/blockchain/chain'); const ChainEntry = require('../lib/blockchain/chainentry'); const Network = require('../lib/protocol/network'); +const BlockStore = require('../lib/blockstore/level'); const network = Network.get('main'); @@ -14,13 +15,20 @@ function random(max) { return Math.floor(Math.random() * max); } -const chain = new Chain({ +const blocks = new BlockStore({ memory: true, network }); +const chain = new Chain({ + memory: true, + network, + blocks +}); + describe('Difficulty', function() { it('should open chain', async () => { + await blocks.open(); await chain.open(); }); From 89d3253f293076961fd6d8a8227ee1a704177594 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 7 Mar 2019 11:27:15 -0800 Subject: [PATCH 12/31] blockstore: add ensure method to create directories --- lib/blockstore/abstract.js | 10 ++++++++++ lib/blockstore/file.js | 11 +++++++++++ lib/blockstore/level.js | 11 +++++++++++ lib/node/node.js | 4 ++++ test/blockstore-test.js | 7 +++---- 5 files changed, 39 insertions(+), 4 deletions(-) diff --git a/lib/blockstore/abstract.js b/lib/blockstore/abstract.js index 69113e73..e12d0488 100644 --- a/lib/blockstore/abstract.js +++ b/lib/blockstore/abstract.js @@ -30,6 +30,16 @@ class AbstractBlockStore { this.logger = Logger.global.context('blockstore'); } + /** + * This method ensures that resources are available + * before opening. + * @returns {Promise} + */ + + async ensure() { + throw new Error('Abstract method.'); + } + /** * This method opens any necessary resources and * initializes the store to be ready to be queried. diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index 9b176575..132a388f 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -129,6 +129,17 @@ class FileBlockStore extends AbstractBlockStore { } } + /** + * This method ensures that both the block storage directory + * and index directory exist. + * before opening. + * @returns {Promise} + */ + + async ensure() { + return fs.mkdirp(this.indexLocation); + } + /** * Opens the file block store. It will regenerate necessary block * indexing if the index is missing or inconsistent. diff --git a/lib/blockstore/level.js b/lib/blockstore/level.js index 83b9db7d..f745e370 100644 --- a/lib/blockstore/level.js +++ b/lib/blockstore/level.js @@ -7,6 +7,7 @@ 'use strict'; const bdb = require('bdb'); +const fs = require('bfile'); const AbstractBlockStore = require('./abstract'); const layout = require('./layout'); const {types} = require('./common'); @@ -37,6 +38,16 @@ class LevelBlockStore extends AbstractBlockStore { }); } + /** + * This method ensures that the storage directory exists + * before opening. + * @returns {Promise} + */ + + async ensure() { + return fs.mkdirp(this.location); + } + /** * Opens the block storage. * @returns {Promise} diff --git a/lib/node/node.js b/lib/node/node.js index 099f4985..b407020e 100644 --- a/lib/node/node.js +++ b/lib/node/node.js @@ -57,6 +57,7 @@ class Node extends EventEmitter { this.workers = null; this.spv = false; + this.blocks = null; this.chain = null; this.fees = null; this.mempool = null; @@ -135,6 +136,9 @@ class Node extends EventEmitter { if (this.memory) return undefined; + if (this.blocks) + await this.blocks.ensure(); + return fs.mkdirp(this.config.prefix); } diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 62ef22e1..fc69db65 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -10,7 +10,6 @@ const common = require('./util/common'); const {resolve} = require('path'); const fs = require('bfile'); const {rimraf} = require('./util/common'); -const {mkdirp} = require('bfile'); const random = require('bcrypto/lib/random'); const vectors = [ @@ -347,13 +346,13 @@ describe('BlockStore', function() { beforeEach(async () => { await rimraf(location); - await mkdirp(location); store = new FileBlockStore({ location: location, maxFileLength: 1024 }); + await store.ensure(); await store.open(); }); @@ -657,13 +656,13 @@ describe('BlockStore', function() { beforeEach(async () => { await rimraf(location); - await mkdirp(location); store = new FileBlockStore({ location: location, maxFileLength: 1024 * 1024 }); + await store.ensure(); await store.open(); }); @@ -709,12 +708,12 @@ describe('BlockStore', function() { beforeEach(async () => { await rimraf(location); - await mkdirp(location); store = new LevelBlockStore({ location: location }); + await store.ensure(); await store.open(); }); From 8b6ec89a642fdb5e5b3c4701fc0971021d43740e Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 7 Mar 2019 11:54:57 -0800 Subject: [PATCH 13/31] blockchain: use blockstore for undo coins --- lib/blockchain/chaindb.js | 10 +++++----- lib/blockchain/layout.js | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/blockchain/chaindb.js b/lib/blockchain/chaindb.js index 20eefea1..96b38493 100644 --- a/lib/blockchain/chaindb.js +++ b/lib/blockchain/chaindb.js @@ -770,7 +770,7 @@ class ChainDB { if (!hash) throw new Error(`Cannot find hash for ${i}.`); - b.del(layout.u.encode(hash)); + await this.blocks.pruneUndo(hash); await this.blocks.prune(hash); } @@ -1016,7 +1016,7 @@ class ChainDB { */ async getUndoCoins(hash) { - const data = await this.db.get(layout.u.encode(hash)); + const data = await this.blocks.readUndo(hash); if (!data) return new UndoCoins(); @@ -1764,7 +1764,7 @@ class ChainDB { // Write undo coins (if there are any). if (!view.undo.isEmpty()) - this.put(layout.u.encode(hash), view.undo.commit()); + await this.blocks.writeUndo(hash, view.undo.commit()); // Prune height-288 if pruning is enabled. return this.pruneBlock(entry); @@ -1823,7 +1823,7 @@ class ChainDB { this.saveView(view); // Remove undo coins. - this.del(layout.u.encode(hash)); + await this.blocks.pruneUndo(hash); return view; } @@ -1853,7 +1853,7 @@ class ChainDB { if (!hash) return; - this.del(layout.u.encode(hash)); + await this.blocks.pruneUndo(hash); await this.blocks.prune(hash); } diff --git a/lib/blockchain/layout.js b/lib/blockchain/layout.js index 1bc801d5..01aaa086 100644 --- a/lib/blockchain/layout.js +++ b/lib/blockchain/layout.js @@ -19,10 +19,10 @@ const bdb = require('bdb'); * H[height] -> hash * n[hash] -> next hash * p[hash] -> tip index - * b[hash] -> block + * b[hash] -> block (deprecated) * t[hash] -> extended tx * c[hash] -> coins - * u[hash] -> undo coins + * u[hash] -> undo coins (deprecated) * v[bit][hash] -> versionbits state * T[addr-hash][hash] -> dummy (tx by address) * C[addr-hash][hash][index] -> dummy (coin by address) From 2ba3ee6354574392b20aac60c4549324825512b6 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 7 Mar 2019 11:56:41 -0800 Subject: [PATCH 14/31] migrate: move block undo coins to blockstore --- migrate/chaindb4to5.js | 44 +++++++++++++++++++++++++++++++++--------- 1 file changed, 35 insertions(+), 9 deletions(-) diff --git a/migrate/chaindb4to5.js b/migrate/chaindb4to5.js index 585fc868..8a2d946e 100644 --- a/migrate/chaindb4to5.js +++ b/migrate/chaindb4to5.js @@ -4,7 +4,6 @@ const assert = require('assert'); const bdb = require('bdb'); const layout = require('../lib/blockchain/layout'); const FileBlockStore = require('../lib/blockstore/file'); -const fs = require('bfile'); const {resolve} = require('path'); assert(process.argv.length > 2, 'Please pass in a database path.'); @@ -20,13 +19,6 @@ const db = bdb.create({ createIfMissing: false }); -async function ensure(location) { - if (fs.unsupported) - return undefined; - - return fs.mkdirp(location); -} - const location = resolve(process.argv[2], '../blocks'); const blockStore = new FileBlockStore({ @@ -61,6 +53,36 @@ async function checkVersion() { return ver; } +async function migrateUndoBlocks() { + console.log('Migrating undo blocks'); + + let parent = db.batch(); + + const iter = db.iterator({ + gte: layout.u.min(), + lte: layout.u.max(), + keys: true, + values: true + }); + + let total = 0; + + await iter.each(async (key, value) => { + const hash = key.slice(1); + await blockStore.writeUndo(hash, value); + parent.del(key); + + if (++total % 10000 === 0) { + console.log('Migrated up %d undo blocks.', total); + await parent.write(); + parent = db.batch(); + } + }); + + console.log('Migrated all %d undo blocks.', total); + await parent.write(); +} + async function migrateBlocks() { console.log('Migrating blocks'); @@ -74,6 +96,7 @@ async function migrateBlocks() { }); let total = 0; + await iter.each(async (key, value) => { const hash = key.slice(1); await blockStore.write(hash, value); @@ -85,6 +108,8 @@ async function migrateBlocks() { parent = db.batch(); } }); + + console.log('Migrated all %d blocks.', total); await parent.write(); } @@ -94,13 +119,14 @@ async function migrateBlocks() { (async () => { await db.open(); - await ensure(location); + await blockStore.ensure(); await blockStore.open(); console.log('Opened %s.', process.argv[2]); await checkVersion(); await migrateBlocks(); + await migrateUndoBlocks(); await updateVersion(); await db.compactRange(); From d1e747cf2f5aa0591aec9ca2c80a4bfcce1f3c9c Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 7 Mar 2019 15:27:21 -0800 Subject: [PATCH 15/31] blockstore: minor, update comments and docs --- bench/blockstore.js | 7 ++++--- lib/blockchain/chaindb.js | 3 +-- lib/blockstore/abstract.js | 4 ++-- lib/blockstore/common.js | 6 +++--- lib/blockstore/file.js | 13 ++++++++----- lib/blockstore/layout.js | 4 ++-- lib/blockstore/level.js | 2 +- lib/blockstore/records.js | 4 ++-- 8 files changed, 23 insertions(+), 20 deletions(-) diff --git a/bench/blockstore.js b/bench/blockstore.js index 362869ea..817633b0 100644 --- a/bench/blockstore.js +++ b/bench/blockstore.js @@ -1,5 +1,5 @@ /*! - * bench/blockstore.js - benchmark block store for bcoin + * bench/blockstore.js - benchmark blockstore for bcoin * * This can be run to benchmark the performance of the blockstore * module for writing, reading and pruning block data. Results are @@ -7,14 +7,15 @@ * * Usage: * node ./blockstore.js [--maxfile=] [--total=] - [--location=] [--store=] [--unsafe] + * [--location=] [--store=] + * [--output=] [--unsafe] * * Options: * - `maxfile` The maximum file size (applies to "file" store). * - `total` The total number of block bytes to write. * - `location` The location to store block data. * - `store` This can be "file" or "level". - * - `output` This can be "json" or "bench". + * - `output` This can be "json", "bench" or "benchjson". * - `unsafe` This will allocate block data directly from memory * instead of random, it is faster. * diff --git a/lib/blockchain/chaindb.js b/lib/blockchain/chaindb.js index 96b38493..eef121c4 100644 --- a/lib/blockchain/chaindb.js +++ b/lib/blockchain/chaindb.js @@ -1667,8 +1667,7 @@ class ChainDB { if (this.options.spv) return; - // Write actual block data (this may be - // better suited to flat files in the future). + // Write actual block data. await this.blocks.write(hash, block.toRaw()); if (!view) diff --git a/lib/blockstore/abstract.js b/lib/blockstore/abstract.js index e12d0488..efde8bd5 100644 --- a/lib/blockstore/abstract.js +++ b/lib/blockstore/abstract.js @@ -1,5 +1,5 @@ /*! - * blockstore/abstract.js - abstract block store for bcoin + * blockstore/abstract.js - abstract blockstore for bcoin * Copyright (c) 2019, Braydon Fuller (MIT License). * https://github.com/bcoin-org/bcoin */ @@ -52,7 +52,7 @@ class AbstractBlockStore { /** * This method closes resources and prepares - * store to be closed. + * the store to be closed. * @returns {Promise} */ diff --git a/lib/blockstore/common.js b/lib/blockstore/common.js index 3bfed645..f1cb310f 100644 --- a/lib/blockstore/common.js +++ b/lib/blockstore/common.js @@ -1,5 +1,5 @@ /*! - * common.js - block store constants for bcoin + * common.js - blockstore constants for bcoin * Copyright (c) 2019, Braydon Fuller (MIT License). * https://github.com/bcoin-org/bcoin */ @@ -11,7 +11,7 @@ */ /** - * Data types. + * Block data types. * @enum {Number} */ @@ -21,7 +21,7 @@ exports.types = { }; /** - * File prefixes for data types. + * File prefixes for block data types. * @enum {String} */ diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index 132a388f..5ebe2aa5 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -1,5 +1,5 @@ /*! - * blockstore/file.js - file block store for bcoin + * blockstore/file.js - file blockstore for bcoin * Copyright (c) 2019, Braydon Fuller (MIT License). * https://github.com/bcoin-org/bcoin */ @@ -157,7 +157,7 @@ class FileBlockStore extends AbstractBlockStore { /** * This closes the file block store and underlying - * databases for indexing. + * indexing databases. */ async close() { @@ -170,6 +170,7 @@ class FileBlockStore extends AbstractBlockStore { * This method will determine the file path based on the file number * and the current block data location. * @private + * @param {Number} type - The type of block data * @param {Number} fileno - The number of the file. * @returns {Promise} */ @@ -199,9 +200,10 @@ class FileBlockStore extends AbstractBlockStore { /** * This method will select and potentially allocate a file to - * write a block based on the size. + * write a block based on the size and type. * @private - * @param {Number} length - The number of bytes of the data to be written. + * @param {Number} type - The type of block data + * @param {Number} length - The number of bytes * @returns {Promise} */ @@ -280,6 +282,7 @@ class FileBlockStore extends AbstractBlockStore { * data to the last written file and updating indexes to point * to the file and position. * @private + * @param {Number} type - The type of block data * @param {Buffer} hash - The block hash * @param {Buffer} data - The block data * @returns {Promise} @@ -372,7 +375,7 @@ class FileBlockStore extends AbstractBlockStore { * This methods reads data from disk by retrieving the index of * the data and reading from the correponding file and location. * @private - * @param {Buffer} type - The data type + * @param {Number} type - The type of block data * @param {Buffer} hash - The block hash * @param {Number} offset - The offset within the block * @param {Number} length - The number of bytes of the data diff --git a/lib/blockstore/layout.js b/lib/blockstore/layout.js index 8b221d5c..ce5cf290 100644 --- a/lib/blockstore/layout.js +++ b/lib/blockstore/layout.js @@ -1,5 +1,5 @@ /*! - * blockstore/layout.js - file block store data layout for bcoin + * blockstore/layout.js - file blockstore data layout for bcoin * Copyright (c) 2019, Braydon Fuller (MIT License). * https://github.com/bcoin-org/bcoin */ @@ -11,7 +11,7 @@ const bdb = require('bdb'); /* * Database Layout: * V -> db version - * B[type] -> last file record by type + * F[type] -> last file record by type * f[type][fileno] -> file record by type and file number * b[type][hash] -> block record by type and block hash */ diff --git a/lib/blockstore/level.js b/lib/blockstore/level.js index f745e370..ebc09675 100644 --- a/lib/blockstore/level.js +++ b/lib/blockstore/level.js @@ -1,5 +1,5 @@ /*! - * blockstore/level.js - leveldb block store for bcoin + * blockstore/level.js - leveldb blockstore for bcoin * Copyright (c) 2019, Braydon Fuller (MIT License). * https://github.com/bcoin-org/bcoin */ diff --git a/lib/blockstore/records.js b/lib/blockstore/records.js index 1f75ce00..d387171b 100644 --- a/lib/blockstore/records.js +++ b/lib/blockstore/records.js @@ -1,5 +1,5 @@ /*! - * blockstore/records.js - block store records + * blockstore/records.js - blockstore records * Copyright (c) 2019, Braydon Fuller (MIT License). * https://github.com/bcoin-org/bcoin */ @@ -82,7 +82,7 @@ class BlockRecord { class FileRecord { /** - * Create a chain state. + * Create a file record. * @constructor */ From bdcb392a9c83a4948f429a95b6ad6eeb002908f6 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Tue, 12 Mar 2019 16:44:37 -0700 Subject: [PATCH 16/31] blockchain: do not check blockstore with spv --- lib/blockchain/chain.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/blockchain/chain.js b/lib/blockchain/chain.js index 08d6dfaa..7e94c6a7 100644 --- a/lib/blockchain/chain.js +++ b/lib/blockchain/chain.js @@ -2697,8 +2697,10 @@ class ChainOptions { */ fromOptions(options) { - assert(options.blocks && typeof options.blocks === 'object', - 'Chain requires a blockstore.'); + if (!options.spv) { + assert(options.blocks && typeof options.blocks === 'object', + 'Chain requires a blockstore.'); + } this.blocks = options.blocks; From 0f0cb00c541732f4eca0351fba1871ad1dc9b630 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 14 Mar 2019 09:01:03 -0700 Subject: [PATCH 17/31] blockstore: minor, spelling --- lib/blockstore/file.js | 2 +- test/blockstore-test.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index 5ebe2aa5..84d2f856 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -373,7 +373,7 @@ class FileBlockStore extends AbstractBlockStore { /** * This methods reads data from disk by retrieving the index of - * the data and reading from the correponding file and location. + * the data and reading from the corresponding file and location. * @private * @param {Number} type - The type of block data * @param {Buffer} hash - The block hash diff --git a/test/blockstore-test.js b/test/blockstore-test.js index fc69db65..eddd756b 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -530,7 +530,7 @@ describe('BlockStore', function() { const block = random.randomBytes(128); const hash = random.randomBytes(32); - // Accidently don't use `await` and attempt to + // Accidentally don't use `await` and attempt to // write multiple blocks in parallel and at the // same file position. const promise = store.write(hash, block); From 747a8e707b8fa0e628a728b169e19ca759132312 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 14 Mar 2019 10:28:51 -0700 Subject: [PATCH 18/31] blockstore: tests and fixes for blockstore error cases --- lib/blockstore/file.js | 11 +++- test/blockstore-test.js | 136 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 143 insertions(+), 4 deletions(-) diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index 84d2f856..fd31bf76 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -47,6 +47,9 @@ class FileBlockStore extends AbstractBlockStore { this.maxFileLength = options.maxFileLength || 128 * 1024 * 1024; + assert(Number.isSafeInteger(this.maxFileLength), + 'Invalid max file length.'); + this.network = Network.primary; if (options.network != null) @@ -319,11 +322,15 @@ class FileBlockStore extends AbstractBlockStore { await fs.close(fd); - if (mwritten !== mlength) + if (mwritten !== mlength) { + this.writing = false; throw new Error('Could not write block magic.'); + } - if (bwritten !== blength) + if (bwritten !== blength) { + this.writing = false; throw new Error('Could not write block.'); + } filerecord.blocks += 1; filerecord.used += length; diff --git a/test/blockstore-test.js b/test/blockstore-test.js index eddd756b..dd84f159 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -64,8 +64,9 @@ describe('BlockStore', function() { }); it('has unimplemented base methods', async () => { - const methods = ['open', 'close', 'write', 'read', - 'prune', 'has']; + const methods = ['open', 'close', 'write', 'writeUndo', + 'read', 'readUndo', 'prune', 'pruneUndo', + 'has', 'hasUndo', 'ensure']; const store = new AbstractBlockStore(); @@ -277,6 +278,40 @@ describe('BlockStore', function() { }); }); + describe('constructor', function() { + it('will error with invalid location', () => { + let err = null; + + try { + new FileBlockStore({ + location: 'tmp/.bcoin/blocks', + maxFileLength: 1024 + }); + } catch (e) { + err = e; + } + + assert(err); + assert.equal(err.message, 'Location not absolute.'); + }); + + it('will error with invalid max file length', () => { + let err = null; + + try { + new FileBlockStore({ + location: location, + maxFileLength: 'notanumber' + }); + } catch (e) { + err = e; + } + + assert(err); + assert.equal(err.message, 'Invalid max file length.'); + }); + }); + describe('allocate', function() { it('will fail with length above file max', async () => { let err = null; @@ -337,6 +372,103 @@ describe('BlockStore', function() { const filepath = store.filepath(types.UNDO, 99999); assert.equal(filepath, '/tmp/.bcoin/blocks/blu99999.dat'); }); + + it('will fail for unknown prefix', () => { + let err = null; + try { + store.filepath(0, 1234); + } catch (e) { + err = e; + } + + assert(err); + assert.equal(err.message, 'Unknown file prefix.'); + }); + }); + + describe('write', function() { + const write = fs.write; + const open = fs.open; + const close = fs.close; + let allocate = null; + + beforeEach(() => { + allocate = store.allocate; + }); + + afterEach(() => { + // Restore stubbed methods. + fs.write = write; + fs.open = open; + fs.close = close; + store.allocate = allocate; + }); + + it('will error if total magic bytes not written', async () => { + let err = null; + + store.allocate = () => { + return { + fileno: 20, + filerecord: { + used: 0 + }, + filepath: '/tmp/.bcoin/blocks/blk00020.dat' + }; + }; + fs.open = () => 7; + fs.close = () => undefined; + fs.write = () => 0; + + try { + const hash = random.randomBytes(128); + const block = random.randomBytes(32); + await store.write(hash, block); + } catch (e) { + err = e; + } + + assert(err, 'Expected error.'); + assert.equal(err.message, 'Could not write block magic.'); + }); + + it('will error if total block bytes not written', async () => { + let err = 0; + + let called = 0; + store.allocate = () => { + return { + fileno: 20, + filerecord: { + used: 0 + }, + filepath: '/tmp/.bcoin/blocks/blk00020.dat' + }; + }; + fs.open = () => 7; + fs.close = () => undefined; + fs.write = (fd, buffer, offset, length, position) => { + let written = 0; + + if (called === 0) + written = length; + + called += 1; + + return written; + }; + + try { + const hash = random.randomBytes(128); + const block = random.randomBytes(32); + await store.write(hash, block); + } catch (e) { + err = e; + } + + assert(err, 'Expected error.'); + assert.equal(err.message, 'Could not write block.'); + }); }); }); From 58e623e30ac72043a4454aedf1277621aaaa93d3 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 14 Mar 2019 10:41:11 -0700 Subject: [PATCH 19/31] test: minor, cleanup and clarity --- test/blockstore-test.js | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/test/blockstore-test.js b/test/blockstore-test.js index dd84f159..cc7525c7 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -492,6 +492,10 @@ describe('BlockStore', function() { await store.close(); }); + after(async () => { + await rimraf(location); + }); + it('will write and read a block', async () => { const block1 = random.randomBytes(128); const hash = random.randomBytes(32); @@ -567,7 +571,8 @@ describe('BlockStore', function() { assert.equal(second.size, 952); assert.equal(third.size, 272); - const len = first.size + second.size + third.size - (8 * 16); + const magic = (8 * 16); + const len = first.size + second.size + third.size - magic; assert.equal(len, 128 * 16); for (let i = 0; i < 16; i++) { @@ -596,7 +601,8 @@ describe('BlockStore', function() { assert.equal(second.size, 952); assert.equal(third.size, 272); - const len = first.size + second.size + third.size - (8 * 16); + const magic = (8 * 16); + const len = first.size + second.size + third.size - magic; assert.equal(len, 128 * 16); for (let i = 0; i < 16; i++) { @@ -726,7 +732,8 @@ describe('BlockStore', function() { const second = await fs.stat(store.filepath(types.BLOCK, 1)); const third = await fs.stat(store.filepath(types.BLOCK, 2)); - const len = first.size + second.size + third.size - (8 * 16); + const magic = (8 * 16); + const len = first.size + second.size + third.size - magic; assert.equal(len, 128 * 16); for (let i = 0; i < 16; i++) { @@ -760,7 +767,8 @@ describe('BlockStore', function() { const second = await fs.stat(store.filepath(types.UNDO, 1)); const third = await fs.stat(store.filepath(types.UNDO, 2)); - const len = first.size + second.size + third.size - (8 * 16); + const magic = (8 * 16); + const len = first.size + second.size + third.size - magic; assert.equal(len, 128 * 16); for (let i = 0; i < 16; i++) { @@ -802,6 +810,10 @@ describe('BlockStore', function() { await store.close(); }); + after(async () => { + await rimraf(location); + }); + it('will import from files (e.g. db corruption)', async () => { const blocks = []; @@ -853,6 +865,10 @@ describe('BlockStore', function() { await store.close(); }); + after(async () => { + await rimraf(location); + }); + it('will write and read a block', async () => { const block1 = random.randomBytes(128); const hash = random.randomBytes(32); From 5cbbcf5409685ed0436825b58b0cf3053b0308ca Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Fri, 15 Mar 2019 08:54:48 -0700 Subject: [PATCH 20/31] blockstore: do not write block twice --- lib/blockstore/file.js | 7 +++++++ test/blockstore-test.js | 20 ++++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index fd31bf76..ff5feecf 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -297,6 +297,11 @@ class FileBlockStore extends AbstractBlockStore { this.writing = true; + if (await this.db.has(layout.b.encode(type, hash))) { + this.writing = false; + return false; + } + const mlength = 8; const blength = data.length; const length = data.length + mlength; @@ -352,6 +357,8 @@ class FileBlockStore extends AbstractBlockStore { await b.write(); this.writing = false; + + return true; } /** diff --git a/test/blockstore-test.js b/test/blockstore-test.js index cc7525c7..5869a87c 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -391,9 +391,11 @@ describe('BlockStore', function() { const open = fs.open; const close = fs.close; let allocate = null; + let has = null; beforeEach(() => { allocate = store.allocate; + has = store.db.has; }); afterEach(() => { @@ -402,6 +404,7 @@ describe('BlockStore', function() { fs.open = open; fs.close = close; store.allocate = allocate; + store.db.has = has; }); it('will error if total magic bytes not written', async () => { @@ -416,6 +419,7 @@ describe('BlockStore', function() { filepath: '/tmp/.bcoin/blocks/blk00020.dat' }; }; + store.db.has = () => false; fs.open = () => 7; fs.close = () => undefined; fs.write = () => 0; @@ -445,6 +449,7 @@ describe('BlockStore', function() { filepath: '/tmp/.bcoin/blocks/blk00020.dat' }; }; + store.db.has = () => false; fs.open = () => 7; fs.close = () => undefined; fs.write = (fd, buffer, offset, length, position) => { @@ -685,6 +690,21 @@ describe('BlockStore', function() { } }); + it('will not duplicate a block on disk', async () => { + const block = random.randomBytes(128); + const hash = random.randomBytes(32); + + const first = await store.write(hash, block); + assert.equal(first, true); + const second = await store.write(hash, block); + assert.equal(second, false); + + const pruned = await store.prune(hash); + assert.equal(pruned, true); + + assert.equal(await fs.exists(store.filepath(types.BLOCK, 0)), false); + }); + it('will return null if block not found', async () => { const hash = random.randomBytes(32); const block = await store.read(hash); From 8fc605c4a92849068675bd4fbbe4677e04584794 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Fri, 15 Mar 2019 12:31:26 -0700 Subject: [PATCH 21/31] blockstore: index after write interrupt and use less memory --- lib/blockstore/file.js | 23 +++++++++++--- test/blockstore-test.js | 70 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 89 insertions(+), 4 deletions(-) diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index ff5feecf..01c3dbbd 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -12,7 +12,7 @@ const assert = require('bsert'); const fs = require('bfile'); const bio = require('bufio'); const Network = require('../protocol/network'); -const Block = require('../primitives/block'); +const Headers = require('../primitives/headers'); const AbstractBlockStore = require('./abstract'); const {BlockRecord, FileRecord} = require('./records'); const layout = require('./layout'); @@ -97,16 +97,29 @@ class FileBlockStore extends AbstractBlockStore { while (reader.left() >= 4) { magic = reader.readU32(); + + // Move forward a byte from the last read + // if the magic doesn't match. if (magic !== this.network.magic) { - reader.seek(4); + reader.seek(-3); continue; } const length = reader.readU32(); const position = reader.offset; - const block = Block.fromReader(reader); - const hash = block.hash(); + let header = null; + + try { + header = Headers.fromReader(reader); + const read = reader.offset - position; + reader.seek(length - read); + } catch (err) { + this.logger.warning( + 'Unknown block in file: %s, reason: %s', + filepath, err.message); + continue; + } const blockrecord = new BlockRecord({ file: fileno, @@ -114,6 +127,8 @@ class FileBlockStore extends AbstractBlockStore { length: length }); + const hash = header.hash(); + blocks += 1; b.put(layout.b.encode(types.BLOCK, hash), blockrecord.toRaw()); } diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 5869a87c..30474dcd 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -18,6 +18,10 @@ const vectors = [ common.readBlock('block898352') ]; +const extra = [ + common.readBlock('block482683') +]; + const { AbstractBlockStore, FileBlockStore, @@ -864,6 +868,72 @@ describe('BlockStore', function() { assert.bufferEqual(block, expect.block); } }); + + it('will import from files after write interrupt', async () => { + const blocks = []; + + for (let i = 0; i < vectors.length; i++) { + const [block] = vectors[i].getBlock(); + const hash = block.hash(); + const raw = block.toRaw(); + + blocks.push({hash, block: raw}); + await store.write(hash, raw); + } + + await store.close(); + + assert.equal(await fs.exists(store.filepath(types.BLOCK, 0)), true); + assert.equal(await fs.exists(store.filepath(types.BLOCK, 1)), true); + assert.equal(await fs.exists(store.filepath(types.BLOCK, 2)), false); + + // Write partial block as would be the case in a + // block write interrupt. + const [partial] = extra[0].getBlock(); + { + // Include all of the header, but not the block. + let raw = partial.toRaw(); + const actual = raw.length; + const part = raw.length - 1; + raw = raw.slice(0, part); + + const filepath = store.filepath(types.BLOCK, 1); + + const fd = await fs.open(filepath, 'a'); + + const bw = bio.write(8); + bw.writeU32(store.network.magic); + bw.writeU32(actual); + const magic = bw.render(); + + const mwritten = await fs.write(fd, magic, 0, 8); + const bwritten = await fs.write(fd, raw, 0, part); + + await fs.close(fd); + + assert.equal(mwritten, 8); + assert.equal(bwritten, part); + } + + await rimraf(resolve(location, './index')); + + store = new FileBlockStore({ + location: location, + maxFileLength: 1024 + }); + + await store.open(); + + const incomplete = await store.read(partial.hash()); + assert(incomplete === null); + + for (let i = 0; i < vectors.length; i++) { + const expect = blocks[i]; + const block = await store.read(expect.hash); + assert.equal(block.length, expect.block.length); + assert.bufferEqual(block, expect.block); + } + }); }); describe('LevelBlockStore', function() { From 961f6eddb5f77fa287fc4801ad1418151954010e Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Mon, 18 Mar 2019 13:31:30 -0700 Subject: [PATCH 22/31] blockstore: index undo blocks from files --- lib/blockstore/file.js | 96 ++++++++++++++++++++++++++++++----------- test/blockstore-test.js | 47 +++++++++++++++++--- 2 files changed, 114 insertions(+), 29 deletions(-) diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index 01c3dbbd..a687f224 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -60,14 +60,15 @@ class FileBlockStore extends AbstractBlockStore { /** * Compares the number of files in the directory - * with the recorded number of files. If there are any - * inconsistencies it will reindex all blocks. + * with the recorded number of files. + * @param {Number} type - The type of block data * @private * @returns {Promise} */ - async index() { - const regexp = /^blk(\d{5})\.dat$/; + async check(type) { + const prefix = prefixes[type]; + const regexp = new RegExp(`^${prefix}(\\d{5})\\.dat$`); const all = await fs.readdir(this.location); const dats = all.filter(f => regexp.test(f)); const filenos = dats.map(f => parseInt(f.match(regexp)[1])); @@ -75,21 +76,36 @@ class FileBlockStore extends AbstractBlockStore { let missing = false; for (const fileno of filenos) { - const rec = await this.db.get(layout.f.encode(types.BLOCK, fileno)); + const rec = await this.db.get(layout.f.encode(type, fileno)); if (!rec) { missing = true; break; } } + return {missing, filenos}; + } + + /** + * Creates indexes from files for a block type. Reads the hash of + * the block data from the magic prefix, except for a block which + * the hash is read from the block header. + * @private + * @param {Number} type - The type of block data + * @returns {Promise} + */ + + async _index(type) { + const {missing, filenos} = await this.check(type); + if (!missing) return; - this.logger.info('Indexing FileBlockStore...'); + this.logger.info(`Indexing block type ${type}...`); for (const fileno of filenos) { const b = this.db.batch(); - const filepath = this.filepath(types.BLOCK, fileno); + const filepath = this.filepath(type, fileno); const data = await fs.readFile(filepath); const reader = bio.read(data); let magic = null; @@ -105,15 +121,24 @@ class FileBlockStore extends AbstractBlockStore { continue; } - const length = reader.readU32(); - const position = reader.offset; - - let header = null; + let hash = null; + let position = 0; + let length = 0; try { - header = Headers.fromReader(reader); - const read = reader.offset - position; - reader.seek(length - read); + length = reader.readU32(); + + if (type === types.BLOCK) { + position = reader.offset; + const header = Headers.fromReader(reader); + hash = header.hash(); + const read = reader.offset - position; + reader.seek(length - read); + } else { + hash = reader.readHash(); + position = reader.offset; + reader.seek(length); + } } catch (err) { this.logger.warning( 'Unknown block in file: %s, reason: %s', @@ -127,10 +152,8 @@ class FileBlockStore extends AbstractBlockStore { length: length }); - const hash = header.hash(); - blocks += 1; - b.put(layout.b.encode(types.BLOCK, hash), blockrecord.toRaw()); + b.put(layout.b.encode(type, hash), blockrecord.toRaw()); } const filerecord = new FileRecord({ @@ -139,7 +162,7 @@ class FileBlockStore extends AbstractBlockStore { length: this.maxFileLength }); - b.put(layout.f.encode(types.BLOCK, fileno), filerecord.toRaw()); + b.put(layout.f.encode(type, fileno), filerecord.toRaw()); await b.write(); @@ -147,6 +170,19 @@ class FileBlockStore extends AbstractBlockStore { } } + /** + * Compares the number of files in the directory + * with the recorded number of files. If there are any + * inconsistencies it will reindex all blocks. + * @private + * @returns {Promise} + */ + + async index() { + await this._index(types.BLOCK); + await this._index(types.UNDO); + } + /** * This method ensures that both the block storage directory * and index directory exist. @@ -317,10 +353,27 @@ class FileBlockStore extends AbstractBlockStore { return false; } - const mlength = 8; + let mlength = 8; + + // Hash for a block is not stored with + // the magic prefix as it's read from the header + // of the block data. + if (type !== types.BLOCK) + mlength += 32; + const blength = data.length; const length = data.length + mlength; + const bwm = bio.write(mlength); + + bwm.writeU32(this.network.magic); + bwm.writeU32(blength); + + if (type !== types.BLOCK) + bwm.writeHash(hash); + + const magic = bwm.render(); + const { fileno, filerecord, @@ -330,11 +383,6 @@ class FileBlockStore extends AbstractBlockStore { const mposition = filerecord.used; const bposition = filerecord.used + mlength; - const bwm = bio.write(mlength); - bwm.writeU32(this.network.magic); - bwm.writeU32(blength); - const magic = bwm.render(); - const fd = await fs.open(filepath, 'r+'); const mwritten = await fs.write(fd, magic, 0, mlength, mposition); diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 30474dcd..160538de 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -22,6 +22,15 @@ const extra = [ common.readBlock('block482683') ]; +const undos = [ + common.readBlock('block300025'), + common.readBlock('block928816'), + common.readBlock('block928828'), + common.readBlock('block928831'), + common.readBlock('block928848'), + common.readBlock('block928849') +]; + const { AbstractBlockStore, FileBlockStore, @@ -606,11 +615,8 @@ describe('BlockStore', function() { const first = await fs.stat(store.filepath(types.UNDO, 0)); const second = await fs.stat(store.filepath(types.UNDO, 1)); const third = await fs.stat(store.filepath(types.UNDO, 2)); - assert.equal(first.size, 952); - assert.equal(second.size, 952); - assert.equal(third.size, 272); - const magic = (8 * 16); + const magic = (40 * 16); const len = first.size + second.size + third.size - magic; assert.equal(len, 128 * 16); @@ -791,7 +797,7 @@ describe('BlockStore', function() { const second = await fs.stat(store.filepath(types.UNDO, 1)); const third = await fs.stat(store.filepath(types.UNDO, 2)); - const magic = (8 * 16); + const magic = (40 * 16); const len = first.size + second.size + third.size - magic; assert.equal(len, 128 * 16); @@ -934,6 +940,37 @@ describe('BlockStore', function() { assert.bufferEqual(block, expect.block); } }); + + it('will import undo blocks from files', async () => { + const blocks = []; + + for (let i = 0; i < undos.length; i++) { + const [block] = undos[i].getBlock(); + const raw = undos[i].undoRaw; + const hash = block.hash(); + + blocks.push({hash, block: raw}); + await store.writeUndo(hash, raw); + } + + await store.close(); + + await rimraf(resolve(location, './index')); + + store = new FileBlockStore({ + location: location, + maxFileLength: 1024 + }); + + await store.open(); + + for (let i = 0; i < undos.length; i++) { + const expect = blocks[i]; + const block = await store.readUndo(expect.hash); + assert.equal(block.length, expect.block.length); + assert.bufferEqual(block, expect.block); + } + }); }); describe('LevelBlockStore', function() { From 6af6353906a01b2011559399ca6121812c0e4269 Mon Sep 17 00:00:00 2001 From: Javed Khan Date: Sat, 16 Mar 2019 10:02:53 +0530 Subject: [PATCH 23/31] blockstore: add README and docs --- lib/blockstore/README.md | 147 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 147 insertions(+) create mode 100644 lib/blockstore/README.md diff --git a/lib/blockstore/README.md b/lib/blockstore/README.md new file mode 100644 index 00000000..37fff012 --- /dev/null +++ b/lib/blockstore/README.md @@ -0,0 +1,147 @@ +# BlockStore + +BlockStore `lib/blockstore` is a bcoin module intended to be used as a backend +for storing block and undo coin data. It includes a backend that uses flat +files for storage. Its key benefit is performance improvements across the +board in disk I/O, which is the major bottleneck for the initial block sync. + +Blocks are stored in wire format directly to the disk, while some additional +metadata is stored in a key-value store, i.e. LevelDB, to help with the data +management. Both the flat files and the metadata db, are exposed through a +unified interace so that the users can simply read and write blocks without +having to worry about managing data layout on the disk. + +In addition to blocks, undo coin data, which is used to revert the changes +applied by a block (in case of a re-org), is also stored on disk, in a similar +fashion. + +## Interface + +The `AbstractBlockStore` interface defines the following abstract methods to be +defined by concrete implementations: + +### Basic housekeeping + +* `ensure()` +* `open()` +* `close()` + +### Block I/O + +* `read(hash, offset, size)` +* `write(hash, data)` +* `prune(hash)` +* `has(hash)` + +### Undo Coins I/O + +* `readUndo(hash)` +* `writeUndo(hash, data)` +* `pruneUndo(hash)` +* `hasUndo(hash)` + +The interface is implemented by `FileBlockStore` and `LevelBlockStore`, backed +by flat files and LevelDB respectively. We will focus here on the +`FileBlockStore`, which is the backend that implements a flat file based +storage. + +## FileBlockStore + +`FileBlockStore` implements the flat file backend for `AbstractBlockStore`. As +the name suggests, it uses flat files for block/undo data and LevelDB for +metadata. + +Let's create a file blockstore, write a block and walk-through the disk storage: + +```js +// nodejs +const store = blockstore.create({ + network: 'regtest', + prefix: '/tmp/blockstore' +}); +await store.ensure(); +await store.open(); +await store.write(hash, block); +``` + +```sh +// shell +tree /tmp/blockstore/ +/tmp/blockstore/ +└── blocks + ├── blk00000.dat + └── index + ├── LOG + ... +``` + +As we can see, the store writes to the file `blk00000.dat` in +`/tmp/blockstore/blocks/`, and the metadata is written to +`/tmp/blockstore/index`. + +Raw blocks are written to the disk in flat files named `blkXXXXX.dat`, where +`XXXXX` is the number of file being currently written, starting at +`blk00000.dat`. We store the file number as an integer in the metadata db, +expanding the digits to five places. + +The metadata db key `layout.F` tracks the last file used for writing. Each +file in turn tracks the number of blocks in it, the number of bytes used and +its max length. This data is stored in the db key `layout.f`. + + f['block'][0] => [1, 5, 128] // blk00000.dat: 1 block written, 5 bytes used, 128 bytes length + F['block'] => 0 // writing to file blk00000.dat + +Each raw block data is preceded by a magic marker defined as follows, to help +identify data written by us: + + magic (8 bytes) = network.magic (4 bytes) + block data length (4 bytes) + +For raw undo block data, the hash of the block is also included: + + magic (40 bytes) = network.magic (4 bytes) + length (4 bytes) + hash (32 bytes) + +But a marker alone is not sufficient to track the data we write to the files. +For each block we write, we need to store a pointer to the position in the file +where to start reading, and the size of the data we need to seek. This data is +stored in the metadata db using the key `layout.b`: + + b['block']['hash'] => [0, 8, 285] // 'hash' points to file blk00000.dat, position 8, size 285 + +Using this we know that our block is in `blk00000.dat`, bytes 8 through 285. + +Note that the position indicates that the block data is preceded by 8 bytes of +the magic marker. + + +Examples: + +> `store.write('hash', 'block')` + + blk00000: + 0xfabfb5da05000000 block + + index: + b['block']['hash'] => [0, 8, 5] + f['block'][0] => [1, 13, 128] + F['block'] => 0 + +> `store.write('hash1', 'block1')` + + blk00000: + 0xfabfb5da05000000 block 0xfabfb5da06000000 block1 + + index: + b['block']['hash'] => [0, 8, 5] + b['block']['hash1'] => [0, 13, 6] + f['block'][0] => [2, 19, 128] + F['block'] => 0 + +> `store.prune('hash1', 'block1')` + + blk00000: + 0xfabfb5da05000000 block 0xfabfb5da06000000 block1 + + index: + b['block']['hash'] => [0, 8, 5] + f['block'][0] => [1, 19, 128] + F['block'] => 0 From f24f64b48334a032010911617eda72ae39b6af0a Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Mon, 18 Mar 2019 15:23:47 -0700 Subject: [PATCH 24/31] changelog: add blockstore changes and migration notes --- CHANGELOG.md | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 19cde791..2b50d535 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,26 @@ # Bcoin release notes & changelog -## v2.0.0 +## v2.0.0-dev + +### How to upgrade + +The way that block data is stored has changed for greater performance, +efficiency, reliability and portability. To upgrade to the new disk layout +it's necessary to move block data from LevelDB (e.g. `~/.bcoin/chain`) to +a new file based block storage (e.g. `~./.bcoin/blocks`). + +To do this you can run: +``` +node ./migrate/chaindb4to5.js /path/to/bcoin/chain +``` + +The block data will now be stored at `/path/to/bcoin/blocks`, after +the data has been moved the chain database will be compacted to free +disk space. + +Alternatively, you can also sync the chain again, however the above +migration will be faster as additional network bandwidth won't be used +for downloading the blocks again. ### Wallet API changes @@ -79,6 +99,7 @@ ### Other changes +- A new module for storing block data in files. - Use of `buffer-map` for storing hashes (see https://github.com/bcoin-org/bcoin/issues/533). - Use of `bsert` for assertions. From f3e517c3c0a3a17da4db2d351cb867ef9028ba52 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Tue, 19 Mar 2019 10:34:14 -0700 Subject: [PATCH 25/31] blockstore: fix logger, improve messages --- lib/blockstore/file.js | 6 +++--- lib/blockstore/level.js | 2 +- test/blockstore-test.js | 17 +++++++++++++++++ 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index a687f224..51cc31d8 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -32,7 +32,7 @@ class FileBlockStore extends AbstractBlockStore { */ constructor(options) { - super(); + super(options); assert(isAbsolute(options.location), 'Location not absolute.'); @@ -101,7 +101,7 @@ class FileBlockStore extends AbstractBlockStore { if (!missing) return; - this.logger.info(`Indexing block type ${type}...`); + this.logger.info('Indexing block type %d...', type); for (const fileno of filenos) { const b = this.db.batch(); @@ -166,7 +166,7 @@ class FileBlockStore extends AbstractBlockStore { await b.write(); - this.logger.info(`Indexed ${blocks} blocks from ${filepath}...`); + this.logger.info('Indexed %d blocks (file=%s).', blocks, filepath); } } diff --git a/lib/blockstore/level.js b/lib/blockstore/level.js index ebc09675..bb0f7f6c 100644 --- a/lib/blockstore/level.js +++ b/lib/blockstore/level.js @@ -26,7 +26,7 @@ class LevelBlockStore extends AbstractBlockStore { */ constructor(options) { - super(); + super(options); this.location = options.location; diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 160538de..25ed17e9 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -292,6 +292,23 @@ describe('BlockStore', function() { }); describe('constructor', function() { + it('will pass options to super', () => { + const info = () => 'info'; + const logger = { + context: () => { + return {info}; + } + }; + + const store = new FileBlockStore({ + location: '/tmp/.bcoin/blocks', + maxFileLength: 1024, + logger: logger + }); + + assert.strictEqual(store.logger.info, info); + }); + it('will error with invalid location', () => { let err = null; From 8337176e07fe556369aa9979161db2bb8dd56423 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Tue, 2 Apr 2019 20:31:06 -0700 Subject: [PATCH 26/31] migrate: add time to migrate and compaction log message --- CHANGELOG.md | 6 +++--- migrate/chaindb4to5.js | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2b50d535..6821a499 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,9 +14,9 @@ To do this you can run: node ./migrate/chaindb4to5.js /path/to/bcoin/chain ``` -The block data will now be stored at `/path/to/bcoin/blocks`, after -the data has been moved the chain database will be compacted to free -disk space. +The migration will take 1-3 hours, depending on hardware. The block data +will now be stored at `/path/to/bcoin/blocks`, after the data has been moved +the chain database will be compacted to free disk space. Alternatively, you can also sync the chain again, however the above migration will be faster as additional network bandwidth won't be used diff --git a/migrate/chaindb4to5.js b/migrate/chaindb4to5.js index 8a2d946e..b9eb1f0b 100644 --- a/migrate/chaindb4to5.js +++ b/migrate/chaindb4to5.js @@ -129,6 +129,7 @@ async function migrateBlocks() { await migrateUndoBlocks(); await updateVersion(); + console.log('Compacting database'); await db.compactRange(); await db.close(); await blockStore.close(); From 41ce504175e578eb535b01ce6e47a6d364b795c4 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Tue, 2 Apr 2019 20:38:12 -0700 Subject: [PATCH 27/31] bcoin: expose blockstore module --- lib/bcoin.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/bcoin.js b/lib/bcoin.js index b5e2a018..72ab240b 100644 --- a/lib/bcoin.js +++ b/lib/bcoin.js @@ -55,6 +55,7 @@ bcoin.set = function set(network) { // Blockchain bcoin.define('blockchain', './blockchain'); +bcoin.define('blockstore', './blockstore'); bcoin.define('Chain', './blockchain/chain'); bcoin.define('ChainEntry', './blockchain/chainentry'); From 83824d73b11ef9e8384c60a0dad66415e1d549f4 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Wed, 3 Apr 2019 17:23:47 -0700 Subject: [PATCH 28/31] blockstore: optimize block read and index There is potential for around a 10% to 23% increase to the performance of block reads by using `allocUnsafe`, however there is already around a 3 to 6 times increase to the performance. While it's safe to use `allocUnsafe` as the number of bytes read is checked to be the same size as the buffer allocation, there is a potential for test cases to introduce _other_ behavior for `fs.read` that may not have the same behavior, though this isn't currently the case. --- lib/blockstore/file.js | 15 +++++++------ test/blockstore-test.js | 49 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+), 7 deletions(-) diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index 51cc31d8..0f6f2c7b 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -11,8 +11,8 @@ const bdb = require('bdb'); const assert = require('bsert'); const fs = require('bfile'); const bio = require('bufio'); +const hash256 = require('bcrypto/lib/hash256'); const Network = require('../protocol/network'); -const Headers = require('../primitives/headers'); const AbstractBlockStore = require('./abstract'); const {BlockRecord, FileRecord} = require('./records'); const layout = require('./layout'); @@ -130,10 +130,8 @@ class FileBlockStore extends AbstractBlockStore { if (type === types.BLOCK) { position = reader.offset; - const header = Headers.fromReader(reader); - hash = header.hash(); - const read = reader.offset - position; - reader.seek(length - read); + hash = hash256.digest(reader.readBytes(80, true)); + reader.seek(length - 80); } else { hash = reader.readHash(); position = reader.offset; @@ -271,7 +269,7 @@ class FileBlockStore extends AbstractBlockStore { const last = await this.db.get(layout.F.encode(type)); if (last) - fileno = bio.read(last).readU32(); + fileno = bio.readU32(last, 0); filepath = this.filepath(type, fileno); @@ -482,9 +480,12 @@ class FileBlockStore extends AbstractBlockStore { const data = Buffer.alloc(length); const fd = await fs.open(filepath, 'r'); - await fs.read(fd, data, 0, length, position); + const bytes = await fs.read(fd, data, 0, length, position); await fs.close(fd); + if (bytes !== length) + throw new Error('Wrong number of bytes read.'); + return data; } diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 25ed17e9..d15c51dc 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -505,6 +505,55 @@ describe('BlockStore', function() { assert.equal(err.message, 'Could not write block.'); }); }); + + describe('read', function() { + const read = fs.read; + const open = fs.open; + const close = fs.close; + let get = null; + let raw = null; + + before(() => { + const record = new BlockRecord({ + file: 1, + position: 8, + length: 100 + }); + raw = record.toRaw(); + }); + + beforeEach(() => { + get = store.db.get; + }); + + afterEach(() => { + // Restore stubbed methods. + store.db.get = get; + fs.read = read; + fs.open = open; + fs.close = close; + }); + + it('will error if total read bytes not correct', async () => { + let err = null; + + store.db.get = () => raw; + fs.open = () => 7; + fs.close = () => undefined; + fs.read = () => 99; + + try { + const hash = random.randomBytes(128); + const block = random.randomBytes(32); + await store.read(hash, block); + } catch (e) { + err = e; + } + + assert(err, 'Expected error.'); + assert.equal(err.message, 'Wrong number of bytes read.'); + }); + }); }); describe('FileBlockStore (Integration 1)', function() { From 3457ccc91b21cb26ce3097fce913c1bcfcb9c784 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Wed, 3 Apr 2019 17:24:54 -0700 Subject: [PATCH 29/31] blockchain: remove blocks after commit --- lib/blockchain/chaindb.js | 41 ++++++++++++++++++++++++++------------- 1 file changed, 28 insertions(+), 13 deletions(-) diff --git a/lib/blockchain/chaindb.js b/lib/blockchain/chaindb.js index eef121c4..f24e6f97 100644 --- a/lib/blockchain/chaindb.js +++ b/lib/blockchain/chaindb.js @@ -762,7 +762,6 @@ class ChainDB { const start = pruneAfter + 1; const end = height - keepBlocks; - const b = this.db.batch(); for (let i = start; i <= end; i++) { const hash = await this.getHash(i); @@ -780,16 +779,12 @@ class ChainDB { const flags = ChainFlags.fromOptions(options); assert(flags.prune); - b.put(layout.O.encode(), flags.toRaw()); - - await b.write(); + await this.db.put(layout.O.encode(), flags.toRaw()); } catch (e) { options.prune = false; throw e; } - await this.db.compactRange(); - return true; } @@ -1466,6 +1461,10 @@ class ChainDB { await this.commit(); + // Remove undo data _after_ successful commit. + if (this.blocks) + await this.blocks.pruneUndo(entry.hash); + return view; } @@ -1582,6 +1581,12 @@ class ChainDB { await this.commit(); + // Remove block and undo data _after_ successful commit. + if (this.blocks) { + await this.blocks.pruneUndo(tip.hash); + await this.blocks.prune(tip.hash); + } + // Update caches _after_ successful commit. this.cacheHeight.remove(tip.height); this.cacheHash.remove(tip.hash); @@ -1605,15 +1610,23 @@ class ChainDB { // one giant atomic write! this.start(); + let hashes = []; + try { for (const tip of tips) - await this._removeChain(tip); + hashes = hashes.concat(await this._removeChain(tip)); } catch (e) { this.drop(); throw e; } await this.commit(); + + // SPV doesn't store blocks. + if (this.blocks) { + for (const hash of hashes) + await this.blocks.prune(hash); + } } /** @@ -1631,6 +1644,8 @@ class ChainDB { this.logger.debug('Removing alternate chain: %h.', tip.hash); + const hashes = []; + for (;;) { if (await this.isMainChain(tip)) break; @@ -1641,7 +1656,10 @@ class ChainDB { this.del(layout.p.encode(tip.hash)); this.del(layout.h.encode(tip.hash)); this.del(layout.e.encode(tip.hash)); - await this.blocks.prune(tip.hash); + + // Queue block to be pruned on + // successful write. + hashes.push(tip.hash); // Queue up hash to be removed // on successful write. @@ -1650,6 +1668,8 @@ class ChainDB { tip = await this.getPrevious(tip); assert(tip); } + + return hashes; } /** @@ -1692,8 +1712,6 @@ class ChainDB { if (!block) throw new Error('Block not found.'); - await this.blocks.prune(block.hash()); - return this.disconnectBlock(entry, block); } @@ -1821,9 +1839,6 @@ class ChainDB { // Commit new coin state. this.saveView(view); - // Remove undo coins. - await this.blocks.pruneUndo(hash); - return view; } From 50fe51ca3251f13e8f93325cca841e2252fc0961 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Wed, 3 Apr 2019 17:34:35 -0700 Subject: [PATCH 30/31] blockstore: close file if write or read throws --- lib/blockstore/file.js | 20 ++++++++++---- test/blockstore-test.js | 61 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 76 insertions(+), 5 deletions(-) diff --git a/lib/blockstore/file.js b/lib/blockstore/file.js index 0f6f2c7b..02733336 100644 --- a/lib/blockstore/file.js +++ b/lib/blockstore/file.js @@ -383,10 +383,15 @@ class FileBlockStore extends AbstractBlockStore { const fd = await fs.open(filepath, 'r+'); - const mwritten = await fs.write(fd, magic, 0, mlength, mposition); - const bwritten = await fs.write(fd, data, 0, blength, bposition); + let mwritten = 0; + let bwritten = 0; - await fs.close(fd); + try { + mwritten = await fs.write(fd, magic, 0, mlength, mposition); + bwritten = await fs.write(fd, data, 0, blength, bposition); + } finally { + await fs.close(fd); + } if (mwritten !== mlength) { this.writing = false; @@ -480,8 +485,13 @@ class FileBlockStore extends AbstractBlockStore { const data = Buffer.alloc(length); const fd = await fs.open(filepath, 'r'); - const bytes = await fs.read(fd, data, 0, length, position); - await fs.close(fd); + let bytes = 0; + + try { + bytes = await fs.read(fd, data, 0, length, position); + } finally { + await fs.close(fd); + } if (bytes !== length) throw new Error('Wrong number of bytes read.'); diff --git a/test/blockstore-test.js b/test/blockstore-test.js index d15c51dc..048c438b 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -504,6 +504,41 @@ describe('BlockStore', function() { assert(err, 'Expected error.'); assert.equal(err.message, 'Could not write block.'); }); + + it('will close file if write throws', async () => { + let err = null; + let closed = null; + + store.allocate = () => { + return { + fileno: 20, + filerecord: { + used: 0 + }, + filepath: '/tmp/.bcoin/blocks/blk00020.dat' + }; + }; + store.db.has = () => false; + fs.open = () => 7; + fs.close = (fd) => { + closed = fd; + }; + fs.write = () => { + throw new Error('Test.'); + }; + + try { + const hash = random.randomBytes(128); + const block = random.randomBytes(32); + await store.write(hash, block); + } catch (e) { + err = e; + } + + assert(err, 'Expected error.'); + assert.equal(err.message, 'Test.'); + assert.equal(closed, 7); + }); }); describe('read', function() { @@ -553,6 +588,32 @@ describe('BlockStore', function() { assert(err, 'Expected error.'); assert.equal(err.message, 'Wrong number of bytes read.'); }); + + it('will close file if read throws', async () => { + let err = null; + let closed = null; + + store.db.get = () => raw; + fs.open = () => 7; + fs.close = (fd) => { + closed = fd; + }; + fs.read = () => { + throw new Error('Test.'); + }; + + try { + const hash = random.randomBytes(128); + const block = random.randomBytes(32); + await store.read(hash, block); + } catch (e) { + err = e; + } + + assert(err, 'Expected error.'); + assert.equal(err.message, 'Test.'); + assert.equal(closed, 7); + }); }); }); From 54383578fb4788ecffeee04c835354d83f45424a Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 11 Apr 2019 09:58:20 -0700 Subject: [PATCH 31/31] test: configurable and unique test directory path - Multiple parallel runs of the same test will not conflict as a unique identifier is added to the test directory. - The base test directory can be configured for various environments, and can be changed via the environment variable `TEMP`, see the implementation of `os.tmpdir()` for specific details. --- test/blockstore-test.js | 8 ++++---- test/util/common.js | 11 ++++++++++- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 048c438b..39a777dc 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -9,7 +9,7 @@ const assert = require('./util/assert'); const common = require('./util/common'); const {resolve} = require('path'); const fs = require('bfile'); -const {rimraf} = require('./util/common'); +const {rimraf, testdir} = require('./util/common'); const random = require('bcrypto/lib/random'); const vectors = [ @@ -618,7 +618,7 @@ describe('BlockStore', function() { }); describe('FileBlockStore (Integration 1)', function() { - const location = '/tmp/bcoin-blockstore-test'; + const location = testdir('blockstore'); let store = null; beforeEach(async () => { @@ -948,7 +948,7 @@ describe('BlockStore', function() { }); describe('FileBlockStore (Integration 2)', function() { - const location = '/tmp/bcoin-blockstore-test'; + const location = testdir('blockstore'); let store = null; beforeEach(async () => { @@ -1101,7 +1101,7 @@ describe('BlockStore', function() { }); describe('LevelBlockStore', function() { - const location = '/tmp/bcoin-blockstore-test'; + const location = testdir('blockstore'); let store = null; beforeEach(async () => { diff --git a/test/util/common.js b/test/util/common.js index bdaa8620..39177fc5 100644 --- a/test/util/common.js +++ b/test/util/common.js @@ -1,9 +1,11 @@ 'use strict'; const assert = require('assert'); +const {tmpdir} = require('os'); const path = require('path'); const fs = require('bfile'); const bio = require('bufio'); +const {randomBytes} = require('bcrypto/lib/random'); const Block = require('../../lib/primitives/block'); const MerkleBlock = require('../../lib/primitives/merkleblock'); const Headers = require('../../lib/primitives/headers'); @@ -85,8 +87,15 @@ common.writeTX = function writeTX(name, tx, view) { common.writeFile(`${name}-undo.raw`, undoRaw); }; +common.testdir = function(name) { + assert(/^[a-z]+$/.test(name), 'Invalid name'); + + const uniq = randomBytes(4).toString('hex'); + return path.join(tmpdir(), `bcoin-test-${name}-${uniq}`); +}; + common.rimraf = async function(p) { - const allowed = new RegExp('^\/tmp\/(.*)$'); + const allowed = /bcoin\-test\-[a-z]+\-[a-f0-9]{8}(\/[a-z]+)?$/; if (!allowed.test(p)) throw new Error(`Path not allowed: ${p}.`);