blockstore: add file block storage

This commit is contained in:
Braydon Fuller 2019-02-19 17:41:17 -08:00
parent f57bd51187
commit 8435a116f1
No known key found for this signature in database
GPG Key ID: F24F232D108B3AD4
8 changed files with 1435 additions and 0 deletions

100
lib/blockstore/abstract.js Normal file
View File

@ -0,0 +1,100 @@
/*!
* blockstore/abstract.js - abstract block store for bcoin
* Copyright (c) 2019, Braydon Fuller (MIT License).
* https://github.com/bcoin-org/bcoin
*/
'use strict';
const Logger = require('blgr');
/**
* Abstract Block Store
*
* @alias module:blockstore.AbstractBlockStore
* @abstract
*/
class AbstractBlockStore {
/**
* Create an abstract blockstore.
* @constructor
*/
constructor(options) {
this.options = options || {};
if (this.options.logger != null)
this.logger = this.options.logger.context('blockstore');
else
this.logger = Logger.global.context('blockstore');
}
/**
* This method opens any necessary resources and
* initializes the store to be ready to be queried.
* @returns {Promise}
*/
async open() {
throw new Error('Abstract method.');
}
/**
* This method closes resources and prepares
* store to be closed.
* @returns {Promise}
*/
async close() {
throw new Error('Abstract method.');
}
/**
* This method stores block data. The action should be idempotent.
* If the data is already stored, the behavior will be the same. Any
* concurrent requests to store the same data will produce the same
* result, and will not conflict with each other.
* @returns {Promise}
*/
async write(hash, data) {
throw new Error('Abstract method.');
}
/**
* This method will retrieve block data. Smaller portions of
* the block can be read by using the offset and size arguments.
* @returns {Promise}
*/
async read(hash, offset, size) {
throw new Error('Abstract method.');
}
/**
* This will free resources for storing the block data. This
* may not mean that the block is deleted, but that it should
* no longer consume any local storage resources.
* @returns {Promise}
*/
async prune(hash) {
throw new Error('Abstract method.');
}
/**
* This will check if a block has been stored and is available.
* @returns {Promise}
*/
async has(hash) {
throw new Error('Abstract method.');
}
}
/*
* Expose
*/
module.exports = AbstractBlockStore;

371
lib/blockstore/file.js Normal file
View File

@ -0,0 +1,371 @@
/*!
* blockstore/file.js - file block store for bcoin
* Copyright (c) 2019, Braydon Fuller (MIT License).
* https://github.com/bcoin-org/bcoin
*/
'use strict';
const {isAbsolute, resolve, join} = require('path');
const bdb = require('bdb');
const assert = require('bsert');
const fs = require('bfile');
const bio = require('bufio');
const Network = require('../protocol/network');
const Block = require('../primitives/block');
const AbstractBlockStore = require('./abstract');
const {BlockRecord, FileRecord} = require('./records');
const layout = require('./layout');
/**
* File Block Store
*
* @alias module:blockstore:FileBlockStore
* @abstract
*/
class FileBlockStore extends AbstractBlockStore {
/**
* Create a blockstore that stores blocks in files.
* @constructor
*/
constructor(options) {
super();
assert(isAbsolute(options.location), 'Location not absolute.');
this.location = options.location;
this.db = bdb.create({
location: resolve(this.location, './index')
});
this.maxFileLength = options.maxFileLength || 128 * 1024 * 1024;
this.network = Network.primary;
if (options.network != null)
this.network = Network.get(options.network);
}
/**
* Compares the number of files in the directory
* with the recorded number of files. If there are any
* inconsistencies it will reindex all blocks.
* @private
* @returns {Promise}
*/
async index() {
const regexp = /^blk(\d{5})\.dat$/;
const all = await fs.readdir(this.location);
const dats = all.filter(f => regexp.test(f));
const filenos = dats.map(f => parseInt(f.match(regexp)[1]));
let missing = false;
for (const fileno of filenos) {
const rec = await this.db.get(layout.f.encode(fileno));
if (!rec) {
missing = true;
break;
}
}
if (!missing)
return;
this.logger.info('Indexing FileBlockStore...');
for (const fileno of filenos) {
const b = this.db.batch();
const filepath = this.filepath(fileno);
const data = await fs.readFile(filepath);
const reader = bio.read(data);
let magic = null;
let blocks = 0;
while (reader.left() >= 4) {
magic = reader.readU32();
if (magic !== this.network.magic) {
reader.seek(4);
continue;
}
const length = reader.readU32();
const position = reader.offset;
const block = Block.fromReader(reader);
const hash = block.hash();
const blockrecord = new BlockRecord({
file: fileno,
position: position,
length: length
});
blocks += 1;
b.put(layout.b.encode(hash), blockrecord.toRaw());
}
const filerecord = new FileRecord({
blocks: blocks,
used: reader.offset,
length: this.maxFileLength
});
b.put(layout.f.encode(fileno), filerecord.toRaw());
await b.write();
this.logger.info(`Indexed ${blocks} blocks from ${filepath}...`);
}
}
/**
* Opens the file block store. It will regenerate necessary block
* indexing if the index is missing or inconsistent.
* @returns {Promise}
*/
async open() {
this.logger.info('Opening FileBlockStore...');
await this.db.open();
await this.db.verify(layout.V.encode(), 'fileblockstore', 0);
await this.index();
}
/**
* This closes the file block store and underlying
* databases for indexing.
*/
async close() {
this.logger.info('Closing FileBlockStore...');
await this.db.close();
}
/**
* This method will determine the file path based on the file number
* and the current block data location.
* @param {Number} fileno - The number of the file.
* @returns {Promise}
*/
filepath(fileno) {
const pad = 5;
let num = fileno.toString(10);
if (num.length > pad)
throw new Error('File number too large.');
while (num.length < pad)
num = `0${num}`;
return join(this.location, `blk${num}.dat`);
}
/**
* This method will select and potentially allocate a file to
* write a block based on the size.
* @param {Number} length - The number of bytes of the data to be written.
* @returns {Promise}
*/
async allocate(length) {
if (length > this.maxFileLength)
throw new Error('Block length above max file length.');
let fileno = 0;
let filerecord = null;
let filepath = null;
const last = await this.db.get(layout.R.encode());
if (last)
fileno = bio.read(last).readU32();
filepath = this.filepath(fileno);
const rec = await this.db.get(layout.f.encode(fileno));
if (rec) {
filerecord = FileRecord.fromRaw(rec);
} else {
filerecord = new FileRecord({
blocks: 0,
used: 0,
length: this.maxFileLength
});
}
if (filerecord.used + length > filerecord.length) {
fileno += 1;
filepath = this.filepath(fileno);
filerecord = new FileRecord({
blocks: 0,
used: 0,
length: this.maxFileLength
});
}
return {fileno, filerecord, filepath};
}
/**
* This method stores block data in files.
* @param {Buffer} hash - The block hash
* @param {Buffer} data - The block data
* @returns {Promise}
*/
async write(hash, data) {
const mlength = 8;
const blength = data.length;
const length = data.length + mlength;
const {
fileno,
filerecord,
filepath
} = await this.allocate(length);
const mposition = filerecord.used;
const bposition = filerecord.used + mlength;
const bwm = bio.write(mlength);
bwm.writeU32(this.network.magic);
bwm.writeU32(blength);
const magic = bwm.render();
const fd = await fs.open(filepath, 'a');
const mwritten = await fs.write(fd, magic, 0, mlength, mposition);
const bwritten = await fs.write(fd, data, 0, blength, bposition);
await fs.close(fd);
if (mwritten !== mlength)
throw new Error('Could not write block magic.');
if (bwritten !== blength)
throw new Error('Could not write block.');
filerecord.blocks += 1;
filerecord.used += length;
const b = this.db.batch();
const blockrecord = new BlockRecord({
file: fileno,
position: bposition,
length: blength
});
b.put(layout.b.encode(hash), blockrecord.toRaw());
b.put(layout.f.encode(fileno), filerecord.toRaw());
const bw = bio.write(4);
b.put(layout.R.encode(), bw.writeU32(fileno).render());
await b.write();
}
/**
* This method will retrieve block data. Smaller portions of the
* block (e.g. transactions) can be read by using the offset and
* length arguments.
* @param {Buffer} hash - The block hash
* @param {Number} offset - The offset within the block
* @param {Number} length - The number of bytes of the data
* @returns {Promise}
*/
async read(hash, offset, length) {
const raw = await this.db.get(layout.b.encode(hash));
if (!raw)
return null;
const blockrecord = BlockRecord.fromRaw(raw);
const filepath = this.filepath(blockrecord.file);
let position = blockrecord.position;
if (offset)
position += offset;
if (!length)
length = blockrecord.length;
if (offset + length > blockrecord.length)
throw new Error('Out-of-bounds read.');
const data = Buffer.alloc(length);
const fd = await fs.open(filepath, 'r');
await fs.read(fd, data, 0, length, position);
await fs.close(fd);
return data;
}
/**
* This will free resources for storing the block data. The block
* data may not be deleted from disk immediately, the index for
* the block is removed and will not be able to be read. The underlying
* file is unlinked when all blocks in a file have been pruned.
* @param {Buffer} hash - The block hash
* @returns {Promise}
*/
async prune(hash) {
const braw = await this.db.get(layout.b.encode(hash));
if (!braw)
return false;
const blockrecord = BlockRecord.fromRaw(braw);
const fraw = await this.db.get(layout.f.encode(blockrecord.file));
if (!fraw)
return false;
const filerecord = FileRecord.fromRaw(fraw);
filerecord.blocks -= 1;
const b = this.db.batch();
if (filerecord.blocks === 0)
b.del(layout.f.encode(blockrecord.file));
else
b.put(layout.f.encode(blockrecord.file), filerecord.toRaw());
b.del(layout.b.encode(hash));
await b.write();
if (filerecord.blocks === 0)
await fs.unlink(this.filepath(blockrecord.file));
return true;
}
/**
* This will check if a block has been stored and is available.
* @param {Buffer} hash - The block hash
* @returns {Promise}
*/
async has(hash) {
return await this.db.has(layout.b.encode(hash));
}
}
/*
* Expose
*/
module.exports = FileBlockStore;

15
lib/blockstore/index.js Normal file
View File

@ -0,0 +1,15 @@
/*!
* blockstore/index.js - bitcoin blockstore for bcoin
* Copyright (c) 2019, Braydon Fuller (MIT License).
* https://github.com/bcoin-org/bcoin
*/
'use strict';
/**
* @module blockstore
*/
exports.AbstractBlockStore = require('./abstract');
exports.FileBlockStore = require('./file');
exports.LevelBlockStore = require('./level');

30
lib/blockstore/layout.js Normal file
View File

@ -0,0 +1,30 @@
/*!
* blockstore/layout.js - file block store data layout for bcoin
* Copyright (c) 2019, Braydon Fuller (MIT License).
* https://github.com/bcoin-org/bcoin
*/
'use strict';
const bdb = require('bdb');
/*
* Database Layout:
* V -> db version
* R -> last file entry
* f[uint32] -> file entry
* b[hash] -> block entry
*/
const layout = {
V: bdb.key('V'),
R: bdb.key('R'),
f: bdb.key('f', ['uint32']),
b: bdb.key('b', ['hash256'])
};
/*
* Expose
*/
module.exports = layout;

128
lib/blockstore/level.js Normal file
View File

@ -0,0 +1,128 @@
/*!
* blockstore/level.js - leveldb block store for bcoin
* Copyright (c) 2019, Braydon Fuller (MIT License).
* https://github.com/bcoin-org/bcoin
*/
'use strict';
const {isAbsolute, resolve} = require('path');
const bdb = require('bdb');
const assert = require('bsert');
const AbstractBlockStore = require('./abstract');
const layout = require('./layout');
/**
* LevelDB Block Store
*
* @alias module:blockstore:LevelBlockStore
* @abstract
*/
class LevelBlockStore extends AbstractBlockStore {
/**
* Create a blockstore that stores blocks in LevelDB.
* @constructor
*/
constructor(options) {
super();
assert(isAbsolute(options.location), 'Location not absolute.');
this.location = options.location;
this.db = bdb.create({
location: resolve(this.location, './index')
});
}
/**
* Opens the block storage.
* @returns {Promise}
*/
async open() {
this.logger.info('Opening LevelBlockStore...');
await this.db.open();
await this.db.verify(layout.V.encode(), 'levelblockstore', 0);
}
/**
* Closes the block storage.
*/
async close() {
this.logger.info('Closing LevelBlockStore...');
await this.db.close();
}
/**
* This method stores block data in LevelDB.
* @param {Buffer} hash - The block hash
* @param {Buffer} data - The block data
* @returns {Promise}
*/
async write(hash, data) {
this.db.put(layout.b.encode(hash), data);
}
/**
* This method will retrieve block data. Smaller portions of the
* block (e.g. transactions) can be returned using the offset and
* length arguments. However, the entire block will be read as the
* data is stored in a key/value database.
* @param {Buffer} hash - The block hash
* @param {Number} offset - The offset within the block
* @param {Number} length - The number of bytes of the data
* @returns {Promise}
*/
async read(hash, offset, length) {
let raw = await this.db.get(layout.b.encode(hash));
if (offset) {
if (offset + length > raw.length)
throw new Error('Out-of-bounds read.');
raw = raw.slice(offset, offset + length);
}
return raw;
}
/**
* This will free resources for storing the block data. The block
* data may not be immediately removed from disk, and will be reclaimed
* during LevelDB compaction.
* @param {Buffer} hash - The block hash
* @returns {Promise}
*/
async prune(hash) {
if (!await this.has(hash))
return false;
await this.db.del(layout.b.encode(hash));
return true;
}
/**
* This will check if a block has been stored and is available.
* @param {Buffer} hash - The block hash
* @returns {Promise}
*/
async has(hash) {
return this.db.has(layout.b.encode(hash));
}
}
/*
* Expose
*/
module.exports = LevelBlockStore;

149
lib/blockstore/records.js Normal file
View File

@ -0,0 +1,149 @@
/*!
* blockstore/records.js - block store records
* Copyright (c) 2019, Braydon Fuller (MIT License).
* https://github.com/bcoin-org/bcoin
*/
'use strict';
const assert = require('bsert');
const bio = require('bufio');
/**
* @module blockstore/records
*/
/**
* Block Record
*/
class BlockRecord {
/**
* Create a block record.
* @constructor
*/
constructor(options = {}) {
this.file = options.file || 0;
this.position = options.position || 0;
this.length = options.length || 0;
assert((this.file >>> 0) === this.file);
assert((this.position >>> 0) === this.position);
assert((this.length >>> 0) === this.length);
}
/**
* Inject properties from serialized data.
* @private
* @param {Buffer} data
*/
fromRaw(data) {
const br = bio.read(data);
this.file = br.readU32();
this.position = br.readU32();
this.length = br.readU32();
return this;
}
/**
* Instantiate block record from serialized data.
* @param {Hash} hash
* @param {Buffer} data
* @returns {BlockRecord}
*/
static fromRaw(data) {
return new this().fromRaw(data);
}
/**
* Serialize the block record.
* @returns {Buffer}
*/
toRaw() {
const bw = bio.write(12);
bw.writeU32(this.file);
bw.writeU32(this.position);
bw.writeU32(this.length);
return bw.render();
}
}
/**
* File Record
*/
class FileRecord {
/**
* Create a chain state.
* @constructor
*/
constructor(options = {}) {
this.blocks = options.blocks || 0;
this.used = options.used || 0;
this.length = options.length || 0;
assert((this.blocks >>> 0) === this.blocks);
assert((this.used >>> 0) === this.used);
assert((this.length >>> 0) === this.length);
}
/**
* Inject properties from serialized data.
* @private
* @param {Buffer} data
*/
fromRaw(data) {
const br = bio.read(data);
this.blocks = br.readU32();
this.used = br.readU32();
this.length = br.readU32();
return this;
}
/**
* Instantiate file record from serialized data.
* @param {Hash} hash
* @param {Buffer} data
* @returns {ChainState}
*/
static fromRaw(data) {
return new this().fromRaw(data);
}
/**
* Serialize the file record.
* @returns {Buffer}
*/
toRaw() {
const bw = bio.write(12);
bw.writeU32(this.blocks);
bw.writeU32(this.used);
bw.writeU32(this.length);
return bw.render();
}
}
/*
* Expose
*/
exports.BlockRecord = BlockRecord;
exports.FileRecord = FileRecord;
module.exports = exports;

634
test/blockstore-test.js Normal file
View File

@ -0,0 +1,634 @@
/* eslint-env mocha */
/* eslint prefer-arrow-callback: "off" */
'use strict';
const Logger = require('blgr');
const assert = require('./util/assert');
const common = require('./util/common');
const {resolve} = require('path');
const fs = require('bfile');
const {rimraf} = require('./util/common');
const {mkdirp} = require('bfile');
const random = require('bcrypto/lib/random');
const vectors = [
common.readBlock('block300025'),
common.readBlock('block426884'),
common.readBlock('block898352')
];
const {
AbstractBlockStore,
FileBlockStore,
LevelBlockStore
} = require('../lib/blockstore');
const layout = require('../lib/blockstore/layout');
const {
BlockRecord,
FileRecord
} = require('../lib/blockstore/records');
describe('BlockStore', function() {
describe('Abstract', function() {
let logger = null;
function context(ctx) {
return {info: () => ctx};
}
beforeEach(() => {
logger = Logger.global;
Logger.global = {context};
});
afterEach(() => {
Logger.global = logger;
});
it('construct with custom logger', async () => {
const store = new AbstractBlockStore({logger: {context}});
assert(store.logger);
assert(store.logger.info);
assert.equal(store.logger.info(), 'blockstore');
});
it('construct with default logger', async () => {
const store = new AbstractBlockStore();
assert(store.logger);
assert(store.logger.info);
assert.equal(store.logger.info(), 'blockstore');
});
it('has unimplemented base methods', async () => {
const methods = ['open', 'close', 'write', 'read',
'prune', 'has'];
const store = new AbstractBlockStore();
for (const method of methods) {
assert(store[method]);
let err = null;
try {
await store[method]();
} catch (e) {
err = e;
}
assert(err, `Expected unimplemented method ${method}.`);
assert.equal(err.message, 'Abstract method.');
}
});
});
describe('Records', function() {
describe('BlockRecord', function() {
function constructError(options) {
let err = null;
try {
new BlockRecord({
file: options.file,
position: options.position,
length: options.length
});
} catch (e) {
err = e;
}
assert(err);
}
function toAndFromRaw(options) {
const rec1 = new BlockRecord(options);
assert.equal(rec1.file, options.file);
assert.equal(rec1.position, options.position);
assert.equal(rec1.length, options.length);
const raw = rec1.toRaw();
const rec2 = BlockRecord.fromRaw(raw);
assert.equal(rec2.file, options.file);
assert.equal(rec2.position, options.position);
assert.equal(rec2.length, options.length);
}
it('construct with correct options', () => {
const rec = new BlockRecord({
file: 12,
position: 23392,
length: 4194304
});
assert.equal(rec.file, 12);
assert.equal(rec.position, 23392);
assert.equal(rec.length, 4194304);
});
it('construct null record', () => {
const rec = new BlockRecord();
assert.equal(rec.file, 0);
assert.equal(rec.position, 0);
assert.equal(rec.length, 0);
});
it('fail with signed number (file)', () => {
constructError({file: -1, position: 1, length: 1});
});
it('fail with signed number (position)', () => {
constructError({file: 1, position: -1, length: 1});
});
it('fail with signed number (length)', () => {
constructError({file: 1, position: 1, length: -1});
});
it('fail with non-32-bit number (file)', () => {
constructError({file: Math.pow(2, 32), position: 1, length: 1});
});
it('fail with non-32-bit number (position)', () => {
constructError({file: 1, position: Math.pow(2, 32), length: 1});
});
it('fail with non-32-bit number (length)', () => {
constructError({file: 1, position: 1, length: Math.pow(2, 32)});
});
it('construct with max 32-bit numbers', () => {
const max = Math.pow(2, 32) - 1;
const rec = new BlockRecord({
file: max,
position: max,
length: max
});
assert(rec);
assert.equal(rec.file, max);
assert.equal(rec.position, max);
assert.equal(rec.length, max);
});
it('serialize/deserialize file record (min)', () => {
toAndFromRaw({file: 0, position: 0, length: 0});
});
it('serialize/deserialize file record', () => {
toAndFromRaw({file: 12, position: 23392, length: 4194304});
});
it('serialize/deserialize file record (max)', () => {
const max = Math.pow(2, 32) - 1;
toAndFromRaw({file: max, position: max, length: max});
});
});
describe('FileRecord', function() {
function constructError(options) {
let err = null;
try {
new FileRecord({
blocks: options.blocks,
used: options.used,
length: options.length
});
} catch (e) {
err = e;
}
assert(err);
}
function toAndFromRaw(options) {
const rec1 = new FileRecord(options);
assert.equal(rec1.blocks, options.blocks);
assert.equal(rec1.used, options.used);
assert.equal(rec1.length, options.length);
const raw = rec1.toRaw();
const rec2 = FileRecord.fromRaw(raw);
assert.equal(rec2.blocks, options.blocks);
assert.equal(rec2.used, options.used);
assert.equal(rec2.length, options.length);
}
it('construct with correct options', () => {
const rec = new FileRecord({
blocks: 1,
used: 4194304,
length: 20971520
});
assert.equal(rec.blocks, 1);
assert.equal(rec.used, 4194304);
assert.equal(rec.length, 20971520);
});
it('fail to with signed number (blocks)', () => {
constructError({blocks: -1, used: 1, length: 1});
});
it('fail to with signed number (used)', () => {
constructError({blocks: 1, used: -1, length: 1});
});
it('fail to with signed number (length)', () => {
constructError({blocks: 1, used: 1, length: -1});
});
it('fail to with non-32-bit number (blocks)', () => {
constructError({blocks: Math.pow(2, 32), used: 1, length: 1});
});
it('fail to with non-32-bit number (used)', () => {
constructError({blocks: 1, used: Math.pow(2, 32), length: 1});
});
it('fail to with non-32-bit number (length)', () => {
constructError({blocks: 1, used: 1, length: Math.pow(2, 32)});
});
it('serialize/deserialize block record (min)', () => {
toAndFromRaw({blocks: 0, used: 0, length: 0});
});
it('serialize/deserialize block record', () => {
toAndFromRaw({blocks: 10, used: 4194304, length: 20971520});
});
it('serialize/deserialize block record (max)', () => {
const max = Math.pow(2, 32) - 1;
toAndFromRaw({blocks: max, used: max, length: max});
});
});
});
describe('FileBlockStore (Unit)', function() {
const location = '/tmp/.bcoin/blocks';
let store = null;
before(() => {
store = new FileBlockStore({
location: location,
maxFileLength: 1024
});
});
describe('allocate', function() {
it('will fail with length above file max', async () => {
let err = null;
try {
await store.allocate(1025);
} catch (e) {
err = e;
}
assert(err);
assert.equal(err.message, 'Block length above max file length.');
});
});
describe('filepath', function() {
it('will give correct path (0)', () => {
const filepath = store.filepath(0);
assert.equal(filepath, '/tmp/.bcoin/blocks/blk00000.dat');
});
it('will give correct path (1)', () => {
const filepath = store.filepath(7);
assert.equal(filepath, '/tmp/.bcoin/blocks/blk00007.dat');
});
it('will give correct path (2)', () => {
const filepath = store.filepath(23);
assert.equal(filepath, '/tmp/.bcoin/blocks/blk00023.dat');
});
it('will give correct path (3)', () => {
const filepath = store.filepath(456);
assert.equal(filepath, '/tmp/.bcoin/blocks/blk00456.dat');
});
it('will give correct path (4)', () => {
const filepath = store.filepath(8999);
assert.equal(filepath, '/tmp/.bcoin/blocks/blk08999.dat');
});
it('will give correct path (5)', () => {
const filepath = store.filepath(99999);
assert.equal(filepath, '/tmp/.bcoin/blocks/blk99999.dat');
});
it('will fail over max size', () => {
let err = null;
try {
store.filepath(100000);
} catch (e) {
err = e;
}
assert(err);
assert.equal(err.message, 'File number too large.');
});
});
});
describe('FileBlockStore (Integration 1)', function() {
const location = '/tmp/bcoin-blockstore-test';
let store = null;
beforeEach(async () => {
await rimraf(location);
await mkdirp(location);
store = new FileBlockStore({
location: location,
maxFileLength: 1024
});
await store.open();
});
afterEach(async () => {
await store.close();
});
it('will write and read a block', async () => {
const block1 = random.randomBytes(128);
const hash = random.randomBytes(32);
await store.write(hash, block1);
const block2 = await store.read(hash);
assert.bufferEqual(block1, block2);
});
it('will read a block w/ offset and length', async () => {
const block1 = random.randomBytes(128);
const hash = random.randomBytes(32);
await store.write(hash, block1);
const offset = 79;
const size = 15;
const block2 = await store.read(hash, offset, size);
assert.bufferEqual(block1.slice(offset, offset + size), block2);
});
it('will fail to read w/ out-of-bounds length', async () => {
const block1 = random.randomBytes(128);
const hash = random.randomBytes(32);
await store.write(hash, block1);
const offset = 79;
const size = 50;
let err = null;
try {
await store.read(hash, offset, size);
} catch (e) {
err = e;
}
assert(err);
assert.equal(err.message, 'Out-of-bounds read.');
});
it('will allocate new files', async () => {
const blocks = [];
for (let i = 0; i < 16; i++) {
const block = random.randomBytes(128);
const hash = random.randomBytes(32);
blocks.push({hash, block});
await store.write(hash, block);
const block2 = await store.read(hash);
assert.bufferEqual(block2, block);
}
const first = await fs.stat(store.filepath(0));
const second = await fs.stat(store.filepath(1));
const third = await fs.stat(store.filepath(2));
assert.equal(first.size, 952);
assert.equal(second.size, 952);
assert.equal(third.size, 272);
const len = first.size + second.size + third.size - (8 * 16);
assert.equal(len, 128 * 16);
for (let i = 0; i < 16; i++) {
const expect = blocks[i];
const block = await store.read(expect.hash);
assert.bufferEqual(block, expect.block);
}
});
it('will return null if block not found', async () => {
const hash = random.randomBytes(32);
const block = await store.read(hash);
assert.strictEqual(block, null);
});
it('will check if block exists (false)', async () => {
const hash = random.randomBytes(32);
const exists = await store.has(hash);
assert.strictEqual(exists, false);
});
it('will check if block exists (true)', async () => {
const block = random.randomBytes(128);
const hash = random.randomBytes(32);
await store.write(hash, block);
const exists = await store.has(hash);
assert.strictEqual(exists, true);
});
it('will prune blocks', async () => {
const hashes = [];
for (let i = 0; i < 16; i++) {
const block = random.randomBytes(128);
const hash = random.randomBytes(32);
hashes.push(hash);
await store.write(hash, block);
}
const first = await fs.stat(store.filepath(0));
const second = await fs.stat(store.filepath(1));
const third = await fs.stat(store.filepath(2));
const len = first.size + second.size + third.size - (8 * 16);
assert.equal(len, 128 * 16);
for (let i = 0; i < 16; i++) {
const pruned = await store.prune(hashes[i]);
assert.strictEqual(pruned, true);
}
assert.equal(await fs.exists(store.filepath(0)), false);
assert.equal(await fs.exists(store.filepath(1)), false);
assert.equal(await fs.exists(store.filepath(2)), false);
for (let i = 0; i < 16; i++) {
const exists = await store.has(hashes[i]);
assert.strictEqual(exists, false);
}
const exists = await store.db.has(layout.f.encode(0));
assert.strictEqual(exists, false);
});
});
describe('FileBlockStore (Integration 2)', function() {
const location = '/tmp/bcoin-blockstore-test';
let store = null;
beforeEach(async () => {
await rimraf(location);
await mkdirp(location);
store = new FileBlockStore({
location: location,
maxFileLength: 1024 * 1024
});
await store.open();
});
afterEach(async () => {
await store.close();
});
it('will import from files (e.g. db corruption)', async () => {
const blocks = [];
for (let i = 0; i < vectors.length; i++) {
const [block] = vectors[i].getBlock();
const hash = block.hash();
const raw = block.toRaw();
blocks.push({hash, block: raw});
await store.write(hash, raw);
}
await store.close();
await rimraf(resolve(location, './index'));
store = new FileBlockStore({
location: location,
maxFileLength: 1024
});
await store.open();
for (let i = 0; i < vectors.length; i++) {
const expect = blocks[i];
const block = await store.read(expect.hash);
assert.equal(block.length, expect.block.length);
assert.bufferEqual(block, expect.block);
}
});
});
describe('LevelBlockStore', function() {
const location = '/tmp/bcoin-blockstore-test';
let store = null;
beforeEach(async () => {
await rimraf(location);
await mkdirp(location);
store = new LevelBlockStore({
location: location
});
await store.open();
});
afterEach(async () => {
await store.close();
});
it('will write and read a block', async () => {
const block1 = random.randomBytes(128);
const hash = random.randomBytes(32);
await store.write(hash, block1);
const block2 = await store.read(hash);
assert.bufferEqual(block1, block2);
});
it('will read a block w/ offset and length', async () => {
const block1 = random.randomBytes(128);
const hash = random.randomBytes(32);
await store.write(hash, block1);
const offset = 79;
const size = 15;
const block2 = await store.read(hash, offset, size);
assert.bufferEqual(block1.slice(offset, offset + size), block2);
});
it('will fail to read w/ out-of-bounds length', async () => {
const block1 = random.randomBytes(128);
const hash = random.randomBytes(32);
await store.write(hash, block1);
const offset = 79;
const size = 50;
let err = null;
try {
await store.read(hash, offset, size);
} catch (e) {
err = e;
}
assert(err);
assert.equal(err.message, 'Out-of-bounds read.');
});
it('will check if block exists (false)', async () => {
const hash = random.randomBytes(32);
const exists = await store.has(hash);
assert.strictEqual(exists, false);
});
it('will check if block exists (true)', async () => {
const block = random.randomBytes(128);
const hash = random.randomBytes(32);
await store.write(hash, block);
const exists = await store.has(hash);
assert.strictEqual(exists, true);
});
it('will prune blocks (true)', async () => {
const block = random.randomBytes(128);
const hash = random.randomBytes(32);
await store.write(hash, block);
const pruned = await store.prune(hash);
assert.strictEqual(pruned, true);
const block2 = await store.read(hash);
assert.strictEqual(block2, null);
});
it('will prune blocks (false)', async () => {
const hash = random.randomBytes(32);
const exists = await store.has(hash);
assert.strictEqual(exists, false);
const pruned = await store.prune(hash);
assert.strictEqual(pruned, false);
});
});
});

View File

@ -85,6 +85,14 @@ common.writeTX = function writeTX(name, tx, view) {
common.writeFile(`${name}-undo.raw`, undoRaw);
};
common.rimraf = async function(p) {
const allowed = new RegExp('^\/tmp\/(.*)$');
if (!allowed.test(p))
throw new Error(`Path not allowed: ${p}.`);
return await fs.rimraf(p);
};
function parseUndo(data) {
const br = bio.read(data);
const items = [];