blockstore: index undo blocks from files

This commit is contained in:
Braydon Fuller 2019-03-18 13:31:30 -07:00
parent 8fc605c4a9
commit 961f6eddb5
No known key found for this signature in database
GPG Key ID: F24F232D108B3AD4
2 changed files with 114 additions and 29 deletions

View File

@ -60,14 +60,15 @@ class FileBlockStore extends AbstractBlockStore {
/**
* Compares the number of files in the directory
* with the recorded number of files. If there are any
* inconsistencies it will reindex all blocks.
* with the recorded number of files.
* @param {Number} type - The type of block data
* @private
* @returns {Promise}
*/
async index() {
const regexp = /^blk(\d{5})\.dat$/;
async check(type) {
const prefix = prefixes[type];
const regexp = new RegExp(`^${prefix}(\\d{5})\\.dat$`);
const all = await fs.readdir(this.location);
const dats = all.filter(f => regexp.test(f));
const filenos = dats.map(f => parseInt(f.match(regexp)[1]));
@ -75,21 +76,36 @@ class FileBlockStore extends AbstractBlockStore {
let missing = false;
for (const fileno of filenos) {
const rec = await this.db.get(layout.f.encode(types.BLOCK, fileno));
const rec = await this.db.get(layout.f.encode(type, fileno));
if (!rec) {
missing = true;
break;
}
}
return {missing, filenos};
}
/**
* Creates indexes from files for a block type. Reads the hash of
* the block data from the magic prefix, except for a block which
* the hash is read from the block header.
* @private
* @param {Number} type - The type of block data
* @returns {Promise}
*/
async _index(type) {
const {missing, filenos} = await this.check(type);
if (!missing)
return;
this.logger.info('Indexing FileBlockStore...');
this.logger.info(`Indexing block type ${type}...`);
for (const fileno of filenos) {
const b = this.db.batch();
const filepath = this.filepath(types.BLOCK, fileno);
const filepath = this.filepath(type, fileno);
const data = await fs.readFile(filepath);
const reader = bio.read(data);
let magic = null;
@ -105,15 +121,24 @@ class FileBlockStore extends AbstractBlockStore {
continue;
}
const length = reader.readU32();
const position = reader.offset;
let header = null;
let hash = null;
let position = 0;
let length = 0;
try {
header = Headers.fromReader(reader);
const read = reader.offset - position;
reader.seek(length - read);
length = reader.readU32();
if (type === types.BLOCK) {
position = reader.offset;
const header = Headers.fromReader(reader);
hash = header.hash();
const read = reader.offset - position;
reader.seek(length - read);
} else {
hash = reader.readHash();
position = reader.offset;
reader.seek(length);
}
} catch (err) {
this.logger.warning(
'Unknown block in file: %s, reason: %s',
@ -127,10 +152,8 @@ class FileBlockStore extends AbstractBlockStore {
length: length
});
const hash = header.hash();
blocks += 1;
b.put(layout.b.encode(types.BLOCK, hash), blockrecord.toRaw());
b.put(layout.b.encode(type, hash), blockrecord.toRaw());
}
const filerecord = new FileRecord({
@ -139,7 +162,7 @@ class FileBlockStore extends AbstractBlockStore {
length: this.maxFileLength
});
b.put(layout.f.encode(types.BLOCK, fileno), filerecord.toRaw());
b.put(layout.f.encode(type, fileno), filerecord.toRaw());
await b.write();
@ -147,6 +170,19 @@ class FileBlockStore extends AbstractBlockStore {
}
}
/**
* Compares the number of files in the directory
* with the recorded number of files. If there are any
* inconsistencies it will reindex all blocks.
* @private
* @returns {Promise}
*/
async index() {
await this._index(types.BLOCK);
await this._index(types.UNDO);
}
/**
* This method ensures that both the block storage directory
* and index directory exist.
@ -317,10 +353,27 @@ class FileBlockStore extends AbstractBlockStore {
return false;
}
const mlength = 8;
let mlength = 8;
// Hash for a block is not stored with
// the magic prefix as it's read from the header
// of the block data.
if (type !== types.BLOCK)
mlength += 32;
const blength = data.length;
const length = data.length + mlength;
const bwm = bio.write(mlength);
bwm.writeU32(this.network.magic);
bwm.writeU32(blength);
if (type !== types.BLOCK)
bwm.writeHash(hash);
const magic = bwm.render();
const {
fileno,
filerecord,
@ -330,11 +383,6 @@ class FileBlockStore extends AbstractBlockStore {
const mposition = filerecord.used;
const bposition = filerecord.used + mlength;
const bwm = bio.write(mlength);
bwm.writeU32(this.network.magic);
bwm.writeU32(blength);
const magic = bwm.render();
const fd = await fs.open(filepath, 'r+');
const mwritten = await fs.write(fd, magic, 0, mlength, mposition);

View File

@ -22,6 +22,15 @@ const extra = [
common.readBlock('block482683')
];
const undos = [
common.readBlock('block300025'),
common.readBlock('block928816'),
common.readBlock('block928828'),
common.readBlock('block928831'),
common.readBlock('block928848'),
common.readBlock('block928849')
];
const {
AbstractBlockStore,
FileBlockStore,
@ -606,11 +615,8 @@ describe('BlockStore', function() {
const first = await fs.stat(store.filepath(types.UNDO, 0));
const second = await fs.stat(store.filepath(types.UNDO, 1));
const third = await fs.stat(store.filepath(types.UNDO, 2));
assert.equal(first.size, 952);
assert.equal(second.size, 952);
assert.equal(third.size, 272);
const magic = (8 * 16);
const magic = (40 * 16);
const len = first.size + second.size + third.size - magic;
assert.equal(len, 128 * 16);
@ -791,7 +797,7 @@ describe('BlockStore', function() {
const second = await fs.stat(store.filepath(types.UNDO, 1));
const third = await fs.stat(store.filepath(types.UNDO, 2));
const magic = (8 * 16);
const magic = (40 * 16);
const len = first.size + second.size + third.size - magic;
assert.equal(len, 128 * 16);
@ -934,6 +940,37 @@ describe('BlockStore', function() {
assert.bufferEqual(block, expect.block);
}
});
it('will import undo blocks from files', async () => {
const blocks = [];
for (let i = 0; i < undos.length; i++) {
const [block] = undos[i].getBlock();
const raw = undos[i].undoRaw;
const hash = block.hash();
blocks.push({hash, block: raw});
await store.writeUndo(hash, raw);
}
await store.close();
await rimraf(resolve(location, './index'));
store = new FileBlockStore({
location: location,
maxFileLength: 1024
});
await store.open();
for (let i = 0; i < undos.length; i++) {
const expect = blocks[i];
const block = await store.readUndo(expect.hash);
assert.equal(block.length, expect.block.length);
assert.bufferEqual(block, expect.block);
}
});
});
describe('LevelBlockStore', function() {