blockstore: optimize block read and index
There is potential for around a 10% to 23% increase to the performance of block reads by using `allocUnsafe`, however there is already around a 3 to 6 times increase to the performance. While it's safe to use `allocUnsafe` as the number of bytes read is checked to be the same size as the buffer allocation, there is a potential for test cases to introduce _other_ behavior for `fs.read` that may not have the same behavior, though this isn't currently the case.
This commit is contained in:
parent
41ce504175
commit
83824d73b1
@ -11,8 +11,8 @@ const bdb = require('bdb');
|
||||
const assert = require('bsert');
|
||||
const fs = require('bfile');
|
||||
const bio = require('bufio');
|
||||
const hash256 = require('bcrypto/lib/hash256');
|
||||
const Network = require('../protocol/network');
|
||||
const Headers = require('../primitives/headers');
|
||||
const AbstractBlockStore = require('./abstract');
|
||||
const {BlockRecord, FileRecord} = require('./records');
|
||||
const layout = require('./layout');
|
||||
@ -130,10 +130,8 @@ class FileBlockStore extends AbstractBlockStore {
|
||||
|
||||
if (type === types.BLOCK) {
|
||||
position = reader.offset;
|
||||
const header = Headers.fromReader(reader);
|
||||
hash = header.hash();
|
||||
const read = reader.offset - position;
|
||||
reader.seek(length - read);
|
||||
hash = hash256.digest(reader.readBytes(80, true));
|
||||
reader.seek(length - 80);
|
||||
} else {
|
||||
hash = reader.readHash();
|
||||
position = reader.offset;
|
||||
@ -271,7 +269,7 @@ class FileBlockStore extends AbstractBlockStore {
|
||||
|
||||
const last = await this.db.get(layout.F.encode(type));
|
||||
if (last)
|
||||
fileno = bio.read(last).readU32();
|
||||
fileno = bio.readU32(last, 0);
|
||||
|
||||
filepath = this.filepath(type, fileno);
|
||||
|
||||
@ -482,9 +480,12 @@ class FileBlockStore extends AbstractBlockStore {
|
||||
const data = Buffer.alloc(length);
|
||||
|
||||
const fd = await fs.open(filepath, 'r');
|
||||
await fs.read(fd, data, 0, length, position);
|
||||
const bytes = await fs.read(fd, data, 0, length, position);
|
||||
await fs.close(fd);
|
||||
|
||||
if (bytes !== length)
|
||||
throw new Error('Wrong number of bytes read.');
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
|
||||
@ -505,6 +505,55 @@ describe('BlockStore', function() {
|
||||
assert.equal(err.message, 'Could not write block.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('read', function() {
|
||||
const read = fs.read;
|
||||
const open = fs.open;
|
||||
const close = fs.close;
|
||||
let get = null;
|
||||
let raw = null;
|
||||
|
||||
before(() => {
|
||||
const record = new BlockRecord({
|
||||
file: 1,
|
||||
position: 8,
|
||||
length: 100
|
||||
});
|
||||
raw = record.toRaw();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
get = store.db.get;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore stubbed methods.
|
||||
store.db.get = get;
|
||||
fs.read = read;
|
||||
fs.open = open;
|
||||
fs.close = close;
|
||||
});
|
||||
|
||||
it('will error if total read bytes not correct', async () => {
|
||||
let err = null;
|
||||
|
||||
store.db.get = () => raw;
|
||||
fs.open = () => 7;
|
||||
fs.close = () => undefined;
|
||||
fs.read = () => 99;
|
||||
|
||||
try {
|
||||
const hash = random.randomBytes(128);
|
||||
const block = random.randomBytes(32);
|
||||
await store.read(hash, block);
|
||||
} catch (e) {
|
||||
err = e;
|
||||
}
|
||||
|
||||
assert(err, 'Expected error.');
|
||||
assert.equal(err.message, 'Wrong number of bytes read.');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('FileBlockStore (Integration 1)', function() {
|
||||
|
||||
Loading…
Reference in New Issue
Block a user