mnemonic: refactor parsing and serialization for phrases.

This commit is contained in:
Christopher Jeffrey 2017-08-10 06:55:01 -07:00
parent 50b8dd2b2c
commit 413f26584d
No known key found for this signature in database
GPG Key ID: 8962AB9DE6666BBD
3 changed files with 40 additions and 36 deletions

View File

@ -41,14 +41,6 @@ common.MIN_ENTROPY = 128;
common.MAX_ENTROPY = 512;
/**
* Seed salt for key derivation ("Bitcoin seed").
* @const {Buffer}
* @default
*/
common.SEED_SALT = Buffer.from('Bitcoin seed', 'ascii');
/**
* LRU cache to avoid deriving keys twice.
* @type {LRU}

View File

@ -23,7 +23,7 @@ const nfkd = require('../utils/nfkd');
* Constants
*/
const wordlistCache = new Map();
const wordlistCache = Object.create(null);
/**
* HD Mnemonic
@ -185,29 +185,33 @@ Mnemonic.prototype.getPhrase = function getPhrase() {
// Include the first `ENT / 32` bits
// of the hash (the checksum).
const bits = this.bits + (this.bits / 32);
const wbits = this.bits + (this.bits / 32);
// Get entropy and checksum.
const entropy = this.getEntropy();
const chk = digest.sha256(entropy);
// Append the hash to the entropy to
// make things easy when grabbing
// the checksum bits.
const ent = this.getEntropy();
const entropy = Buffer.allocUnsafe(Math.ceil(bits / 8));
ent.copy(entropy, 0);
digest.sha256(ent).copy(entropy, ent.length);
const size = Math.ceil(wbits / 8);
const data = Buffer.allocUnsafe(size);
entropy.copy(data, 0);
chk.copy(data, entropy.length);
// Build the mnemonic by reading
// 11 bit indexes from the entropy.
const list = Mnemonic.getWordlist(this.language);
let phrase = [];
for (let i = 0; i < bits / 11; i++) {
for (let i = 0; i < wbits / 11; i++) {
let index = 0;
for (let j = 0; j < 11; j++) {
const pos = i * 11 + j;
const bit = pos % 8;
const oct = (pos - bit) / 8;
index <<= 1;
index |= (entropy[oct] >>> (7 - bit)) & 1;
index |= (data[oct] >>> (7 - bit)) & 1;
}
phrase.push(list.words[index]);
}
@ -233,20 +237,21 @@ Mnemonic.prototype.fromPhrase = function fromPhrase(phrase) {
assert(typeof phrase === 'string');
assert(phrase.length <= 1000);
const words = phrase.split(/[ \u3000]+/);
let bits = words.length * 11;
const cbits = bits % 32;
const cbytes = Math.ceil(cbits / 8);
const words = phrase.trim().split(/[\s\u3000]+/);
const wbits = words.length * 11;
const cbits = wbits % 32;
bits -= cbits;
assert(cbits !== 0, 'Invalid checksum.');
const bits = wbits - cbits;
assert(bits >= common.MIN_ENTROPY);
assert(bits <= common.MAX_ENTROPY);
assert(bits % 32 === 0);
assert(cbits !== 0, 'Invalid checksum.');
const ent = Buffer.allocUnsafe(Math.ceil((bits + cbits) / 8));
ent.fill(0);
const size = Math.ceil(wbits / 8);
const data = Buffer.allocUnsafe(size);
data.fill(0);
const lang = Mnemonic.getLanguage(words[0]);
const list = Mnemonic.getWordlist(lang);
@ -254,7 +259,7 @@ Mnemonic.prototype.fromPhrase = function fromPhrase(phrase) {
// Rebuild entropy bytes.
for (let i = 0; i < words.length; i++) {
const word = words[i];
const index = list.map.get(word);
const index = list.map[word];
if (index == null)
throw new Error('Could not find word.');
@ -264,12 +269,13 @@ Mnemonic.prototype.fromPhrase = function fromPhrase(phrase) {
const bit = pos % 8;
const oct = (pos - bit) / 8;
const val = (index >>> (10 - j)) & 1;
ent[oct] |= val << (7 - bit);
data[oct] |= val << (7 - bit);
}
}
const entropy = ent.slice(0, ent.length - cbytes);
const chk1 = ent.slice(ent.length - cbytes);
const cbytes = Math.ceil(cbits / 8);
const entropy = data.slice(0, data.length - cbytes);
const chk1 = data.slice(data.length - cbytes);
const chk2 = digest.sha256(entropy);
// Verify checksum.
@ -347,7 +353,7 @@ Mnemonic.fromEntropy = function fromEntropy(entropy, lang) {
Mnemonic.getLanguage = function getLanguage(word) {
for (const lang of Mnemonic.languages) {
const list = Mnemonic.getWordlist(lang);
if (list.map.has(word))
if (list.map[word] != null)
return lang;
}
@ -361,7 +367,7 @@ Mnemonic.getLanguage = function getLanguage(word) {
*/
Mnemonic.getWordlist = function getWordlist(lang) {
const cache = wordlistCache.get(lang);
const cache = wordlistCache[lang];
if (cache)
return cache;
@ -369,7 +375,7 @@ Mnemonic.getWordlist = function getWordlist(lang) {
const words = wordlist.get(lang);
const list = new WordList(words);
wordlistCache.set(lang, list);
wordlistCache[lang] = list;
return list;
};
@ -562,11 +568,11 @@ Mnemonic.isMnemonic = function isMnemonic(obj) {
function WordList(words) {
this.words = words;
this.map = new Map();
this.map = Object.create(null);
for (let i = 0; i < words.length; i++) {
const word = words[i];
this.map.set(word, i);
this.map[word] = i;
}
}

View File

@ -21,6 +21,12 @@ const common = require('./common');
const Mnemonic = require('./mnemonic');
const HDPublicKey = require('./public');
/*
* Constants
*/
const SEED_SALT = Buffer.from('Bitcoin seed', 'ascii');
/**
* HDPrivateKey
* @alias module:hd.PrivateKey
@ -451,12 +457,12 @@ HDPrivateKey.prototype.compare = function compare(key) {
HDPrivateKey.prototype.fromSeed = function fromSeed(seed, network) {
assert(Buffer.isBuffer(seed));
if (!(seed.length * 8 >= common.MIN_ENTROPY
&& seed.length * 8 <= common.MAX_ENTROPY)) {
if (seed.length * 8 < common.MIN_ENTROPY
|| seed.length * 8 > common.MAX_ENTROPY) {
throw new Error('Entropy not in range.');
}
const hash = digest.hmac('sha512', seed, common.SEED_SALT);
const hash = digest.hmac('sha512', seed, SEED_SALT);
const left = hash.slice(0, 32);
const right = hash.slice(32, 64);