support BIP-0039

This commit is contained in:
Alexey Karyabkin 2018-06-01 19:54:55 +04:00
parent 4a212c2ccd
commit 8e72b59bde
10 changed files with 16458 additions and 0 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

2048
pybtc/bip-0039/english.txt Normal file

File diff suppressed because it is too large Load Diff

2048
pybtc/bip-0039/french.txt Normal file

File diff suppressed because it is too large Load Diff

2048
pybtc/bip-0039/italian.txt Normal file

File diff suppressed because it is too large Load Diff

2048
pybtc/bip-0039/japanese.txt Normal file

File diff suppressed because it is too large Load Diff

2048
pybtc/bip-0039/korean.txt Normal file

File diff suppressed because it is too large Load Diff

2048
pybtc/bip-0039/spanish.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,7 @@
from secp256k1 import lib as secp256k1
from secp256k1 import ffi
import random
import os
SIGHASH_ALL = 0x00000001
SIGHASH_NONE = 0x00000002
@ -74,3 +75,5 @@ SCRIPT_TYPES = { "P2PKH": 0,
"NON_STANDART": 7
}
ROOT_DIR = os.path.abspath(os.path.dirname(__file__))
BIP0039_DIR = os.path.normpath(os.path.join(ROOT_DIR, 'bip-0039'))

View File

@ -700,3 +700,74 @@ def i2b(i): return bn2vch(i)
def b2i(b): return vch2bn(b)
# BIP39
#
#
#
def create_mnemonic(bits=256, language='english'):
passphrase = []
wordlist = create_wordlist(language)
entropy = os.urandom(bits // 8)
entropy_int = int.from_bytes(entropy, byteorder="big")
entropy_bit_len = len(entropy) * 8
chk_sum_bit_len = entropy_bit_len // 32
entropy_hash = hashlib.sha256(entropy).hexdigest()
fbyte_hash = unhexlify(entropy_hash)[0]
entropy_int = add_checksum(entropy)
while entropy_int:
passphrase.append(wordlist[entropy_int & 0b11111111111])
entropy_int = entropy_int >> 11
return ' '.join(passphrase[::-1])
def create_wordlist(language, wordlist_dir=BIP0039_DIR):
f = None
path = os.path.join(wordlist_dir, '.'.join((language, 'txt')))
assert os.path.exists(path)
f = open(path)
content = f.read().rstrip('\n')
assert content
f.close()
return content.split('\n')
def add_checksum(data):
mask = 0b10000000
data_int = int.from_bytes(data, byteorder="big")
data_bit_len = len(data) * 8 // 32
data_hash = hashlib.sha256(data).hexdigest()
fbyte_hash = unhexlify(data_hash)[0]
while data_bit_len:
data_bit_len -= 1
data_int = (data_int << 1) | 1 if fbyte_hash & mask else data_int << 1
mask = mask >> 1
return data_int
def mnemonic2bytes(mnemonic, language):
wordlist = create_wordlist(language)
codes = dict()
for code, word in enumerate(wordlist):
codes[word] = code
word_count = len(mnemonic)
entropy_int = None
bit_size = word_count * 11
chk_sum_bit_len = word_count * 11 % 32
for word in mnemonic:
entropy_int = (entropy_int << 11) | codes[word] if entropy_int else codes[word]
chk_sum = entropy_int & (2 ** chk_sum_bit_len - 1)
entropy_int = entropy_int >> chk_sum_bit_len
entropy = entropy_int.to_bytes((bit_size - chk_sum_bit_len) // 8, byteorder="big")
ent_hash = hashlib.sha256(entropy).hexdigest()
fb = unhexlify(ent_hash)[0]
assert (fb >> (8 - chk_sum_bit_len)) & chk_sum
return entropy