Skip to content
Snippets Groups Projects
Commit 8d461eaf authored by Holger's avatar Holger
Browse files

Bip39 and Bip32 support has been added

parent 6e25816e
No related branches found
No related tags found
2 merge requests!5Taken current version of master branch in the https://github.com/holgern/beem,!4Original changes pushed to master at https://github.com/holgern/beem
Changelog
=========
0.23.4
------
* Bip39 and Bip32 support has been added
0.23.3
------
* bugfix for beempy post
......
......@@ -182,3 +182,4 @@ Acknowledgements
.. _Anaconda: https://www.continuum.io
.. _beem.readthedocs.io: http://beem.readthedocs.io/en/latest/
.. _beem-discord-channel: https://discord.gg/4HM592V
\ No newline at end of file
"""THIS FILE IS GENERATED FROM beem SETUP.PY."""
version = '0.23.3'
version = '0.23.4'
"""THIS FILE IS GENERATED FROM beem SETUP.PY."""
version = '0.23.3'
version = '0.23.4'
"""THIS FILE IS GENERATED FROM beem SETUP.PY."""
version = '0.23.3'
version = '0.23.4'
......@@ -14,13 +14,28 @@ import os
import codecs
import ecdsa
import ctypes
import binascii
import bisect
import hmac
import itertools
from binascii import hexlify, unhexlify
import unicodedata
from .base58 import ripemd160, Base58
from .dictionary import words as BrainKeyDictionary
from .dictionary import words_bip39 as MnemonicDictionary
from .py23 import py23_bytes, PY2
PBKDF2_ROUNDS = 2048
# From <https://stackoverflow.com/questions/212358/binary-search-bisection-in-python/2233940#2233940>
def binary_search(a, x, lo=0, hi=None): # can't use a to specify default for hi
hi = hi if hi is not None else len(a) # hi defaults to len(a)
pos = bisect.bisect_left(a, x, lo, hi) # find insertion position
return pos if pos != hi and a[pos] == x else -1 # don't walk off the end
class PasswordKey(object):
""" This class derives a private key given the account name, the
role and a password. It leverages the technology of Brainkeys
......@@ -34,9 +49,9 @@ class PasswordKey(object):
self.password = password
self.prefix = prefix
def normalize(self, brainkey):
def normalize(self, seed):
""" Correct formating with single whitespace syntax and no trailing space """
return " ".join(re.compile("[\t\n\v\f\r ]+").split(brainkey))
return " ".join(re.compile("[\t\n\v\f\r ]+").split(seed))
def get_private(self):
""" Derive private key from the account, the role and the password
......@@ -47,8 +62,8 @@ class PasswordKey(object):
seed = self.password
else:
seed = self.account + self.role + self.password
brainkey = self.normalize(seed)
a = py23_bytes(brainkey, 'utf8')
seed = self.normalize(seed)
a = py23_bytes(seed, 'utf8')
s = hashlib.sha256(a).digest()
return PrivateKey(hexlify(s).decode('ascii'), prefix=self.prefix)
......@@ -155,6 +170,156 @@ class BrainKey(object):
return " ".join(brainkey).upper()
# From https://github.com/trezor/python-mnemonic/blob/master/mnemonic/mnemonic.py
#
# Copyright (c) 2013 Pavol Rusnak
# Copyright (c) 2017 mruddy
@python_2_unicode_compatible
class Mnemonic(object):
def __init__(self):
self.wordlist = MnemonicDictionary.split(',')
self.radix = 2048
def generate(self, strength=128):
if strength not in [128, 160, 192, 224, 256]:
raise ValueError(
"Strength should be one of the following [128, 160, 192, 224, 256], but it is not (%d)."
% strength
)
return self.to_mnemonic(os.urandom(strength // 8))
# Adapted from <http://tinyurl.com/oxmn476>
def to_entropy(self, words):
if not isinstance(words, list):
words = words.split(" ")
if len(words) not in [12, 15, 18, 21, 24]:
raise ValueError(
"Number of words must be one of the following: [12, 15, 18, 21, 24], but it is not (%d)."
% len(words)
)
# Look up all the words in the list and construct the
# concatenation of the original entropy and the checksum.
concatLenBits = len(words) * 11
concatBits = [False] * concatLenBits
wordindex = 0
use_binary_search = True
for word in words:
# Find the words index in the wordlist
ndx = (
binary_search(self.wordlist, word)
if use_binary_search
else self.wordlist.index(word)
)
if ndx < 0:
raise LookupError('Unable to find "%s" in word list.' % word)
# Set the next 11 bits to the value of the index.
for ii in range(11):
concatBits[(wordindex * 11) + ii] = (ndx & (1 << (10 - ii))) != 0
wordindex += 1
checksumLengthBits = concatLenBits // 33
entropyLengthBits = concatLenBits - checksumLengthBits
# Extract original entropy as bytes.
entropy = bytearray(entropyLengthBits // 8)
for ii in range(len(entropy)):
for jj in range(8):
if concatBits[(ii * 8) + jj]:
entropy[ii] |= 1 << (7 - jj)
# Take the digest of the entropy.
hashBytes = hashlib.sha256(entropy).digest()
if sys.version < "3":
hashBits = list(
itertools.chain.from_iterable(
(
[ord(c) & (1 << (7 - i)) != 0 for i in range(8)]
for c in hashBytes
)
)
)
else:
hashBits = list(
itertools.chain.from_iterable(
([c & (1 << (7 - i)) != 0 for i in range(8)] for c in hashBytes)
)
)
# Check all the checksum bits.
for i in range(checksumLengthBits):
if concatBits[entropyLengthBits + i] != hashBits[i]:
raise ValueError("Failed checksum.")
return entropy
def to_mnemonic(self, data):
if len(data) not in [16, 20, 24, 28, 32]:
raise ValueError(
"Data length should be one of the following: [16, 20, 24, 28, 32], but it is not (%d)."
% len(data)
)
h = hashlib.sha256(data).hexdigest()
b = (
bin(int(binascii.hexlify(data), 16))[2:].zfill(len(data) * 8)
+ bin(int(h, 16))[2:].zfill(256)[: len(data) * 8 // 32]
)
result = []
for i in range(len(b) // 11):
idx = int(b[i * 11 : (i + 1) * 11], 2)
result.append(self.wordlist[idx])
result_phrase = " ".join(result)
return result_phrase
def check(self, mnemonic):
mnemonic = self.normalize_string(mnemonic).split(" ")
# list of valid mnemonic lengths
if len(mnemonic) not in [12, 15, 18, 21, 24]:
return False
try:
idx = map(lambda x: bin(self.wordlist.index(x))[2:].zfill(11), mnemonic)
b = "".join(idx)
except ValueError:
return False
l = len(b) # noqa: E741
d = b[: l // 33 * 32]
h = b[-l // 33 :]
nd = binascii.unhexlify(hex(int(d, 2))[2:].rstrip("L").zfill(l // 33 * 8))
nh = bin(int(hashlib.sha256(nd).hexdigest(), 16))[2:].zfill(256)[: l // 33]
return h == nh
def expand_word(self, prefix):
if prefix in self.wordlist:
return prefix
else:
matches = [word for word in self.wordlist if word.startswith(prefix)]
if len(matches) == 1: # matched exactly one word in the wordlist
return matches[0]
else:
# exact match not found.
# this is not a validation routine, just return the input
return prefix
def expand(self, mnemonic):
return " ".join(map(self.expand_word, mnemonic.split(" ")))
@classmethod
def normalize_string(cls, txt):
if isinstance(txt, str if sys.version < "3" else bytes):
utxt = txt.decode("utf8")
elif isinstance(txt, unicode if sys.version < "3" else str): # noqa: F821
utxt = txt
else:
raise TypeError("String value expected")
return unicodedata.normalize("NFKD", utxt)
@classmethod
def to_seed(cls, mnemonic, passphrase=""):
mnemonic = cls.normalize_string(mnemonic)
passphrase = cls.normalize_string(passphrase)
passphrase = "mnemonic" + passphrase
mnemonic = mnemonic.encode("utf-8")
passphrase = passphrase.encode("utf-8")
stretched = hashlib.pbkdf2_hmac("sha512", mnemonic, passphrase, PBKDF2_ROUNDS)
return stretched[:64]
@python_2_unicode_compatible
class Address(object):
""" Address class
......
......@@ -183,7 +183,10 @@ def b58decode(v):
def base58CheckEncode(version, payload):
s = ('%.2x' % version) + payload
if isinstance(version, string_types):
s = version + payload
else:
s = ('%.2x' % version) + payload
checksum = doublesha256(s)[:4]
result = s + hexlify(checksum).decode('ascii')
return base58encode(result)
......
#!/usr/bin/env python
#
# Copyright 2014 Corgan Labs
# See LICENSE.txt for distribution terms
# https://github.com/namuyan/bip32nem/blob/master/bip32nem/BIP32Key.py
import os
import hmac
import hashlib
import struct
import codecs
from beemgraphenebase.base58 import base58CheckDecode, base58CheckEncode
from hashlib import sha256
from binascii import hexlify, unhexlify
import ecdsa
from ecdsa.curves import SECP256k1
from ecdsa.numbertheory import square_root_mod_prime as sqrt_mod
VerifyKey = ecdsa.VerifyingKey.from_public_point
SigningKey = ecdsa.SigningKey.from_string
PointObject = ecdsa.ellipticcurve.Point # Point class
CURVE_GEN = ecdsa.ecdsa.generator_secp256k1 # Point class
CURVE_ORDER = CURVE_GEN.order() # int
FIELD_ORDER = SECP256k1.curve.p() # int
INFINITY = ecdsa.ellipticcurve.INFINITY # Point
MIN_ENTROPY_LEN = 128 # bits
BIP32_HARDEN = 0x80000000 # choose from hardened set of child keys
EX_MAIN_PRIVATE = [codecs.decode('0488ade4', 'hex')] # Version strings for mainnet extended private keys
EX_MAIN_PUBLIC = [codecs.decode('0488b21e', 'hex'),
codecs.decode('049d7cb2', 'hex')] # Version strings for mainnet extended public keys
EX_TEST_PRIVATE = [codecs.decode('04358394', 'hex')] # Version strings for testnet extended private keys
EX_TEST_PUBLIC = [codecs.decode('043587CF', 'hex')] # Version strings for testnet extended public keys
def parse_path(nstr):
""""""
r = list()
for s in nstr.split('/'):
if s == 'm':
continue
elif s.endswith("'") or s.endswith('h'):
r.append(int(s[:-1]) + BIP32_HARDEN)
else:
r.append(int(s))
return r
class BIP32Key(object):
# Static initializers to create from entropy or external formats
#
@staticmethod
def fromEntropy(entropy, public=False, testnet=False):
"""Create a BIP32Key using supplied entropy >= MIN_ENTROPY_LEN"""
if entropy is None:
entropy = os.urandom(MIN_ENTROPY_LEN // 8) # Python doesn't have os.random()
if not len(entropy) >= MIN_ENTROPY_LEN // 8:
raise ValueError("Initial entropy %i must be at least %i bits" %
(len(entropy), MIN_ENTROPY_LEN))
i64 = hmac.new(b"Bitcoin seed", entropy, hashlib.sha512).digest()
il, ir = i64[:32], i64[32:]
# FIXME test Il for 0 or less than SECP256k1 prime field order
key = BIP32Key(secret=il, chain=ir, depth=0, index=0, fpr=b'\0\0\0\0', public=False, testnet=testnet)
if public:
key.SetPublic()
return key
@staticmethod
def fromExtendedKey(xkey, public=False):
"""
Create a BIP32Key by importing from extended private or public key string
If public is True, return a public-only key regardless of input type.
"""
# Sanity checks
# raw = check_decode(xkey)
raw = b'\x04' + unhexlify(base58CheckDecode(xkey))
if len(raw) != 78:
raise ValueError("extended key format wrong length")
# Verify address version/type
version = raw[:4]
if version in EX_MAIN_PRIVATE:
is_testnet = False
is_pubkey = False
elif version in EX_TEST_PRIVATE:
is_testnet = True
is_pubkey = False
elif version in EX_MAIN_PUBLIC:
is_testnet = False
is_pubkey = True
elif version in EX_TEST_PUBLIC:
is_testnet = True
is_pubkey = True
else:
raise ValueError("unknown extended key version")
# Extract remaining fields
# Python 2.x compatibility
if type(raw[4]) == int:
depth = raw[4]
else:
depth = ord(raw[4])
fpr = raw[5:9]
child = struct.unpack(">L", raw[9:13])[0]
chain = raw[13:45]
secret = raw[45:78]
# Extract private key or public key point
if not is_pubkey:
secret = secret[1:]
else:
# Recover public curve point from compressed key
# Python3 FIX
lsb = secret[0] & 1 if type(secret[0]) == int else ord(secret[0]) & 1
x = int.from_bytes(secret[1:], 'big')
ys = (x ** 3 + 7) % FIELD_ORDER # y^2 = x^3 + 7 mod p
y = sqrt_mod(ys, FIELD_ORDER)
if y & 1 != lsb:
y = FIELD_ORDER - y
point = PointObject(SECP256k1.curve, x, y)
secret = VerifyKey(point, curve=SECP256k1)
key = BIP32Key(secret=secret, chain=chain, depth=depth, index=child, fpr=fpr, public=is_pubkey,
testnet=is_testnet)
if not is_pubkey and public:
key.SetPublic()
return key
# Normal class initializer
def __init__(self, secret, chain, depth, index, fpr, public=False, testnet=False):
"""
Create a public or private BIP32Key using key material and chain code.
secret This is the source material to generate the keypair, either a
32-byte string representation of a private key, or the ECDSA
library object representing a public key.
chain This is a 32-byte string representation of the chain code
depth Child depth; parent increments its own by one when assigning this
index Child index
fpr Parent fingerprint
public If true, this keypair will only contain a public key and can only create
a public key chain.
"""
self.public = public
if public is False:
self.k = SigningKey(secret, curve=SECP256k1)
self.K = self.k.get_verifying_key()
else:
self.k = None
self.K = secret
self.C = chain
self.depth = depth
self.index = index
self.parent_fpr = fpr
self.testnet = testnet
# Internal methods not intended to be called externally
#
def hmac(self, data):
"""
Calculate the HMAC-SHA512 of input data using the chain code as key.
Returns a tuple of the left and right halves of the HMAC
"""
i64 = hmac.new(self.C, data, hashlib.sha512).digest()
return i64[:32], i64[32:]
def CKDpriv(self, i):
"""
Create a child key of index 'i'.
If the most significant bit of 'i' is set, then select from the
hardened key set, otherwise, select a regular child key.
Returns a BIP32Key constructed with the child key parameters,
or None if i index would result in an invalid key.
"""
# Index as bytes, BE
i_str = struct.pack(">L", i)
# Data to HMAC
if i & BIP32_HARDEN:
data = b'\0' + self.k.to_string() + i_str
else:
data = self.PublicKey() + i_str
# Get HMAC of data
(Il, Ir) = self.hmac(data)
# Construct new key material from Il and current private key
Il_int = int.from_bytes(Il, 'big')
if Il_int > CURVE_ORDER:
return None
pvt_int = int.from_bytes(self.k.to_string(), 'big')
k_int = (Il_int + pvt_int) % CURVE_ORDER
if (k_int == 0):
return None
secret = k_int.to_bytes(32, 'big')
# Construct and return a new BIP32Key
return BIP32Key(secret=secret, chain=Ir, depth=self.depth + 1, index=i, fpr=self.Fingerprint(), public=False,
testnet=self.testnet)
def CKDpub(self, i):
"""
Create a publicly derived child key of index 'i'.
If the most significant bit of 'i' is set, this is
an error.
Returns a BIP32Key constructed with the child key parameters,
or None if index would result in invalid key.
"""
if i & BIP32_HARDEN:
raise Exception("Cannot create a hardened child key using public child derivation")
# Data to HMAC. Same as CKDpriv() for public child key.
data = self.PublicKey() + struct.pack(">L", i)
# Get HMAC of data
(Il, Ir) = self.hmac(data)
# Construct curve point Il*G+K
Il_int = int.from_bytes(Il, 'big')
if Il_int >= CURVE_ORDER:
return None
point = Il_int * CURVE_GEN + self.K.pubkey.point
if point == INFINITY:
return None
# Retrieve public key based on curve point
K_i = VerifyKey(point, curve=SECP256k1)
# Construct and return a new BIP32Key
return BIP32Key(secret=K_i, chain=Ir, depth=self.depth + 1, index=i, fpr=self.Fingerprint(), public=True,
testnet=self.testnet)
# Public methods
#
def ChildKey(self, i):
"""
Create and return a child key of this one at index 'i'.
The index 'i' should be summed with BIP32_HARDEN to indicate
to use the private derivation algorithm.
"""
if self.public is False:
return self.CKDpriv(i)
else:
return self.CKDpub(i)
def SetPublic(self):
"""Convert a private BIP32Key into a public one"""
self.k = None
self.public = True
def PrivateKey(self):
"""Return private key as string"""
if self.public:
raise Exception("Publicly derived deterministic keys have no private half")
else:
return self.k.to_string()
def PublicKey(self):
"""Return compressed public key encoding"""
padx = self.K.pubkey.point.x().to_bytes(32, 'big')
if self.K.pubkey.point.y() & 1:
ck = b'\3' + padx
else:
ck = b'\2' + padx
return ck
def ChainCode(self):
"""Return chain code as string"""
return self.C
def Identifier(self):
"""Return key identifier as string"""
cK = self.PublicKey()
return hashlib.new('ripemd160', sha256(cK).digest()).digest()
def Fingerprint(self):
"""Return key fingerprint as string"""
return self.Identifier()[:4]
def Address(self):
"""Return compressed public key address"""
addressversion = b'\x00' if not self.testnet else b'\x6f'
# vh160 = addressversion + self.Identifier()
# return check_encode(vh160)
payload = hexlify(self.Identifier()).decode('ascii')
return base58CheckEncode(hexlify(addressversion).decode('ascii'), payload)
def P2WPKHoP2SHAddress(self):
"""Return P2WPKH over P2SH segwit address"""
pk_bytes = self.PublicKey()
assert len(pk_bytes) == 33 and (pk_bytes.startswith(b"\x02") or pk_bytes.startswith(b"\x03")), \
"Only compressed public keys are compatible with p2sh-p2wpkh addresses. " \
"See https://github.com/bitcoin/bips/blob/master/bip-0049.mediawiki."
pk_hash = hashlib.new('ripemd160', sha256(pk_bytes).digest()).digest()
push_20 = bytes.fromhex('0014')
script_sig = push_20 + pk_hash
address_bytes = hashlib.new('ripemd160', sha256(script_sig).digest()).digest()
prefix = b"\xc4" if self.testnet else b"\x05"
# return check_encode(prefix + address_bytes)
payload = hexlify(address_bytes).decode('ascii')
return base58CheckEncode(hexlify(prefix).decode('ascii'), payload)
def WalletImportFormat(self):
"""Returns private key encoded for wallet import"""
if self.public:
raise Exception("Publicly derived deterministic keys have no private half")
addressversion = b'\x80' if not self.testnet else b'\xef'
raw = self.k.to_string() + b'\x01' # Always compressed
# return check_encode(addressversion + raw)
payload = hexlify(raw).decode('ascii')
return base58CheckEncode(hexlify(addressversion).decode('ascii'), payload)
def ExtendedKey(self, private=True, encoded=True):
"""Return extended private or public key as string, optionally base58 encoded"""
if self.public is True and private is True:
raise Exception("Cannot export an extended private key from a public-only deterministic key")
if not self.testnet:
version = EX_MAIN_PRIVATE[0] if private else EX_MAIN_PUBLIC[0]
else:
version = EX_TEST_PRIVATE[0] if private else EX_TEST_PUBLIC[0]
depth = bytes(bytearray([self.depth]))
fpr = self.parent_fpr
child = struct.pack('>L', self.index)
chain = self.C
if self.public is True or private is False:
data = self.PublicKey()
else:
data = b'\x00' + self.PrivateKey()
raw = version + depth + fpr + child + chain + data
if not encoded:
return raw
else:
# return check_encode(raw)
payload = hexlify(chain + data).decode('ascii')
return base58CheckEncode(hexlify(version + depth + fpr + child).decode('ascii'), payload)
# Debugging methods
#
def dump(self):
"""Dump key fields mimicking the BIP0032 test vector format"""
print(" * Identifier")
print(" * (hex): ", self.Identifier().hex())
print(" * (fpr): ", self.Fingerprint().hex())
print(" * (main addr):", self.Address())
if self.public is False:
print(" * Secret key")
print(" * (hex): ", self.PrivateKey().hex())
print(" * (wif): ", self.WalletImportFormat())
print(" * Public key")
print(" * (hex): ", self.PublicKey().hex())
print(" * Chain code")
print(" * (hex): ", self.C.hex())
print(" * Serialized")
print(" * (pub hex): ", self.ExtendedKey(private=False, encoded=False).hex())
print(" * (pub b58): ", self.ExtendedKey(private=False, encoded=True))
if self.public is False:
print(" * (prv hex): ", self.ExtendedKey(private=True, encoded=False).hex())
print(" * (prv b58): ", self.ExtendedKey(private=True, encoded=True))
def test():
from binascii import a2b_hex
# BIP0032 Test vector 1
entropy = a2b_hex('000102030405060708090A0B0C0D0E0F')
m = BIP32Key.fromEntropy(entropy)
print("Test vector 1:")
print("Master (hex):", entropy.hex())
print("* [Chain m]")
m.dump()
print("* [Chain m/0h]")
m = m.ChildKey(0 + BIP32_HARDEN)
m.dump()
print("* [Chain m/0h/1]")
m = m.ChildKey(1)
m.dump()
print("* [Chain m/0h/1/2h]")
m = m.ChildKey(2 + BIP32_HARDEN)
m.dump()
print("* [Chain m/0h/1/2h/2]")
m = m.ChildKey(2)
m.dump()
print("* [Chain m/0h/1/2h/2/1000000000]")
m = m.ChildKey(1000000000)
m.dump()
# BIP0032 Test vector 2
entropy = a2b_hex('fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9c999693908d8a878481'
'7e7b7875726f6c696663605d5a5754514e4b484542')
m = BIP32Key.fromEntropy(entropy)
print("Test vector 2:")
print("Master (hex):", entropy.hex())
print("* [Chain m]")
m.dump()
print("* [Chain m/0]")
m = m.ChildKey(0)
m.dump()
print("* [Chain m/0/2147483647h]")
m = m.ChildKey(2147483647 + BIP32_HARDEN)
m.dump()
print("* [Chain m/0/2147483647h/1]")
m = m.ChildKey(1)
m.dump()
print("* [Chain m/0/2147483647h/1/2147483646h]")
m = m.ChildKey(2147483646 + BIP32_HARDEN)
m.dump()
print("* [Chain m/0/2147483647h/1/2147483646h/2]")
m = m.ChildKey(2)
m.dump()
if __name__ == "__main__":
test()
\ No newline at end of file
Source diff could not be displayed: it is too large. Options to address this: view the blob.
"""THIS FILE IS GENERATED FROM beem SETUP.PY."""
version = '0.23.3'
version = '0.23.4'
......@@ -16,7 +16,7 @@ except LookupError:
ascii = codecs.lookup('ascii')
codecs.register(lambda name, enc=ascii: {True: enc}.get(name == 'mbcs'))
VERSION = '0.23.3'
VERSION = '0.23.4'
tests_require = ['mock >= 2.0.0', 'pytest', 'pytest-mock', 'parameterized']
......
......@@ -5,8 +5,13 @@ from __future__ import print_function
from __future__ import unicode_literals
from builtins import str
import unittest
from beemgraphenebase.base58 import Base58
from beemgraphenebase.account import BrainKey, Address, PublicKey, PrivateKey, PasswordKey
from beemgraphenebase.base58 import Base58, base58encode
from beemgraphenebase.bip32 import BIP32Key
from beemgraphenebase.account import BrainKey, Address, PublicKey, PrivateKey, PasswordKey, Mnemonic
from binascii import hexlify, unhexlify
import sys
import hashlib
import random
class Testcases(unittest.TestCase):
......@@ -221,3 +226,229 @@ class Testcases(unittest.TestCase):
"STM24hzNSDZYgm9C85yxJqyk32DwjXg8pCgkGVzB77hvP2XxGDdvr",
"STM2e99iqVQUFij7Dk2nWVNC1dL8M86q37Nj4KwPHKBu1Yy49HkwA",
"STMgqaH9RdvUtVk7NFnx4BZJRrNS7Lj35qaueAeYJ3tKEqPaLwa4"])
def test_utf8_nfkd(self):
# The same sentence in various UTF-8 forms
words_nfkd = u"Pr\u030ci\u0301s\u030cerne\u030c z\u030clut\u030couc\u030cky\u0301 ku\u030an\u030c u\u0301pe\u030cl d\u030ca\u0301belske\u0301 o\u0301dy za\u0301ker\u030cny\u0301 uc\u030cen\u030c be\u030cz\u030ci\u0301 pode\u0301l zo\u0301ny u\u0301lu\u030a"
words_nfc = u"P\u0159\xed\u0161ern\u011b \u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy z\xe1ke\u0159n\xfd u\u010de\u0148 b\u011b\u017e\xed pod\xe9l z\xf3ny \xfal\u016f"
words_nfkc = u"P\u0159\xed\u0161ern\u011b \u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy z\xe1ke\u0159n\xfd u\u010de\u0148 b\u011b\u017e\xed pod\xe9l z\xf3ny \xfal\u016f"
words_nfd = u"Pr\u030ci\u0301s\u030cerne\u030c z\u030clut\u030couc\u030cky\u0301 ku\u030an\u030c u\u0301pe\u030cl d\u030ca\u0301belske\u0301 o\u0301dy za\u0301ker\u030cny\u0301 uc\u030cen\u030c be\u030cz\u030ci\u0301 pode\u0301l zo\u0301ny u\u0301lu\u030a"
passphrase_nfkd = (
u"Neuve\u030cr\u030citelne\u030c bezpec\u030cne\u0301 hesli\u0301c\u030cko"
)
passphrase_nfc = (
u"Neuv\u011b\u0159iteln\u011b bezpe\u010dn\xe9 hesl\xed\u010dko"
)
passphrase_nfkc = (
u"Neuv\u011b\u0159iteln\u011b bezpe\u010dn\xe9 hesl\xed\u010dko"
)
passphrase_nfd = (
u"Neuve\u030cr\u030citelne\u030c bezpec\u030cne\u0301 hesli\u0301c\u030cko"
)
seed_nfkd = Mnemonic.to_seed(words_nfkd, passphrase_nfkd)
seed_nfc = Mnemonic.to_seed(words_nfc, passphrase_nfc)
seed_nfkc = Mnemonic.to_seed(words_nfkc, passphrase_nfkc)
seed_nfd = Mnemonic.to_seed(words_nfd, passphrase_nfd)
self.assertEqual(seed_nfkd, seed_nfc)
self.assertEqual(seed_nfkd, seed_nfkc)
self.assertEqual(seed_nfkd, seed_nfd)
def test_expand(self):
m = Mnemonic()
self.assertEqual("access", m.expand("access"))
self.assertEqual(
"access access acb acc act action", m.expand("access acce acb acc act acti")
)
def test_expand_word(self):
m = Mnemonic()
self.assertEqual("", m.expand_word(""))
self.assertEqual(" ", m.expand_word(" "))
self.assertEqual("access", m.expand_word("access")) # word in list
self.assertEqual(
"access", m.expand_word("acce")
) # unique prefix expanded to word in list
self.assertEqual("acb", m.expand_word("acb")) # not found at all
self.assertEqual("acc", m.expand_word("acc")) # multi-prefix match
self.assertEqual("act", m.expand_word("act")) # exact three letter match
self.assertEqual(
"action", m.expand_word("acti")
) # unique prefix expanded to word in list
def test_failed_checksum(self):
code = (
"bless cloud wheel regular tiny venue bird web grief security dignity zoo"
)
mnemo = Mnemonic()
self.assertFalse(mnemo.check(code))
def test_mnemonic(self):
v = [[
"00000000000000000000000000000000",
"abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about",
"c55257c360c07c72029aebc1b53c05ed0362ada38ead3e3e9efa3708e53495531f09a6987599d18264c1e1c92f2cf141630c7a3c4ab7c81b2f001698e7463b04",
"xprv9s21ZrQH143K3h3fDYiay8mocZ3afhfULfb5GX8kCBdno77K4HiA15Tg23wpbeF1pLfs1c5SPmYHrEpTuuRhxMwvKDwqdKiGJS9XFKzUsAF"
],
[
"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
"legal winner thank year wave sausage worth useful legal winner thank yellow",
"2e8905819b8723fe2c1d161860e5ee1830318dbf49a83bd451cfb8440c28bd6fa457fe1296106559a3c80937a1c1069be3a3a5bd381ee6260e8d9739fce1f607",
"xprv9s21ZrQH143K2gA81bYFHqU68xz1cX2APaSq5tt6MFSLeXnCKV1RVUJt9FWNTbrrryem4ZckN8k4Ls1H6nwdvDTvnV7zEXs2HgPezuVccsq"
],
[
"80808080808080808080808080808080",
"letter advice cage absurd amount doctor acoustic avoid letter advice cage above",
"d71de856f81a8acc65e6fc851a38d4d7ec216fd0796d0a6827a3ad6ed5511a30fa280f12eb2e47ed2ac03b5c462a0358d18d69fe4f985ec81778c1b370b652a8",
"xprv9s21ZrQH143K2shfP28KM3nr5Ap1SXjz8gc2rAqqMEynmjt6o1qboCDpxckqXavCwdnYds6yBHZGKHv7ef2eTXy461PXUjBFQg6PrwY4Gzq"
],
[
"ffffffffffffffffffffffffffffffff",
"zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo wrong",
"ac27495480225222079d7be181583751e86f571027b0497b5b5d11218e0a8a13332572917f0f8e5a589620c6f15b11c61dee327651a14c34e18231052e48c069",
"xprv9s21ZrQH143K2V4oox4M8Zmhi2Fjx5XK4Lf7GKRvPSgydU3mjZuKGCTg7UPiBUD7ydVPvSLtg9hjp7MQTYsW67rZHAXeccqYqrsx8LcXnyd"
],
[
"000000000000000000000000000000000000000000000000",
"abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon agent",
"035895f2f481b1b0f01fcf8c289c794660b289981a78f8106447707fdd9666ca06da5a9a565181599b79f53b844d8a71dd9f439c52a3d7b3e8a79c906ac845fa",
"xprv9s21ZrQH143K3mEDrypcZ2usWqFgzKB6jBBx9B6GfC7fu26X6hPRzVjzkqkPvDqp6g5eypdk6cyhGnBngbjeHTe4LsuLG1cCmKJka5SMkmU"
],
[
"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
"legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal will",
"f2b94508732bcbacbcc020faefecfc89feafa6649a5491b8c952cede496c214a0c7b3c392d168748f2d4a612bada0753b52a1c7ac53c1e93abd5c6320b9e95dd",
"xprv9s21ZrQH143K3Lv9MZLj16np5GzLe7tDKQfVusBni7toqJGcnKRtHSxUwbKUyUWiwpK55g1DUSsw76TF1T93VT4gz4wt5RM23pkaQLnvBh7"
],
[
"808080808080808080808080808080808080808080808080",
"letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter always",
"107d7c02a5aa6f38c58083ff74f04c607c2d2c0ecc55501dadd72d025b751bc27fe913ffb796f841c49b1d33b610cf0e91d3aa239027f5e99fe4ce9e5088cd65",
"xprv9s21ZrQH143K3VPCbxbUtpkh9pRG371UCLDz3BjceqP1jz7XZsQ5EnNkYAEkfeZp62cDNj13ZTEVG1TEro9sZ9grfRmcYWLBhCocViKEJae"
],
[
"ffffffffffffffffffffffffffffffffffffffffffffffff",
"zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo when",
"0cd6e5d827bb62eb8fc1e262254223817fd068a74b5b449cc2f667c3f1f985a76379b43348d952e2265b4cd129090758b3e3c2c49103b5051aac2eaeb890a528",
"xprv9s21ZrQH143K36Ao5jHRVhFGDbLP6FCx8BEEmpru77ef3bmA928BxsqvVM27WnvvyfWywiFN8K6yToqMaGYfzS6Db1EHAXT5TuyCLBXUfdm"
],
[
"0000000000000000000000000000000000000000000000000000000000000000",
"abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon art",
"bda85446c68413707090a52022edd26a1c9462295029f2e60cd7c4f2bbd3097170af7a4d73245cafa9c3cca8d561a7c3de6f5d4a10be8ed2a5e608d68f92fcc8",
"xprv9s21ZrQH143K32qBagUJAMU2LsHg3ka7jqMcV98Y7gVeVyNStwYS3U7yVVoDZ4btbRNf4h6ibWpY22iRmXq35qgLs79f312g2kj5539ebPM"
],
[
"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
"legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth title",
"bc09fca1804f7e69da93c2f2028eb238c227f2e9dda30cd63699232578480a4021b146ad717fbb7e451ce9eb835f43620bf5c514db0f8add49f5d121449d3e87",
"xprv9s21ZrQH143K3Y1sd2XVu9wtqxJRvybCfAetjUrMMco6r3v9qZTBeXiBZkS8JxWbcGJZyio8TrZtm6pkbzG8SYt1sxwNLh3Wx7to5pgiVFU"
],
[
"8080808080808080808080808080808080808080808080808080808080808080",
"letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic bless",
"c0c519bd0e91a2ed54357d9d1ebef6f5af218a153624cf4f2da911a0ed8f7a09e2ef61af0aca007096df430022f7a2b6fb91661a9589097069720d015e4e982f",
"xprv9s21ZrQH143K3CSnQNYC3MqAAqHwxeTLhDbhF43A4ss4ciWNmCY9zQGvAKUSqVUf2vPHBTSE1rB2pg4avopqSiLVzXEU8KziNnVPauTqLRo"
],
[
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo vote",
"dd48c104698c30cfe2b6142103248622fb7bb0ff692eebb00089b32d22484e1613912f0a5b694407be899ffd31ed3992c456cdf60f5d4564b8ba3f05a69890ad",
"xprv9s21ZrQH143K2WFF16X85T2QCpndrGwx6GueB72Zf3AHwHJaknRXNF37ZmDrtHrrLSHvbuRejXcnYxoZKvRquTPyp2JiNG3XcjQyzSEgqCB"
],
[
"9e885d952ad362caeb4efe34a8e91bd2",
"ozone drill grab fiber curtain grace pudding thank cruise elder eight picnic",
"274ddc525802f7c828d8ef7ddbcdc5304e87ac3535913611fbbfa986d0c9e5476c91689f9c8a54fd55bd38606aa6a8595ad213d4c9c9f9aca3fb217069a41028",
"xprv9s21ZrQH143K2oZ9stBYpoaZ2ktHj7jLz7iMqpgg1En8kKFTXJHsjxry1JbKH19YrDTicVwKPehFKTbmaxgVEc5TpHdS1aYhB2s9aFJBeJH"
],
[
"6610b25967cdcca9d59875f5cb50b0ea75433311869e930b",
"gravity machine north sort system female filter attitude volume fold club stay feature office ecology stable narrow fog",
"628c3827a8823298ee685db84f55caa34b5cc195a778e52d45f59bcf75aba68e4d7590e101dc414bc1bbd5737666fbbef35d1f1903953b66624f910feef245ac",
"xprv9s21ZrQH143K3uT8eQowUjsxrmsA9YUuQQK1RLqFufzybxD6DH6gPY7NjJ5G3EPHjsWDrs9iivSbmvjc9DQJbJGatfa9pv4MZ3wjr8qWPAK"
],
[
"68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c",
"hamster diagram private dutch cause delay private meat slide toddler razor book happy fancy gospel tennis maple dilemma loan word shrug inflict delay length",
"64c87cde7e12ecf6704ab95bb1408bef047c22db4cc7491c4271d170a1b213d20b385bc1588d9c7b38f1b39d415665b8a9030c9ec653d75e65f847d8fc1fc440",
"xprv9s21ZrQH143K2XTAhys3pMNcGn261Fi5Ta2Pw8PwaVPhg3D8DWkzWQwjTJfskj8ofb81i9NP2cUNKxwjueJHHMQAnxtivTA75uUFqPFeWzk"
],
[
"c0ba5a8e914111210f2bd131f3d5e08d",
"scheme spot photo card baby mountain device kick cradle pact join borrow",
"ea725895aaae8d4c1cf682c1bfd2d358d52ed9f0f0591131b559e2724bb234fca05aa9c02c57407e04ee9dc3b454aa63fbff483a8b11de949624b9f1831a9612",
"xprv9s21ZrQH143K3FperxDp8vFsFycKCRcJGAFmcV7umQmcnMZaLtZRt13QJDsoS5F6oYT6BB4sS6zmTmyQAEkJKxJ7yByDNtRe5asP2jFGhT6"
],
[
"6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3",
"horn tenant knee talent sponsor spell gate clip pulse soap slush warm silver nephew swap uncle crack brave",
"fd579828af3da1d32544ce4db5c73d53fc8acc4ddb1e3b251a31179cdb71e853c56d2fcb11aed39898ce6c34b10b5382772db8796e52837b54468aeb312cfc3d",
"xprv9s21ZrQH143K3R1SfVZZLtVbXEB9ryVxmVtVMsMwmEyEvgXN6Q84LKkLRmf4ST6QrLeBm3jQsb9gx1uo23TS7vo3vAkZGZz71uuLCcywUkt"
],
[
"9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863",
"panda eyebrow bullet gorilla call smoke muffin taste mesh discover soft ostrich alcohol speed nation flash devote level hobby quick inner drive ghost inside",
"72be8e052fc4919d2adf28d5306b5474b0069df35b02303de8c1729c9538dbb6fc2d731d5f832193cd9fb6aeecbc469594a70e3dd50811b5067f3b88b28c3e8d",
"xprv9s21ZrQH143K2WNnKmssvZYM96VAr47iHUQUTUyUXH3sAGNjhJANddnhw3i3y3pBbRAVk5M5qUGFr4rHbEWwXgX4qrvrceifCYQJbbFDems"
],
[
"23db8160a31d3e0dca3688ed941adbf3",
"cat swing flag economy stadium alone churn speed unique patch report train",
"deb5f45449e615feff5640f2e49f933ff51895de3b4381832b3139941c57b59205a42480c52175b6efcffaa58a2503887c1e8b363a707256bdd2b587b46541f5",
"xprv9s21ZrQH143K4G28omGMogEoYgDQuigBo8AFHAGDaJdqQ99QKMQ5J6fYTMfANTJy6xBmhvsNZ1CJzRZ64PWbnTFUn6CDV2FxoMDLXdk95DQ"
],
[
"8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0",
"light rule cinnamon wrap drastic word pride squirrel upgrade then income fatal apart sustain crack supply proud access",
"4cbdff1ca2db800fd61cae72a57475fdc6bab03e441fd63f96dabd1f183ef5b782925f00105f318309a7e9c3ea6967c7801e46c8a58082674c860a37b93eda02",
"xprv9s21ZrQH143K3wtsvY8L2aZyxkiWULZH4vyQE5XkHTXkmx8gHo6RUEfH3Jyr6NwkJhvano7Xb2o6UqFKWHVo5scE31SGDCAUsgVhiUuUDyh"
],
[
"066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad",
"all hour make first leader extend hole alien behind guard gospel lava path output census museum junior mass reopen famous sing advance salt reform",
"26e975ec644423f4a4c4f4215ef09b4bd7ef924e85d1d17c4cf3f136c2863cf6df0a475045652c57eb5fb41513ca2a2d67722b77e954b4b3fc11f7590449191d",
"xprv9s21ZrQH143K3rEfqSM4QZRVmiMuSWY9wugscmaCjYja3SbUD3KPEB1a7QXJoajyR2T1SiXU7rFVRXMV9XdYVSZe7JoUXdP4SRHTxsT1nzm"
],
[
"f30f8c1da665478f49b001d94c5fc452",
"vessel ladder alter error federal sibling chat ability sun glass valve picture",
"2aaa9242daafcee6aa9d7269f17d4efe271e1b9a529178d7dc139cd18747090bf9d60295d0ce74309a78852a9caadf0af48aae1c6253839624076224374bc63f",
"xprv9s21ZrQH143K2QWV9Wn8Vvs6jbqfF1YbTCdURQW9dLFKDovpKaKrqS3SEWsXCu6ZNky9PSAENg6c9AQYHcg4PjopRGGKmdD313ZHszymnps"
],
[
"c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05",
"scissors invite lock maple supreme raw rapid void congress muscle digital elegant little brisk hair mango congress clump",
"7b4a10be9d98e6cba265566db7f136718e1398c71cb581e1b2f464cac1ceedf4f3e274dc270003c670ad8d02c4558b2f8e39edea2775c9e232c7cb798b069e88",
"xprv9s21ZrQH143K4aERa2bq7559eMCCEs2QmmqVjUuzfy5eAeDX4mqZffkYwpzGQRE2YEEeLVRoH4CSHxianrFaVnMN2RYaPUZJhJx8S5j6puX"
],
[
"f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f",
"void come effort suffer camp survey warrior heavy shoot primary clutch crush open amazing screen patrol group space point ten exist slush involve unfold",
"01f5bced59dec48e362f2c45b5de68b9fd6c92c6634f44d6d40aab69056506f0e35524a518034ddc1192e1dacd32c1ed3eaa3c3b131c88ed8e7e54c49a5d0998",
"xprv9s21ZrQH143K39rnQJknpH1WEPFJrzmAqqasiDcVrNuk926oizzJDDQkdiTvNPr2FYDYzWgiMiC63YmfPAa2oPyNB23r2g7d1yiK6WpqaQS"
]
]
mnemo = Mnemonic()
for i in range(len(v)):
code = mnemo.to_mnemonic(unhexlify(v[i][0]))
seed = Mnemonic.to_seed(code, passphrase="TREZOR")
key = BIP32Key.fromEntropy(seed)
if sys.version >= "3":
seed = hexlify(seed).decode("utf8")
self.assertIs(mnemo.check(v[i][1]), True)
self.assertEqual(v[i][1], code)
self.assertEqual(v[i][2], seed)
xprv = key.ExtendedKey(private=True, encoded=True)
self.assertEqual(v[i][3], xprv)
def test_to_entropy(self):
data = [
bytearray((random.getrandbits(8) for _ in range(32))) for _ in range(1024)
]
data.append(b"Lorem ipsum dolor sit amet amet.")
m = Mnemonic()
for d in data:
self.assertEqual(m.to_entropy(m.to_mnemonic(d).split()), d)
# This Python file uses the following encoding: utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
import binascii
from binascii import hexlify, unhexlify
from beemgraphenebase.account import Mnemonic
from beemgraphenebase.bip32 import BIP32Key, BIP32_HARDEN, parse_path
words = 'news clever spot drama infant detail sword cover color throw foot primary when slender rhythm clog autumn ecology enough bronze math you modify excuse'
class Testcases(unittest.TestCase):
def test_btc_privkey(self):
mobj = Mnemonic()
mnemonic_words = "aware report movie exile buyer drum poverty supreme gym oppose float elegant"
seed = mobj.to_seed(mnemonic_words)
bip32_root_key_obj = BIP32Key.fromEntropy(seed)
bip32_child_key_obj = bip32_root_key_obj.ChildKey(
44 + BIP32_HARDEN
).ChildKey(
0 + BIP32_HARDEN
).ChildKey(
0 + BIP32_HARDEN
).ChildKey(0).ChildKey(0)
self.assertEqual(bip32_child_key_obj.Address(), '1A9vZ4oPLb29szfRWVFe1VoEe7a2qEMjvJ')
self.assertEqual(binascii.hexlify(bip32_child_key_obj.PublicKey()).decode(), '029dc2912196f2ad7a830747c2490287e4ff3ea52c417598681a955dcdf473b6c0')
self.assertEqual(bip32_child_key_obj.WalletImportFormat(), 'L3g3hhYabnBFbGqd7qReebwCrRkGhAzaX4cBpYSv5S667sWJAn5A')
def test_with_sec(self):
path = "m/44'/0'/0'/0'/0"
m = Mnemonic()
seed = m.to_seed(words)
key = BIP32Key.fromEntropy(seed)
self.assertEqual(key.ExtendedKey(), "xprv9s21ZrQH143K3EGRfjQYhZ6fA3HPPiw6rxopHKXfWTrB66evM4fDRiUScJy5RCCGz98nBaCCtwpwFCTDiFG5tx3mdnyyL1MbHmQQ19BWemo")
m = key
for n in parse_path(path):
m = m.ChildKey(n)
self.assertEqual(m.ExtendedKey(), "xprvA3Fu8ZNFZDn3S24jWzHCLGsX9eSUcpvFY2FFKLzessSEkk1KQLhHpyG7rnfYtx7txBupUY546PT5tjb4kwXghpd1rRw1Xw8nAqS19EZuPSu")
self.assertEqual(m.ExtendedKey(private=False), "xpub6GFFY4u9PbLLeW9Cd1pChQpFhgGy2He6uFAr7jQGSCyDdYLTwt1YNmabi4aRfHRxwiEEhwu2Bjm3ypHaWHXbmr48QP4Fd8PXcw1o9qpdLSQ")
m = key.ChildKey(
44 + BIP32_HARDEN
).ChildKey(
0 + BIP32_HARDEN
).ChildKey(
0 + BIP32_HARDEN
).ChildKey(0 + BIP32_HARDEN).ChildKey(0)
self.assertEqual(m.ExtendedKey(), "xprvA3Fu8ZNFZDn3S24jWzHCLGsX9eSUcpvFY2FFKLzessSEkk1KQLhHpyG7rnfYtx7txBupUY546PT5tjb4kwXghpd1rRw1Xw8nAqS19EZuPSu")
self.assertEqual(m.ExtendedKey(private=False), "xpub6GFFY4u9PbLLeW9Cd1pChQpFhgGy2He6uFAr7jQGSCyDdYLTwt1YNmabi4aRfHRxwiEEhwu2Bjm3ypHaWHXbmr48QP4Fd8PXcw1o9qpdLSQ")
def test_with_pub(self):
path = "m/0/0/0"
m = Mnemonic()
seed = m.to_seed(words)
key = BIP32Key.fromEntropy(seed)
m = key
for n in parse_path(path):
m = m.ChildKey(n)
self.assertEqual(m.ExtendedKey(), "xprv9yK7bXqEnmCpHMV4NM7FKj1vsiXQ14h6W8Bn5jkAHHBqrm2CSy82Wpb3FXHaG39v6zt3YCKiqNz4ydx3BNtgvDmU2bxXz1RJ9TXL7N91bTL")
self.assertEqual(m.ExtendedKey(private=False, encoded=True), "xpub6CJU13N8d8m7VqZXUNeFgrxfRkMtQXQwsM7Nt89mqcipjZMLzWSH4cuX6mWj3XohyuCBRK7cpkAq59XBLRqqjQJGieg2qHaEeRS8dBrGgZu")
path = "m/0/0/0"
key2 = BIP32Key.fromExtendedKey(key.ExtendedKey(encoded=True))
m = key2
for n in parse_path(path):
m = m.ChildKey(n)
self.assertEqual(m.ExtendedKey(), "xprv9yK7bXqEnmCpHMV4NM7FKj1vsiXQ14h6W8Bn5jkAHHBqrm2CSy82Wpb3FXHaG39v6zt3YCKiqNz4ydx3BNtgvDmU2bxXz1RJ9TXL7N91bTL")
self.assertEqual(m.ExtendedKey(private=False, encoded=True), "xpub6CJU13N8d8m7VqZXUNeFgrxfRkMtQXQwsM7Nt89mqcipjZMLzWSH4cuX6mWj3XohyuCBRK7cpkAq59XBLRqqjQJGieg2qHaEeRS8dBrGgZu")
def test_vector1(self):
seed = unhexlify("000102030405060708090a0b0c0d0e0f")
key = BIP32Key.fromEntropy(seed)
self.assertEqual(key.ExtendedKey(), "xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi")
self.assertEqual(key.ExtendedKey(private=False), "xpub661MyMwAqRbcFtXgS5sYJABqqG9YLmC4Q1Rdap9gSE8NqtwybGhePY2gZ29ESFjqJoCu1Rupje8YtGqsefD265TMg7usUDFdp6W1EGMcet8")
path = "m/0'"
m = key
for n in parse_path(path):
m = m.ChildKey(n)
self.assertEqual(m.ExtendedKey(), "xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7")
self.assertEqual(m.ExtendedKey(private=False, encoded=True), "xpub68Gmy5EdvgibQVfPdqkBBCHxA5htiqg55crXYuXoQRKfDBFA1WEjWgP6LHhwBZeNK1VTsfTFUHCdrfp1bgwQ9xv5ski8PX9rL2dZXvgGDnw")
path = "m/0'/1"
m = key
for n in parse_path(path):
m = m.ChildKey(n)
self.assertEqual(m.ExtendedKey(), "xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs")
self.assertEqual(m.ExtendedKey(private=False, encoded=True), "xpub6ASuArnXKPbfEwhqN6e3mwBcDTgzisQN1wXN9BJcM47sSikHjJf3UFHKkNAWbWMiGj7Wf5uMash7SyYq527Hqck2AxYysAA7xmALppuCkwQ")
path = "m/0'/1/2'"
m = key
for n in parse_path(path):
m = m.ChildKey(n)
self.assertEqual(m.ExtendedKey(), "xprv9z4pot5VBttmtdRTWfWQmoH1taj2axGVzFqSb8C9xaxKymcFzXBDptWmT7FwuEzG3ryjH4ktypQSAewRiNMjANTtpgP4mLTj34bhnZX7UiM")
self.assertEqual(m.ExtendedKey(private=False, encoded=True), "xpub6D4BDPcP2GT577Vvch3R8wDkScZWzQzMMUm3PWbmWvVJrZwQY4VUNgqFJPMM3No2dFDFGTsxxpG5uJh7n7epu4trkrX7x7DogT5Uv6fcLW5")
path = "m/0'/1/2'/2"
m = key
for n in parse_path(path):
m = m.ChildKey(n)
self.assertEqual(m.ExtendedKey(), "xprvA2JDeKCSNNZky6uBCviVfJSKyQ1mDYahRjijr5idH2WwLsEd4Hsb2Tyh8RfQMuPh7f7RtyzTtdrbdqqsunu5Mm3wDvUAKRHSC34sJ7in334")
self.assertEqual(m.ExtendedKey(private=False, encoded=True), "xpub6FHa3pjLCk84BayeJxFW2SP4XRrFd1JYnxeLeU8EqN3vDfZmbqBqaGJAyiLjTAwm6ZLRQUMv1ZACTj37sR62cfN7fe5JnJ7dh8zL4fiyLHV")
path = "m/0'/1/2'/2/1000000000"
m = key
for n in parse_path(path):
m = m.ChildKey(n)
self.assertEqual(m.ExtendedKey(), "xprvA41z7zogVVwxVSgdKUHDy1SKmdb533PjDz7J6N6mV6uS3ze1ai8FHa8kmHScGpWmj4WggLyQjgPie1rFSruoUihUZREPSL39UNdE3BBDu76")
self.assertEqual(m.ExtendedKey(private=False, encoded=True), "xpub6H1LXWLaKsWFhvm6RVpEL9P4KfRZSW7abD2ttkWP3SSQvnyA8FSVqNTEcYFgJS2UaFcxupHiYkro49S8yGasTvXEYBVPamhGW6cFJodrTHy")
if __name__ == '__main__':
unittest.main()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment